mirror of
https://github.com/ccfos/nightingale.git
synced 2026-03-03 06:29:16 +00:00
Compare commits
71 Commits
release-11
...
optimize-w
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
15c2eadda6 | ||
|
|
0aea38e564 | ||
|
|
45e9253b2a | ||
|
|
9385ca9931 | ||
|
|
fdd3d14871 | ||
|
|
e890034c19 | ||
|
|
3aaab9e6ad | ||
|
|
7f7d707cfc | ||
|
|
98402e9f8a | ||
|
|
017094fd78 | ||
|
|
8b6b896362 | ||
|
|
acaa00cfb6 | ||
|
|
87f3d8595d | ||
|
|
42791a374d | ||
|
|
3855c25805 | ||
|
|
10ec0ccbd1 | ||
|
|
94cf304222 | ||
|
|
994de4635a | ||
|
|
9a0013a406 | ||
|
|
6dcd5dd01e | ||
|
|
70126e3aec | ||
|
|
767482d358 | ||
|
|
9a46106cc0 | ||
|
|
da9ea67cee | ||
|
|
c13ecd780b | ||
|
|
cab37c796a | ||
|
|
078578772b | ||
|
|
31883ec844 | ||
|
|
6100cd084a | ||
|
|
b82e260d65 | ||
|
|
3983386af3 | ||
|
|
83f2054062 | ||
|
|
83e0b3cb98 | ||
|
|
f6bfa17e2e | ||
|
|
3d8019b738 | ||
|
|
ee1be71be6 | ||
|
|
7f2fb459bb | ||
|
|
fde6a9c75e | ||
|
|
a2b506e263 | ||
|
|
30024a4951 | ||
|
|
2c3996812a | ||
|
|
51d35900f2 | ||
|
|
852fd2ea6e | ||
|
|
e1a57217ab | ||
|
|
1e7dad1a67 | ||
|
|
534e40ad63 | ||
|
|
15daa3826c | ||
|
|
d5efb5b6d4 | ||
|
|
7ebd776881 | ||
|
|
0e5cda1cee | ||
|
|
64dad19377 | ||
|
|
48f199f8f5 | ||
|
|
f7e4df7415 | ||
|
|
37fe01ab54 | ||
|
|
cbfe661bce | ||
|
|
890c12f0d4 | ||
|
|
643c6c997c | ||
|
|
b201836b40 | ||
|
|
b5eced1540 | ||
|
|
a13004eab7 | ||
|
|
a0c56548e5 | ||
|
|
e3d97386a8 | ||
|
|
051b0ca045 | ||
|
|
2941ced011 | ||
|
|
97d6908edd | ||
|
|
c7117b9461 | ||
|
|
78417b1d5b | ||
|
|
79f3404810 | ||
|
|
81e51c60eb | ||
|
|
af9cd55ca5 | ||
|
|
d4afdb2b6e |
2
.github/workflows/n9e.yml
vendored
2
.github/workflows/n9e.yml
vendored
@@ -5,7 +5,7 @@ on:
|
||||
tags:
|
||||
- 'v*'
|
||||
env:
|
||||
GO_VERSION: 1.18
|
||||
GO_VERSION: 1.23
|
||||
|
||||
jobs:
|
||||
goreleaser:
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -9,6 +9,7 @@
|
||||
*.o
|
||||
*.a
|
||||
*.so
|
||||
*.db
|
||||
*.sw[po]
|
||||
*.tar.gz
|
||||
*.[568vq]
|
||||
|
||||
@@ -65,6 +65,7 @@ func Initialize(configDir string, cryptoKey string) (func(), error) {
|
||||
configCvalCache := memsto.NewCvalCache(ctx, syncStats)
|
||||
|
||||
promClients := prom.NewPromClient(ctx)
|
||||
dispatch.InitRegisterQueryFunc(promClients)
|
||||
tdengineClients := tdengine.NewTdengineClient(ctx, config.Alert.Heartbeat)
|
||||
|
||||
externalProcessors := process.NewExternalProcessors()
|
||||
@@ -102,7 +103,7 @@ func Start(alertc aconf.Alert, pushgwc pconf.Pushgw, syncStats *memsto.Stats, al
|
||||
naming := naming.NewNaming(ctx, alertc.Heartbeat, alertStats)
|
||||
|
||||
writers := writer.NewWriters(pushgwc)
|
||||
record.NewScheduler(alertc, recordingRuleCache, promClients, writers, alertStats)
|
||||
record.NewScheduler(alertc, recordingRuleCache, promClients, writers, alertStats, datasourceCache)
|
||||
|
||||
eval.NewScheduler(alertc, externalProcessors, alertRuleCache, targetCache, targetsOfAlertRulesCache,
|
||||
busiGroupCache, alertMuteCache, datasourceCache, promClients, tdendgineClients, naming, ctx, alertStats)
|
||||
|
||||
@@ -38,7 +38,7 @@ func NewSyncStats() *Stats {
|
||||
Subsystem: subsystem,
|
||||
Name: "rule_eval_error_total",
|
||||
Help: "Number of rule eval error.",
|
||||
}, []string{"datasource", "stage"})
|
||||
}, []string{"datasource", "stage", "busi_group", "rule_id"})
|
||||
|
||||
CounterQueryDataErrorTotal := prometheus.NewCounterVec(prometheus.CounterOpts{
|
||||
Namespace: namespace,
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
package dispatch
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"strings"
|
||||
@@ -13,8 +14,10 @@ import (
|
||||
"github.com/ccfos/nightingale/v6/pkg/ctx"
|
||||
"github.com/ccfos/nightingale/v6/pkg/poster"
|
||||
promsdk "github.com/ccfos/nightingale/v6/pkg/prom"
|
||||
"github.com/ccfos/nightingale/v6/pkg/tplx"
|
||||
"github.com/ccfos/nightingale/v6/prom"
|
||||
|
||||
"github.com/prometheus/common/model"
|
||||
"github.com/toolkits/pkg/concurrent/semaphore"
|
||||
"github.com/toolkits/pkg/logger"
|
||||
)
|
||||
@@ -27,6 +30,18 @@ type Consumer struct {
|
||||
promClients *prom.PromClientMap
|
||||
}
|
||||
|
||||
func InitRegisterQueryFunc(promClients *prom.PromClientMap) {
|
||||
tplx.RegisterQueryFunc(func(datasourceID int64, promql string) model.Value {
|
||||
if promClients.IsNil(datasourceID) {
|
||||
return nil
|
||||
}
|
||||
|
||||
readerClient := promClients.GetCli(datasourceID)
|
||||
value, _, _ := readerClient.Query(context.Background(), promql, time.Now())
|
||||
return value
|
||||
})
|
||||
}
|
||||
|
||||
// 创建一个 Consumer 实例
|
||||
func NewConsumer(alerting aconf.Alerting, ctx *ctx.Context, dispatch *Dispatch, promClients *prom.PromClientMap) *Consumer {
|
||||
return &Consumer{
|
||||
@@ -113,7 +128,7 @@ func (e *Consumer) persist(event *models.AlertCurEvent) {
|
||||
event.Id, err = poster.PostByUrlsWithResp[int64](e.ctx, "/v1/n9e/event-persist", event)
|
||||
if err != nil {
|
||||
logger.Errorf("event:%+v persist err:%v", event, err)
|
||||
e.dispatch.Astats.CounterRuleEvalErrorTotal.WithLabelValues(fmt.Sprintf("%v", event.DatasourceId), "persist_event").Inc()
|
||||
e.dispatch.Astats.CounterRuleEvalErrorTotal.WithLabelValues(fmt.Sprintf("%v", event.DatasourceId), "persist_event", event.GroupName, fmt.Sprintf("%v", event.RuleId)).Inc()
|
||||
}
|
||||
return
|
||||
}
|
||||
@@ -121,7 +136,7 @@ func (e *Consumer) persist(event *models.AlertCurEvent) {
|
||||
err := models.EventPersist(e.ctx, event)
|
||||
if err != nil {
|
||||
logger.Errorf("event%+v persist err:%v", event, err)
|
||||
e.dispatch.Astats.CounterRuleEvalErrorTotal.WithLabelValues(fmt.Sprintf("%v", event.DatasourceId), "persist_event").Inc()
|
||||
e.dispatch.Astats.CounterRuleEvalErrorTotal.WithLabelValues(fmt.Sprintf("%v", event.DatasourceId), "persist_event", event.GroupName, fmt.Sprintf("%v", event.RuleId)).Inc()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -169,7 +184,7 @@ func (e *Consumer) queryRecoveryVal(event *models.AlertCurEvent) {
|
||||
logger.Errorf("rule_eval:%s promql:%s, warnings:%v", getKey(event), promql, warnings)
|
||||
}
|
||||
|
||||
anomalyPoints := common.ConvertAnomalyPoints(value)
|
||||
anomalyPoints := models.ConvertAnomalyPoints(value)
|
||||
if len(anomalyPoints) == 0 {
|
||||
logger.Warningf("rule_eval:%s promql:%s, result is empty", getKey(event), promql)
|
||||
event.AnnotationsJSON["recovery_promql_error"] = fmt.Sprintf("promql:%s error:%s", promql, "result is empty")
|
||||
|
||||
@@ -141,6 +141,7 @@ func (e *Dispatch) HandleEventNotify(event *models.AlertCurEvent, isSubscribe bo
|
||||
}
|
||||
|
||||
if e.blockEventNotify(rule, event) {
|
||||
logger.Infof("block event notify: rule_id:%d event:%+v", rule.Id, event)
|
||||
return
|
||||
}
|
||||
|
||||
@@ -191,10 +192,10 @@ func (e *Dispatch) blockEventNotify(rule *models.AlertRule, event *models.AlertC
|
||||
}
|
||||
}
|
||||
|
||||
// 规则配置是否改变
|
||||
if event.RuleHash != rule.Hash() {
|
||||
return true
|
||||
}
|
||||
// 恢复通知,检测规则配置是否改变
|
||||
// if event.IsRecovered && event.RuleHash != rule.Hash() {
|
||||
// return true
|
||||
// }
|
||||
|
||||
return false
|
||||
}
|
||||
@@ -290,10 +291,12 @@ func (e *Dispatch) Send(rule *models.AlertRule, event *models.AlertCurEvent, not
|
||||
e.SendCallbacks(rule, notifyTarget, event)
|
||||
|
||||
// handle global webhooks
|
||||
if e.alerting.WebhookBatchSend {
|
||||
sender.BatchSendWebhooks(e.ctx, notifyTarget.ToWebhookList(), event, e.Astats)
|
||||
} else {
|
||||
sender.SingleSendWebhooks(e.ctx, notifyTarget.ToWebhookList(), event, e.Astats)
|
||||
if !event.OverrideGlobalWebhook() {
|
||||
if e.alerting.WebhookBatchSend {
|
||||
sender.BatchSendWebhooks(e.ctx, notifyTarget.ToWebhookMap(), event, e.Astats)
|
||||
} else {
|
||||
sender.SingleSendWebhooks(e.ctx, notifyTarget.ToWebhookMap(), event, e.Astats)
|
||||
}
|
||||
}
|
||||
|
||||
// handle plugin call
|
||||
@@ -307,10 +310,10 @@ func (e *Dispatch) Send(rule *models.AlertRule, event *models.AlertCurEvent, not
|
||||
}
|
||||
|
||||
func (e *Dispatch) SendCallbacks(rule *models.AlertRule, notifyTarget *NotifyTarget, event *models.AlertCurEvent) {
|
||||
|
||||
uids := notifyTarget.ToUidList()
|
||||
urls := notifyTarget.ToCallbackList()
|
||||
whMap := notifyTarget.ToWebhookMap()
|
||||
ogw := event.OverrideGlobalWebhook()
|
||||
for _, urlStr := range urls {
|
||||
if len(urlStr) == 0 {
|
||||
continue
|
||||
@@ -318,7 +321,7 @@ func (e *Dispatch) SendCallbacks(rule *models.AlertRule, notifyTarget *NotifyTar
|
||||
|
||||
cbCtx := sender.BuildCallBackContext(e.ctx, urlStr, rule, []*models.AlertCurEvent{event}, uids, e.userCache, e.alerting.WebhookBatchSend, e.Astats)
|
||||
|
||||
if wh, ok := whMap[cbCtx.CallBackURL]; ok && wh.Enable {
|
||||
if wh, ok := whMap[cbCtx.CallBackURL]; !ogw && ok && wh.Enable {
|
||||
logger.Debugf("SendCallbacks: webhook[%s] is in global conf.", cbCtx.CallBackURL)
|
||||
continue
|
||||
}
|
||||
|
||||
@@ -76,52 +76,8 @@ func (s *NotifyTarget) ToCallbackList() []string {
|
||||
return callbacks
|
||||
}
|
||||
|
||||
func (s *NotifyTarget) ToWebhookList() []*models.Webhook {
|
||||
webhooks := make([]*models.Webhook, 0, len(s.webhooks))
|
||||
for _, wh := range s.webhooks {
|
||||
if wh.Batch == 0 {
|
||||
wh.Batch = 1000
|
||||
}
|
||||
|
||||
if wh.Timeout == 0 {
|
||||
wh.Timeout = 10
|
||||
}
|
||||
|
||||
if wh.RetryCount == 0 {
|
||||
wh.RetryCount = 10
|
||||
}
|
||||
|
||||
if wh.RetryInterval == 0 {
|
||||
wh.RetryInterval = 10
|
||||
}
|
||||
|
||||
webhooks = append(webhooks, wh)
|
||||
}
|
||||
return webhooks
|
||||
}
|
||||
|
||||
func (s *NotifyTarget) ToWebhookMap() map[string]*models.Webhook {
|
||||
webhookMap := make(map[string]*models.Webhook, len(s.webhooks))
|
||||
for _, wh := range s.webhooks {
|
||||
if wh.Batch == 0 {
|
||||
wh.Batch = 1000
|
||||
}
|
||||
|
||||
if wh.Timeout == 0 {
|
||||
wh.Timeout = 10
|
||||
}
|
||||
|
||||
if wh.RetryCount == 0 {
|
||||
wh.RetryCount = 10
|
||||
}
|
||||
|
||||
if wh.RetryInterval == 0 {
|
||||
wh.RetryInterval = 10
|
||||
}
|
||||
|
||||
webhookMap[wh.Url] = wh
|
||||
}
|
||||
return webhookMap
|
||||
return s.webhooks
|
||||
}
|
||||
|
||||
func (s *NotifyTarget) ToUidList() []int64 {
|
||||
|
||||
@@ -96,8 +96,7 @@ func (s *Scheduler) syncAlertRules() {
|
||||
|
||||
ruleType := rule.GetRuleType()
|
||||
if rule.IsPrometheusRule() || rule.IsLokiRule() || rule.IsTdengineRule() {
|
||||
datasourceIds := s.promClients.Hit(rule.DatasourceIdsJson)
|
||||
datasourceIds = append(datasourceIds, s.tdengineClients.Hit(rule.DatasourceIdsJson)...)
|
||||
datasourceIds := s.datasourceCache.GetIDsByDsCateAndQueries(rule.Cate, rule.DatasourceQueries)
|
||||
for _, dsId := range datasourceIds {
|
||||
if !naming.DatasourceHashRing.IsHit(strconv.FormatInt(dsId, 10), fmt.Sprintf("%d", rule.Id), s.aconf.Heartbeat.Endpoint) {
|
||||
continue
|
||||
@@ -133,7 +132,8 @@ func (s *Scheduler) syncAlertRules() {
|
||||
} else {
|
||||
// 如果 rule 不是通过 prometheus engine 来告警的,则创建为 externalRule
|
||||
// if rule is not processed by prometheus engine, create it as externalRule
|
||||
for _, dsId := range rule.DatasourceIdsJson {
|
||||
dsIds := s.datasourceCache.GetIDsByDsCateAndQueries(rule.Cate, rule.DatasourceQueries)
|
||||
for _, dsId := range dsIds {
|
||||
ds := s.datasourceCache.GetById(dsId)
|
||||
if ds == nil {
|
||||
logger.Debugf("datasource %d not found", dsId)
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -269,3 +269,190 @@ func allValueDeepEqual(got, want map[uint64][]uint64) bool {
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// allValueDeepEqualOmitOrder 判断两个字符串切片是否相等,不考虑顺序
|
||||
func allValueDeepEqualOmitOrder(got, want []string) bool {
|
||||
if len(got) != len(want) {
|
||||
return false
|
||||
}
|
||||
slices.Sort(got)
|
||||
slices.Sort(want)
|
||||
for i := range got {
|
||||
if got[i] != want[i] {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func Test_removeVal(t *testing.T) {
|
||||
type args struct {
|
||||
promql string
|
||||
}
|
||||
tests := []struct {
|
||||
name string
|
||||
args args
|
||||
want string
|
||||
}{
|
||||
// TODO: Add test cases.
|
||||
{
|
||||
name: "removeVal1",
|
||||
args: args{
|
||||
promql: "mem{test1=\"$test1\",test2=\"$test2\",test3=\"$test3\"} > $val",
|
||||
},
|
||||
want: "mem{} > $val",
|
||||
},
|
||||
{
|
||||
name: "removeVal2",
|
||||
args: args{
|
||||
promql: "mem{test1=\"test1\",test2=\"$test2\",test3=\"$test3\"} > $val",
|
||||
},
|
||||
want: "mem{test1=\"test1\"} > $val",
|
||||
},
|
||||
{
|
||||
name: "removeVal3",
|
||||
args: args{
|
||||
promql: "mem{test1=\"$test1\",test2=\"test2\",test3=\"$test3\"} > $val",
|
||||
},
|
||||
want: "mem{test2=\"test2\"} > $val",
|
||||
},
|
||||
{
|
||||
name: "removeVal4",
|
||||
args: args{
|
||||
promql: "mem{test1=\"$test1\",test2=\"$test2\",test3=\"test3\"} > $val",
|
||||
},
|
||||
want: "mem{test3=\"test3\"} > $val",
|
||||
},
|
||||
{
|
||||
name: "removeVal5",
|
||||
args: args{
|
||||
promql: "mem{test1=\"$test1\",test2=\"test2\",test3=\"test3\"} > $val",
|
||||
},
|
||||
want: "mem{test2=\"test2\",test3=\"test3\"} > $val",
|
||||
},
|
||||
{
|
||||
name: "removeVal6",
|
||||
args: args{
|
||||
promql: "mem{test1=\"test1\",test2=\"$test2\",test3=\"test3\"} > $val",
|
||||
},
|
||||
want: "mem{test1=\"test1\",test3=\"test3\"} > $val",
|
||||
},
|
||||
{
|
||||
name: "removeVal7",
|
||||
args: args{
|
||||
promql: "mem{test1=\"test1\",test2=\"test2\",test3='$test3'} > $val",
|
||||
},
|
||||
want: "mem{test1=\"test1\",test2=\"test2\"} > $val",
|
||||
},
|
||||
{
|
||||
name: "removeVal8",
|
||||
args: args{
|
||||
promql: "mem{test1=\"test1\",test2=\"test2\",test3=\"test3\"} > $val",
|
||||
},
|
||||
want: "mem{test1=\"test1\",test2=\"test2\",test3=\"test3\"} > $val",
|
||||
},
|
||||
{
|
||||
name: "removeVal9",
|
||||
args: args{
|
||||
promql: "mem{test1=\"$test1\",test2=\"test2\"} > $val1 and mem{test3=\"test3\",test4=\"test4\"} > $val2",
|
||||
},
|
||||
want: "mem{test2=\"test2\"} > $val1 and mem{test3=\"test3\",test4=\"test4\"} > $val2",
|
||||
},
|
||||
{
|
||||
name: "removeVal10",
|
||||
args: args{
|
||||
promql: "mem{test1=\"test1\",test2='$test2'} > $val1 and mem{test3=\"test3\",test4=\"test4\"} > $val2",
|
||||
},
|
||||
want: "mem{test1=\"test1\"} > $val1 and mem{test3=\"test3\",test4=\"test4\"} > $val2",
|
||||
},
|
||||
{
|
||||
name: "removeVal11",
|
||||
args: args{
|
||||
promql: "mem{test1='test1',test2=\"test2\"} > $val1 and mem{test3=\"$test3\",test4=\"test4\"} > $val2",
|
||||
},
|
||||
want: "mem{test1='test1',test2=\"test2\"} > $val1 and mem{test4=\"test4\"} > $val2",
|
||||
},
|
||||
{
|
||||
name: "removeVal12",
|
||||
args: args{
|
||||
promql: "mem{test1=\"test1\",test2=\"test2\"} > $val1 and mem{test3=\"test3\",test4=\"$test4\"} > $val2",
|
||||
},
|
||||
want: "mem{test1=\"test1\",test2=\"test2\"} > $val1 and mem{test3=\"test3\"} > $val2",
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
if got := removeVal(tt.args.promql); got != tt.want {
|
||||
t.Errorf("removeVal() = %v, want %v", got, tt.want)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestExtractVarMapping(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
promql string
|
||||
want map[string]string
|
||||
}{
|
||||
{
|
||||
name: "单个花括号单个变量",
|
||||
promql: `mem_used_percent{host="$my_host"} > $val`,
|
||||
want: map[string]string{"my_host": "host"},
|
||||
},
|
||||
{
|
||||
name: "单个花括号多个变量",
|
||||
promql: `mem_used_percent{host="$my_host",region="$region",env="prod"} > $val`,
|
||||
want: map[string]string{"my_host": "host", "region": "region"},
|
||||
},
|
||||
{
|
||||
name: "多个花括号多个变量",
|
||||
promql: `sum(rate(mem_used_percent{host="$my_host"})) by (instance) + avg(node_load1{region="$region"}) > $val`,
|
||||
want: map[string]string{"my_host": "host", "region": "region"},
|
||||
},
|
||||
{
|
||||
name: "相同变量出现多次",
|
||||
promql: `sum(rate(mem_used_percent{host="$my_host"})) + avg(node_load1{host="$my_host"}) > $val`,
|
||||
want: map[string]string{"my_host": "host"},
|
||||
},
|
||||
{
|
||||
name: "没有变量",
|
||||
promql: `mem_used_percent{host="localhost",region="cn"} > 80`,
|
||||
want: map[string]string{},
|
||||
},
|
||||
{
|
||||
name: "没有花括号",
|
||||
promql: `80 > $val`,
|
||||
want: map[string]string{},
|
||||
},
|
||||
{
|
||||
name: "格式不规范的标签",
|
||||
promql: `mem_used_percent{host=$my_host,region = $region} > $val`,
|
||||
want: map[string]string{"my_host": "host", "region": "region"},
|
||||
},
|
||||
{
|
||||
name: "空花括号",
|
||||
promql: `mem_used_percent{} > $val`,
|
||||
want: map[string]string{},
|
||||
},
|
||||
{
|
||||
name: "不完整的花括号",
|
||||
promql: `mem_used_percent{host="$my_host"`,
|
||||
want: map[string]string{},
|
||||
},
|
||||
{
|
||||
name: "复杂表达式",
|
||||
promql: `sum(rate(http_requests_total{handler="$handler",code="$code"}[5m])) by (handler) / sum(rate(http_requests_total{handler="$handler"}[5m])) by (handler) * 100 > $threshold`,
|
||||
want: map[string]string{"handler": "handler", "code": "code"},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
got := ExtractVarMapping(tt.promql)
|
||||
if !reflect.DeepEqual(got, tt.want) {
|
||||
t.Errorf("ExtractVarMapping() = %v, want %v", got, tt.want)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -21,6 +21,7 @@ import (
|
||||
"github.com/ccfos/nightingale/v6/pushgw/writer"
|
||||
|
||||
"github.com/prometheus/prometheus/prompb"
|
||||
"github.com/robfig/cron/v3"
|
||||
"github.com/toolkits/pkg/logger"
|
||||
"github.com/toolkits/pkg/str"
|
||||
)
|
||||
@@ -78,6 +79,9 @@ type Processor struct {
|
||||
HandleFireEventHook HandleEventFunc
|
||||
HandleRecoverEventHook HandleEventFunc
|
||||
EventMuteHook EventMuteHookFunc
|
||||
|
||||
ScheduleEntry cron.Entry
|
||||
PromEvalInterval int
|
||||
}
|
||||
|
||||
func (p *Processor) Key() string {
|
||||
@@ -89,9 +93,9 @@ func (p *Processor) DatasourceId() int64 {
|
||||
}
|
||||
|
||||
func (p *Processor) Hash() string {
|
||||
return str.MD5(fmt.Sprintf("%d_%d_%s_%d",
|
||||
return str.MD5(fmt.Sprintf("%d_%s_%s_%d",
|
||||
p.rule.Id,
|
||||
p.rule.PromEvalInterval,
|
||||
p.rule.CronPattern,
|
||||
p.rule.RuleConfig,
|
||||
p.datasourceId,
|
||||
))
|
||||
@@ -126,7 +130,7 @@ func NewProcessor(engineName string, rule *models.AlertRule, datasourceId int64,
|
||||
return p
|
||||
}
|
||||
|
||||
func (p *Processor) Handle(anomalyPoints []common.AnomalyPoint, from string, inhibit bool) {
|
||||
func (p *Processor) Handle(anomalyPoints []models.AnomalyPoint, from string, inhibit bool) {
|
||||
// 有可能rule的一些配置已经发生变化,比如告警接收人、callbacks等
|
||||
// 这些信息的修改是不会引起worker restart的,但是确实会影响告警处理逻辑
|
||||
// 所以,这里直接从memsto.AlertRuleCache中获取并覆盖
|
||||
@@ -134,7 +138,7 @@ func (p *Processor) Handle(anomalyPoints []common.AnomalyPoint, from string, inh
|
||||
cachedRule := p.alertRuleCache.Get(p.rule.Id)
|
||||
if cachedRule == nil {
|
||||
logger.Errorf("rule not found %+v", anomalyPoints)
|
||||
p.Stats.CounterRuleEvalErrorTotal.WithLabelValues(fmt.Sprintf("%v", p.DatasourceId()), "handle_event").Inc()
|
||||
p.Stats.CounterRuleEvalErrorTotal.WithLabelValues(fmt.Sprintf("%v", p.DatasourceId()), "handle_event", p.BusiGroupCache.GetNameByBusiGroupId(p.rule.GroupId), fmt.Sprintf("%v", p.rule.Id)).Inc()
|
||||
return
|
||||
}
|
||||
|
||||
@@ -178,7 +182,7 @@ func (p *Processor) Handle(anomalyPoints []common.AnomalyPoint, from string, inh
|
||||
}
|
||||
}
|
||||
|
||||
func (p *Processor) BuildEvent(anomalyPoint common.AnomalyPoint, from string, now int64, ruleHash string) *models.AlertCurEvent {
|
||||
func (p *Processor) BuildEvent(anomalyPoint models.AnomalyPoint, from string, now int64, ruleHash string) *models.AlertCurEvent {
|
||||
p.fillTags(anomalyPoint)
|
||||
p.mayHandleIdent()
|
||||
hash := Hash(p.rule.Id, p.datasourceId, anomalyPoint)
|
||||
@@ -204,6 +208,7 @@ func (p *Processor) BuildEvent(anomalyPoint common.AnomalyPoint, from string, no
|
||||
event.TargetNote = p.targetNote
|
||||
event.TriggerValue = anomalyPoint.ReadableValue()
|
||||
event.TriggerValues = anomalyPoint.Values
|
||||
event.TriggerValuesJson = models.EventTriggerValues{ValuesWithUnit: anomalyPoint.ValuesUnit}
|
||||
event.TagsJSON = p.tagsArr
|
||||
event.Tags = strings.Join(p.tagsArr, ",,")
|
||||
event.IsRecovered = false
|
||||
@@ -419,6 +424,7 @@ func (p *Processor) handleEvent(events []*models.AlertCurEvent) {
|
||||
p.pendingsUseByRecover.Set(event.Hash, event)
|
||||
}
|
||||
|
||||
event.PromEvalInterval = p.PromEvalInterval
|
||||
if p.rule.PromForDuration == 0 {
|
||||
fireEvents = append(fireEvents, event)
|
||||
if severity > event.Severity {
|
||||
@@ -513,7 +519,7 @@ func (p *Processor) pushEventToQueue(e *models.AlertCurEvent) {
|
||||
dispatch.LogEvent(e, "push_queue")
|
||||
if !queue.EventQueue.PushFront(e) {
|
||||
logger.Warningf("event_push_queue: queue is full, event:%+v", e)
|
||||
p.Stats.CounterRuleEvalErrorTotal.WithLabelValues(fmt.Sprintf("%v", p.DatasourceId()), "push_event_queue").Inc()
|
||||
p.Stats.CounterRuleEvalErrorTotal.WithLabelValues(fmt.Sprintf("%v", p.DatasourceId()), "push_event_queue", p.BusiGroupCache.GetNameByBusiGroupId(p.rule.GroupId), fmt.Sprintf("%v", p.rule.Id)).Inc()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -524,7 +530,7 @@ func (p *Processor) RecoverAlertCurEventFromDb() {
|
||||
curEvents, err := models.AlertCurEventGetByRuleIdAndDsId(p.ctx, p.rule.Id, p.datasourceId)
|
||||
if err != nil {
|
||||
logger.Errorf("recover event from db for rule:%s failed, err:%s", p.Key(), err)
|
||||
p.Stats.CounterRuleEvalErrorTotal.WithLabelValues(fmt.Sprintf("%v", p.DatasourceId()), "get_recover_event").Inc()
|
||||
p.Stats.CounterRuleEvalErrorTotal.WithLabelValues(fmt.Sprintf("%v", p.DatasourceId()), "get_recover_event", p.BusiGroupCache.GetNameByBusiGroupId(p.rule.GroupId), fmt.Sprintf("%v", p.rule.Id)).Inc()
|
||||
p.fires = NewAlertCurEventMap(nil)
|
||||
return
|
||||
}
|
||||
@@ -558,7 +564,7 @@ func (p *Processor) RecoverAlertCurEventFromDb() {
|
||||
p.pendingsUseByRecover = NewAlertCurEventMap(pendingsUseByRecoverMap)
|
||||
}
|
||||
|
||||
func (p *Processor) fillTags(anomalyPoint common.AnomalyPoint) {
|
||||
func (p *Processor) fillTags(anomalyPoint models.AnomalyPoint) {
|
||||
// handle series tags
|
||||
tagsMap := make(map[string]string)
|
||||
for label, value := range anomalyPoint.Labels {
|
||||
@@ -648,10 +654,10 @@ func labelMapToArr(m map[string]string) []string {
|
||||
return labelStrings
|
||||
}
|
||||
|
||||
func Hash(ruleId, datasourceId int64, vector common.AnomalyPoint) string {
|
||||
func Hash(ruleId, datasourceId int64, vector models.AnomalyPoint) string {
|
||||
return str.MD5(fmt.Sprintf("%d_%s_%d_%d_%s", ruleId, vector.Labels.String(), datasourceId, vector.Severity, vector.Query))
|
||||
}
|
||||
|
||||
func TagHash(vector common.AnomalyPoint) string {
|
||||
func TagHash(vector models.AnomalyPoint) string {
|
||||
return str.MD5(vector.Labels.String())
|
||||
}
|
||||
|
||||
@@ -26,9 +26,11 @@ type Scheduler struct {
|
||||
writers *writer.WritersType
|
||||
|
||||
stats *astats.Stats
|
||||
|
||||
datasourceCache *memsto.DatasourceCacheType
|
||||
}
|
||||
|
||||
func NewScheduler(aconf aconf.Alert, rrc *memsto.RecordingRuleCacheType, promClients *prom.PromClientMap, writers *writer.WritersType, stats *astats.Stats) *Scheduler {
|
||||
func NewScheduler(aconf aconf.Alert, rrc *memsto.RecordingRuleCacheType, promClients *prom.PromClientMap, writers *writer.WritersType, stats *astats.Stats, datasourceCache *memsto.DatasourceCacheType) *Scheduler {
|
||||
scheduler := &Scheduler{
|
||||
aconf: aconf,
|
||||
recordRules: make(map[string]*RecordRuleContext),
|
||||
@@ -39,6 +41,8 @@ func NewScheduler(aconf aconf.Alert, rrc *memsto.RecordingRuleCacheType, promCli
|
||||
writers: writers,
|
||||
|
||||
stats: stats,
|
||||
|
||||
datasourceCache: datasourceCache,
|
||||
}
|
||||
|
||||
go scheduler.LoopSyncRules(context.Background())
|
||||
@@ -67,7 +71,7 @@ func (s *Scheduler) syncRecordRules() {
|
||||
continue
|
||||
}
|
||||
|
||||
datasourceIds := s.promClients.Hit(rule.DatasourceIdsJson)
|
||||
datasourceIds := s.datasourceCache.GetIDsByDsCateAndQueries("prometheus", rule.DatasourceQueries)
|
||||
for _, dsId := range datasourceIds {
|
||||
if !naming.DatasourceHashRing.IsHit(strconv.FormatInt(dsId, 10), fmt.Sprintf("%d", rule.Id), s.aconf.Heartbeat.Endpoint) {
|
||||
continue
|
||||
|
||||
@@ -6,7 +6,6 @@ import (
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/ccfos/nightingale/v6/alert/common"
|
||||
"github.com/ccfos/nightingale/v6/alert/dispatch"
|
||||
"github.com/ccfos/nightingale/v6/alert/mute"
|
||||
"github.com/ccfos/nightingale/v6/alert/naming"
|
||||
@@ -92,7 +91,7 @@ func (rt *Router) eventPersist(c *gin.Context) {
|
||||
|
||||
type eventForm struct {
|
||||
Alert bool `json:"alert"`
|
||||
AnomalyPoints []common.AnomalyPoint `json:"vectors"`
|
||||
AnomalyPoints []models.AnomalyPoint `json:"vectors"`
|
||||
RuleId int64 `json:"rule_id"`
|
||||
DatasourceId int64 `json:"datasource_id"`
|
||||
Inhibit bool `json:"inhibit"`
|
||||
|
||||
@@ -129,34 +129,39 @@ func (c *DefaultCallBacker) CallBack(ctx CallBackContext) {
|
||||
return
|
||||
}
|
||||
|
||||
doSendAndRecord(ctx.Ctx, ctx.CallBackURL, ctx.CallBackURL, event, "callback", ctx.Stats, event)
|
||||
doSendAndRecord(ctx.Ctx, ctx.CallBackURL, ctx.CallBackURL, event, "callback", ctx.Stats, ctx.Events)
|
||||
}
|
||||
|
||||
func doSendAndRecord(ctx *ctx.Context, url, token string, body interface{}, channel string,
|
||||
stats *astats.Stats, event *models.AlertCurEvent) {
|
||||
stats *astats.Stats, events []*models.AlertCurEvent) {
|
||||
res, err := doSend(url, body, channel, stats)
|
||||
NotifyRecord(ctx, event, channel, token, res, err)
|
||||
NotifyRecord(ctx, events, channel, token, res, err)
|
||||
}
|
||||
|
||||
func NotifyRecord(ctx *ctx.Context, evt *models.AlertCurEvent, channel, target, res string, err error) {
|
||||
noti := models.NewNotificationRecord(evt, channel, target)
|
||||
if err != nil {
|
||||
noti.SetStatus(models.NotiStatusFailure)
|
||||
noti.SetDetails(err.Error())
|
||||
} else if res != "" {
|
||||
noti.SetDetails(string(res))
|
||||
func NotifyRecord(ctx *ctx.Context, evts []*models.AlertCurEvent, channel, target, res string, err error) {
|
||||
// 一个通知可能对应多个 event,都需要记录
|
||||
notis := make([]*models.NotificaitonRecord, 0, len(evts))
|
||||
for _, evt := range evts {
|
||||
noti := models.NewNotificationRecord(evt, channel, target)
|
||||
if err != nil {
|
||||
noti.SetStatus(models.NotiStatusFailure)
|
||||
noti.SetDetails(err.Error())
|
||||
} else if res != "" {
|
||||
noti.SetDetails(string(res))
|
||||
}
|
||||
notis = append(notis, noti)
|
||||
}
|
||||
|
||||
if !ctx.IsCenter {
|
||||
_, err := poster.PostByUrlsWithResp[int64](ctx, "/v1/n9e/notify-record", noti)
|
||||
_, err := poster.PostByUrlsWithResp[[]int64](ctx, "/v1/n9e/notify-record", notis)
|
||||
if err != nil {
|
||||
logger.Errorf("add noti:%v failed, err: %v", noti, err)
|
||||
logger.Errorf("add notis:%v failed, err: %v", notis, err)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
if err := noti.Add(ctx); err != nil {
|
||||
logger.Errorf("add noti:%v failed, err: %v", noti, err)
|
||||
if err := models.DB(ctx).CreateInBatches(notis, 100).Error; err != nil {
|
||||
logger.Errorf("add notis:%v failed, err: %v", notis, err)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -67,7 +67,7 @@ func (ds *DingtalkSender) Send(ctx MessageContext) {
|
||||
}
|
||||
}
|
||||
|
||||
doSendAndRecord(ctx.Ctx, url, tokens[i], body, models.Dingtalk, ctx.Stats, ctx.Events[0])
|
||||
doSendAndRecord(ctx.Ctx, url, tokens[i], body, models.Dingtalk, ctx.Stats, ctx.Events)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -97,8 +97,7 @@ func (ds *DingtalkSender) CallBack(ctx CallBackContext) {
|
||||
body.Markdown.Text = message
|
||||
}
|
||||
|
||||
doSendAndRecord(ctx.Ctx, ctx.CallBackURL, ctx.CallBackURL, body,
|
||||
"callback", ctx.Stats, ctx.Events[0])
|
||||
doSendAndRecord(ctx.Ctx, ctx.CallBackURL, ctx.CallBackURL, body, "callback", ctx.Stats, ctx.Events)
|
||||
}
|
||||
|
||||
// extract urls and ats from Users
|
||||
|
||||
@@ -25,8 +25,8 @@ type EmailSender struct {
|
||||
}
|
||||
|
||||
type EmailContext struct {
|
||||
event *models.AlertCurEvent
|
||||
mail *gomail.Message
|
||||
events []*models.AlertCurEvent
|
||||
mail *gomail.Message
|
||||
}
|
||||
|
||||
func (es *EmailSender) Send(ctx MessageContext) {
|
||||
@@ -42,7 +42,7 @@ func (es *EmailSender) Send(ctx MessageContext) {
|
||||
subject = ctx.Events[0].RuleName
|
||||
}
|
||||
content := BuildTplMessage(models.Email, es.contentTpl, ctx.Events)
|
||||
es.WriteEmail(subject, content, tos, ctx.Events[0])
|
||||
es.WriteEmail(subject, content, tos, ctx.Events)
|
||||
|
||||
ctx.Stats.AlertNotifyTotal.WithLabelValues(models.Email).Add(float64(len(tos)))
|
||||
}
|
||||
@@ -79,8 +79,7 @@ func SendEmail(subject, content string, tos []string, stmp aconf.SMTPConfig) err
|
||||
return nil
|
||||
}
|
||||
|
||||
func (es *EmailSender) WriteEmail(subject, content string, tos []string,
|
||||
event *models.AlertCurEvent) {
|
||||
func (es *EmailSender) WriteEmail(subject, content string, tos []string, events []*models.AlertCurEvent) {
|
||||
m := gomail.NewMessage()
|
||||
|
||||
m.SetHeader("From", es.smtp.From)
|
||||
@@ -88,7 +87,7 @@ func (es *EmailSender) WriteEmail(subject, content string, tos []string,
|
||||
m.SetHeader("Subject", subject)
|
||||
m.SetBody("text/html", content)
|
||||
|
||||
mailch <- &EmailContext{event, m}
|
||||
mailch <- &EmailContext{events, m}
|
||||
}
|
||||
|
||||
func dialSmtp(d *gomail.Dialer) gomail.SendCloser {
|
||||
@@ -206,7 +205,7 @@ func startEmailSender(ctx *ctx.Context, smtp aconf.SMTPConfig) {
|
||||
if err == nil {
|
||||
msg = "ok"
|
||||
}
|
||||
NotifyRecord(ctx, m.event, models.Email, to, msg, err)
|
||||
NotifyRecord(ctx, m.events, models.Email, to, msg, err)
|
||||
}
|
||||
|
||||
size++
|
||||
|
||||
@@ -54,8 +54,7 @@ func (fs *FeishuSender) CallBack(ctx CallBackContext) {
|
||||
},
|
||||
}
|
||||
|
||||
doSendAndRecord(ctx.Ctx, ctx.CallBackURL, ctx.CallBackURL, body, "callback",
|
||||
ctx.Stats, ctx.Events[0])
|
||||
doSendAndRecord(ctx.Ctx, ctx.CallBackURL, ctx.CallBackURL, body, "callback", ctx.Stats, ctx.Events)
|
||||
}
|
||||
|
||||
func (fs *FeishuSender) Send(ctx MessageContext) {
|
||||
@@ -77,7 +76,7 @@ func (fs *FeishuSender) Send(ctx MessageContext) {
|
||||
IsAtAll: false,
|
||||
}
|
||||
}
|
||||
doSendAndRecord(ctx.Ctx, url, tokens[i], body, models.Feishu, ctx.Stats, ctx.Events[0])
|
||||
doSendAndRecord(ctx.Ctx, url, tokens[i], body, models.Feishu, ctx.Stats, ctx.Events)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -135,8 +135,7 @@ func (fs *FeishuCardSender) CallBack(ctx CallBackContext) {
|
||||
}
|
||||
parsedURL.RawQuery = ""
|
||||
|
||||
doSendAndRecord(ctx.Ctx, parsedURL.String(), parsedURL.String(), body, "callback",
|
||||
ctx.Stats, ctx.Events[0])
|
||||
doSendAndRecord(ctx.Ctx, parsedURL.String(), parsedURL.String(), body, "callback", ctx.Stats, ctx.Events)
|
||||
}
|
||||
|
||||
func (fs *FeishuCardSender) Send(ctx MessageContext) {
|
||||
@@ -160,8 +159,7 @@ func (fs *FeishuCardSender) Send(ctx MessageContext) {
|
||||
body.Card.Elements[0].Text.Content = message
|
||||
body.Card.Elements[2].Elements[0].Content = SendTitle
|
||||
for i, url := range urls {
|
||||
doSendAndRecord(ctx.Ctx, url, tokens[i], body, models.FeishuCard,
|
||||
ctx.Stats, ctx.Events[0])
|
||||
doSendAndRecord(ctx.Ctx, url, tokens[i], body, models.FeishuCard, ctx.Stats, ctx.Events)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -118,6 +118,7 @@ func CallIbex(ctx *ctx.Context, id int64, host string,
|
||||
// 附加告警级别 告警触发值标签
|
||||
tagsMap["alert_severity"] = strconv.Itoa(event.Severity)
|
||||
tagsMap["alert_trigger_value"] = event.TriggerValue
|
||||
tagsMap["is_recovered"] = strconv.FormatBool(event.IsRecovered)
|
||||
|
||||
tags, err := json.Marshal(tagsMap)
|
||||
if err != nil {
|
||||
|
||||
@@ -27,8 +27,7 @@ func (lk *LarkSender) CallBack(ctx CallBackContext) {
|
||||
},
|
||||
}
|
||||
|
||||
doSendAndRecord(ctx.Ctx, ctx.CallBackURL, ctx.CallBackURL, body, "callback",
|
||||
ctx.Stats, ctx.Events[0])
|
||||
doSendAndRecord(ctx.Ctx, ctx.CallBackURL, ctx.CallBackURL, body, "callback", ctx.Stats, ctx.Events)
|
||||
}
|
||||
|
||||
func (lk *LarkSender) Send(ctx MessageContext) {
|
||||
@@ -44,7 +43,7 @@ func (lk *LarkSender) Send(ctx MessageContext) {
|
||||
Text: message,
|
||||
},
|
||||
}
|
||||
doSendAndRecord(ctx.Ctx, url, tokens[i], body, models.Lark, ctx.Stats, ctx.Events[0])
|
||||
doSendAndRecord(ctx.Ctx, url, tokens[i], body, models.Lark, ctx.Stats, ctx.Events)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -56,8 +56,7 @@ func (fs *LarkCardSender) CallBack(ctx CallBackContext) {
|
||||
}
|
||||
parsedURL.RawQuery = ""
|
||||
|
||||
doSendAndRecord(ctx.Ctx, ctx.CallBackURL, ctx.CallBackURL, body, "callback",
|
||||
ctx.Stats, ctx.Events[0])
|
||||
doSendAndRecord(ctx.Ctx, ctx.CallBackURL, ctx.CallBackURL, body, "callback", ctx.Stats, ctx.Events)
|
||||
}
|
||||
|
||||
func (fs *LarkCardSender) Send(ctx MessageContext) {
|
||||
@@ -81,7 +80,7 @@ func (fs *LarkCardSender) Send(ctx MessageContext) {
|
||||
body.Card.Elements[0].Text.Content = message
|
||||
body.Card.Elements[2].Elements[0].Content = SendTitle
|
||||
for i, url := range urls {
|
||||
doSendAndRecord(ctx.Ctx, url, tokens[i], body, models.LarkCard, ctx.Stats, ctx.Events[0])
|
||||
doSendAndRecord(ctx.Ctx, url, tokens[i], body, models.LarkCard, ctx.Stats, ctx.Events)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -43,7 +43,7 @@ func (ms *MmSender) Send(ctx MessageContext) {
|
||||
Text: message,
|
||||
Tokens: urls,
|
||||
Stats: ctx.Stats,
|
||||
}, ctx.Events[0], models.Mm)
|
||||
}, ctx.Events, models.Mm)
|
||||
}
|
||||
|
||||
func (ms *MmSender) CallBack(ctx CallBackContext) {
|
||||
@@ -56,7 +56,7 @@ func (ms *MmSender) CallBack(ctx CallBackContext) {
|
||||
Text: message,
|
||||
Tokens: []string{ctx.CallBackURL},
|
||||
Stats: ctx.Stats,
|
||||
}, ctx.Events[0], "callback")
|
||||
}, ctx.Events, "callback")
|
||||
}
|
||||
|
||||
func (ms *MmSender) extract(users []*models.User) []string {
|
||||
@@ -69,12 +69,12 @@ func (ms *MmSender) extract(users []*models.User) []string {
|
||||
return tokens
|
||||
}
|
||||
|
||||
func SendMM(ctx *ctx.Context, message MatterMostMessage, event *models.AlertCurEvent, channel string) {
|
||||
func SendMM(ctx *ctx.Context, message MatterMostMessage, events []*models.AlertCurEvent, channel string) {
|
||||
for i := 0; i < len(message.Tokens); i++ {
|
||||
u, err := url.Parse(message.Tokens[i])
|
||||
if err != nil {
|
||||
logger.Errorf("mm_sender: failed to parse error=%v", err)
|
||||
NotifyRecord(ctx, event, channel, message.Tokens[i], "", err)
|
||||
NotifyRecord(ctx, events, channel, message.Tokens[i], "", err)
|
||||
continue
|
||||
}
|
||||
|
||||
@@ -103,7 +103,7 @@ func SendMM(ctx *ctx.Context, message MatterMostMessage, event *models.AlertCurE
|
||||
Username: username,
|
||||
Text: txt + message.Text,
|
||||
}
|
||||
doSendAndRecord(ctx, ur, message.Tokens[i], body, channel, message.Stats, event)
|
||||
doSendAndRecord(ctx, ur, message.Tokens[i], body, channel, message.Stats, events)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -85,7 +85,7 @@ func alertingCallScript(ctx *ctx.Context, stdinBytes []byte, notifyScript models
|
||||
}
|
||||
|
||||
err, isTimeout := sys.WrapTimeout(cmd, time.Duration(config.Timeout)*time.Second)
|
||||
NotifyRecord(ctx, event, channel, cmd.String(), "", buildErr(err, isTimeout))
|
||||
NotifyRecord(ctx, []*models.AlertCurEvent{event}, channel, cmd.String(), "", buildErr(err, isTimeout))
|
||||
|
||||
if isTimeout {
|
||||
if err == nil {
|
||||
|
||||
@@ -41,7 +41,7 @@ func (ts *TelegramSender) CallBack(ctx CallBackContext) {
|
||||
Text: message,
|
||||
Tokens: []string{ctx.CallBackURL},
|
||||
Stats: ctx.Stats,
|
||||
}, ctx.Events[0], "callback")
|
||||
}, ctx.Events, "callback")
|
||||
}
|
||||
|
||||
func (ts *TelegramSender) Send(ctx MessageContext) {
|
||||
@@ -55,7 +55,7 @@ func (ts *TelegramSender) Send(ctx MessageContext) {
|
||||
Text: message,
|
||||
Tokens: tokens,
|
||||
Stats: ctx.Stats,
|
||||
}, ctx.Events[0], models.Telegram)
|
||||
}, ctx.Events, models.Telegram)
|
||||
}
|
||||
|
||||
func (ts *TelegramSender) extract(users []*models.User) []string {
|
||||
@@ -68,11 +68,11 @@ func (ts *TelegramSender) extract(users []*models.User) []string {
|
||||
return tokens
|
||||
}
|
||||
|
||||
func SendTelegram(ctx *ctx.Context, message TelegramMessage, event *models.AlertCurEvent, channel string) {
|
||||
func SendTelegram(ctx *ctx.Context, message TelegramMessage, events []*models.AlertCurEvent, channel string) {
|
||||
for i := 0; i < len(message.Tokens); i++ {
|
||||
if !strings.Contains(message.Tokens[i], "/") && !strings.HasPrefix(message.Tokens[i], "https://") {
|
||||
logger.Errorf("telegram_sender: result=fail invalid token=%s", message.Tokens[i])
|
||||
NotifyRecord(ctx, event, channel, message.Tokens[i], "", errors.New("invalid token"))
|
||||
NotifyRecord(ctx, events, channel, message.Tokens[i], "", errors.New("invalid token"))
|
||||
continue
|
||||
}
|
||||
var url string
|
||||
@@ -93,6 +93,6 @@ func SendTelegram(ctx *ctx.Context, message TelegramMessage, event *models.Alert
|
||||
Text: message.Text,
|
||||
}
|
||||
|
||||
doSendAndRecord(ctx, url, message.Tokens[i], body, channel, message.Stats, event)
|
||||
doSendAndRecord(ctx, url, message.Tokens[i], body, channel, message.Stats, events)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -59,17 +59,21 @@ func sendWebhook(webhook *models.Webhook, event interface{}, stats *astats.Stats
|
||||
if webhook != nil {
|
||||
insecureSkipVerify = webhook.SkipVerify
|
||||
}
|
||||
client := http.Client{
|
||||
Timeout: time.Duration(conf.Timeout) * time.Second,
|
||||
Transport: &http.Transport{
|
||||
TLSClientConfig: &tls.Config{InsecureSkipVerify: insecureSkipVerify},
|
||||
},
|
||||
|
||||
if conf.Client == nil {
|
||||
logger.Warningf("event_%s, event:%s, url: [%s], error: [%s]", channel, string(bs), conf.Url, "client is nil")
|
||||
conf.Client = &http.Client{
|
||||
Timeout: time.Duration(conf.Timeout) * time.Second,
|
||||
Transport: &http.Transport{
|
||||
TLSClientConfig: &tls.Config{InsecureSkipVerify: insecureSkipVerify},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
stats.AlertNotifyTotal.WithLabelValues(channel).Inc()
|
||||
var resp *http.Response
|
||||
var body []byte
|
||||
resp, err = client.Do(req)
|
||||
resp, err = conf.Client.Do(req)
|
||||
|
||||
if err != nil {
|
||||
stats.AlertNotifyErrorTotal.WithLabelValues(channel).Inc()
|
||||
@@ -91,12 +95,12 @@ func sendWebhook(webhook *models.Webhook, event interface{}, stats *astats.Stats
|
||||
return false, string(body), nil
|
||||
}
|
||||
|
||||
func SingleSendWebhooks(ctx *ctx.Context, webhooks []*models.Webhook, event *models.AlertCurEvent, stats *astats.Stats) {
|
||||
func SingleSendWebhooks(ctx *ctx.Context, webhooks map[string]*models.Webhook, event *models.AlertCurEvent, stats *astats.Stats) {
|
||||
for _, conf := range webhooks {
|
||||
retryCount := 0
|
||||
for retryCount < 3 {
|
||||
needRetry, res, err := sendWebhook(conf, event, stats)
|
||||
NotifyRecord(ctx, event, "webhook", conf.Url, res, err)
|
||||
NotifyRecord(ctx, []*models.AlertCurEvent{event}, "webhook", conf.Url, res, err)
|
||||
if !needRetry {
|
||||
break
|
||||
}
|
||||
@@ -106,7 +110,7 @@ func SingleSendWebhooks(ctx *ctx.Context, webhooks []*models.Webhook, event *mod
|
||||
}
|
||||
}
|
||||
|
||||
func BatchSendWebhooks(ctx *ctx.Context, webhooks []*models.Webhook, event *models.AlertCurEvent, stats *astats.Stats) {
|
||||
func BatchSendWebhooks(ctx *ctx.Context, webhooks map[string]*models.Webhook, event *models.AlertCurEvent, stats *astats.Stats) {
|
||||
for _, conf := range webhooks {
|
||||
logger.Infof("push event:%+v to queue:%v", event, conf)
|
||||
PushEvent(ctx, conf, event, stats)
|
||||
@@ -166,7 +170,7 @@ func StartConsumer(ctx *ctx.Context, queue *WebhookQueue, popSize int, webhook *
|
||||
retryCount := 0
|
||||
for retryCount < webhook.RetryCount {
|
||||
needRetry, res, err := sendWebhook(webhook, events, stats)
|
||||
go RecordEvents(ctx, webhook, events, stats, res, err)
|
||||
go NotifyRecord(ctx, events, "webhook", webhook.Url, res, err)
|
||||
if !needRetry {
|
||||
break
|
||||
}
|
||||
@@ -176,10 +180,3 @@ func StartConsumer(ctx *ctx.Context, queue *WebhookQueue, popSize int, webhook *
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func RecordEvents(ctx *ctx.Context, webhook *models.Webhook, events []*models.AlertCurEvent, stats *astats.Stats, res string, err error) {
|
||||
for _, event := range events {
|
||||
time.Sleep(time.Millisecond * 10)
|
||||
NotifyRecord(ctx, event, "webhook", webhook.Url, res, err)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -37,8 +37,7 @@ func (ws *WecomSender) CallBack(ctx CallBackContext) {
|
||||
},
|
||||
}
|
||||
|
||||
doSendAndRecord(ctx.Ctx, ctx.CallBackURL, ctx.CallBackURL, body, "callback",
|
||||
ctx.Stats, ctx.Events[0])
|
||||
doSendAndRecord(ctx.Ctx, ctx.CallBackURL, ctx.CallBackURL, body, "callback", ctx.Stats, ctx.Events)
|
||||
}
|
||||
|
||||
func (ws *WecomSender) Send(ctx MessageContext) {
|
||||
@@ -54,7 +53,7 @@ func (ws *WecomSender) Send(ctx MessageContext) {
|
||||
Content: message,
|
||||
},
|
||||
}
|
||||
doSendAndRecord(ctx.Ctx, url, tokens[i], body, models.Wecom, ctx.Stats, ctx.Events[0])
|
||||
doSendAndRecord(ctx.Ctx, url, tokens[i], body, models.Wecom, ctx.Stats, ctx.Events)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -6,6 +6,7 @@ import (
|
||||
|
||||
"github.com/ccfos/nightingale/v6/alert"
|
||||
"github.com/ccfos/nightingale/v6/alert/astats"
|
||||
"github.com/ccfos/nightingale/v6/alert/dispatch"
|
||||
"github.com/ccfos/nightingale/v6/alert/process"
|
||||
alertrt "github.com/ccfos/nightingale/v6/alert/router"
|
||||
"github.com/ccfos/nightingale/v6/center/cconf"
|
||||
@@ -67,7 +68,7 @@ func Initialize(configDir string, cryptoKey string) (func(), error) {
|
||||
}
|
||||
ctx := ctx.NewContext(context.Background(), db, true)
|
||||
migrate.Migrate(db)
|
||||
models.InitRoot(ctx)
|
||||
isRootInit := models.InitRoot(ctx)
|
||||
|
||||
config.HTTP.JWTAuth.SigningKey = models.InitJWTSigningKey(ctx)
|
||||
|
||||
@@ -103,6 +104,9 @@ func Initialize(configDir string, cryptoKey string) (func(), error) {
|
||||
|
||||
sso := sso.Init(config.Center, ctx, configCache)
|
||||
promClients := prom.NewPromClient(ctx)
|
||||
|
||||
dispatch.InitRegisterQueryFunc(promClients)
|
||||
|
||||
tdengineClients := tdengine.NewTdengineClient(ctx, config.Alert.Heartbeat)
|
||||
|
||||
externalProcessors := process.NewExternalProcessors()
|
||||
@@ -121,7 +125,7 @@ func Initialize(configDir string, cryptoKey string) (func(), error) {
|
||||
pushgwRouter := pushgwrt.New(config.HTTP, config.Pushgw, config.Alert, targetCache, busiGroupCache, idents, metas, writers, ctx)
|
||||
|
||||
go func() {
|
||||
if models.CanMigrateBg(ctx) {
|
||||
if config.Center.MigrateBusiGroupLabel || models.CanMigrateBg(ctx) {
|
||||
models.MigrateBg(ctx, pushgwRouter.Pushgw.BusiGroupLabelKey)
|
||||
}
|
||||
}()
|
||||
@@ -140,6 +144,11 @@ func Initialize(configDir string, cryptoKey string) (func(), error) {
|
||||
|
||||
httpClean := httpx.Init(config.HTTP, r)
|
||||
|
||||
fmt.Printf("please view n9e at http://%v:%v\n", config.Alert.Heartbeat.IP, config.HTTP.Port)
|
||||
if isRootInit {
|
||||
fmt.Println("username/password: root/root.2020")
|
||||
}
|
||||
|
||||
return func() {
|
||||
logxClean()
|
||||
httpClean()
|
||||
|
||||
@@ -113,6 +113,12 @@ func Init(ctx *ctx.Context, builtinIntegrationsDir string) {
|
||||
logger.Warning("delete builtin metrics fail ", err)
|
||||
}
|
||||
|
||||
// 删除 uuid%1000 不为 0 uuid > 1000000000000000000 且 type 为 dashboard 的记录
|
||||
err = models.DB(ctx).Exec("delete from builtin_payloads where uuid%1000 != 0 and uuid > 1000000000000000000 and type = 'dashboard' and updated_by = 'system'").Error
|
||||
if err != nil {
|
||||
logger.Warning("delete builtin payloads fail ", err)
|
||||
}
|
||||
|
||||
// alerts
|
||||
files, err = file.FilesUnder(componentDir + "/alerts")
|
||||
if err == nil && len(files) > 0 {
|
||||
@@ -218,7 +224,8 @@ func Init(ctx *ctx.Context, builtinIntegrationsDir string) {
|
||||
}
|
||||
|
||||
if dashboard.UUID == 0 {
|
||||
dashboard.UUID = time.Now().UnixNano()
|
||||
time.Sleep(time.Microsecond)
|
||||
dashboard.UUID = time.Now().UnixMicro()
|
||||
// 补全文件中的 uuid
|
||||
bs, err = json.MarshalIndent(dashboard, "", " ")
|
||||
if err != nil {
|
||||
|
||||
@@ -184,6 +184,7 @@ func (rt *Router) Config(r *gin.Engine) {
|
||||
pages.POST("/query-range-batch", rt.promBatchQueryRange)
|
||||
pages.POST("/query-instant-batch", rt.promBatchQueryInstant)
|
||||
pages.GET("/datasource/brief", rt.datasourceBriefs)
|
||||
pages.POST("/datasource/query", rt.datasourceQuery)
|
||||
|
||||
pages.POST("/ds-query", rt.QueryData)
|
||||
pages.POST("/logs-query", rt.QueryLog)
|
||||
@@ -197,6 +198,7 @@ func (rt *Router) Config(r *gin.Engine) {
|
||||
pages.POST("/query-range-batch", rt.auth(), rt.promBatchQueryRange)
|
||||
pages.POST("/query-instant-batch", rt.auth(), rt.promBatchQueryInstant)
|
||||
pages.GET("/datasource/brief", rt.auth(), rt.user(), rt.datasourceBriefs)
|
||||
pages.POST("/datasource/query", rt.auth(), rt.user(), rt.datasourceQuery)
|
||||
|
||||
pages.POST("/ds-query", rt.auth(), rt.QueryData)
|
||||
pages.POST("/logs-query", rt.auth(), rt.QueryLog)
|
||||
@@ -278,6 +280,7 @@ func (rt *Router) Config(r *gin.Engine) {
|
||||
pages.DELETE("/busi-group/:id/members", rt.auth(), rt.user(), rt.perm("/busi-groups/put"), rt.bgrw(), rt.busiGroupMemberDel)
|
||||
pages.DELETE("/busi-group/:id", rt.auth(), rt.user(), rt.perm("/busi-groups/del"), rt.bgrw(), rt.busiGroupDel)
|
||||
pages.GET("/busi-group/:id/perm/:perm", rt.auth(), rt.user(), rt.checkBusiGroupPerm)
|
||||
pages.GET("/busi-groups/tags", rt.auth(), rt.user(), rt.busiGroupsGetTags)
|
||||
|
||||
pages.GET("/targets", rt.auth(), rt.user(), rt.targetGets)
|
||||
pages.GET("/target/extra-meta", rt.auth(), rt.user(), rt.targetExtendInfoByIdent)
|
||||
|
||||
@@ -77,6 +77,11 @@ func (rt *Router) alertRuleGetsByGids(c *gin.Context) {
|
||||
for i := 0; i < len(ars); i++ {
|
||||
ars[i].FillNotifyGroups(rt.Ctx, cache)
|
||||
ars[i].FillSeverities()
|
||||
|
||||
if len(ars[i].DatasourceQueries) != 0 {
|
||||
ars[i].DatasourceIdsJson = rt.DatasourceCache.GetIDsByDsCateAndQueries(ars[i].Cate, ars[i].DatasourceQueries)
|
||||
}
|
||||
|
||||
rids = append(rids, ars[i].Id)
|
||||
names = append(names, ars[i].UpdateBy)
|
||||
}
|
||||
@@ -123,6 +128,10 @@ func (rt *Router) alertRulesGetByService(c *gin.Context) {
|
||||
cache := make(map[int64]*models.UserGroup)
|
||||
for i := 0; i < len(ars); i++ {
|
||||
ars[i].FillNotifyGroups(rt.Ctx, cache)
|
||||
|
||||
if len(ars[i].DatasourceQueries) != 0 {
|
||||
ars[i].DatasourceIdsJson = rt.DatasourceCache.GetIDsByDsCateAndQueries(ars[i].Cate, ars[i].DatasourceQueries)
|
||||
}
|
||||
}
|
||||
}
|
||||
ginx.NewRender(c).Data(ars, err)
|
||||
@@ -157,6 +166,14 @@ func (rt *Router) alertRuleAddByImport(c *gin.Context) {
|
||||
ginx.Bomb(http.StatusBadRequest, "input json is empty")
|
||||
}
|
||||
|
||||
for i := range lst {
|
||||
if len(lst[i].DatasourceQueries) == 0 {
|
||||
lst[i].DatasourceQueries = []models.DatasourceQuery{
|
||||
models.DataSourceQueryAll,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
bgid := ginx.UrlParamInt64(c, "id")
|
||||
reterr := rt.alertRuleAdd(lst, username, bgid, c.GetHeader("X-Language"))
|
||||
|
||||
@@ -164,9 +181,9 @@ func (rt *Router) alertRuleAddByImport(c *gin.Context) {
|
||||
}
|
||||
|
||||
type promRuleForm struct {
|
||||
Payload string `json:"payload" binding:"required"`
|
||||
DatasourceIds []int64 `json:"datasource_ids" binding:"required"`
|
||||
Disabled int `json:"disabled" binding:"gte=0,lte=1"`
|
||||
Payload string `json:"payload" binding:"required"`
|
||||
DatasourceQueries []models.DatasourceQuery `json:"datasource_queries" binding:"required"`
|
||||
Disabled int `json:"disabled" binding:"gte=0,lte=1"`
|
||||
}
|
||||
|
||||
func (rt *Router) alertRuleAddByImportPromRule(c *gin.Context) {
|
||||
@@ -185,7 +202,7 @@ func (rt *Router) alertRuleAddByImportPromRule(c *gin.Context) {
|
||||
ginx.Bomb(http.StatusBadRequest, "input yaml is empty")
|
||||
}
|
||||
|
||||
lst := models.DealPromGroup(pr.Groups, f.DatasourceIds, f.Disabled)
|
||||
lst := models.DealPromGroup(pr.Groups, f.DatasourceQueries, f.Disabled)
|
||||
username := c.MustGet("username").(string)
|
||||
bgid := ginx.UrlParamInt64(c, "id")
|
||||
ginx.NewRender(c).Data(rt.alertRuleAdd(lst, username, bgid, c.GetHeader("X-Language")), nil)
|
||||
@@ -398,6 +415,16 @@ func (rt *Router) alertRulePutFields(c *gin.Context) {
|
||||
}
|
||||
}
|
||||
|
||||
if f.Action == "datasource_change" {
|
||||
// 修改数据源
|
||||
if datasourceQueries, has := f.Fields["datasource_queries"]; has {
|
||||
bytes, err := json.Marshal(datasourceQueries)
|
||||
ginx.Dangerous(err)
|
||||
ginx.Dangerous(ar.UpdateFieldsMap(rt.Ctx, map[string]interface{}{"datasource_queries": bytes}))
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
for k, v := range f.Fields {
|
||||
ginx.Dangerous(ar.UpdateColumn(rt.Ctx, k, v))
|
||||
}
|
||||
@@ -417,6 +444,10 @@ func (rt *Router) alertRuleGet(c *gin.Context) {
|
||||
return
|
||||
}
|
||||
|
||||
if len(ar.DatasourceQueries) != 0 {
|
||||
ar.DatasourceIdsJson = rt.DatasourceCache.GetIDsByDsCateAndQueries(ar.Cate, ar.DatasourceQueries)
|
||||
}
|
||||
|
||||
err = ar.FillNotifyGroups(rt.Ctx, make(map[int64]*models.UserGroup))
|
||||
ginx.Dangerous(err)
|
||||
|
||||
@@ -623,7 +654,7 @@ func (rt *Router) cloneToMachine(c *gin.Context) {
|
||||
newRule.CreateAt = now
|
||||
newRule.RuleConfig = alertRules[i].RuleConfig
|
||||
|
||||
exist, err := models.AlertRuleExists(rt.Ctx, 0, newRule.GroupId, newRule.DatasourceIdsJson, newRule.Name)
|
||||
exist, err := models.AlertRuleExists(rt.Ctx, 0, newRule.GroupId, newRule.Name)
|
||||
if err != nil {
|
||||
errMsg[f.IdentList[j]] = err.Error()
|
||||
continue
|
||||
|
||||
@@ -43,7 +43,7 @@ func (rt *Router) builtinPayloadsAdd(c *gin.Context) {
|
||||
|
||||
for _, rule := range alertRules {
|
||||
if rule.UUID == 0 {
|
||||
rule.UUID = time.Now().UnixNano()
|
||||
rule.UUID = time.Now().UnixMicro()
|
||||
}
|
||||
|
||||
contentBytes, err := json.Marshal(rule)
|
||||
@@ -78,7 +78,13 @@ func (rt *Router) builtinPayloadsAdd(c *gin.Context) {
|
||||
}
|
||||
|
||||
if alertRule.UUID == 0 {
|
||||
alertRule.UUID = time.Now().UnixNano()
|
||||
alertRule.UUID = time.Now().UnixMicro()
|
||||
}
|
||||
|
||||
contentBytes, err := json.Marshal(alertRule)
|
||||
if err != nil {
|
||||
reterr[alertRule.Name] = err.Error()
|
||||
continue
|
||||
}
|
||||
|
||||
bp := models.BuiltinPayload{
|
||||
@@ -88,7 +94,7 @@ func (rt *Router) builtinPayloadsAdd(c *gin.Context) {
|
||||
Name: alertRule.Name,
|
||||
Tags: alertRule.AppendTags,
|
||||
UUID: alertRule.UUID,
|
||||
Content: lst[i].Content,
|
||||
Content: string(contentBytes),
|
||||
CreatedBy: username,
|
||||
UpdatedBy: username,
|
||||
}
|
||||
@@ -106,7 +112,7 @@ func (rt *Router) builtinPayloadsAdd(c *gin.Context) {
|
||||
|
||||
for _, dashboard := range dashboards {
|
||||
if dashboard.UUID == 0 {
|
||||
dashboard.UUID = time.Now().UnixNano()
|
||||
dashboard.UUID = time.Now().UnixMicro()
|
||||
}
|
||||
|
||||
contentBytes, err := json.Marshal(dashboard)
|
||||
@@ -141,7 +147,13 @@ func (rt *Router) builtinPayloadsAdd(c *gin.Context) {
|
||||
}
|
||||
|
||||
if dashboard.UUID == 0 {
|
||||
dashboard.UUID = time.Now().UnixNano()
|
||||
dashboard.UUID = time.Now().UnixMicro()
|
||||
}
|
||||
|
||||
contentBytes, err := json.Marshal(dashboard)
|
||||
if err != nil {
|
||||
reterr[dashboard.Name] = err.Error()
|
||||
continue
|
||||
}
|
||||
|
||||
bp := models.BuiltinPayload{
|
||||
@@ -151,7 +163,7 @@ func (rt *Router) builtinPayloadsAdd(c *gin.Context) {
|
||||
Name: dashboard.Name,
|
||||
Tags: dashboard.Tags,
|
||||
UUID: dashboard.UUID,
|
||||
Content: lst[i].Content,
|
||||
Content: string(contentBytes),
|
||||
CreatedBy: username,
|
||||
UpdatedBy: username,
|
||||
}
|
||||
|
||||
@@ -140,3 +140,12 @@ func (rt *Router) busiGroupGet(c *gin.Context) {
|
||||
ginx.Dangerous(bg.FillUserGroups(rt.Ctx))
|
||||
ginx.NewRender(c).Data(bg, nil)
|
||||
}
|
||||
|
||||
func (rt *Router) busiGroupsGetTags(c *gin.Context) {
|
||||
bgids := str.IdsInt64(ginx.QueryStr(c, "gids", ""), ",")
|
||||
targetIdents, err := models.TargetIndentsGetByBgids(rt.Ctx, bgids)
|
||||
ginx.Dangerous(err)
|
||||
tags, err := models.TargetGetTags(rt.Ctx, targetIdents, true, "busigroup")
|
||||
ginx.Dangerous(err)
|
||||
ginx.NewRender(c).Data(tags, nil)
|
||||
}
|
||||
|
||||
@@ -251,3 +251,37 @@ func (rt *Router) getDatasourceIds(c *gin.Context) {
|
||||
|
||||
ginx.NewRender(c).Data(datasourceIds, err)
|
||||
}
|
||||
|
||||
type datasourceQueryForm struct {
|
||||
Cate string `json:"datasource_cate"`
|
||||
DatasourceQueries []models.DatasourceQuery `json:"datasource_queries"`
|
||||
}
|
||||
|
||||
type datasourceQueryResp struct {
|
||||
ID int64 `json:"id"`
|
||||
Name string `json:"name"`
|
||||
}
|
||||
|
||||
func (rt *Router) datasourceQuery(c *gin.Context) {
|
||||
var dsf datasourceQueryForm
|
||||
ginx.BindJSON(c, &dsf)
|
||||
datasources, err := models.GetDatasourcesGetsByTypes(rt.Ctx, []string{dsf.Cate})
|
||||
ginx.Dangerous(err)
|
||||
|
||||
nameToID := make(map[string]int64)
|
||||
IDToName := make(map[int64]string)
|
||||
for _, ds := range datasources {
|
||||
nameToID[ds.Name] = ds.Id
|
||||
IDToName[ds.Id] = ds.Name
|
||||
}
|
||||
|
||||
ids := models.GetDatasourceIDsByDatasourceQueries(dsf.DatasourceQueries, IDToName, nameToID)
|
||||
var req []datasourceQueryResp
|
||||
for _, id := range ids {
|
||||
req = append(req, datasourceQueryResp{
|
||||
ID: id,
|
||||
Name: IDToName[id],
|
||||
})
|
||||
}
|
||||
ginx.NewRender(c).Data(req, err)
|
||||
}
|
||||
|
||||
@@ -100,7 +100,7 @@ func HandleHeartbeat(c *gin.Context, ctx *ctx.Context, engineName string, metaSe
|
||||
groupIds = append(groupIds, groupId)
|
||||
}
|
||||
|
||||
err := models.TargetOverrideBgids(ctx, []string{target.Ident}, groupIds)
|
||||
err := models.TargetOverrideBgids(ctx, []string{target.Ident}, groupIds, nil)
|
||||
if err != nil {
|
||||
logger.Warningf("update target:%s group ids failed, err: %v", target.Ident, err)
|
||||
}
|
||||
@@ -113,7 +113,7 @@ func HandleHeartbeat(c *gin.Context, ctx *ctx.Context, engineName string, metaSe
|
||||
}
|
||||
|
||||
if !target.MatchGroupId(groupId) {
|
||||
err := models.TargetBindBgids(ctx, []string{target.Ident}, []int64{groupId})
|
||||
err := models.TargetBindBgids(ctx, []string{target.Ident}, []int64{groupId}, nil)
|
||||
if err != nil {
|
||||
logger.Warningf("update target:%s group ids failed, err: %v", target.Ident, err)
|
||||
}
|
||||
|
||||
@@ -35,11 +35,18 @@ type Record struct {
|
||||
|
||||
// notificationRecordAdd
|
||||
func (rt *Router) notificationRecordAdd(c *gin.Context) {
|
||||
var req models.NotificaitonRecord
|
||||
var req []*models.NotificaitonRecord
|
||||
ginx.BindJSON(c, &req)
|
||||
err := req.Add(rt.Ctx)
|
||||
err := models.DB(rt.Ctx).CreateInBatches(req, 100).Error
|
||||
var ids []int64
|
||||
if err == nil {
|
||||
ids = make([]int64, len(req))
|
||||
for i, noti := range req {
|
||||
ids[i] = noti.Id
|
||||
}
|
||||
}
|
||||
|
||||
ginx.NewRender(c).Data(req.Id, err)
|
||||
ginx.NewRender(c).Data(ids, err)
|
||||
}
|
||||
|
||||
func (rt *Router) notificationRecordList(c *gin.Context) {
|
||||
|
||||
@@ -161,7 +161,11 @@ func (rt *Router) notifyTplPreview(c *gin.Context) {
|
||||
func (rt *Router) notifyTplAdd(c *gin.Context) {
|
||||
var f models.NotifyTpl
|
||||
ginx.BindJSON(c, &f)
|
||||
f.Channel = strings.TrimSpace(f.Channel)
|
||||
|
||||
user := c.MustGet("user").(*models.User)
|
||||
f.CreateBy = user.Username
|
||||
|
||||
f.Channel = strings.TrimSpace(f.Channel)
|
||||
ginx.Dangerous(templateValidate(f))
|
||||
|
||||
count, err := models.Count(models.DB(rt.Ctx).Model(&models.NotifyTpl{}).Where("channel = ? or name = ?", f.Channel, f.Name))
|
||||
@@ -169,6 +173,8 @@ func (rt *Router) notifyTplAdd(c *gin.Context) {
|
||||
if count != 0 {
|
||||
ginx.Bomb(200, "Refuse to create duplicate channel(unique)")
|
||||
}
|
||||
|
||||
f.CreateAt = time.Now().Unix()
|
||||
ginx.NewRender(c).Message(f.Create(rt.Ctx))
|
||||
}
|
||||
|
||||
|
||||
@@ -3,8 +3,6 @@ package router
|
||||
import (
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/ccfos/nightingale/v6/models"
|
||||
@@ -74,6 +72,14 @@ func (rt *Router) recordingRuleAddByFE(c *gin.Context) {
|
||||
ginx.Bomb(http.StatusBadRequest, "input json is empty")
|
||||
}
|
||||
|
||||
for i := range lst {
|
||||
if len(lst[i].DatasourceQueries) == 0 {
|
||||
lst[i].DatasourceQueries = []models.DatasourceQuery{
|
||||
models.DataSourceQueryAll,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
bgid := ginx.UrlParamInt64(c, "id")
|
||||
reterr := make(map[string]string)
|
||||
for i := 0; i < count; i++ {
|
||||
@@ -137,23 +143,10 @@ func (rt *Router) recordingRulePutFields(c *gin.Context) {
|
||||
f.Fields["update_by"] = c.MustGet("username").(string)
|
||||
f.Fields["update_at"] = time.Now().Unix()
|
||||
|
||||
if _, ok := f.Fields["datasource_ids"]; ok {
|
||||
// datasource_ids = "1 2 3"
|
||||
idsStr := strings.Fields(f.Fields["datasource_ids"].(string))
|
||||
ids := make([]int64, 0)
|
||||
for _, idStr := range idsStr {
|
||||
id, err := strconv.ParseInt(idStr, 10, 64)
|
||||
if err != nil {
|
||||
ginx.Bomb(http.StatusBadRequest, "datasource_ids error")
|
||||
}
|
||||
ids = append(ids, id)
|
||||
}
|
||||
|
||||
bs, err := json.Marshal(ids)
|
||||
if err != nil {
|
||||
ginx.Bomb(http.StatusBadRequest, "datasource_ids error")
|
||||
}
|
||||
f.Fields["datasource_ids"] = string(bs)
|
||||
if datasourceQueries, ok := f.Fields["datasource_queries"]; ok {
|
||||
bytes, err := json.Marshal(datasourceQueries)
|
||||
ginx.Dangerous(err)
|
||||
f.Fields["datasource_queries"] = string(bytes)
|
||||
}
|
||||
|
||||
for i := 0; i < len(f.Ids); i++ {
|
||||
|
||||
@@ -169,7 +169,7 @@ func (rt *Router) targetGetTags(c *gin.Context) {
|
||||
idents := ginx.QueryStr(c, "idents", "")
|
||||
idents = strings.ReplaceAll(idents, ",", " ")
|
||||
ignoreHostTag := ginx.QueryBool(c, "ignore_host_tag", false)
|
||||
lst, err := models.TargetGetTags(rt.Ctx, strings.Fields(idents), ignoreHostTag)
|
||||
lst, err := models.TargetGetTags(rt.Ctx, strings.Fields(idents), ignoreHostTag, "")
|
||||
ginx.NewRender(c).Data(lst, err)
|
||||
}
|
||||
|
||||
@@ -397,6 +397,7 @@ type targetBgidsForm struct {
|
||||
Idents []string `json:"idents" binding:"required_without=HostIps"`
|
||||
HostIps []string `json:"host_ips" binding:"required_without=Idents"`
|
||||
Bgids []int64 `json:"bgids"`
|
||||
Tags []string `json:"tags"`
|
||||
Action string `json:"action"` // add del reset
|
||||
}
|
||||
|
||||
@@ -452,11 +453,11 @@ func (rt *Router) targetBindBgids(c *gin.Context) {
|
||||
|
||||
switch f.Action {
|
||||
case "add":
|
||||
ginx.NewRender(c).Data(failedResults, models.TargetBindBgids(rt.Ctx, f.Idents, f.Bgids))
|
||||
ginx.NewRender(c).Data(failedResults, models.TargetBindBgids(rt.Ctx, f.Idents, f.Bgids, f.Tags))
|
||||
case "del":
|
||||
ginx.NewRender(c).Data(failedResults, models.TargetUnbindBgids(rt.Ctx, f.Idents, f.Bgids))
|
||||
case "reset":
|
||||
ginx.NewRender(c).Data(failedResults, models.TargetOverrideBgids(rt.Ctx, f.Idents, f.Bgids))
|
||||
ginx.NewRender(c).Data(failedResults, models.TargetOverrideBgids(rt.Ctx, f.Idents, f.Bgids, f.Tags))
|
||||
default:
|
||||
ginx.Bomb(http.StatusBadRequest, "invalid action")
|
||||
}
|
||||
@@ -478,7 +479,7 @@ func (rt *Router) targetUpdateBgidByService(c *gin.Context) {
|
||||
ginx.Bomb(http.StatusBadRequest, err.Error())
|
||||
}
|
||||
|
||||
ginx.NewRender(c).Data(failedResults, models.TargetOverrideBgids(rt.Ctx, f.Idents, []int64{f.Bgid}))
|
||||
ginx.NewRender(c).Data(failedResults, models.TargetOverrideBgids(rt.Ctx, f.Idents, []int64{f.Bgid}, nil))
|
||||
}
|
||||
|
||||
type identsForm struct {
|
||||
|
||||
@@ -33,7 +33,7 @@ type ClusterOptions struct {
|
||||
MaxIdleConnsPerHost int
|
||||
}
|
||||
|
||||
func Parse(fpath string, configPtr interface{}) error {
|
||||
func Parse(fpath string, configPtr *Config) error {
|
||||
var (
|
||||
tBuf []byte
|
||||
)
|
||||
|
||||
@@ -7,6 +7,7 @@ import (
|
||||
|
||||
"github.com/ccfos/nightingale/v6/alert"
|
||||
"github.com/ccfos/nightingale/v6/alert/astats"
|
||||
"github.com/ccfos/nightingale/v6/alert/dispatch"
|
||||
"github.com/ccfos/nightingale/v6/alert/process"
|
||||
alertrt "github.com/ccfos/nightingale/v6/alert/router"
|
||||
"github.com/ccfos/nightingale/v6/center/metas"
|
||||
@@ -73,6 +74,9 @@ func Initialize(configDir string, cryptoKey string) (func(), error) {
|
||||
taskTplsCache := memsto.NewTaskTplCache(ctx)
|
||||
|
||||
promClients := prom.NewPromClient(ctx)
|
||||
|
||||
dispatch.InitRegisterQueryFunc(promClients)
|
||||
|
||||
tdengineClients := tdengine.NewTdengineClient(ctx, config.Alert.Heartbeat)
|
||||
externalProcessors := process.NewExternalProcessors()
|
||||
|
||||
|
||||
@@ -19,8 +19,8 @@ precision = "ms"
|
||||
# global collect interval
|
||||
interval = 15
|
||||
|
||||
[global.labels]
|
||||
source="categraf"
|
||||
# [global.labels]
|
||||
# source="categraf"
|
||||
# region = "shanghai"
|
||||
# env = "localhost"
|
||||
|
||||
|
||||
42
docker/compose-bridge/etc-categraf/input.mysql/mysql.toml
Normal file
42
docker/compose-bridge/etc-categraf/input.mysql/mysql.toml
Normal file
@@ -0,0 +1,42 @@
|
||||
[[instances]]
|
||||
address = "mysql:3306"
|
||||
username = "root"
|
||||
password = "1234"
|
||||
|
||||
# # set tls=custom to enable tls
|
||||
# parameters = "tls=false"
|
||||
|
||||
# extra_status_metrics = true
|
||||
# extra_innodb_metrics = false
|
||||
# gather_processlist_processes_by_state = false
|
||||
# gather_processlist_processes_by_user = false
|
||||
# gather_schema_size = true
|
||||
# gather_table_size = false
|
||||
# gather_system_table_size = false
|
||||
# gather_slave_status = true
|
||||
|
||||
# # timeout
|
||||
# timeout_seconds = 3
|
||||
|
||||
# # interval = global.interval * interval_times
|
||||
# interval_times = 1
|
||||
|
||||
# important! use global unique string to specify instance
|
||||
labels = { instance="docker-compose-mysql" }
|
||||
|
||||
## Optional TLS Config
|
||||
# use_tls = false
|
||||
# tls_min_version = "1.2"
|
||||
# tls_ca = "/etc/categraf/ca.pem"
|
||||
# tls_cert = "/etc/categraf/cert.pem"
|
||||
# tls_key = "/etc/categraf/key.pem"
|
||||
## Use TLS but skip chain & host verification
|
||||
# insecure_skip_verify = true
|
||||
|
||||
#[[instances.queries]]
|
||||
# mesurement = "lock_wait"
|
||||
# metric_fields = [ "total" ]
|
||||
# timeout = "3s"
|
||||
# request = '''
|
||||
#SELECT count(*) as total FROM information_schema.innodb_trx WHERE trx_state='LOCK WAIT'
|
||||
#'''
|
||||
37
docker/compose-bridge/etc-categraf/input.redis/redis.toml
Normal file
37
docker/compose-bridge/etc-categraf/input.redis/redis.toml
Normal file
@@ -0,0 +1,37 @@
|
||||
[[instances]]
|
||||
address = "redis:6379"
|
||||
username = ""
|
||||
password = ""
|
||||
# pool_size = 2
|
||||
|
||||
## 是否开启slowlog 收集
|
||||
# gather_slowlog = true
|
||||
## 最多收集少条slowlog
|
||||
# slowlog_max_len = 100
|
||||
## 收集距离现在多少秒以内的slowlog
|
||||
## 注意插件的采集周期,该参数不要小于采集周期,否则会有slowlog查不到
|
||||
# slowlog_time_window=30
|
||||
|
||||
# 指标
|
||||
# redis_slow_log{ident=dev-01 client_addr=127.0.0.1:56364 client_name= cmd="info ALL" log_id=983} 74 (单位微秒)
|
||||
|
||||
# # Optional. Specify redis commands to retrieve values
|
||||
# commands = [
|
||||
# {command = ["get", "sample-key1"], metric = "custom_metric_name1"},
|
||||
# {command = ["get", "sample-key2"], metric = "custom_metric_name2"}
|
||||
# ]
|
||||
|
||||
# # interval = global.interval * interval_times
|
||||
# interval_times = 1
|
||||
|
||||
# important! use global unique string to specify instance
|
||||
labels = { instance="docker-compose-redis" }
|
||||
|
||||
## Optional TLS Config
|
||||
# use_tls = false
|
||||
# tls_min_version = "1.2"
|
||||
# tls_ca = "/etc/categraf/ca.pem"
|
||||
# tls_cert = "/etc/categraf/cert.pem"
|
||||
# tls_key = "/etc/categraf/key.pem"
|
||||
## Use TLS but skip chain & host verification
|
||||
# insecure_skip_verify = true
|
||||
@@ -25,7 +25,7 @@ services:
|
||||
network_mode: host
|
||||
|
||||
prometheus:
|
||||
image: prom/prometheus
|
||||
image: prom/prometheus:v2.55.1
|
||||
container_name: prometheus
|
||||
hostname: prometheus
|
||||
restart: always
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,3 +1,10 @@
|
||||
GRANT ALL ON *.* TO 'root'@'127.0.0.1' IDENTIFIED BY '1234';
|
||||
GRANT ALL ON *.* TO 'root'@'localhost' IDENTIFIED BY '1234';
|
||||
GRANT ALL ON *.* TO 'root'@'%' IDENTIFIED BY '1234';
|
||||
CREATE USER IF NOT EXISTS 'root'@'127.0.0.1' IDENTIFIED BY '1234';
|
||||
GRANT ALL PRIVILEGES ON *.* TO 'root'@'127.0.0.1' WITH GRANT OPTION;
|
||||
|
||||
CREATE USER IF NOT EXISTS 'root'@'localhost' IDENTIFIED BY '1234';
|
||||
GRANT ALL PRIVILEGES ON *.* TO 'root'@'localhost' WITH GRANT OPTION;
|
||||
|
||||
CREATE USER IF NOT EXISTS 'root'@'%' IDENTIFIED BY '1234';
|
||||
GRANT ALL PRIVILEGES ON *.* TO 'root'@'%' WITH GRANT OPTION;
|
||||
|
||||
FLUSH PRIVILEGES;
|
||||
|
||||
@@ -116,4 +116,8 @@ CREATE TABLE `target_busi_group` (
|
||||
`update_at` bigint NOT NULL,
|
||||
PRIMARY KEY (`id`),
|
||||
UNIQUE KEY `idx_target_group` (`target_ident`,`group_id`)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
||||
|
||||
/* v7.7.2 2024-12-02 */
|
||||
ALTER TABLE alert_subscribe MODIFY COLUMN rule_ids varchar(1024);
|
||||
ALTER TABLE alert_subscribe MODIFY COLUMN busi_groups varchar(4096);
|
||||
@@ -73,14 +73,14 @@ DefaultRoles = ["Standard"]
|
||||
OpenRSA = false
|
||||
|
||||
[DB]
|
||||
# mysql postgres sqlite
|
||||
DBType = "sqlite"
|
||||
# postgres: host=%s port=%s user=%s dbname=%s password=%s sslmode=%s
|
||||
# postgres: DSN="host=127.0.0.1 port=5432 user=root dbname=n9e_v6 password=1234 sslmode=disable"
|
||||
# sqlite: DSN="/path/to/filename.db"
|
||||
DSN = "root:1234@tcp(127.0.0.1:3306)/n9e_v6?charset=utf8mb4&parseTime=True&loc=Local&allowNativePasswords=true"
|
||||
DSN = "n9e.db"
|
||||
# enable debug mode or not
|
||||
Debug = false
|
||||
# mysql postgres sqlite
|
||||
DBType = "mysql"
|
||||
# unit: s
|
||||
MaxLifetime = 7200
|
||||
# max open connections
|
||||
@@ -98,8 +98,8 @@ Address = "127.0.0.1:6379"
|
||||
# DB = 0
|
||||
# UseTLS = false
|
||||
# TLSMinVersion = "1.2"
|
||||
# standalone cluster sentinel
|
||||
RedisType = "standalone"
|
||||
# standalone cluster sentinel miniredis
|
||||
RedisType = "miniredis"
|
||||
# Mastername for sentinel type
|
||||
# MasterName = "mymaster"
|
||||
# SentinelUsername = ""
|
||||
@@ -138,6 +138,9 @@ ForceUseServerTS = true
|
||||
# [Pushgw.WriterOpt]
|
||||
# QueueMaxSize = 1000000
|
||||
# QueuePopSize = 1000
|
||||
# AllQueueMaxSize = 1000000
|
||||
# fresh time, unit ms
|
||||
# AllQueueMaxSizeInterval = 200
|
||||
|
||||
[[Pushgw.Writers]]
|
||||
# Url = "http://127.0.0.1:8480/insert/0/prometheus/api/v1/write"
|
||||
|
||||
13
go.mod
13
go.mod
@@ -1,6 +1,6 @@
|
||||
module github.com/ccfos/nightingale/v6
|
||||
|
||||
go 1.18
|
||||
go 1.22
|
||||
|
||||
require (
|
||||
github.com/BurntSushi/toml v0.3.1
|
||||
@@ -45,10 +45,15 @@ require (
|
||||
gorm.io/gorm v1.25.7-0.20240204074919-46816ad31dde
|
||||
)
|
||||
|
||||
require github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
|
||||
require (
|
||||
github.com/alicebob/gopher-json v0.0.0-20200520072559-a9ecdc9d1d3a // indirect
|
||||
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
|
||||
github.com/yuin/gopher-lua v1.1.1 // indirect
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/Azure/go-ntlmssp v0.0.0-20220621081337-cb9428e4ac1e // indirect
|
||||
github.com/alicebob/miniredis/v2 v2.33.0
|
||||
github.com/beorn7/perks v1.0.1 // indirect
|
||||
github.com/bytedance/sonic v1.9.1 // indirect
|
||||
github.com/cespare/xxhash/v2 v2.2.0 // indirect
|
||||
@@ -90,7 +95,7 @@ require (
|
||||
github.com/prometheus/client_model v0.4.0 // indirect
|
||||
github.com/prometheus/procfs v0.11.0 // indirect
|
||||
github.com/robfig/cron/v3 v3.0.1
|
||||
github.com/tidwall/match v1.1.1 // indirect
|
||||
github.com/tidwall/match v1.1.1
|
||||
github.com/tidwall/pretty v1.2.0 // indirect
|
||||
github.com/twitchyliquid64/golang-asm v0.15.1 // indirect
|
||||
github.com/ugorji/go/codec v1.2.11 // indirect
|
||||
@@ -100,7 +105,7 @@ require (
|
||||
golang.org/x/crypto v0.21.0 // indirect
|
||||
golang.org/x/image v0.18.0 // indirect
|
||||
golang.org/x/net v0.23.0 // indirect
|
||||
golang.org/x/sys v0.18.0 // indirect
|
||||
golang.org/x/sys v0.21.0 // indirect
|
||||
golang.org/x/text v0.16.0 // indirect
|
||||
google.golang.org/appengine v1.6.7 // indirect
|
||||
google.golang.org/protobuf v1.33.0 // indirect
|
||||
|
||||
33
go.sum
33
go.sum
@@ -1,20 +1,32 @@
|
||||
github.com/Azure/azure-sdk-for-go v65.0.0+incompatible h1:HzKLt3kIwMm4KeJYTdx9EbjRYTySD/t8i1Ee/W5EGXw=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.7.0 h1:8q4SaHjFsClSvuVne0ID/5Ka8u3fcIHyqkLjcFpNRHQ=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.7.0/go.mod h1:bjGvMhVMb+EEm3VRNQawDMUyMMjo+S5ewNjflkep/0Q=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.3.0 h1:vcYCAze6p19qBW7MhZybIsqD8sMV8js0NyQM8JDnVtg=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.3.0/go.mod h1:OQeznEEkTZ9OrhHJoDD8ZDq51FHgXjqtP9z6bEwBq9U=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/internal v1.3.0 h1:sXr+ck84g/ZlZUOZiNELInmMgOsuGwdjjVkEIde0OtY=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/internal v1.3.0/go.mod h1:okt5dMMTOFjX/aovMlrjvvXoPMBVSPzk9185BT0+eZM=
|
||||
github.com/Azure/go-ntlmssp v0.0.0-20220621081337-cb9428e4ac1e h1:NeAW1fUYUEWhft7pkxDf6WoUvEZJ/uOKsvtpjLnn8MU=
|
||||
github.com/Azure/go-ntlmssp v0.0.0-20220621081337-cb9428e4ac1e/go.mod h1:chxPXzSsl7ZWRAuOIE23GDNzjWuZquvFlgA8xmpunjU=
|
||||
github.com/AzureAD/microsoft-authentication-library-for-go v1.0.0 h1:OBhqkivkhkMqLPymWEppkm7vgPQY2XsHoEkaMQ0AdZY=
|
||||
github.com/AzureAD/microsoft-authentication-library-for-go v1.0.0/go.mod h1:kgDmCTgBzIEPFElEF+FK0SdjAor06dRq2Go927dnQ6o=
|
||||
github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ=
|
||||
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
|
||||
github.com/Masterminds/semver/v3 v3.1.1 h1:hLg3sBzpNErnxhQtUy/mmLR2I9foDujNK030IGemrRc=
|
||||
github.com/Masterminds/semver/v3 v3.1.1/go.mod h1:VPu/7SZ7ePZ3QOrcuXROw5FAcLl4a0cBrbBpGY/8hQs=
|
||||
github.com/alecthomas/units v0.0.0-20211218093645-b94a6e3cc137 h1:s6gZFSlWYmbqAuRjVTiNNhvNRfY2Wxp9nhfyel4rklc=
|
||||
github.com/alecthomas/units v0.0.0-20211218093645-b94a6e3cc137/go.mod h1:OMCwj8VM1Kc9e19TLln2VL61YJF0x1XFtfdL4JdbSyE=
|
||||
github.com/alicebob/gopher-json v0.0.0-20200520072559-a9ecdc9d1d3a h1:HbKu58rmZpUGpz5+4FfNmIU+FmZg2P3Xaj2v2bfNWmk=
|
||||
github.com/alicebob/gopher-json v0.0.0-20200520072559-a9ecdc9d1d3a/go.mod h1:SGnFV6hVsYE877CKEZ6tDNTjaSXYUk6QqoIK6PrAtcc=
|
||||
github.com/alicebob/miniredis/v2 v2.33.0 h1:uvTF0EDeu9RLnUEG27Db5I68ESoIxTiXbNUiji6lZrA=
|
||||
github.com/alicebob/miniredis/v2 v2.33.0/go.mod h1:MhP4a3EU7aENRi9aO+tHfTBZicLqQevyi/DJpoj6mi0=
|
||||
github.com/aws/aws-sdk-go v1.44.302 h1:ST3ko6GrJKn3Xi+nAvxjG3uk/V1pW8KC52WLeIxqqNk=
|
||||
github.com/aws/aws-sdk-go v1.44.302/go.mod h1:aVsgQcEevwlmQ7qHE9I3h+dtQgpqhFB+i8Phjh7fkwI=
|
||||
github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM=
|
||||
github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw=
|
||||
github.com/bsm/ginkgo/v2 v2.5.0 h1:aOAnND1T40wEdAtkGSkvSICWeQ8L3UASX7YVCqQx+eQ=
|
||||
github.com/bsm/ginkgo/v2 v2.5.0/go.mod h1:AiKlXPm7ItEHNc/2+OkrNG4E0ITzojb9/xWzvQ9XZ9w=
|
||||
github.com/bsm/gomega v1.20.0 h1:JhAwLmtRzXFTx2AkALSLa8ijZafntmhSoU63Ok18Uq8=
|
||||
github.com/bsm/gomega v1.20.0/go.mod h1:JifAceMQ4crZIWYUKrlGcmbN3bqHogVTADMD2ATsbwk=
|
||||
github.com/bytedance/sonic v1.5.0/go.mod h1:ED5hyg4y6t3/9Ku1R6dU/4KyJ48DZ4jPhfY1O2AihPM=
|
||||
github.com/bytedance/sonic v1.9.1 h1:6iJ6NqdoxCDr6mbY8h18oSO+cShGSMRGCEo7F2h0x8s=
|
||||
github.com/bytedance/sonic v1.9.1/go.mod h1:i736AoUSYt75HyZLoJW9ERYxcy6eaN6h4BZXU064P/U=
|
||||
@@ -94,6 +106,7 @@ github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69
|
||||
github.com/golang-jwt/jwt v3.2.2+incompatible h1:IfV12K8xAKAnZqdXVzCZ+TOjboZ2keLg81eXfW3O+oY=
|
||||
github.com/golang-jwt/jwt v3.2.2+incompatible/go.mod h1:8pz2t5EyA70fFQQSrl6XZXzqecmYZeUEB8OUGHkxJ+I=
|
||||
github.com/golang-jwt/jwt/v4 v4.5.0 h1:7cYmW1XlMY7h7ii7UhUyChSgS5wUJEnm9uZVTGqOWzg=
|
||||
github.com/golang-jwt/jwt/v4 v4.5.0/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0=
|
||||
github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0 h1:DACJavvAHhabrF08vX0COfcOBJRhZ8lUbR+ZWIs0Y5g=
|
||||
github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0/go.mod h1:E/TSTwGwJL78qG/PmXZO1EjYhfJinVAhrmmHX6Z8B9k=
|
||||
github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
|
||||
@@ -105,6 +118,7 @@ github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM=
|
||||
github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
|
||||
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38=
|
||||
github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
||||
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
||||
github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI=
|
||||
github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I=
|
||||
@@ -168,14 +182,17 @@ github.com/jinzhu/now v1.1.4/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/
|
||||
github.com/jinzhu/now v1.1.5 h1:/o9tlHleP7gOFmsnYNz3RGnqzefHA47wQpKrrdTIwXQ=
|
||||
github.com/jinzhu/now v1.1.5/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8=
|
||||
github.com/jmespath/go-jmespath v0.4.0 h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9YPoQUg=
|
||||
github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo=
|
||||
github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY=
|
||||
github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y=
|
||||
github.com/jpillora/backoff v1.0.0 h1:uvFg412JmmHBHw7iwprIxkPMI+sGQ4kzOWsMeHnm2EA=
|
||||
github.com/jpillora/backoff v1.0.0/go.mod h1:J/6gKK9jxlEcS3zixgDgUAsiuZ7yrSoa/FX5e0EB2j4=
|
||||
github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
|
||||
github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
|
||||
github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8=
|
||||
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
|
||||
github.com/klauspost/compress v1.16.7 h1:2mk3MPGNzKyxErAw8YaohYh69+pa4sIQSC0fPGCFR9I=
|
||||
github.com/klauspost/compress v1.16.7/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE=
|
||||
github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=
|
||||
github.com/klauspost/cpuid/v2 v2.2.4 h1:acbojRNwl3o09bUq+yDCtZFc1aiwaAAxtcn8YkZXnvk=
|
||||
github.com/klauspost/cpuid/v2 v2.2.4/go.mod h1:RVVoqg1df56z8g3pUjL/3lE5UfnlrJX8tyFgg4nqhuY=
|
||||
@@ -187,12 +204,14 @@ github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORN
|
||||
github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI=
|
||||
github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk=
|
||||
github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
|
||||
github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
|
||||
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
|
||||
github.com/kr/pty v1.1.8/go.mod h1:O1sed60cT9XZ5uDucP5qwvh+TE3NnUj51EiZO/lmSfw=
|
||||
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
|
||||
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
|
||||
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
|
||||
github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc=
|
||||
github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw=
|
||||
github.com/leodido/go-urn v1.2.1/go.mod h1:zt4jvISO2HfUBqxjfIshjdMTYS56ZS/qv49ictyFfxY=
|
||||
github.com/leodido/go-urn v1.2.4 h1:XlAE/cm/ms7TE/VMVoduSpNBoyc2dOxHs5MZSwAN63Q=
|
||||
github.com/leodido/go-urn v1.2.4/go.mod h1:7ZrI8mTSeBSHl/UaRyKQW1qZeMgak41ANeCNaVckg+4=
|
||||
@@ -223,11 +242,14 @@ github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjY
|
||||
github.com/mojocn/base64Captcha v1.3.6 h1:gZEKu1nsKpttuIAQgWHO+4Mhhls8cAKyiV2Ew03H+Tw=
|
||||
github.com/mojocn/base64Captcha v1.3.6/go.mod h1:i5CtHvm+oMbj1UzEPXaA8IH/xHFZ3DGY3Wh3dBpZ28E=
|
||||
github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f h1:KUppIJq7/+SVif2QVs3tOP0zanoHgBEVAwHxUSIzRqU=
|
||||
github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
|
||||
github.com/oklog/ulid v1.3.1 h1:EGfNDEx6MqHz8B3uNV6QAib1UR2Lm97sHi3ocA6ESJ4=
|
||||
github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U=
|
||||
github.com/pelletier/go-toml/v2 v2.0.1/go.mod h1:r9LEWfGN8R5k0VXJ+0BkIe7MYkRdwZOjgMj2KwnJFUo=
|
||||
github.com/pelletier/go-toml/v2 v2.0.8 h1:0ctb6s9mE31h0/lhu+J6OPmVeDxJn+kYnJc2jZR9tGQ=
|
||||
github.com/pelletier/go-toml/v2 v2.0.8/go.mod h1:vuYfssBdrU2XDZ9bYydBu6t+6a6PYNcZljzZR9VXg+4=
|
||||
github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8 h1:KoWmjvw+nsYOo29YJK9vDA65RGE3NrOnUtO7a+RF9HU=
|
||||
github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8/go.mod h1:HKlIX3XHQyzLZPlr7++PzdhaXEj94dEiJgZDTsxEqUI=
|
||||
github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA=
|
||||
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
|
||||
@@ -238,6 +260,7 @@ github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH
|
||||
github.com/pquerna/cachecontrol v0.1.0 h1:yJMy84ti9h/+OEWa752kBTKv4XC30OtVVHYv/8cTqKc=
|
||||
github.com/pquerna/cachecontrol v0.1.0/go.mod h1:NrUG3Z7Rdu85UNR3vm7SOsl1nFIeSiQnrHV5K9mBcUI=
|
||||
github.com/prashantv/gostub v1.1.0 h1:BTyx3RfQjRHnUWaGF9oQos79AlQ5k8WNktv7VGvVH4g=
|
||||
github.com/prashantv/gostub v1.1.0/go.mod h1:A5zLQHz7ieHGG7is6LLXLz7I8+3LZzsrV0P1IAHhP5U=
|
||||
github.com/prometheus/client_golang v1.16.0 h1:yk/hx9hDbrGHovbci4BY+pRMfSuuat626eFsHb7tmT8=
|
||||
github.com/prometheus/client_golang v1.16.0/go.mod h1:Zsulrv/L9oM40tJ7T815tM89lFEugiJ9HzIqaAx4LKc=
|
||||
github.com/prometheus/client_model v0.4.0 h1:5lQXD3cAg1OXBf4Wq03gTrXHeaV0TQvGfUooCfx1yqY=
|
||||
@@ -245,6 +268,7 @@ github.com/prometheus/client_model v0.4.0/go.mod h1:oMQmHW1/JoDwqLtg57MGgP/Fb1CJ
|
||||
github.com/prometheus/common v0.44.0 h1:+5BrQJwiBB9xsMygAB3TNvpQKOwlkc25LbISbrdOOfY=
|
||||
github.com/prometheus/common v0.44.0/go.mod h1:ofAIvZbQ1e/nugmZGz4/qCb9Ap1VoSTIO7x0VV9VvuY=
|
||||
github.com/prometheus/common/sigv4 v0.1.0 h1:qoVebwtwwEhS85Czm2dSROY5fTo2PAPEVdDeppTwGX4=
|
||||
github.com/prometheus/common/sigv4 v0.1.0/go.mod h1:2Jkxxk9yYvCkE5G1sQT7GuEXm57JrvHu9k5YwTjsNtI=
|
||||
github.com/prometheus/procfs v0.11.0 h1:5EAgkfkMl659uZPbe9AS2N68a7Cc1TJbPEuGzFuRbyk=
|
||||
github.com/prometheus/procfs v0.11.0/go.mod h1:nwNm2aOCAYw8uTR/9bWRREkZFxAUcWzPHWJq+XBB/FM=
|
||||
github.com/prometheus/prometheus v0.47.1 h1:bd2LiZyxzHn9Oo2Ei4eK2D86vz/L/OiqR1qYo0XmMBo=
|
||||
@@ -260,6 +284,7 @@ github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFR
|
||||
github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc=
|
||||
github.com/rogpeppe/go-internal v1.8.0/go.mod h1:WmiCO8CzOY8rg0OYDC4/i/2WRWAB6poM+XZ2dLUbcbE=
|
||||
github.com/rogpeppe/go-internal v1.10.0 h1:TMyTOH3F/DB16zRVcYyreMH6GnZZrwQVAoYjRBZyWFQ=
|
||||
github.com/rogpeppe/go-internal v1.10.0/go.mod h1:UQnix2H7Ngw/k4C5ijL5+65zddjncjaFoBhdsK/akog=
|
||||
github.com/rs/xid v1.2.1/go.mod h1:+uKXf+4Djp6Md1KODXJxgGQPKngRmWyn10oCKFzNHOQ=
|
||||
github.com/rs/zerolog v1.13.0/go.mod h1:YbFCdg8HfsridGWAh22vktObvhZbQsZXe4/zB0OKkWU=
|
||||
github.com/rs/zerolog v1.15.0/go.mod h1:xYTKnLHcpfU2225ny5qZjxnj9NvkumZYjJHlAThCjNc=
|
||||
@@ -307,6 +332,8 @@ github.com/ugorji/go/codec v1.2.11/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZ
|
||||
github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
||||
github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
||||
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
||||
github.com/yuin/gopher-lua v1.1.1 h1:kYKnWBjvbNP4XLT3+bPEwAXJx262OhaHDWDVOPjL46M=
|
||||
github.com/yuin/gopher-lua v1.1.1/go.mod h1:GBR0iDaNXjAgGg9zfCvksxSRnQx76gclCIb7kdAd1Pw=
|
||||
github.com/zenazn/goji v0.9.0/go.mod h1:7S9M489iMyHBNxwZnk9/EHS098H4/F6TATF2mIxtB1Q=
|
||||
go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE=
|
||||
go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE=
|
||||
@@ -318,6 +345,7 @@ go.uber.org/automaxprocs v1.4.0/go.mod h1:/mTEdr7LvHhs0v7mjdxDreTz1OG5zdZGqgOnhW
|
||||
go.uber.org/automaxprocs v1.5.2 h1:2LxUOGiR3O6tw8ui5sZa2LAaHnsviZdVOUZw4fvbnME=
|
||||
go.uber.org/automaxprocs v1.5.2/go.mod h1:eRbA25aqJrxAbsLO0xy5jVwPt7FQnRgjW+efnwa1WM0=
|
||||
go.uber.org/goleak v1.2.1 h1:NBol2c7O1ZokfZ0LEU9K6Whx/KnwvepVetCUhtKja4A=
|
||||
go.uber.org/goleak v1.2.1/go.mod h1:qlT2yGI9QafXHhZZLxlSuNsMw3FFLxBr+tBRlmO1xH4=
|
||||
go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0=
|
||||
go.uber.org/multierr v1.3.0/go.mod h1:VgVr7evmIr6uPjLBxg28wmKNXyqE9akIJ5XnfpiKl+4=
|
||||
go.uber.org/multierr v1.5.0/go.mod h1:FeouvMocqHpRaaGuG9EjoKcStLC43Zu/fmqdUMPcKYU=
|
||||
@@ -380,6 +408,7 @@ golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJ
|
||||
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.7.0 h1:YsImfSBoP9QPYL0xyKJPq0gcaJdG3rInoqxTWbfQu9M=
|
||||
golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
@@ -402,8 +431,8 @@ golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBc
|
||||
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.18.0 h1:DBdB3niSjOA/O0blCZBqDefyWNYveAYMNF1Wum0DYQ4=
|
||||
golang.org/x/sys v0.18.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.21.0 h1:rF+pYz3DAGSQAxAu1CbC7catZg4ebC4UIeIhKxBZvws=
|
||||
golang.org/x/sys v0.21.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw=
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||
|
||||
2524
integrations/Doris/alerts/doris_by_categraf.json
Normal file
2524
integrations/Doris/alerts/doris_by_categraf.json
Normal file
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,22 @@
|
||||
# doris_fe
|
||||
[[instances]]
|
||||
# 配置 fe metrics 服务地址
|
||||
urls = [
|
||||
"http://127.0.0.1:8030/metrics"
|
||||
]
|
||||
|
||||
url_label_key = "instance"
|
||||
url_label_value = "{{.Host}}"
|
||||
# 指定 fe 服务 group 和 job 标签,这里是仪表盘变量调用,可根据实际需求修改。
|
||||
labels = { group = "fe",job = "doris_cluster01"}
|
||||
|
||||
# doris_be
|
||||
[[instances]]
|
||||
# 配置 be metrics 服务地址
|
||||
urls = [
|
||||
"http://127.0.0.1:8040/metrics"
|
||||
]
|
||||
url_label_key = "instance"
|
||||
url_label_value = "{{.Host}}"
|
||||
# 指定 be 服务 group 和 job 标签,这里是仪表盘变量调用,可根据实际需求修改。
|
||||
labels = { group = "be",job = "doris_cluster01"}
|
||||
4698
integrations/Doris/dashboards/Doris_Overview.json
Normal file
4698
integrations/Doris/dashboards/Doris_Overview.json
Normal file
File diff suppressed because it is too large
Load Diff
26
integrations/Doris/icon/doris.svg
Normal file
26
integrations/Doris/icon/doris.svg
Normal file
@@ -0,0 +1,26 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generator: Adobe Illustrator 27.6.1, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
|
||||
<svg version="1.1" id="图层_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
|
||||
viewBox="0 0 30 30" style="enable-background:new 0 0 30 30;" xml:space="preserve">
|
||||
<style type="text/css">
|
||||
.st0{fill:#00A5CA;}
|
||||
.st1{fill:#3ACA9B;}
|
||||
.st2{fill:#405AAD;}
|
||||
</style>
|
||||
<g>
|
||||
<g>
|
||||
<g>
|
||||
<path class="st0" d="M17.4,4.6l-3.3-3.3c-0.4-0.4-0.9-0.8-1.5-1C12.1,0.1,11.5,0,11,0C9.9,0,8.8,0.4,8,1.2C7.6,1.6,7.3,2,7.1,2.6
|
||||
c-0.3,0.5-0.4,1-0.4,1.6S6.8,5.3,7,5.9c0.2,0.5,0.5,1,0.9,1.4l5.9,5.9c0.1,0.1,0.3,0.2,0.5,0.2s0.3-0.1,0.5-0.2l2.6-2.6
|
||||
C17.6,10.5,20.2,7.4,17.4,4.6z"/>
|
||||
<path class="st1" d="M22.8,9.8c-0.6-0.6-1.3-1.2-1.9-1.9l0,0c0,0.1,0,0.1,0,0.2c-0.2,1.4-0.9,2.7-2,3.7
|
||||
c-3.4,3.4-6.9,6.9-10.3,10.3l-0.5,0.5c-0.7,0.6-1.2,1.5-1.3,2.4c-0.1,0.7,0,1.3,0.2,2c0.2,0.6,0.5,1.2,1,1.7
|
||||
c0.4,0.4,0.9,0.8,1.4,1c0.5,0.2,1.1,0.3,1.7,0.3c1.3,0,2-0.2,3-1.1c3.9-3.8,7.8-7.7,10.8-10.6c1.4-1.4,1.7-3.7,0.7-5.2
|
||||
C24.8,11.8,23.8,10.8,22.8,9.8z"/>
|
||||
<path class="st2" d="M3.8,7.8v14.5c0,0.2,0,0.3,0.1,0.4C4,22.8,4.1,22.9,4.3,23c0.1,0,0.3,0,0.5,0c0.2,0,0.3-0.1,0.4-0.2l7.3-7.3
|
||||
c0.1-0.1,0.2-0.3,0.2-0.5s-0.1-0.4-0.2-0.5L5.2,7.2C5.1,7.1,5,7.1,4.9,7C4.8,7,4.7,7,4.6,7C4.4,7,4.2,7.1,4,7.2
|
||||
C3.9,7.4,3.8,7.6,3.8,7.8z"/>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 1.4 KiB |
39
integrations/Doris/markdown/README.md
Normal file
39
integrations/Doris/markdown/README.md
Normal file
@@ -0,0 +1,39 @@
|
||||
# Doris
|
||||
|
||||
Doris 的进程都会暴露 `/metrics` 接口,通过这个接口暴露 Prometheus 协议的监控数据。
|
||||
|
||||
## 采集配置
|
||||
|
||||
categraf 的 `conf/input.prometheus/prometheus.toml`。因为 Doris 是暴露的 Prometheus 协议的监控数据,所以使用 categraf 的 prometheus 插件即可采集。
|
||||
|
||||
```toml
|
||||
# doris_fe
|
||||
[[instances]]
|
||||
urls = [
|
||||
"http://127.0.0.1:8030/metrics"
|
||||
]
|
||||
|
||||
url_label_key = "instance"
|
||||
url_label_value = "{{.Host}}"
|
||||
|
||||
labels = { group = "fe",job = "doris_cluster01"}
|
||||
|
||||
# doris_be
|
||||
[[instances]]
|
||||
urls = [
|
||||
"http://127.0.0.1:8040/metrics"
|
||||
]
|
||||
url_label_key = "instance"
|
||||
url_label_value = "{{.Host}}"
|
||||
labels = { group = "be",job = "doris_cluster01"}
|
||||
```
|
||||
|
||||
## 告警规则
|
||||
|
||||
夜莺内置了 Doris 的告警规则,克隆到自己的业务组下即可使用。
|
||||
|
||||
## 仪表盘
|
||||
|
||||
夜莺内置了 Doris 的仪表盘,克隆到自己的业务组下即可使用。
|
||||
|
||||
|
||||
@@ -459,5 +459,5 @@
|
||||
}
|
||||
]
|
||||
},
|
||||
"uuid": 1727587308068775200
|
||||
"uuid": 1727587308068775000
|
||||
}
|
||||
|
||||
@@ -1702,5 +1702,5 @@
|
||||
],
|
||||
"version": "3.0.0"
|
||||
},
|
||||
"uuid": 1727335102129685800
|
||||
"uuid": 1727335102129685000
|
||||
}
|
||||
@@ -1,13 +1,7 @@
|
||||
{
|
||||
"id": 0,
|
||||
"group_id": 0,
|
||||
"name": "MongoDB Overview by exporter",
|
||||
"ident": "",
|
||||
"tags": "Prometheus MongoDB",
|
||||
"create_at": 0,
|
||||
"create_by": "",
|
||||
"update_at": 0,
|
||||
"update_by": "",
|
||||
"ident": "",
|
||||
"configs": {
|
||||
"panels": [
|
||||
{
|
||||
@@ -150,21 +144,7 @@
|
||||
"version": "2.0.0"
|
||||
},
|
||||
{
|
||||
"custom": {
|
||||
"drawStyle": "lines",
|
||||
"fillOpacity": 0.3,
|
||||
"gradientMode": "opacity",
|
||||
"lineInterpolation": "smooth",
|
||||
"lineWidth": 2,
|
||||
"scaleDistribution": {
|
||||
"type": "linear"
|
||||
},
|
||||
"spanNulls": false,
|
||||
"stack": "off"
|
||||
},
|
||||
"datasourceCate": "prometheus",
|
||||
"datasourceValue": "${prom}",
|
||||
"description": "Memory usage (MiB)",
|
||||
"type": "timeseries",
|
||||
"id": "8446dded-9e11-4ee9-bdad-769b193ddf3e",
|
||||
"layout": {
|
||||
"h": 7,
|
||||
@@ -174,10 +154,37 @@
|
||||
"x": 12,
|
||||
"y": 1
|
||||
},
|
||||
"version": "3.0.0",
|
||||
"datasourceCate": "prometheus",
|
||||
"datasourceValue": "${prom}",
|
||||
"targets": [
|
||||
{
|
||||
"expr": "mongodb_ss_mem_resident{instance='$instance'} * 1024 * 1024",
|
||||
"legend": "{{type}}",
|
||||
"refId": "A",
|
||||
"maxDataPoints": 240
|
||||
}
|
||||
],
|
||||
"transformations": [
|
||||
{
|
||||
"id": "organize",
|
||||
"options": {}
|
||||
}
|
||||
],
|
||||
"name": "Memory",
|
||||
"description": "Memory usage (MiB)",
|
||||
"maxPerRow": 4,
|
||||
"options": {
|
||||
"tooltip": {
|
||||
"mode": "all",
|
||||
"sort": "desc"
|
||||
},
|
||||
"legend": {
|
||||
"displayMode": "hidden"
|
||||
"displayMode": "hidden",
|
||||
"heightInPercentage": 30,
|
||||
"placement": "bottom",
|
||||
"behaviour": "showItem",
|
||||
"selectMode": "single"
|
||||
},
|
||||
"standardOptions": {
|
||||
"util": "bytesIEC"
|
||||
@@ -186,25 +193,36 @@
|
||||
"steps": [
|
||||
{
|
||||
"color": "#634CD9",
|
||||
"type": "base",
|
||||
"value": null
|
||||
"value": null,
|
||||
"type": "base"
|
||||
}
|
||||
]
|
||||
},
|
||||
"tooltip": {
|
||||
"mode": "all",
|
||||
"sort": "desc"
|
||||
}
|
||||
},
|
||||
"targets": [
|
||||
"custom": {
|
||||
"drawStyle": "lines",
|
||||
"lineInterpolation": "smooth",
|
||||
"spanNulls": false,
|
||||
"lineWidth": 2,
|
||||
"fillOpacity": 0.3,
|
||||
"gradientMode": "opacity",
|
||||
"stack": "off",
|
||||
"scaleDistribution": {
|
||||
"type": "linear"
|
||||
},
|
||||
"showPoints": "none",
|
||||
"pointSize": 5
|
||||
},
|
||||
"overrides": [
|
||||
{
|
||||
"expr": "mongodb_ss_mem_resident{cluster='$cluster'} * 1024 * 1024",
|
||||
"legend": "{{type}}",
|
||||
"refId": "A"
|
||||
"matcher": {
|
||||
"id": "byFrameRefID"
|
||||
},
|
||||
"properties": {
|
||||
"rightYAxisDisplay": "off"
|
||||
}
|
||||
}
|
||||
],
|
||||
"type": "timeseries",
|
||||
"version": "2.0.0"
|
||||
]
|
||||
},
|
||||
{
|
||||
"custom": {
|
||||
@@ -571,21 +589,7 @@
|
||||
"version": "2.0.0"
|
||||
},
|
||||
{
|
||||
"custom": {
|
||||
"drawStyle": "lines",
|
||||
"fillOpacity": 0.3,
|
||||
"gradientMode": "opacity",
|
||||
"lineInterpolation": "smooth",
|
||||
"lineWidth": 2,
|
||||
"scaleDistribution": {
|
||||
"type": "linear"
|
||||
},
|
||||
"spanNulls": false,
|
||||
"stack": "off"
|
||||
},
|
||||
"datasourceCate": "prometheus",
|
||||
"datasourceValue": "${prom}",
|
||||
"description": "Number of document operations When used in combination with 'Command Operations', this graph can help identify write amplification. For example, when one insert or update command actually inserts or updates hundreds, thousands, or even millions of documents.",
|
||||
"type": "timeseries",
|
||||
"id": "7030d97a-d69f-4916-a415-ec57503ab1ed",
|
||||
"layout": {
|
||||
"h": 7,
|
||||
@@ -595,52 +599,76 @@
|
||||
"x": 12,
|
||||
"y": 16
|
||||
},
|
||||
"version": "3.0.0",
|
||||
"datasourceCate": "prometheus",
|
||||
"datasourceValue": "${prom}",
|
||||
"targets": [
|
||||
{
|
||||
"expr": "rate(mongodb_ss_metrics_document{instance=\"$instance\"}[5m])",
|
||||
"legend": "",
|
||||
"refId": "A",
|
||||
"maxDataPoints": 240
|
||||
}
|
||||
],
|
||||
"transformations": [
|
||||
{
|
||||
"id": "organize",
|
||||
"options": {}
|
||||
}
|
||||
],
|
||||
"name": "Document Operations",
|
||||
"description": "Number of document operations When used in combination with 'Command Operations', this graph can help identify write amplification. For example, when one insert or update command actually inserts or updates hundreds, thousands, or even millions of documents.",
|
||||
"maxPerRow": 4,
|
||||
"options": {
|
||||
"tooltip": {
|
||||
"mode": "all",
|
||||
"sort": "none"
|
||||
},
|
||||
"legend": {
|
||||
"displayMode": "hidden"
|
||||
"displayMode": "hidden",
|
||||
"heightInPercentage": 30,
|
||||
"placement": "bottom",
|
||||
"behaviour": "showItem",
|
||||
"selectMode": "single"
|
||||
},
|
||||
"standardOptions": {},
|
||||
"thresholds": {
|
||||
"steps": [
|
||||
{
|
||||
"color": "#634CD9",
|
||||
"type": "base",
|
||||
"value": null
|
||||
"value": null,
|
||||
"type": "base"
|
||||
}
|
||||
]
|
||||
},
|
||||
"tooltip": {
|
||||
"mode": "all",
|
||||
"sort": "none"
|
||||
}
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"expr": "rate(mongodb_ss_metric_document{instance=\"$instance\"}[5m])",
|
||||
"legend": "",
|
||||
"refId": "A"
|
||||
}
|
||||
],
|
||||
"type": "timeseries",
|
||||
"version": "2.0.0"
|
||||
},
|
||||
{
|
||||
"custom": {
|
||||
"drawStyle": "lines",
|
||||
"lineInterpolation": "smooth",
|
||||
"spanNulls": false,
|
||||
"lineWidth": 2,
|
||||
"fillOpacity": 0.3,
|
||||
"gradientMode": "opacity",
|
||||
"lineInterpolation": "smooth",
|
||||
"lineWidth": 2,
|
||||
"stack": "off",
|
||||
"scaleDistribution": {
|
||||
"type": "linear"
|
||||
},
|
||||
"spanNulls": false,
|
||||
"stack": "off"
|
||||
"showPoints": "none",
|
||||
"pointSize": 5
|
||||
},
|
||||
"datasourceCate": "prometheus",
|
||||
"datasourceValue": "${prom}",
|
||||
"description": "Operation detail processing time (milliseconds)",
|
||||
"overrides": [
|
||||
{
|
||||
"matcher": {
|
||||
"id": "byFrameRefID"
|
||||
},
|
||||
"properties": {
|
||||
"rightYAxisDisplay": "off"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "timeseries",
|
||||
"id": "1c3b73d5-c25c-449f-995d-26acc9c621e1",
|
||||
"layout": {
|
||||
"h": 7,
|
||||
@@ -650,10 +678,37 @@
|
||||
"x": 0,
|
||||
"y": 23
|
||||
},
|
||||
"version": "3.0.0",
|
||||
"datasourceCate": "prometheus",
|
||||
"datasourceValue": "${prom}",
|
||||
"targets": [
|
||||
{
|
||||
"expr": "rate(mongodb_ss_opLatencies_latency{instance='$instance'}[5m]) / rate(mongodb_ss_opLatencies_latency{instance='$instance'}[5m]) / 1000",
|
||||
"legend": "{{op_type}}",
|
||||
"refId": "A",
|
||||
"maxDataPoints": 240
|
||||
}
|
||||
],
|
||||
"transformations": [
|
||||
{
|
||||
"id": "organize",
|
||||
"options": {}
|
||||
}
|
||||
],
|
||||
"name": "Response Time",
|
||||
"description": "Operation detail processing time (milliseconds)",
|
||||
"maxPerRow": 4,
|
||||
"options": {
|
||||
"tooltip": {
|
||||
"mode": "all",
|
||||
"sort": "none"
|
||||
},
|
||||
"legend": {
|
||||
"displayMode": "hidden"
|
||||
"displayMode": "hidden",
|
||||
"heightInPercentage": 30,
|
||||
"placement": "bottom",
|
||||
"behaviour": "showItem",
|
||||
"selectMode": "single"
|
||||
},
|
||||
"standardOptions": {
|
||||
"util": "milliseconds"
|
||||
@@ -662,25 +717,36 @@
|
||||
"steps": [
|
||||
{
|
||||
"color": "#634CD9",
|
||||
"type": "base",
|
||||
"value": null
|
||||
"value": null,
|
||||
"type": "base"
|
||||
}
|
||||
]
|
||||
},
|
||||
"tooltip": {
|
||||
"mode": "all",
|
||||
"sort": "none"
|
||||
}
|
||||
},
|
||||
"targets": [
|
||||
"custom": {
|
||||
"drawStyle": "lines",
|
||||
"lineInterpolation": "smooth",
|
||||
"spanNulls": false,
|
||||
"lineWidth": 2,
|
||||
"fillOpacity": 0.3,
|
||||
"gradientMode": "opacity",
|
||||
"stack": "off",
|
||||
"scaleDistribution": {
|
||||
"type": "linear"
|
||||
},
|
||||
"showPoints": "none",
|
||||
"pointSize": 5
|
||||
},
|
||||
"overrides": [
|
||||
{
|
||||
"expr": "rate(mongodb_ss_opLatencies_latency{cluster='$cluster'}[5m]) / rate(mongodb_ss_opLatencies_latency{cluster='$cluster'}[5m]) / 1000",
|
||||
"legend": "{{op_type}}",
|
||||
"refId": "A"
|
||||
"matcher": {
|
||||
"id": "byFrameRefID"
|
||||
},
|
||||
"properties": {
|
||||
"rightYAxisDisplay": "off"
|
||||
}
|
||||
}
|
||||
],
|
||||
"type": "timeseries",
|
||||
"version": "2.0.0"
|
||||
]
|
||||
},
|
||||
{
|
||||
"custom": {
|
||||
@@ -815,21 +881,7 @@
|
||||
"type": "row"
|
||||
},
|
||||
{
|
||||
"custom": {
|
||||
"drawStyle": "lines",
|
||||
"fillOpacity": 0.3,
|
||||
"gradientMode": "opacity",
|
||||
"lineInterpolation": "smooth",
|
||||
"lineWidth": 2,
|
||||
"scaleDistribution": {
|
||||
"type": "linear"
|
||||
},
|
||||
"spanNulls": false,
|
||||
"stack": "off"
|
||||
},
|
||||
"datasourceCate": "prometheus",
|
||||
"datasourceValue": "${prom}",
|
||||
"description": "cache size (byte)",
|
||||
"type": "timeseries",
|
||||
"id": "bb0ae571-43a1-430b-8f63-256f6f1ebee6",
|
||||
"layout": {
|
||||
"h": 7,
|
||||
@@ -839,10 +891,55 @@
|
||||
"x": 0,
|
||||
"y": 31
|
||||
},
|
||||
"version": "3.0.0",
|
||||
"datasourceCate": "prometheus",
|
||||
"datasourceValue": "${prom}",
|
||||
"targets": [
|
||||
{
|
||||
"expr": "mongodb_ss_wt_cache_bytes_currently_in_the_cache{instance='$instance'}",
|
||||
"legend": "total",
|
||||
"refId": "A",
|
||||
"maxDataPoints": 240
|
||||
},
|
||||
{
|
||||
"expr": "mongodb_ss_wt_cache_tracked_dirty_bytes_in_the_cache{cluster='$cluster'}",
|
||||
"legend": "dirty",
|
||||
"refId": "B",
|
||||
"maxDataPoints": 240
|
||||
},
|
||||
{
|
||||
"expr": "mongodb_ss_wt_cache_tracked_bytes_belonging_to_internal_pages_in_the_cache{cluster='$cluster'}",
|
||||
"legend": "internal_pages",
|
||||
"refId": "C",
|
||||
"maxDataPoints": 240
|
||||
},
|
||||
{
|
||||
"expr": "mongodb_ss_wt_cache_tracked_bytes_belonging_to_leaf_pages_in_the_cache{cluster='$cluster'}",
|
||||
"legend": "leaf_pages",
|
||||
"refId": "D",
|
||||
"maxDataPoints": 240
|
||||
}
|
||||
],
|
||||
"transformations": [
|
||||
{
|
||||
"id": "organize",
|
||||
"options": {}
|
||||
}
|
||||
],
|
||||
"name": "Cache Size",
|
||||
"description": "cache size (byte)",
|
||||
"maxPerRow": 4,
|
||||
"options": {
|
||||
"tooltip": {
|
||||
"mode": "all",
|
||||
"sort": "none"
|
||||
},
|
||||
"legend": {
|
||||
"displayMode": "hidden"
|
||||
"displayMode": "hidden",
|
||||
"heightInPercentage": 30,
|
||||
"placement": "bottom",
|
||||
"behaviour": "showItem",
|
||||
"selectMode": "single"
|
||||
},
|
||||
"standardOptions": {
|
||||
"util": "bytesIEC"
|
||||
@@ -851,57 +948,39 @@
|
||||
"steps": [
|
||||
{
|
||||
"color": "#634CD9",
|
||||
"type": "base",
|
||||
"value": null
|
||||
"value": null,
|
||||
"type": "base"
|
||||
}
|
||||
]
|
||||
},
|
||||
"tooltip": {
|
||||
"mode": "all",
|
||||
"sort": "none"
|
||||
}
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"expr": "mongodb_ss_wt_cache_bytes_currently_in_the_cache{cluster='$cluster'}",
|
||||
"legend": "total",
|
||||
"refId": "A"
|
||||
},
|
||||
{
|
||||
"expr": "mongodb_ss_wt_cache_tracked_dirty_bytes_in_the_cache{cluster='$cluster'}",
|
||||
"legend": "dirty",
|
||||
"refId": "B"
|
||||
},
|
||||
{
|
||||
"expr": "mongodb_ss_wt_cache_tracked_bytes_belonging_to_internal_pages_in_the_cache{cluster='$cluster'}",
|
||||
"legend": "internal_pages",
|
||||
"refId": "C"
|
||||
},
|
||||
{
|
||||
"expr": "mongodb_ss_wt_cache_tracked_bytes_belonging_to_leaf_pages_in_the_cache{cluster='$cluster'}",
|
||||
"legend": "leaf_pages",
|
||||
"refId": "D"
|
||||
}
|
||||
],
|
||||
"type": "timeseries",
|
||||
"version": "2.0.0"
|
||||
},
|
||||
{
|
||||
"custom": {
|
||||
"drawStyle": "lines",
|
||||
"lineInterpolation": "smooth",
|
||||
"spanNulls": false,
|
||||
"lineWidth": 2,
|
||||
"fillOpacity": 0.3,
|
||||
"gradientMode": "opacity",
|
||||
"lineInterpolation": "smooth",
|
||||
"lineWidth": 2,
|
||||
"stack": "off",
|
||||
"scaleDistribution": {
|
||||
"type": "linear"
|
||||
},
|
||||
"spanNulls": false,
|
||||
"stack": "off"
|
||||
"showPoints": "none",
|
||||
"pointSize": 5
|
||||
},
|
||||
"datasourceCate": "prometheus",
|
||||
"datasourceValue": "${prom}",
|
||||
"description": "size of cached data written or read (in bytes)",
|
||||
"overrides": [
|
||||
{
|
||||
"matcher": {
|
||||
"id": "byFrameRefID"
|
||||
},
|
||||
"properties": {
|
||||
"rightYAxisDisplay": "off"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "timeseries",
|
||||
"id": "f1ffd169-2a1a-42bc-9647-0e6621be0fef",
|
||||
"layout": {
|
||||
"h": 7,
|
||||
@@ -911,10 +990,43 @@
|
||||
"x": 6,
|
||||
"y": 31
|
||||
},
|
||||
"version": "3.0.0",
|
||||
"datasourceCate": "prometheus",
|
||||
"datasourceValue": "${prom}",
|
||||
"targets": [
|
||||
{
|
||||
"expr": "rate(mongodb_ss_wt_cache_bytes_read_into_cache{instance='$instance'}[5m])",
|
||||
"legend": "read",
|
||||
"refId": "A",
|
||||
"maxDataPoints": 240
|
||||
},
|
||||
{
|
||||
"expr": "rate(mongodb_ss_wt_cache_bytes_written_from_cache{cluster='$cluster'}[5m])",
|
||||
"legend": "written",
|
||||
"refId": "B",
|
||||
"maxDataPoints": 240
|
||||
}
|
||||
],
|
||||
"transformations": [
|
||||
{
|
||||
"id": "organize",
|
||||
"options": {}
|
||||
}
|
||||
],
|
||||
"name": "Cache I/O",
|
||||
"description": "size of cached data written or read (in bytes)",
|
||||
"maxPerRow": 4,
|
||||
"options": {
|
||||
"tooltip": {
|
||||
"mode": "all",
|
||||
"sort": "none"
|
||||
},
|
||||
"legend": {
|
||||
"displayMode": "hidden"
|
||||
"displayMode": "hidden",
|
||||
"heightInPercentage": 30,
|
||||
"placement": "bottom",
|
||||
"behaviour": "showItem",
|
||||
"selectMode": "single"
|
||||
},
|
||||
"standardOptions": {
|
||||
"util": "bytesSI"
|
||||
@@ -923,47 +1035,39 @@
|
||||
"steps": [
|
||||
{
|
||||
"color": "#634CD9",
|
||||
"type": "base",
|
||||
"value": null
|
||||
"value": null,
|
||||
"type": "base"
|
||||
}
|
||||
]
|
||||
},
|
||||
"tooltip": {
|
||||
"mode": "all",
|
||||
"sort": "none"
|
||||
}
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"expr": "rate(mongodb_ss_wt_cache_bytes_read_into_cache{cluster='$cluster'}[5m])",
|
||||
"legend": "read",
|
||||
"refId": "A"
|
||||
},
|
||||
{
|
||||
"expr": "rate(mongodb_ss_wt_cache_bytes_written_from_cache{cluster='$cluster'}[5m])",
|
||||
"legend": "written",
|
||||
"refId": "B"
|
||||
}
|
||||
],
|
||||
"type": "timeseries",
|
||||
"version": "2.0.0"
|
||||
},
|
||||
{
|
||||
"custom": {
|
||||
"drawStyle": "lines",
|
||||
"lineInterpolation": "smooth",
|
||||
"spanNulls": false,
|
||||
"lineWidth": 2,
|
||||
"fillOpacity": 0.3,
|
||||
"gradientMode": "opacity",
|
||||
"lineInterpolation": "smooth",
|
||||
"lineWidth": 2,
|
||||
"stack": "off",
|
||||
"scaleDistribution": {
|
||||
"type": "linear"
|
||||
},
|
||||
"spanNulls": false,
|
||||
"stack": "off"
|
||||
"showPoints": "none",
|
||||
"pointSize": 5
|
||||
},
|
||||
"datasourceCate": "prometheus",
|
||||
"datasourceValue": "${prom}",
|
||||
"description": "",
|
||||
"overrides": [
|
||||
{
|
||||
"matcher": {
|
||||
"id": "byFrameRefID"
|
||||
},
|
||||
"properties": {
|
||||
"rightYAxisDisplay": "off"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "timeseries",
|
||||
"id": "43ee140d-ae6d-474a-9892-fa4743d7f97e",
|
||||
"layout": {
|
||||
"h": 7,
|
||||
@@ -973,10 +1077,37 @@
|
||||
"x": 12,
|
||||
"y": 31
|
||||
},
|
||||
"version": "3.0.0",
|
||||
"datasourceCate": "prometheus",
|
||||
"datasourceValue": "${prom}",
|
||||
"targets": [
|
||||
{
|
||||
"expr": "100 * sum(mongodb_ss_wt_cache_tracked_dirty_pages_in_the_cache{instance='$instance'}) / sum(mongodb_ss_wt_cache_pages_currently_held_in_the_cache{instance='$instance'})",
|
||||
"legend": "dirty rate",
|
||||
"refId": "A",
|
||||
"maxDataPoints": 240
|
||||
}
|
||||
],
|
||||
"transformations": [
|
||||
{
|
||||
"id": "organize",
|
||||
"options": {}
|
||||
}
|
||||
],
|
||||
"name": "Cache Dirty Pages Rate",
|
||||
"description": "",
|
||||
"maxPerRow": 4,
|
||||
"options": {
|
||||
"tooltip": {
|
||||
"mode": "all",
|
||||
"sort": "none"
|
||||
},
|
||||
"legend": {
|
||||
"displayMode": "hidden"
|
||||
"displayMode": "hidden",
|
||||
"heightInPercentage": 30,
|
||||
"placement": "bottom",
|
||||
"behaviour": "showItem",
|
||||
"selectMode": "single"
|
||||
},
|
||||
"standardOptions": {
|
||||
"util": "percent"
|
||||
@@ -985,42 +1116,39 @@
|
||||
"steps": [
|
||||
{
|
||||
"color": "#634CD9",
|
||||
"type": "base",
|
||||
"value": null
|
||||
"value": null,
|
||||
"type": "base"
|
||||
}
|
||||
]
|
||||
},
|
||||
"tooltip": {
|
||||
"mode": "all",
|
||||
"sort": "none"
|
||||
}
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"expr": "100 * sum(mongodb_ss_wt_cache_tracked_dirty_pages_in_the_cache{cluster='$cluster'}) / sum(mongodb_ss_wt_cache_pages_currently_held_in_the_cache{cluster='$cluster'})",
|
||||
"legend": "dirty rate",
|
||||
"refId": "A"
|
||||
}
|
||||
],
|
||||
"type": "timeseries",
|
||||
"version": "2.0.0"
|
||||
},
|
||||
{
|
||||
"custom": {
|
||||
"drawStyle": "lines",
|
||||
"lineInterpolation": "smooth",
|
||||
"spanNulls": false,
|
||||
"lineWidth": 2,
|
||||
"fillOpacity": 0.3,
|
||||
"gradientMode": "opacity",
|
||||
"lineInterpolation": "smooth",
|
||||
"lineWidth": 2,
|
||||
"stack": "off",
|
||||
"scaleDistribution": {
|
||||
"type": "linear"
|
||||
},
|
||||
"spanNulls": false,
|
||||
"stack": "off"
|
||||
"showPoints": "none",
|
||||
"pointSize": 5
|
||||
},
|
||||
"datasourceCate": "prometheus",
|
||||
"datasourceValue": "${prom}",
|
||||
"description": "",
|
||||
"overrides": [
|
||||
{
|
||||
"matcher": {
|
||||
"id": "byFrameRefID"
|
||||
},
|
||||
"properties": {
|
||||
"rightYAxisDisplay": "off"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "timeseries",
|
||||
"id": "1a22c31a-859a-400c-af2a-ae83c308d0f2",
|
||||
"layout": {
|
||||
"h": 7,
|
||||
@@ -1030,35 +1158,73 @@
|
||||
"x": 18,
|
||||
"y": 31
|
||||
},
|
||||
"version": "3.0.0",
|
||||
"datasourceCate": "prometheus",
|
||||
"datasourceValue": "${prom}",
|
||||
"targets": [
|
||||
{
|
||||
"expr": "rate(mongodb_mongod_wiredtiger_cache_evicted_total{instance='$instance'}[5m])",
|
||||
"legend": "evicted pages",
|
||||
"refId": "A",
|
||||
"maxDataPoints": 240
|
||||
}
|
||||
],
|
||||
"transformations": [
|
||||
{
|
||||
"id": "organize",
|
||||
"options": {}
|
||||
}
|
||||
],
|
||||
"name": "Cache Evicted Pages",
|
||||
"description": "",
|
||||
"maxPerRow": 4,
|
||||
"options": {
|
||||
"tooltip": {
|
||||
"mode": "all",
|
||||
"sort": "none"
|
||||
},
|
||||
"legend": {
|
||||
"displayMode": "hidden"
|
||||
"displayMode": "hidden",
|
||||
"heightInPercentage": 30,
|
||||
"placement": "bottom",
|
||||
"behaviour": "showItem",
|
||||
"selectMode": "single"
|
||||
},
|
||||
"standardOptions": {},
|
||||
"thresholds": {
|
||||
"steps": [
|
||||
{
|
||||
"color": "#634CD9",
|
||||
"type": "base",
|
||||
"value": null
|
||||
"value": null,
|
||||
"type": "base"
|
||||
}
|
||||
]
|
||||
},
|
||||
"tooltip": {
|
||||
"mode": "all",
|
||||
"sort": "none"
|
||||
}
|
||||
},
|
||||
"targets": [
|
||||
"custom": {
|
||||
"drawStyle": "lines",
|
||||
"lineInterpolation": "smooth",
|
||||
"spanNulls": false,
|
||||
"lineWidth": 2,
|
||||
"fillOpacity": 0.3,
|
||||
"gradientMode": "opacity",
|
||||
"stack": "off",
|
||||
"scaleDistribution": {
|
||||
"type": "linear"
|
||||
},
|
||||
"showPoints": "none",
|
||||
"pointSize": 5
|
||||
},
|
||||
"overrides": [
|
||||
{
|
||||
"expr": "rate(mongodb_mongod_wiredtiger_cache_evicted_total{cluster='$cluster'}[5m])",
|
||||
"legend": "evicted pages",
|
||||
"refId": "A"
|
||||
"matcher": {
|
||||
"id": "byFrameRefID"
|
||||
},
|
||||
"properties": {
|
||||
"rightYAxisDisplay": "off"
|
||||
}
|
||||
}
|
||||
],
|
||||
"type": "timeseries",
|
||||
"version": "2.0.0"
|
||||
]
|
||||
},
|
||||
{
|
||||
"collapsed": true,
|
||||
@@ -1204,10 +1370,5 @@
|
||||
],
|
||||
"version": "3.0.0"
|
||||
},
|
||||
"public": 0,
|
||||
"public_cate": 0,
|
||||
"bgids": null,
|
||||
"built_in": 0,
|
||||
"hide": 0,
|
||||
"uuid": 1717556328065329000
|
||||
}
|
||||
}
|
||||
|
||||
1805
integrations/MySQL/dashboards/MySQL-by-address.json
Normal file
1805
integrations/MySQL/dashboards/MySQL-by-address.json
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"name": "MySQL 仪表盘(远端)",
|
||||
"name": "MySQL 仪表盘(使用 instance 筛选,需要采集时自行打上 instance 标签)",
|
||||
"tags": "",
|
||||
"ident": "",
|
||||
"configs": {
|
||||
@@ -1802,5 +1802,5 @@
|
||||
],
|
||||
"version": "3.0.0"
|
||||
},
|
||||
"uuid": 1717556328087994322
|
||||
"uuid": 1717556328087995000
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"name": "MySQL 仪表盘",
|
||||
"name": "MySQL 仪表盘,适用于 Categraf 采集本机 MySQL 的场景",
|
||||
"tags": "",
|
||||
"ident": "",
|
||||
"configs": {
|
||||
@@ -1798,5 +1798,5 @@
|
||||
],
|
||||
"version": "3.0.0"
|
||||
},
|
||||
"uuid": 1717556328087994321
|
||||
"uuid": 1717556328087994000
|
||||
}
|
||||
@@ -139,3 +139,11 @@ timeout = "3s"
|
||||
request = '''
|
||||
select METRIC_NAME,VALUE from v$sysmetric where group_id=2
|
||||
'''
|
||||
|
||||
[[metrics]]
|
||||
mesurement = "applylag"
|
||||
metric_fields = [ "value" ]
|
||||
timeout = "3s"
|
||||
request = '''
|
||||
SELECT TO_NUMBER(EXTRACT(SECOND FROM TO_DSINTERVAL (value))) as value FROM v$dataguard_stats WHERE name = 'apply lag'
|
||||
'''
|
||||
925
integrations/Redis/dashboards/FilterByAddress.json
Normal file
925
integrations/Redis/dashboards/FilterByAddress.json
Normal file
@@ -0,0 +1,925 @@
|
||||
{
|
||||
"name": "Redis by address",
|
||||
"tags": "Redis Categraf",
|
||||
"configs": {
|
||||
"panels": [
|
||||
{
|
||||
"collapsed": true,
|
||||
"id": "2ecb82c6-4d1a-41b5-8cdc-0284db16bd54",
|
||||
"layout": {
|
||||
"h": 1,
|
||||
"i": "2ecb82c6-4d1a-41b5-8cdc-0284db16bd54",
|
||||
"isResizable": false,
|
||||
"w": 24,
|
||||
"x": 0,
|
||||
"y": 0
|
||||
},
|
||||
"name": "Basic Info",
|
||||
"type": "row"
|
||||
},
|
||||
{
|
||||
"custom": {
|
||||
"alignItems": "center",
|
||||
"bgColor": "rgba(0, 0, 0, 0)",
|
||||
"content": "<img src=\"https://download.flashcat.cloud/ulric/redis.png\" width=128 />",
|
||||
"justifyContent": "center",
|
||||
"textColor": "#000000",
|
||||
"textDarkColor": "#FFFFFF",
|
||||
"textSize": 12
|
||||
},
|
||||
"id": "b5acc352-a2bd-4afc-b6cd-d6db0905f807",
|
||||
"layout": {
|
||||
"h": 3,
|
||||
"i": "b5acc352-a2bd-4afc-b6cd-d6db0905f807",
|
||||
"isResizable": true,
|
||||
"w": 4,
|
||||
"x": 0,
|
||||
"y": 1
|
||||
},
|
||||
"maxPerRow": 4,
|
||||
"name": "",
|
||||
"type": "text",
|
||||
"version": "3.0.0"
|
||||
},
|
||||
{
|
||||
"custom": {
|
||||
"calc": "lastNotNull",
|
||||
"colSpan": 0,
|
||||
"colorMode": "background",
|
||||
"graphMode": "none",
|
||||
"orientation": "vertical",
|
||||
"textMode": "valueAndName",
|
||||
"textSize": {},
|
||||
"valueField": "Value"
|
||||
},
|
||||
"datasourceCate": "prometheus",
|
||||
"datasourceValue": "${prom}",
|
||||
"id": "5eb6fbcf-4260-40d0-ad6a-540e54a1f922",
|
||||
"layout": {
|
||||
"h": 3,
|
||||
"i": "2a02e1d4-2ed3-4bd2-9fa0-69bb10f13888",
|
||||
"isResizable": true,
|
||||
"w": 5,
|
||||
"x": 4,
|
||||
"y": 1
|
||||
},
|
||||
"maxPerRow": 4,
|
||||
"name": "Redis Uptime",
|
||||
"options": {
|
||||
"standardOptions": {
|
||||
"decimals": 2,
|
||||
"util": "seconds"
|
||||
},
|
||||
"thresholds": {
|
||||
"steps": [
|
||||
{
|
||||
"color": "rgba(63, 196, 83, 1)",
|
||||
"type": "base",
|
||||
"value": null
|
||||
}
|
||||
]
|
||||
},
|
||||
"valueMappings": [
|
||||
{
|
||||
"match": {
|
||||
"to": 600
|
||||
},
|
||||
"result": {
|
||||
"color": "rgba(255, 101, 107, 1)"
|
||||
},
|
||||
"type": "range"
|
||||
},
|
||||
{
|
||||
"match": {
|
||||
"from": 600
|
||||
},
|
||||
"result": {
|
||||
"color": "rgba(63, 196, 83, 1)"
|
||||
},
|
||||
"type": "range"
|
||||
}
|
||||
]
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"expr": "redis_uptime_in_seconds{address=~\"$address\"}",
|
||||
"legend": "{{address}}",
|
||||
"maxDataPoints": 240
|
||||
}
|
||||
],
|
||||
"transformations": [
|
||||
{
|
||||
"id": "organize",
|
||||
"options": {}
|
||||
}
|
||||
],
|
||||
"type": "stat",
|
||||
"version": "3.0.0"
|
||||
},
|
||||
{
|
||||
"custom": {
|
||||
"calc": "lastNotNull",
|
||||
"colSpan": 0,
|
||||
"colorMode": "background",
|
||||
"graphMode": "none",
|
||||
"orientation": "vertical",
|
||||
"textMode": "valueAndName",
|
||||
"textSize": {},
|
||||
"valueField": "Value"
|
||||
},
|
||||
"datasourceCate": "prometheus",
|
||||
"datasourceValue": "${prom}",
|
||||
"id": "8ccada5e-02f3-4efc-9b36-2a367612e4cb",
|
||||
"layout": {
|
||||
"h": 3,
|
||||
"i": "8ccada5e-02f3-4efc-9b36-2a367612e4cb",
|
||||
"isResizable": true,
|
||||
"w": 5,
|
||||
"x": 9,
|
||||
"y": 1
|
||||
},
|
||||
"maxPerRow": 4,
|
||||
"name": "Connected Clients",
|
||||
"options": {
|
||||
"standardOptions": {},
|
||||
"thresholds": {
|
||||
"steps": [
|
||||
{
|
||||
"color": "#6C53B1",
|
||||
"type": "base",
|
||||
"value": null
|
||||
}
|
||||
]
|
||||
},
|
||||
"valueMappings": [
|
||||
{
|
||||
"match": {
|
||||
"to": 500
|
||||
},
|
||||
"result": {
|
||||
"color": "rgba(63, 196, 83, 1)"
|
||||
},
|
||||
"type": "range"
|
||||
},
|
||||
{
|
||||
"match": {
|
||||
"from": 500
|
||||
},
|
||||
"result": {
|
||||
"color": "rgba(255, 101, 107, 1)"
|
||||
},
|
||||
"type": "range"
|
||||
}
|
||||
]
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"expr": "redis_connected_clients{address=~\"$address\"}",
|
||||
"legend": "{{address}}",
|
||||
"maxDataPoints": 240
|
||||
}
|
||||
],
|
||||
"transformations": [
|
||||
{
|
||||
"id": "organize",
|
||||
"options": {}
|
||||
}
|
||||
],
|
||||
"type": "stat",
|
||||
"version": "3.0.0"
|
||||
},
|
||||
{
|
||||
"custom": {
|
||||
"calc": "lastNotNull",
|
||||
"colSpan": 0,
|
||||
"colorMode": "background",
|
||||
"graphMode": "none",
|
||||
"orientation": "vertical",
|
||||
"textMode": "valueAndName",
|
||||
"textSize": {},
|
||||
"valueField": "Value"
|
||||
},
|
||||
"datasourceCate": "prometheus",
|
||||
"datasourceValue": "${prom}",
|
||||
"id": "716dc7e7-c9ec-4195-93f6-db1c572ae8b0",
|
||||
"layout": {
|
||||
"h": 3,
|
||||
"i": "716dc7e7-c9ec-4195-93f6-db1c572ae8b0",
|
||||
"isResizable": true,
|
||||
"w": 5,
|
||||
"x": 14,
|
||||
"y": 1
|
||||
},
|
||||
"maxPerRow": 4,
|
||||
"name": "Memory Used",
|
||||
"options": {
|
||||
"standardOptions": {
|
||||
"decimals": 1,
|
||||
"util": "bytesIEC"
|
||||
},
|
||||
"thresholds": {
|
||||
"steps": [
|
||||
{
|
||||
"color": "#6C53B1",
|
||||
"type": "base",
|
||||
"value": null
|
||||
}
|
||||
]
|
||||
},
|
||||
"valueMappings": [
|
||||
{
|
||||
"match": {
|
||||
"to": 128000000
|
||||
},
|
||||
"result": {
|
||||
"color": "#079e05"
|
||||
},
|
||||
"type": "range"
|
||||
},
|
||||
{
|
||||
"match": {
|
||||
"from": 128000000
|
||||
},
|
||||
"result": {
|
||||
"color": "#f10909"
|
||||
},
|
||||
"type": "range"
|
||||
}
|
||||
]
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"expr": "redis_used_memory{address=~\"$address\"}",
|
||||
"legend": "{{address}}",
|
||||
"maxDataPoints": 240
|
||||
}
|
||||
],
|
||||
"transformations": [
|
||||
{
|
||||
"id": "organize",
|
||||
"options": {}
|
||||
}
|
||||
],
|
||||
"type": "stat",
|
||||
"version": "3.0.0"
|
||||
},
|
||||
{
|
||||
"custom": {
|
||||
"calc": "lastNotNull",
|
||||
"colSpan": 0,
|
||||
"colorMode": "background",
|
||||
"graphMode": "none",
|
||||
"orientation": "vertical",
|
||||
"textMode": "valueAndName",
|
||||
"textSize": {},
|
||||
"valueField": "Value"
|
||||
},
|
||||
"datasourceCate": "prometheus",
|
||||
"datasourceValue": "${prom}",
|
||||
"id": "c6948161-db07-42df-beb1-765ee9c071a9",
|
||||
"layout": {
|
||||
"h": 3,
|
||||
"i": "c6948161-db07-42df-beb1-765ee9c071a9",
|
||||
"isResizable": true,
|
||||
"w": 5,
|
||||
"x": 19,
|
||||
"y": 1
|
||||
},
|
||||
"maxPerRow": 4,
|
||||
"name": "Max Memory Limit",
|
||||
"options": {
|
||||
"standardOptions": {
|
||||
"decimals": 1,
|
||||
"util": "bytesIEC"
|
||||
},
|
||||
"thresholds": {
|
||||
"steps": [
|
||||
{
|
||||
"color": "rgba(63, 196, 83, 1)",
|
||||
"type": "base",
|
||||
"value": null
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"expr": "redis_maxmemory{address=~\"$address\"}",
|
||||
"legend": "{{address}}",
|
||||
"maxDataPoints": 240
|
||||
}
|
||||
],
|
||||
"transformations": [
|
||||
{
|
||||
"id": "organize",
|
||||
"options": {}
|
||||
}
|
||||
],
|
||||
"type": "stat",
|
||||
"version": "3.0.0"
|
||||
},
|
||||
{
|
||||
"collapsed": true,
|
||||
"id": "bd54cf4f-1abb-4945-8aab-f89aec16daef",
|
||||
"layout": {
|
||||
"h": 1,
|
||||
"i": "bd54cf4f-1abb-4945-8aab-f89aec16daef",
|
||||
"isResizable": false,
|
||||
"w": 24,
|
||||
"x": 0,
|
||||
"y": 4
|
||||
},
|
||||
"name": "Commands",
|
||||
"type": "row"
|
||||
},
|
||||
{
|
||||
"custom": {
|
||||
"drawStyle": "lines",
|
||||
"fillOpacity": 0.3,
|
||||
"gradientMode": "opacity",
|
||||
"lineInterpolation": "smooth",
|
||||
"lineWidth": 2,
|
||||
"scaleDistribution": {
|
||||
"type": "linear"
|
||||
},
|
||||
"spanNulls": false,
|
||||
"stack": "off"
|
||||
},
|
||||
"datasourceCate": "prometheus",
|
||||
"datasourceValue": "${prom}",
|
||||
"id": "3d5f8c4e-0ddf-4d68-9f6d-2cc57d864a8e",
|
||||
"layout": {
|
||||
"h": 5,
|
||||
"i": "3d5f8c4e-0ddf-4d68-9f6d-2cc57d864a8e",
|
||||
"isResizable": true,
|
||||
"w": 8,
|
||||
"x": 0,
|
||||
"y": 5
|
||||
},
|
||||
"maxPerRow": 4,
|
||||
"name": "Commands Executed / sec",
|
||||
"options": {
|
||||
"legend": {
|
||||
"behaviour": "showItem",
|
||||
"displayMode": "hidden"
|
||||
},
|
||||
"standardOptions": {
|
||||
"decimals": 2
|
||||
},
|
||||
"thresholds": {
|
||||
"steps": [
|
||||
{
|
||||
"color": "#6C53B1",
|
||||
"type": "base",
|
||||
"value": null
|
||||
}
|
||||
]
|
||||
},
|
||||
"tooltip": {
|
||||
"mode": "all",
|
||||
"sort": "none"
|
||||
}
|
||||
},
|
||||
"overrides": [
|
||||
{
|
||||
"matcher": {
|
||||
"id": "byFrameRefID"
|
||||
},
|
||||
"properties": {
|
||||
"rightYAxisDisplay": "off"
|
||||
}
|
||||
}
|
||||
],
|
||||
"targets": [
|
||||
{
|
||||
"expr": "rate(redis_total_commands_processed{address=~\"$address\"}[5m])",
|
||||
"legend": "{{address}}",
|
||||
"maxDataPoints": 240
|
||||
}
|
||||
],
|
||||
"transformations": [
|
||||
{
|
||||
"id": "organize",
|
||||
"options": {}
|
||||
}
|
||||
],
|
||||
"type": "timeseries",
|
||||
"version": "3.0.0"
|
||||
},
|
||||
{
|
||||
"custom": {
|
||||
"drawStyle": "lines",
|
||||
"fillOpacity": 0.3,
|
||||
"gradientMode": "opacity",
|
||||
"lineInterpolation": "smooth",
|
||||
"lineWidth": 2,
|
||||
"scaleDistribution": {
|
||||
"type": "linear"
|
||||
},
|
||||
"spanNulls": false,
|
||||
"stack": "noraml"
|
||||
},
|
||||
"datasourceCate": "prometheus",
|
||||
"datasourceValue": "${prom}",
|
||||
"id": "344a874d-c34d-4d2d-9bb4-46e0912cd9f5",
|
||||
"layout": {
|
||||
"h": 5,
|
||||
"i": "344a874d-c34d-4d2d-9bb4-46e0912cd9f5",
|
||||
"isResizable": true,
|
||||
"w": 8,
|
||||
"x": 8,
|
||||
"y": 5
|
||||
},
|
||||
"maxPerRow": 4,
|
||||
"name": "Hits / Misses per Sec",
|
||||
"options": {
|
||||
"legend": {
|
||||
"behaviour": "showItem",
|
||||
"displayMode": "hidden"
|
||||
},
|
||||
"standardOptions": {
|
||||
"decimals": 2
|
||||
},
|
||||
"thresholds": {
|
||||
"steps": [
|
||||
{
|
||||
"color": "#6C53B1",
|
||||
"type": "base",
|
||||
"value": null
|
||||
}
|
||||
]
|
||||
},
|
||||
"tooltip": {
|
||||
"mode": "all",
|
||||
"sort": "none"
|
||||
}
|
||||
},
|
||||
"overrides": [
|
||||
{
|
||||
"matcher": {
|
||||
"id": "byFrameRefID"
|
||||
},
|
||||
"properties": {
|
||||
"rightYAxisDisplay": "off"
|
||||
}
|
||||
}
|
||||
],
|
||||
"targets": [
|
||||
{
|
||||
"expr": "irate(redis_keyspace_hits{address=~\"$address\"}[5m])",
|
||||
"legend": "{{address}} hits",
|
||||
"maxDataPoints": 240
|
||||
},
|
||||
{
|
||||
"expr": "irate(redis_keyspace_misses{address=~\"$address\"}[5m])",
|
||||
"legend": "{{address}} misses",
|
||||
"maxDataPoints": 240
|
||||
}
|
||||
],
|
||||
"transformations": [
|
||||
{
|
||||
"id": "organize",
|
||||
"options": {}
|
||||
}
|
||||
],
|
||||
"type": "timeseries",
|
||||
"version": "3.0.0"
|
||||
},
|
||||
{
|
||||
"custom": {
|
||||
"drawStyle": "lines",
|
||||
"fillOpacity": 0.3,
|
||||
"gradientMode": "opacity",
|
||||
"lineInterpolation": "smooth",
|
||||
"lineWidth": 2,
|
||||
"scaleDistribution": {
|
||||
"type": "linear"
|
||||
},
|
||||
"spanNulls": false,
|
||||
"stack": "off"
|
||||
},
|
||||
"datasourceCate": "prometheus",
|
||||
"datasourceValue": "${prom}",
|
||||
"id": "3c83cd35-585c-4070-a210-1f17345f13f4",
|
||||
"layout": {
|
||||
"h": 5,
|
||||
"i": "3c83cd35-585c-4070-a210-1f17345f13f4",
|
||||
"isResizable": true,
|
||||
"w": 8,
|
||||
"x": 16,
|
||||
"y": 5
|
||||
},
|
||||
"maxPerRow": 4,
|
||||
"name": "Top Commands",
|
||||
"options": {
|
||||
"legend": {
|
||||
"behaviour": "showItem",
|
||||
"displayMode": "hidden"
|
||||
},
|
||||
"standardOptions": {
|
||||
"decimals": 2
|
||||
},
|
||||
"thresholds": {
|
||||
"steps": [
|
||||
{
|
||||
"color": "#6C53B1",
|
||||
"type": "base",
|
||||
"value": null
|
||||
}
|
||||
]
|
||||
},
|
||||
"tooltip": {
|
||||
"mode": "all",
|
||||
"sort": "desc"
|
||||
}
|
||||
},
|
||||
"overrides": [
|
||||
{
|
||||
"matcher": {
|
||||
"id": "byFrameRefID"
|
||||
},
|
||||
"properties": {
|
||||
"rightYAxisDisplay": "off"
|
||||
}
|
||||
}
|
||||
],
|
||||
"targets": [
|
||||
{
|
||||
"expr": "topk(5, irate(redis_cmdstat_calls{address=~\"$address\"}[1m]))",
|
||||
"legend": "{{address}} {{command}}",
|
||||
"maxDataPoints": 240
|
||||
}
|
||||
],
|
||||
"transformations": [
|
||||
{
|
||||
"id": "organize",
|
||||
"options": {}
|
||||
}
|
||||
],
|
||||
"type": "timeseries",
|
||||
"version": "3.0.0"
|
||||
},
|
||||
{
|
||||
"collapsed": true,
|
||||
"id": "1ea61073-a46d-4d7c-b072-fcdcbc5ac084",
|
||||
"layout": {
|
||||
"h": 1,
|
||||
"i": "1ea61073-a46d-4d7c-b072-fcdcbc5ac084",
|
||||
"isResizable": false,
|
||||
"w": 24,
|
||||
"x": 0,
|
||||
"y": 10
|
||||
},
|
||||
"name": "Keys",
|
||||
"type": "row"
|
||||
},
|
||||
{
|
||||
"custom": {
|
||||
"drawStyle": "lines",
|
||||
"fillOpacity": 0.3,
|
||||
"gradientMode": "opacity",
|
||||
"lineInterpolation": "smooth",
|
||||
"lineWidth": 2,
|
||||
"scaleDistribution": {
|
||||
"type": "linear"
|
||||
},
|
||||
"spanNulls": false,
|
||||
"stack": "off"
|
||||
},
|
||||
"datasourceCate": "prometheus",
|
||||
"datasourceValue": "${prom}",
|
||||
"id": "b2b4451c-4f8a-438a-8c48-69c95c68361e",
|
||||
"layout": {
|
||||
"h": 5,
|
||||
"i": "b2b4451c-4f8a-438a-8c48-69c95c68361e",
|
||||
"isResizable": true,
|
||||
"w": 8,
|
||||
"x": 0,
|
||||
"y": 11
|
||||
},
|
||||
"maxPerRow": 4,
|
||||
"name": "Total Items per DB",
|
||||
"options": {
|
||||
"legend": {
|
||||
"behaviour": "showItem",
|
||||
"displayMode": "hidden"
|
||||
},
|
||||
"standardOptions": {
|
||||
"decimals": 2
|
||||
},
|
||||
"thresholds": {
|
||||
"steps": [
|
||||
{
|
||||
"color": "#6C53B1",
|
||||
"type": "base",
|
||||
"value": null
|
||||
}
|
||||
]
|
||||
},
|
||||
"tooltip": {
|
||||
"mode": "all",
|
||||
"sort": "desc"
|
||||
}
|
||||
},
|
||||
"overrides": [
|
||||
{
|
||||
"matcher": {
|
||||
"id": "byFrameRefID"
|
||||
},
|
||||
"properties": {
|
||||
"rightYAxisDisplay": "off"
|
||||
}
|
||||
}
|
||||
],
|
||||
"targets": [
|
||||
{
|
||||
"expr": "sum(redis_keyspace_keys{address=~\"$address\"}) by (address, db)",
|
||||
"legend": "{{address}} {{db}}",
|
||||
"maxDataPoints": 240
|
||||
}
|
||||
],
|
||||
"transformations": [
|
||||
{
|
||||
"id": "organize",
|
||||
"options": {}
|
||||
}
|
||||
],
|
||||
"type": "timeseries",
|
||||
"version": "3.0.0"
|
||||
},
|
||||
{
|
||||
"custom": {
|
||||
"drawStyle": "lines",
|
||||
"fillOpacity": 0.3,
|
||||
"gradientMode": "opacity",
|
||||
"lineInterpolation": "smooth",
|
||||
"lineWidth": 2,
|
||||
"scaleDistribution": {
|
||||
"type": "linear"
|
||||
},
|
||||
"spanNulls": false,
|
||||
"stack": "off"
|
||||
},
|
||||
"datasourceCate": "prometheus",
|
||||
"datasourceValue": "${prom}",
|
||||
"id": "894b9beb-e764-441c-ae04-13e5dbbb901d",
|
||||
"layout": {
|
||||
"h": 5,
|
||||
"i": "894b9beb-e764-441c-ae04-13e5dbbb901d",
|
||||
"isResizable": true,
|
||||
"w": 8,
|
||||
"x": 8,
|
||||
"y": 11
|
||||
},
|
||||
"maxPerRow": 4,
|
||||
"name": "Expired / Evicted",
|
||||
"options": {
|
||||
"legend": {
|
||||
"behaviour": "showItem",
|
||||
"displayMode": "hidden"
|
||||
},
|
||||
"standardOptions": {
|
||||
"decimals": 2
|
||||
},
|
||||
"thresholds": {
|
||||
"steps": [
|
||||
{
|
||||
"color": "#6C53B1",
|
||||
"type": "base",
|
||||
"value": null
|
||||
}
|
||||
]
|
||||
},
|
||||
"tooltip": {
|
||||
"mode": "all",
|
||||
"sort": "desc"
|
||||
}
|
||||
},
|
||||
"overrides": [
|
||||
{
|
||||
"matcher": {
|
||||
"id": "byFrameRefID"
|
||||
},
|
||||
"properties": {
|
||||
"rightYAxisDisplay": "off"
|
||||
}
|
||||
}
|
||||
],
|
||||
"targets": [
|
||||
{
|
||||
"expr": "sum(rate(redis_expired_keys{address=~\"$address\"}[5m])) by (address)",
|
||||
"legend": "{{address}} expired",
|
||||
"maxDataPoints": 240
|
||||
},
|
||||
{
|
||||
"expr": "sum(rate(redis_evicted_keys{address=~\"$address\"}[5m])) by (address)",
|
||||
"legend": "{{address}} evicted",
|
||||
"maxDataPoints": 240
|
||||
}
|
||||
],
|
||||
"transformations": [
|
||||
{
|
||||
"id": "organize",
|
||||
"options": {}
|
||||
}
|
||||
],
|
||||
"type": "timeseries",
|
||||
"version": "3.0.0"
|
||||
},
|
||||
{
|
||||
"custom": {
|
||||
"drawStyle": "lines",
|
||||
"fillOpacity": 0.3,
|
||||
"gradientMode": "opacity",
|
||||
"lineInterpolation": "smooth",
|
||||
"lineWidth": 2,
|
||||
"scaleDistribution": {
|
||||
"type": "linear"
|
||||
},
|
||||
"spanNulls": false,
|
||||
"stack": "noraml"
|
||||
},
|
||||
"datasourceCate": "prometheus",
|
||||
"datasourceValue": "${prom}",
|
||||
"id": "f721a641-28c7-4e82-a37c-ec17704a0c57",
|
||||
"layout": {
|
||||
"h": 5,
|
||||
"i": "f721a641-28c7-4e82-a37c-ec17704a0c57",
|
||||
"isResizable": true,
|
||||
"w": 8,
|
||||
"x": 16,
|
||||
"y": 11
|
||||
},
|
||||
"maxPerRow": 4,
|
||||
"name": "Expiring vs Not-Expiring Keys",
|
||||
"options": {
|
||||
"legend": {
|
||||
"behaviour": "showItem",
|
||||
"displayMode": "hidden"
|
||||
},
|
||||
"standardOptions": {
|
||||
"decimals": 2
|
||||
},
|
||||
"thresholds": {
|
||||
"steps": [
|
||||
{
|
||||
"color": "#6C53B1",
|
||||
"type": "base",
|
||||
"value": null
|
||||
}
|
||||
]
|
||||
},
|
||||
"tooltip": {
|
||||
"mode": "all",
|
||||
"sort": "none"
|
||||
}
|
||||
},
|
||||
"overrides": [
|
||||
{
|
||||
"matcher": {
|
||||
"id": "byFrameRefID"
|
||||
},
|
||||
"properties": {
|
||||
"rightYAxisDisplay": "off"
|
||||
}
|
||||
}
|
||||
],
|
||||
"targets": [
|
||||
{
|
||||
"expr": "sum(redis_keyspace_keys{address=~\"$address\"}) - sum(redis_keyspace_expires{address=~\"$address\"}) ",
|
||||
"legend": "{{address}} not expiring",
|
||||
"maxDataPoints": 240
|
||||
},
|
||||
{
|
||||
"expr": "sum(redis_keyspace_expires{address=~\"$address\"}) ",
|
||||
"legend": "{{address}} expiring",
|
||||
"maxDataPoints": 240
|
||||
}
|
||||
],
|
||||
"transformations": [
|
||||
{
|
||||
"id": "organize",
|
||||
"options": {}
|
||||
}
|
||||
],
|
||||
"type": "timeseries",
|
||||
"version": "3.0.0"
|
||||
},
|
||||
{
|
||||
"collapsed": true,
|
||||
"id": "60ff41ed-9d41-40ee-a13b-c968f3ca49d0",
|
||||
"layout": {
|
||||
"h": 1,
|
||||
"i": "60ff41ed-9d41-40ee-a13b-c968f3ca49d0",
|
||||
"isResizable": false,
|
||||
"w": 24,
|
||||
"x": 0,
|
||||
"y": 16
|
||||
},
|
||||
"name": "Network",
|
||||
"type": "row"
|
||||
},
|
||||
{
|
||||
"custom": {
|
||||
"drawStyle": "lines",
|
||||
"fillOpacity": 0.3,
|
||||
"gradientMode": "opacity",
|
||||
"lineInterpolation": "smooth",
|
||||
"lineWidth": 2,
|
||||
"scaleDistribution": {
|
||||
"type": "linear"
|
||||
},
|
||||
"spanNulls": false,
|
||||
"stack": "off"
|
||||
},
|
||||
"datasourceCate": "prometheus",
|
||||
"datasourceValue": "${prom}",
|
||||
"id": "1841950c-e867-4a62-b846-78754dc0e34d",
|
||||
"layout": {
|
||||
"h": 7,
|
||||
"i": "1841950c-e867-4a62-b846-78754dc0e34d",
|
||||
"isResizable": true,
|
||||
"w": 24,
|
||||
"x": 0,
|
||||
"y": 17
|
||||
},
|
||||
"maxPerRow": 4,
|
||||
"name": "Network I/O",
|
||||
"options": {
|
||||
"legend": {
|
||||
"behaviour": "showItem",
|
||||
"displayMode": "hidden"
|
||||
},
|
||||
"standardOptions": {
|
||||
"decimals": 2,
|
||||
"util": "bytesIEC"
|
||||
},
|
||||
"thresholds": {
|
||||
"steps": [
|
||||
{
|
||||
"color": "#6C53B1",
|
||||
"type": "base",
|
||||
"value": null
|
||||
}
|
||||
]
|
||||
},
|
||||
"tooltip": {
|
||||
"mode": "all",
|
||||
"sort": "desc"
|
||||
}
|
||||
},
|
||||
"overrides": [
|
||||
{
|
||||
"matcher": {
|
||||
"id": "byFrameRefID"
|
||||
},
|
||||
"properties": {
|
||||
"rightYAxisDisplay": "off"
|
||||
}
|
||||
}
|
||||
],
|
||||
"targets": [
|
||||
{
|
||||
"expr": "sum(rate(redis_total_net_input_bytes{address=~\"$address\"}[5m]))",
|
||||
"legend": "input",
|
||||
"maxDataPoints": 240
|
||||
},
|
||||
{
|
||||
"expr": "sum(rate(redis_total_net_output_bytes{address=~\"$address\"}[5m]))",
|
||||
"legend": "output",
|
||||
"maxDataPoints": 240
|
||||
}
|
||||
],
|
||||
"transformations": [
|
||||
{
|
||||
"id": "organize",
|
||||
"options": {}
|
||||
}
|
||||
],
|
||||
"type": "timeseries",
|
||||
"version": "3.0.0"
|
||||
}
|
||||
],
|
||||
"var": [
|
||||
{
|
||||
"definition": "prometheus",
|
||||
"name": "prom",
|
||||
"type": "datasource"
|
||||
},
|
||||
{
|
||||
"allOption": true,
|
||||
"datasource": {
|
||||
"cate": "prometheus",
|
||||
"value": "${prom}"
|
||||
},
|
||||
"definition": "label_values(redis_uptime_in_seconds,address)",
|
||||
"hide": false,
|
||||
"multi": true,
|
||||
"name": "address",
|
||||
"type": "query"
|
||||
}
|
||||
],
|
||||
"version": "3.0.0"
|
||||
},
|
||||
"uuid": 1732008163114399
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "Redis Overview - categraf",
|
||||
"tags": "Redis Prometheus",
|
||||
"name": "Redis by instance",
|
||||
"tags": "Redis Categraf",
|
||||
"ident": "",
|
||||
"configs": {
|
||||
"panels": [
|
||||
|
||||
@@ -124,3 +124,14 @@ func (c *BusiGroupCacheType) syncBusiGroups() error {
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *BusiGroupCacheType) GetNameByBusiGroupId(id int64) string {
|
||||
c.RLock()
|
||||
defer c.RUnlock()
|
||||
|
||||
busiGroup := c.ugs[id]
|
||||
if busiGroup == nil {
|
||||
return ""
|
||||
}
|
||||
return busiGroup.Name
|
||||
}
|
||||
|
||||
@@ -23,7 +23,9 @@ type DatasourceCacheType struct {
|
||||
DatasourceFilter func([]*models.Datasource, *models.User) []*models.Datasource
|
||||
|
||||
sync.RWMutex
|
||||
ds map[int64]*models.Datasource // key: id
|
||||
ds map[int64]*models.Datasource // key: id value: datasource
|
||||
CateToIDs map[string]map[int64]*models.Datasource // key1: cate key2: id value: datasource
|
||||
CateToNames map[string]map[string]int64 // key1: cate key2: name value: id
|
||||
}
|
||||
|
||||
func NewDatasourceCache(ctx *ctx.Context, stats *Stats) *DatasourceCacheType {
|
||||
@@ -33,6 +35,8 @@ func NewDatasourceCache(ctx *ctx.Context, stats *Stats) *DatasourceCacheType {
|
||||
ctx: ctx,
|
||||
stats: stats,
|
||||
ds: make(map[int64]*models.Datasource),
|
||||
CateToIDs: make(map[string]map[int64]*models.Datasource),
|
||||
CateToNames: make(map[string]map[string]int64),
|
||||
DatasourceCheckHook: func(ctx *gin.Context) bool { return false },
|
||||
DatasourceFilter: func(ds []*models.Datasource, user *models.User) []*models.Datasource { return ds },
|
||||
}
|
||||
@@ -40,6 +44,12 @@ func NewDatasourceCache(ctx *ctx.Context, stats *Stats) *DatasourceCacheType {
|
||||
return ds
|
||||
}
|
||||
|
||||
func (d *DatasourceCacheType) GetIDsByDsCateAndQueries(cate string, datasourceQueries []models.DatasourceQuery) []int64 {
|
||||
d.Lock()
|
||||
defer d.Unlock()
|
||||
return models.GetDatasourceIDsByDatasourceQueries(datasourceQueries, d.CateToIDs[cate], d.CateToNames[cate])
|
||||
}
|
||||
|
||||
func (d *DatasourceCacheType) StatChanged(total, lastUpdated int64) bool {
|
||||
if d.statTotal == total && d.statLastUpdated == lastUpdated {
|
||||
return false
|
||||
@@ -49,8 +59,22 @@ func (d *DatasourceCacheType) StatChanged(total, lastUpdated int64) bool {
|
||||
}
|
||||
|
||||
func (d *DatasourceCacheType) Set(ds map[int64]*models.Datasource, total, lastUpdated int64) {
|
||||
cateToDs := make(map[string]map[int64]*models.Datasource)
|
||||
cateToNames := make(map[string]map[string]int64)
|
||||
for _, datasource := range ds {
|
||||
if _, exists := cateToDs[datasource.PluginType]; !exists {
|
||||
cateToDs[datasource.PluginType] = make(map[int64]*models.Datasource)
|
||||
}
|
||||
cateToDs[datasource.PluginType][datasource.Id] = datasource
|
||||
if _, exists := cateToNames[datasource.PluginType]; !exists {
|
||||
cateToNames[datasource.PluginType] = make(map[string]int64)
|
||||
}
|
||||
cateToNames[datasource.PluginType][datasource.Name] = datasource.Id
|
||||
}
|
||||
d.Lock()
|
||||
d.CateToIDs = cateToDs
|
||||
d.ds = ds
|
||||
d.CateToNames = cateToNames
|
||||
d.Unlock()
|
||||
|
||||
// only one goroutine used, so no need lock
|
||||
@@ -99,20 +123,20 @@ func (d *DatasourceCacheType) syncDatasources() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
m, err := models.DatasourceGetMap(d.ctx)
|
||||
ds, err := models.DatasourceGetMap(d.ctx)
|
||||
if err != nil {
|
||||
dumper.PutSyncRecord("datasources", start.Unix(), -1, -1, "failed to query records: "+err.Error())
|
||||
return errors.WithMessage(err, "failed to call DatasourceGetMap")
|
||||
}
|
||||
|
||||
d.Set(m, stat.Total, stat.LastUpdated)
|
||||
d.Set(ds, stat.Total, stat.LastUpdated)
|
||||
|
||||
ms := time.Since(start).Milliseconds()
|
||||
d.stats.GaugeCronDuration.WithLabelValues("sync_datasources").Set(float64(ms))
|
||||
d.stats.GaugeSyncNumber.WithLabelValues("sync_datasources").Set(float64(len(m)))
|
||||
d.stats.GaugeSyncNumber.WithLabelValues("sync_datasources").Set(float64(len(ds)))
|
||||
|
||||
logger.Infof("timer: sync datasources done, cost: %dms, number: %d", ms, len(m))
|
||||
dumper.PutSyncRecord("datasources", start.Unix(), ms, len(m), "success")
|
||||
logger.Infof("timer: sync datasources done, cost: %dms, number: %d", ms, len(ds))
|
||||
dumper.PutSyncRecord("datasources", start.Unix(), ms, len(ds), "success")
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
package memsto
|
||||
|
||||
import (
|
||||
"crypto/tls"
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
@@ -19,7 +21,7 @@ import (
|
||||
type NotifyConfigCacheType struct {
|
||||
ctx *ctx.Context
|
||||
ConfigCache *ConfigCache
|
||||
webhooks []*models.Webhook
|
||||
webhooks map[string]*models.Webhook
|
||||
smtp aconf.SMTPConfig
|
||||
script models.NotifyScript
|
||||
|
||||
@@ -47,6 +49,7 @@ func NewNotifyConfigCache(ctx *ctx.Context, configCache *ConfigCache) *NotifyCon
|
||||
w := &NotifyConfigCacheType{
|
||||
ctx: ctx,
|
||||
ConfigCache: configCache,
|
||||
webhooks: make(map[string]*models.Webhook),
|
||||
}
|
||||
w.SyncNotifyConfigs()
|
||||
return w
|
||||
@@ -85,11 +88,55 @@ func (w *NotifyConfigCacheType) syncNotifyConfigs() error {
|
||||
}
|
||||
|
||||
if strings.TrimSpace(cval) != "" {
|
||||
err = json.Unmarshal([]byte(cval), &w.webhooks)
|
||||
var webhooks []*models.Webhook
|
||||
err = json.Unmarshal([]byte(cval), &webhooks)
|
||||
if err != nil {
|
||||
dumper.PutSyncRecord("webhooks", start.Unix(), -1, -1, "failed to unmarshal configs.webhook: "+err.Error())
|
||||
logger.Errorf("failed to unmarshal webhooks:%s error:%v", cval, err)
|
||||
}
|
||||
|
||||
newWebhooks := make(map[string]*models.Webhook, len(webhooks))
|
||||
for i := 0; i < len(webhooks); i++ {
|
||||
if webhooks[i].Batch == 0 {
|
||||
webhooks[i].Batch = 1000
|
||||
}
|
||||
|
||||
if webhooks[i].Timeout == 0 {
|
||||
webhooks[i].Timeout = 10
|
||||
}
|
||||
|
||||
if webhooks[i].RetryCount == 0 {
|
||||
webhooks[i].RetryCount = 10
|
||||
}
|
||||
|
||||
if webhooks[i].RetryInterval == 0 {
|
||||
webhooks[i].RetryInterval = 10
|
||||
}
|
||||
|
||||
if webhooks[i].Client == nil {
|
||||
webhooks[i].Client = &http.Client{
|
||||
Timeout: time.Second * time.Duration(webhooks[i].Timeout),
|
||||
Transport: &http.Transport{
|
||||
TLSClientConfig: &tls.Config{InsecureSkipVerify: webhooks[i].SkipVerify},
|
||||
},
|
||||
}
|
||||
}
|
||||
newWebhooks[webhooks[i].Url] = webhooks[i]
|
||||
}
|
||||
|
||||
for url, wh := range newWebhooks {
|
||||
if oldWh, has := w.webhooks[url]; has && oldWh.Hash() != wh.Hash() {
|
||||
w.webhooks[url] = wh
|
||||
} else {
|
||||
w.webhooks[url] = wh
|
||||
}
|
||||
}
|
||||
|
||||
for url := range w.webhooks {
|
||||
if _, has := newWebhooks[url]; !has {
|
||||
delete(w.webhooks, url)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
dumper.PutSyncRecord("webhooks", start.Unix(), time.Since(start).Milliseconds(), len(w.webhooks), "success, webhooks:\n"+cval)
|
||||
@@ -133,7 +180,7 @@ func (w *NotifyConfigCacheType) syncNotifyConfigs() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (w *NotifyConfigCacheType) GetWebhooks() []*models.Webhook {
|
||||
func (w *NotifyConfigCacheType) GetWebhooks() map[string]*models.Webhook {
|
||||
w.RWMutex.RLock()
|
||||
defer w.RWMutex.RUnlock()
|
||||
return w.webhooks
|
||||
|
||||
@@ -12,67 +12,73 @@ import (
|
||||
"github.com/ccfos/nightingale/v6/pkg/ctx"
|
||||
"github.com/ccfos/nightingale/v6/pkg/poster"
|
||||
"github.com/ccfos/nightingale/v6/pkg/tplx"
|
||||
"github.com/ccfos/nightingale/v6/pkg/unit"
|
||||
|
||||
"github.com/toolkits/pkg/logger"
|
||||
)
|
||||
|
||||
type AlertCurEvent struct {
|
||||
Id int64 `json:"id" gorm:"primaryKey"`
|
||||
Cate string `json:"cate"`
|
||||
Cluster string `json:"cluster"`
|
||||
DatasourceId int64 `json:"datasource_id"`
|
||||
GroupId int64 `json:"group_id"` // busi group id
|
||||
GroupName string `json:"group_name"` // busi group name
|
||||
Hash string `json:"hash"` // rule_id + vector_key
|
||||
RuleId int64 `json:"rule_id"`
|
||||
RuleName string `json:"rule_name"`
|
||||
RuleNote string `json:"rule_note"`
|
||||
RuleProd string `json:"rule_prod"`
|
||||
RuleAlgo string `json:"rule_algo"`
|
||||
Severity int `json:"severity"`
|
||||
PromForDuration int `json:"prom_for_duration"`
|
||||
PromQl string `json:"prom_ql"`
|
||||
RuleConfig string `json:"-" gorm:"rule_config"` // rule config
|
||||
RuleConfigJson interface{} `json:"rule_config" gorm:"-"` // rule config for fe
|
||||
PromEvalInterval int `json:"prom_eval_interval"`
|
||||
Callbacks string `json:"-"` // for db
|
||||
CallbacksJSON []string `json:"callbacks" gorm:"-"` // for fe
|
||||
RunbookUrl string `json:"runbook_url"`
|
||||
NotifyRecovered int `json:"notify_recovered"`
|
||||
NotifyChannels string `json:"-"` // for db
|
||||
NotifyChannelsJSON []string `json:"notify_channels" gorm:"-"` // for fe
|
||||
NotifyGroups string `json:"-"` // for db
|
||||
NotifyGroupsJSON []string `json:"notify_groups" gorm:"-"` // for fe
|
||||
NotifyGroupsObj []*UserGroup `json:"notify_groups_obj" gorm:"-"` // for fe
|
||||
TargetIdent string `json:"target_ident"`
|
||||
TargetNote string `json:"target_note"`
|
||||
TriggerTime int64 `json:"trigger_time"`
|
||||
TriggerValue string `json:"trigger_value"`
|
||||
TriggerValues string `json:"trigger_values" gorm:"-"`
|
||||
Tags string `json:"-"` // for db
|
||||
TagsJSON []string `json:"tags" gorm:"-"` // for fe
|
||||
TagsMap map[string]string `json:"tags_map" gorm:"-"` // for internal usage
|
||||
OriginalTags string `json:"-"` // for db
|
||||
OriginalTagsJSON []string `json:"original_tags" gorm:"-"` // for fe
|
||||
Annotations string `json:"-"` //
|
||||
AnnotationsJSON map[string]string `json:"annotations" gorm:"-"` // for fe
|
||||
IsRecovered bool `json:"is_recovered" gorm:"-"` // for notify.py
|
||||
NotifyUsersObj []*User `json:"notify_users_obj" gorm:"-"` // for notify.py
|
||||
LastEvalTime int64 `json:"last_eval_time" gorm:"-"` // for notify.py 上次计算的时间
|
||||
LastSentTime int64 `json:"last_sent_time" gorm:"-"` // 上次发送时间
|
||||
NotifyCurNumber int `json:"notify_cur_number"` // notify: current number
|
||||
FirstTriggerTime int64 `json:"first_trigger_time"` // 连续告警的首次告警时间
|
||||
ExtraConfig interface{} `json:"extra_config" gorm:"-"`
|
||||
Status int `json:"status" gorm:"-"`
|
||||
Claimant string `json:"claimant" gorm:"-"`
|
||||
SubRuleId int64 `json:"sub_rule_id" gorm:"-"`
|
||||
ExtraInfo []string `json:"extra_info" gorm:"-"`
|
||||
Target *Target `json:"target" gorm:"-"`
|
||||
RecoverConfig RecoverConfig `json:"recover_config" gorm:"-"`
|
||||
RuleHash string `json:"rule_hash" gorm:"-"`
|
||||
Id int64 `json:"id" gorm:"primaryKey"`
|
||||
Cate string `json:"cate"`
|
||||
Cluster string `json:"cluster"`
|
||||
DatasourceId int64 `json:"datasource_id"`
|
||||
GroupId int64 `json:"group_id"` // busi group id
|
||||
GroupName string `json:"group_name"` // busi group name
|
||||
Hash string `json:"hash"` // rule_id + vector_key
|
||||
RuleId int64 `json:"rule_id"`
|
||||
RuleName string `json:"rule_name"`
|
||||
RuleNote string `json:"rule_note"`
|
||||
RuleProd string `json:"rule_prod"`
|
||||
RuleAlgo string `json:"rule_algo"`
|
||||
Severity int `json:"severity"`
|
||||
PromForDuration int `json:"prom_for_duration"`
|
||||
PromQl string `json:"prom_ql"`
|
||||
RuleConfig string `json:"-" gorm:"rule_config"` // rule config
|
||||
RuleConfigJson interface{} `json:"rule_config" gorm:"-"` // rule config for fe
|
||||
PromEvalInterval int `json:"prom_eval_interval"`
|
||||
Callbacks string `json:"-"` // for db
|
||||
CallbacksJSON []string `json:"callbacks" gorm:"-"` // for fe
|
||||
RunbookUrl string `json:"runbook_url"`
|
||||
NotifyRecovered int `json:"notify_recovered"`
|
||||
NotifyChannels string `json:"-"` // for db
|
||||
NotifyChannelsJSON []string `json:"notify_channels" gorm:"-"` // for fe
|
||||
NotifyGroups string `json:"-"` // for db
|
||||
NotifyGroupsJSON []string `json:"notify_groups" gorm:"-"` // for fe
|
||||
NotifyGroupsObj []*UserGroup `json:"notify_groups_obj" gorm:"-"` // for fe
|
||||
TargetIdent string `json:"target_ident"`
|
||||
TargetNote string `json:"target_note"`
|
||||
TriggerTime int64 `json:"trigger_time"`
|
||||
TriggerValue string `json:"trigger_value"`
|
||||
TriggerValues string `json:"trigger_values" gorm:"-"`
|
||||
TriggerValuesJson EventTriggerValues `json:"trigger_values_json" gorm:"-"`
|
||||
Tags string `json:"-"` // for db
|
||||
TagsJSON []string `json:"tags" gorm:"-"` // for fe
|
||||
TagsMap map[string]string `json:"tags_map" gorm:"-"` // for internal usage
|
||||
OriginalTags string `json:"-"` // for db
|
||||
OriginalTagsJSON []string `json:"original_tags" gorm:"-"` // for fe
|
||||
Annotations string `json:"-"` //
|
||||
AnnotationsJSON map[string]string `json:"annotations" gorm:"-"` // for fe
|
||||
IsRecovered bool `json:"is_recovered" gorm:"-"` // for notify.py
|
||||
NotifyUsersObj []*User `json:"notify_users_obj" gorm:"-"` // for notify.py
|
||||
LastEvalTime int64 `json:"last_eval_time" gorm:"-"` // for notify.py 上次计算的时间
|
||||
LastSentTime int64 `json:"last_sent_time" gorm:"-"` // 上次发送时间
|
||||
NotifyCurNumber int `json:"notify_cur_number"` // notify: current number
|
||||
FirstTriggerTime int64 `json:"first_trigger_time"` // 连续告警的首次告警时间
|
||||
ExtraConfig interface{} `json:"extra_config" gorm:"-"`
|
||||
Status int `json:"status" gorm:"-"`
|
||||
Claimant string `json:"claimant" gorm:"-"`
|
||||
SubRuleId int64 `json:"sub_rule_id" gorm:"-"`
|
||||
ExtraInfo []string `json:"extra_info" gorm:"-"`
|
||||
Target *Target `json:"target" gorm:"-"`
|
||||
RecoverConfig RecoverConfig `json:"recover_config" gorm:"-"`
|
||||
RuleHash string `json:"rule_hash" gorm:"-"`
|
||||
ExtraInfoMap []map[string]string `json:"extra_info_map" gorm:"-"`
|
||||
}
|
||||
|
||||
type EventTriggerValues struct {
|
||||
ValuesWithUnit map[string]unit.FormattedValue `json:"values_with_unit"`
|
||||
}
|
||||
|
||||
func (e *AlertCurEvent) TableName() string {
|
||||
return "alert_cur_event"
|
||||
}
|
||||
@@ -109,8 +115,18 @@ func (e *AlertCurEvent) ParseRule(field string) error {
|
||||
"{{$value := .TriggerValue}}",
|
||||
}
|
||||
|
||||
templateFuncMapCopy := tplx.NewTemplateFuncMap()
|
||||
templateFuncMapCopy["query"] = func(promql string, param ...int64) []AnomalyPoint {
|
||||
datasourceId := e.DatasourceId
|
||||
if len(param) > 0 {
|
||||
datasourceId = param[0]
|
||||
}
|
||||
value := tplx.Query(datasourceId, promql)
|
||||
return ConvertAnomalyPoints(value)
|
||||
}
|
||||
|
||||
text := strings.Join(append(defs, f), "")
|
||||
t, err := template.New(fmt.Sprint(e.RuleId)).Funcs(template.FuncMap(tplx.TemplateFuncMap)).Parse(text)
|
||||
t, err := template.New(fmt.Sprint(e.RuleId)).Funcs(templateFuncMapCopy).Parse(text)
|
||||
if err != nil {
|
||||
e.AnnotationsJSON[k] = fmt.Sprintf("failed to parse annotations: %v", err)
|
||||
continue
|
||||
@@ -359,6 +375,15 @@ func (e *AlertCurEvent) DB2Mem() {
|
||||
}
|
||||
}
|
||||
|
||||
func (e *AlertCurEvent) OverrideGlobalWebhook() bool {
|
||||
var rc RuleConfig
|
||||
if err := json.Unmarshal([]byte(e.RuleConfig), &rc); err != nil {
|
||||
logger.Warningf("failed to unmarshal rule config: %v", err)
|
||||
return false
|
||||
}
|
||||
return rc.OverrideGlobalWebhook
|
||||
}
|
||||
|
||||
func FillRuleConfigTplName(ctx *ctx.Context, ruleConfig string) (interface{}, bool) {
|
||||
var config RuleConfig
|
||||
err := json.Unmarshal([]byte(ruleConfig), &config)
|
||||
|
||||
@@ -13,6 +13,7 @@ import (
|
||||
|
||||
"github.com/jinzhu/copier"
|
||||
"github.com/pkg/errors"
|
||||
"github.com/tidwall/match"
|
||||
"github.com/toolkits/pkg/logger"
|
||||
"github.com/toolkits/pkg/str"
|
||||
)
|
||||
@@ -45,55 +46,56 @@ const (
|
||||
|
||||
type AlertRule struct {
|
||||
Id int64 `json:"id" gorm:"primaryKey"`
|
||||
GroupId int64 `json:"group_id"` // busi group id
|
||||
Cate string `json:"cate"` // alert rule cate (prometheus|elasticsearch)
|
||||
DatasourceIds string `json:"-" gorm:"datasource_ids"` // datasource ids
|
||||
DatasourceIdsJson []int64 `json:"datasource_ids" gorm:"-"` // for fe
|
||||
Cluster string `json:"cluster"` // take effect by clusters, seperated by space
|
||||
Name string `json:"name"` // rule name
|
||||
Note string `json:"note"` // will sent in notify
|
||||
Prod string `json:"prod"` // product empty means n9e
|
||||
Algorithm string `json:"algorithm"` // algorithm (''|holtwinters), empty means threshold
|
||||
AlgoParams string `json:"-" gorm:"algo_params"` // params algorithm need
|
||||
AlgoParamsJson interface{} `json:"algo_params" gorm:"-"` // for fe
|
||||
Delay int `json:"delay"` // Time (in seconds) to delay evaluation
|
||||
Severity int `json:"severity"` // 1: Emergency 2: Warning 3: Notice
|
||||
Severities []int `json:"severities" gorm:"-"` // 1: Emergency 2: Warning 3: Notice
|
||||
Disabled int `json:"disabled"` // 0: enabled, 1: disabled
|
||||
PromForDuration int `json:"prom_for_duration"` // prometheus for, unit:s
|
||||
PromQl string `json:"prom_ql"` // just one ql
|
||||
RuleConfig string `json:"-" gorm:"rule_config"` // rule config
|
||||
RuleConfigJson interface{} `json:"rule_config" gorm:"-"` // rule config for fe
|
||||
EventRelabelConfig []*pconf.RelabelConfig `json:"event_relabel_config" gorm:"-"` // event relabel config
|
||||
PromEvalInterval int `json:"prom_eval_interval"` // unit:s
|
||||
EnableStime string `json:"-"` // split by space: "00:00 10:00 12:00"
|
||||
EnableStimeJSON string `json:"enable_stime" gorm:"-"` // for fe
|
||||
EnableStimesJSON []string `json:"enable_stimes" gorm:"-"` // for fe
|
||||
EnableEtime string `json:"-"` // split by space: "00:00 10:00 12:00"
|
||||
EnableEtimeJSON string `json:"enable_etime" gorm:"-"` // for fe
|
||||
EnableEtimesJSON []string `json:"enable_etimes" gorm:"-"` // for fe
|
||||
EnableDaysOfWeek string `json:"-"` // eg: "0 1 2 3 4 5 6 ; 0 1 2"
|
||||
EnableDaysOfWeekJSON []string `json:"enable_days_of_week" gorm:"-"` // for fe
|
||||
EnableDaysOfWeeksJSON [][]string `json:"enable_days_of_weeks" gorm:"-"` // for fe
|
||||
EnableInBG int `json:"enable_in_bg"` // 0: global 1: enable one busi-group
|
||||
NotifyRecovered int `json:"notify_recovered"` // whether notify when recovery
|
||||
NotifyChannels string `json:"-"` // split by space: sms voice email dingtalk wecom
|
||||
NotifyChannelsJSON []string `json:"notify_channels" gorm:"-"` // for fe
|
||||
NotifyGroups string `json:"-"` // split by space: 233 43
|
||||
NotifyGroupsObj []UserGroup `json:"notify_groups_obj" gorm:"-"` // for fe
|
||||
NotifyGroupsJSON []string `json:"notify_groups" gorm:"-"` // for fe
|
||||
NotifyRepeatStep int `json:"notify_repeat_step"` // notify repeat interval, unit: min
|
||||
NotifyMaxNumber int `json:"notify_max_number"` // notify: max number
|
||||
RecoverDuration int64 `json:"recover_duration"` // unit: s
|
||||
Callbacks string `json:"-"` // split by space: http://a.com/api/x http://a.com/api/y'
|
||||
CallbacksJSON []string `json:"callbacks" gorm:"-"` // for fe
|
||||
RunbookUrl string `json:"runbook_url"` // sop url
|
||||
AppendTags string `json:"-"` // split by space: service=n9e mod=api
|
||||
AppendTagsJSON []string `json:"append_tags" gorm:"-"` // for fe
|
||||
Annotations string `json:"-"` //
|
||||
AnnotationsJSON map[string]string `json:"annotations" gorm:"-"` // for fe
|
||||
ExtraConfig string `json:"-" gorm:"extra_config"` // extra config
|
||||
ExtraConfigJSON interface{} `json:"extra_config" gorm:"-"` // for fe
|
||||
GroupId int64 `json:"group_id"` // busi group id
|
||||
Cate string `json:"cate"` // alert rule cate (prometheus|elasticsearch)
|
||||
DatasourceIds string `json:"-" gorm:"datasource_ids"`
|
||||
DatasourceIdsJson []int64 `json:"datasource_ids,omitempty" gorm:"-"` // alert rule list page use this field
|
||||
DatasourceQueries []DatasourceQuery `json:"datasource_queries" gorm:"datasource_queries;type:text;serializer:json"` // datasource queries
|
||||
Cluster string `json:"cluster"` // take effect by clusters, seperated by space
|
||||
Name string `json:"name"` // rule name
|
||||
Note string `json:"note"` // will sent in notify
|
||||
Prod string `json:"prod"` // product empty means n9e
|
||||
Algorithm string `json:"algorithm"` // algorithm (''|holtwinters), empty means threshold
|
||||
AlgoParams string `json:"-" gorm:"algo_params"` // params algorithm need
|
||||
AlgoParamsJson interface{} `json:"algo_params" gorm:"-"` // for fe
|
||||
Delay int `json:"delay"` // Time (in seconds) to delay evaluation
|
||||
Severity int `json:"severity"` // 1: Emergency 2: Warning 3: Notice
|
||||
Severities []int `json:"severities" gorm:"-"` // 1: Emergency 2: Warning 3: Notice
|
||||
Disabled int `json:"disabled"` // 0: enabled, 1: disabled
|
||||
PromForDuration int `json:"prom_for_duration"` // prometheus for, unit:s
|
||||
PromQl string `json:"prom_ql"` // just one ql
|
||||
RuleConfig string `json:"-" gorm:"rule_config"` // rule config
|
||||
RuleConfigJson interface{} `json:"rule_config" gorm:"-"` // rule config for fe
|
||||
EventRelabelConfig []*pconf.RelabelConfig `json:"event_relabel_config" gorm:"-"` // event relabel config
|
||||
PromEvalInterval int `json:"prom_eval_interval"` // unit:s
|
||||
EnableStime string `json:"-"` // split by space: "00:00 10:00 12:00"
|
||||
EnableStimeJSON string `json:"enable_stime" gorm:"-"` // for fe
|
||||
EnableStimesJSON []string `json:"enable_stimes" gorm:"-"` // for fe
|
||||
EnableEtime string `json:"-"` // split by space: "00:00 10:00 12:00"
|
||||
EnableEtimeJSON string `json:"enable_etime" gorm:"-"` // for fe
|
||||
EnableEtimesJSON []string `json:"enable_etimes" gorm:"-"` // for fe
|
||||
EnableDaysOfWeek string `json:"-"` // eg: "0 1 2 3 4 5 6 ; 0 1 2"
|
||||
EnableDaysOfWeekJSON []string `json:"enable_days_of_week" gorm:"-"` // for fe
|
||||
EnableDaysOfWeeksJSON [][]string `json:"enable_days_of_weeks" gorm:"-"` // for fe
|
||||
EnableInBG int `json:"enable_in_bg"` // 0: global 1: enable one busi-group
|
||||
NotifyRecovered int `json:"notify_recovered"` // whether notify when recovery
|
||||
NotifyChannels string `json:"-"` // split by space: sms voice email dingtalk wecom
|
||||
NotifyChannelsJSON []string `json:"notify_channels" gorm:"-"` // for fe
|
||||
NotifyGroups string `json:"-"` // split by space: 233 43
|
||||
NotifyGroupsObj []UserGroup `json:"notify_groups_obj" gorm:"-"` // for fe
|
||||
NotifyGroupsJSON []string `json:"notify_groups" gorm:"-"` // for fe
|
||||
NotifyRepeatStep int `json:"notify_repeat_step"` // notify repeat interval, unit: min
|
||||
NotifyMaxNumber int `json:"notify_max_number"` // notify: max number
|
||||
RecoverDuration int64 `json:"recover_duration"` // unit: s
|
||||
Callbacks string `json:"-"` // split by space: http://a.com/api/x http://a.com/api/y'
|
||||
CallbacksJSON []string `json:"callbacks" gorm:"-"` // for fe
|
||||
RunbookUrl string `json:"runbook_url"` // sop url
|
||||
AppendTags string `json:"-"` // split by space: service=n9e mod=api
|
||||
AppendTagsJSON []string `json:"append_tags" gorm:"-"` // for fe
|
||||
Annotations string `json:"-"` //
|
||||
AnnotationsJSON map[string]string `json:"annotations" gorm:"-"` // for fe
|
||||
ExtraConfig string `json:"-" gorm:"extra_config"` // extra config
|
||||
ExtraConfigJSON interface{} `json:"extra_config" gorm:"-"` // for fe
|
||||
CreateAt int64 `json:"create_at"`
|
||||
CreateBy string `json:"create_by"`
|
||||
UpdateAt int64 `json:"update_at"`
|
||||
@@ -101,6 +103,29 @@ type AlertRule struct {
|
||||
UUID int64 `json:"uuid" gorm:"-"` // tpl identifier
|
||||
CurEventCount int64 `json:"cur_event_count" gorm:"-"`
|
||||
UpdateByNickname string `json:"update_by_nickname" gorm:"-"` // for fe
|
||||
CronPattern string `json:"cron_pattern"`
|
||||
}
|
||||
|
||||
type ChildVarConfig struct {
|
||||
ParamVal []map[string]ParamQuery `json:"param_val"`
|
||||
ChildVarConfigs *ChildVarConfig `json:"child_var_configs"`
|
||||
}
|
||||
|
||||
type ParamQuery struct {
|
||||
ParamType string `json:"param_type"` // host、device、enum、threshold 三种类型
|
||||
Query interface{} `json:"query"`
|
||||
}
|
||||
|
||||
type VarConfig struct {
|
||||
ParamVal []ParamQueryForFirst `json:"param_val"`
|
||||
ChildVarConfigs *ChildVarConfig `json:"child_var_configs"`
|
||||
}
|
||||
|
||||
// ParamQueryForFirst 同 ParamQuery,仅在第一层出现
|
||||
type ParamQueryForFirst struct {
|
||||
Name string `json:"name"`
|
||||
ParamType string `json:"param_type"`
|
||||
Query interface{} `json:"query"`
|
||||
}
|
||||
|
||||
type Tpl struct {
|
||||
@@ -110,15 +135,16 @@ type Tpl struct {
|
||||
}
|
||||
|
||||
type RuleConfig struct {
|
||||
Version string `json:"version,omitempty"`
|
||||
EventRelabelConfig []*pconf.RelabelConfig `json:"event_relabel_config,omitempty"`
|
||||
TaskTpls []*Tpl `json:"task_tpls,omitempty"`
|
||||
Queries interface{} `json:"queries,omitempty"`
|
||||
Triggers []Trigger `json:"triggers,omitempty"`
|
||||
Inhibit bool `json:"inhibit,omitempty"`
|
||||
PromQl string `json:"prom_ql,omitempty"`
|
||||
Severity int `json:"severity,omitempty"`
|
||||
AlgoParams interface{} `json:"algo_params,omitempty"`
|
||||
Version string `json:"version,omitempty"`
|
||||
EventRelabelConfig []*pconf.RelabelConfig `json:"event_relabel_config,omitempty"`
|
||||
TaskTpls []*Tpl `json:"task_tpls,omitempty"`
|
||||
Queries interface{} `json:"queries,omitempty"`
|
||||
Triggers []Trigger `json:"triggers,omitempty"`
|
||||
Inhibit bool `json:"inhibit,omitempty"`
|
||||
PromQl string `json:"prom_ql,omitempty"`
|
||||
Severity int `json:"severity,omitempty"`
|
||||
AlgoParams interface{} `json:"algo_params,omitempty"`
|
||||
OverrideGlobalWebhook bool `json:"override_global_webhook,omitempty"`
|
||||
}
|
||||
|
||||
type PromRuleConfig struct {
|
||||
@@ -151,7 +177,10 @@ type HostRuleConfig struct {
|
||||
type PromQuery struct {
|
||||
PromQl string `json:"prom_ql"`
|
||||
Severity int `json:"severity"`
|
||||
VarEnabled bool `json:"var_enabled"`
|
||||
VarConfig VarConfig `json:"var_config"`
|
||||
RecoverConfig RecoverConfig `json:"recover_config"`
|
||||
Unit string `json:"unit"`
|
||||
}
|
||||
|
||||
type HostTrigger struct {
|
||||
@@ -174,11 +203,11 @@ type Trigger struct {
|
||||
Exp string `json:"exp"`
|
||||
Severity int `json:"severity"`
|
||||
|
||||
Type string `json:"type,omitempty"`
|
||||
Duration int `json:"duration,omitempty"`
|
||||
Percent int `json:"percent,omitempty"`
|
||||
Joins []Join `json:"joins"`
|
||||
JoinRef string `json:"join_ref"`
|
||||
Type string `json:"type,omitempty"`
|
||||
Duration int `json:"duration,omitempty"`
|
||||
Percent int `json:"percent,omitempty"`
|
||||
Joins []Join `json:"joins"`
|
||||
JoinRef string `json:"join_ref"`
|
||||
RecoverConfig RecoverConfig `json:"recover_config"`
|
||||
}
|
||||
|
||||
@@ -188,6 +217,132 @@ type Join struct {
|
||||
On []string `json:"on"`
|
||||
}
|
||||
|
||||
var DataSourceQueryAll = DatasourceQuery{
|
||||
MatchType: 2,
|
||||
Op: "in",
|
||||
Values: []interface{}{DatasourceIdAll},
|
||||
}
|
||||
|
||||
type DatasourceQuery struct {
|
||||
MatchType int `json:"match_type"`
|
||||
Op string `json:"op"`
|
||||
Values []interface{} `json:"values"`
|
||||
}
|
||||
|
||||
// GetDatasourceIDsByDatasourceQueries 从 datasourceQueries 中获取 datasourceIDs
|
||||
// 查询分为精确\模糊匹配,逻辑有 in 与 not in
|
||||
// idMap 为当前 datasourceQueries 对应的数据源全集
|
||||
// nameMap 为所有 datasource 的 name 到 id 的映射,用于名称的模糊匹配
|
||||
func GetDatasourceIDsByDatasourceQueries[T any](datasourceQueries []DatasourceQuery, idMap map[int64]T, nameMap map[string]int64) []int64 {
|
||||
if len(datasourceQueries) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
// 所有 query 取交集,初始集合为全集
|
||||
curIDs := make(map[int64]struct{})
|
||||
for id, _ := range idMap {
|
||||
curIDs[id] = struct{}{}
|
||||
}
|
||||
|
||||
for i := range datasourceQueries {
|
||||
// 每次 query 都在 curIDs 的基础上得到 dsIDs
|
||||
dsIDs := make(map[int64]struct{})
|
||||
q := datasourceQueries[i]
|
||||
if q.MatchType == 0 {
|
||||
// 精确匹配转为 id 匹配
|
||||
idValues := make([]int64, 0, len(q.Values))
|
||||
for v := range q.Values {
|
||||
var val int64
|
||||
switch v := q.Values[v].(type) {
|
||||
case int64:
|
||||
val = v
|
||||
case int:
|
||||
val = int64(v)
|
||||
case float64:
|
||||
val = int64(v)
|
||||
case float32:
|
||||
val = int64(v)
|
||||
case int8:
|
||||
val = int64(v)
|
||||
case int16:
|
||||
val = int64(v)
|
||||
case int32:
|
||||
val = int64(v)
|
||||
default:
|
||||
continue
|
||||
}
|
||||
idValues = append(idValues, int64(val))
|
||||
}
|
||||
|
||||
if q.Op == "in" {
|
||||
if len(idValues) == 1 && idValues[0] == DatasourceIdAll {
|
||||
for id := range curIDs {
|
||||
dsIDs[id] = struct{}{}
|
||||
}
|
||||
} else {
|
||||
for idx := range idValues {
|
||||
if _, exist := curIDs[idValues[idx]]; exist {
|
||||
dsIDs[idValues[idx]] = struct{}{}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if q.Op == "not in" {
|
||||
for idx := range idValues {
|
||||
delete(curIDs, idValues[idx])
|
||||
}
|
||||
dsIDs = curIDs
|
||||
}
|
||||
} else if q.MatchType == 1 {
|
||||
// 模糊匹配使用 datasource name
|
||||
if q.Op == "in" {
|
||||
for dsName, dsID := range nameMap {
|
||||
if _, exist := curIDs[dsID]; exist {
|
||||
for idx := range q.Values {
|
||||
if _, ok := q.Values[idx].(string); !ok {
|
||||
continue
|
||||
}
|
||||
|
||||
if match.Match(dsName, q.Values[idx].(string)) {
|
||||
dsIDs[nameMap[dsName]] = struct{}{}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if q.Op == "not in" {
|
||||
for dsName, _ := range nameMap {
|
||||
for idx := range q.Values {
|
||||
if _, ok := q.Values[idx].(string); !ok {
|
||||
continue
|
||||
}
|
||||
|
||||
if match.Match(dsName, q.Values[idx].(string)) {
|
||||
delete(curIDs, nameMap[dsName])
|
||||
}
|
||||
}
|
||||
}
|
||||
dsIDs = curIDs
|
||||
}
|
||||
} else if q.MatchType == 2 {
|
||||
// 全部数据源
|
||||
for id := range curIDs {
|
||||
dsIDs[id] = struct{}{}
|
||||
}
|
||||
}
|
||||
|
||||
curIDs = dsIDs
|
||||
if len(curIDs) == 0 {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
dsIds := make([]int64, 0, len(curIDs))
|
||||
for c := range curIDs {
|
||||
dsIds = append(dsIds, c)
|
||||
}
|
||||
|
||||
return dsIds
|
||||
}
|
||||
|
||||
func GetHostsQuery(queries []HostQuery) []map[string]interface{} {
|
||||
var query []map[string]interface{}
|
||||
for _, q := range queries {
|
||||
@@ -217,12 +372,14 @@ func GetHostsQuery(queries []HostQuery) []map[string]interface{} {
|
||||
blank += " "
|
||||
}
|
||||
} else {
|
||||
blank := " "
|
||||
var args []interface{}
|
||||
var query []string
|
||||
for _, tag := range lst {
|
||||
m["tags not like ?"+blank] = "%" + tag + "%"
|
||||
m["host_tags not like ?"+blank] = "%" + tag + "%"
|
||||
blank += " "
|
||||
query = append(query, "tags not like ?",
|
||||
"(host_tags not like ? or host_tags is null)")
|
||||
args = append(args, "%"+tag+"%", "%"+tag+"%")
|
||||
}
|
||||
m[strings.Join(query, " and ")] = args
|
||||
}
|
||||
case "hosts":
|
||||
lst := []string{}
|
||||
@@ -243,11 +400,13 @@ func GetHostsQuery(queries []HostQuery) []map[string]interface{} {
|
||||
blank += " "
|
||||
}
|
||||
} else if q.Op == "!~" {
|
||||
blank := " "
|
||||
var args []interface{}
|
||||
var query []string
|
||||
for _, host := range lst {
|
||||
m["ident not like ?"+blank] = strings.ReplaceAll(host, "*", "%")
|
||||
blank += " "
|
||||
query = append(query, "ident not like ?")
|
||||
args = append(args, strings.ReplaceAll(host, "*", "%"))
|
||||
}
|
||||
m[strings.Join(query, " and ")] = args
|
||||
}
|
||||
}
|
||||
query = append(query, m)
|
||||
@@ -286,9 +445,9 @@ func (ar *AlertRule) Verify() error {
|
||||
return fmt.Errorf("GroupId(%d) invalid", ar.GroupId)
|
||||
}
|
||||
|
||||
if IsAllDatasource(ar.DatasourceIdsJson) {
|
||||
ar.DatasourceIdsJson = []int64{0}
|
||||
}
|
||||
//if IsAllDatasource(ar.DatasourceIdsJson) {
|
||||
// ar.DatasourceIdsJson = []int64{0}
|
||||
//}
|
||||
|
||||
if str.Dangerous(ar.Name) {
|
||||
return errors.New("Name has invalid characters")
|
||||
@@ -342,7 +501,7 @@ func (ar *AlertRule) Add(ctx *ctx.Context) error {
|
||||
return err
|
||||
}
|
||||
|
||||
exists, err := AlertRuleExists(ctx, 0, ar.GroupId, ar.DatasourceIdsJson, ar.Name)
|
||||
exists, err := AlertRuleExists(ctx, 0, ar.GroupId, ar.Name)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -360,7 +519,7 @@ func (ar *AlertRule) Add(ctx *ctx.Context) error {
|
||||
|
||||
func (ar *AlertRule) Update(ctx *ctx.Context, arf AlertRule) error {
|
||||
if ar.Name != arf.Name {
|
||||
exists, err := AlertRuleExists(ctx, ar.Id, ar.GroupId, ar.DatasourceIdsJson, arf.Name)
|
||||
exists, err := AlertRuleExists(ctx, ar.Id, ar.GroupId, arf.Name)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -509,11 +668,30 @@ func (ar *AlertRule) UpdateFieldsMap(ctx *ctx.Context, fields map[string]interfa
|
||||
return DB(ctx).Model(ar).Updates(fields).Error
|
||||
}
|
||||
|
||||
// for v5 rule
|
||||
func (ar *AlertRule) FillDatasourceIds() error {
|
||||
if ar.DatasourceIds != "" {
|
||||
json.Unmarshal([]byte(ar.DatasourceIds), &ar.DatasourceIdsJson)
|
||||
return nil
|
||||
func (ar *AlertRule) FillDatasourceQueries() error {
|
||||
// 兼容旧逻辑,将 datasourceIds 转换为 datasourceQueries
|
||||
if len(ar.DatasourceQueries) == 0 && len(ar.DatasourceIds) != 0 {
|
||||
datasourceQueries := DatasourceQuery{
|
||||
MatchType: 0,
|
||||
Op: "in",
|
||||
Values: make([]interface{}, 0),
|
||||
}
|
||||
|
||||
var values []int
|
||||
if ar.DatasourceIds != "" {
|
||||
json.Unmarshal([]byte(ar.DatasourceIds), &values)
|
||||
|
||||
}
|
||||
|
||||
for i := range values {
|
||||
if values[i] == 0 {
|
||||
// 0 表示所有数据源
|
||||
datasourceQueries.MatchType = 2
|
||||
break
|
||||
}
|
||||
datasourceQueries.Values = append(datasourceQueries.Values, values[i])
|
||||
}
|
||||
ar.DatasourceQueries = []DatasourceQuery{datasourceQueries}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
@@ -632,14 +810,6 @@ func (ar *AlertRule) FE2DB() error {
|
||||
}
|
||||
ar.AlgoParams = string(algoParamsByte)
|
||||
|
||||
if len(ar.DatasourceIdsJson) > 0 {
|
||||
idsByte, err := json.Marshal(ar.DatasourceIdsJson)
|
||||
if err != nil {
|
||||
return fmt.Errorf("marshal datasource_ids err:%v", err)
|
||||
}
|
||||
ar.DatasourceIds = string(idsByte)
|
||||
}
|
||||
|
||||
if ar.RuleConfigJson == nil {
|
||||
query := PromQuery{
|
||||
PromQl: ar.PromQl,
|
||||
@@ -711,8 +881,17 @@ func (ar *AlertRule) DB2FE() error {
|
||||
json.Unmarshal([]byte(ar.RuleConfig), &ruleConfig)
|
||||
ar.EventRelabelConfig = ruleConfig.EventRelabelConfig
|
||||
|
||||
err := ar.FillDatasourceIds()
|
||||
return err
|
||||
// 兼容旧逻辑填充 cron_pattern
|
||||
if ar.CronPattern == "" && ar.PromEvalInterval != 0 {
|
||||
ar.CronPattern = fmt.Sprintf("@every %ds", ar.PromEvalInterval)
|
||||
}
|
||||
|
||||
err := ar.FillDatasourceQueries()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func AlertRuleDels(ctx *ctx.Context, ids []int64, bgid ...int64) error {
|
||||
@@ -726,7 +905,7 @@ func AlertRuleDels(ctx *ctx.Context, ids []int64, bgid ...int64) error {
|
||||
return ret.Error
|
||||
}
|
||||
|
||||
// 说明确实删掉了,把相关的活跃告警也删了,这些告警永远都不会恢复了,而且策略都没了,说明没人关心了
|
||||
// 说明确实删掉了,把相关的活跃告警也删了,这些告警永远都不会恢复了,而且策略都没了,说明没<EFBFBD><EFBFBD><EFBFBD>关心了
|
||||
if ret.RowsAffected > 0 {
|
||||
DB(ctx).Where("rule_id = ?", ids[i]).Delete(new(AlertCurEvent))
|
||||
}
|
||||
@@ -735,7 +914,7 @@ func AlertRuleDels(ctx *ctx.Context, ids []int64, bgid ...int64) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func AlertRuleExists(ctx *ctx.Context, id, groupId int64, datasourceIds []int64, name string) (bool, error) {
|
||||
func AlertRuleExists(ctx *ctx.Context, id, groupId int64, name string) (bool, error) {
|
||||
session := DB(ctx).Where("id <> ? and group_id = ? and name = ?", id, groupId, name)
|
||||
|
||||
var lst []AlertRule
|
||||
@@ -747,15 +926,6 @@ func AlertRuleExists(ctx *ctx.Context, id, groupId int64, datasourceIds []int64,
|
||||
return false, nil
|
||||
}
|
||||
|
||||
// match cluster
|
||||
for _, r := range lst {
|
||||
r.FillDatasourceIds()
|
||||
for _, id := range r.DatasourceIdsJson {
|
||||
if MatchDatasource(datasourceIds, id) {
|
||||
return true, nil
|
||||
}
|
||||
}
|
||||
}
|
||||
return false, nil
|
||||
}
|
||||
|
||||
@@ -975,7 +1145,6 @@ func (ar *AlertRule) UpdateEvent(event *AlertCurEvent) {
|
||||
event.PromForDuration = ar.PromForDuration
|
||||
event.RuleConfig = ar.RuleConfig
|
||||
event.RuleConfigJson = ar.RuleConfigJson
|
||||
event.PromEvalInterval = ar.PromEvalInterval
|
||||
event.Callbacks = ar.Callbacks
|
||||
event.CallbacksJSON = ar.CallbacksJSON
|
||||
event.RunbookUrl = ar.RunbookUrl
|
||||
|
||||
@@ -114,6 +114,11 @@ func (s *AlertSubscribe) Verify() error {
|
||||
return errors.New("severities is required")
|
||||
}
|
||||
|
||||
if s.UserGroupIds != "" && s.NewChannels == "" {
|
||||
// 如果指定了用户组,那么新告警的通知渠道必须指定,否则容易出现告警规则中没有指定通知渠道,导致订阅通知时,没有通知渠道
|
||||
return errors.New("new_channels is required")
|
||||
}
|
||||
|
||||
ugids := strings.Fields(s.UserGroupIds)
|
||||
for i := 0; i < len(ugids); i++ {
|
||||
if _, err := strconv.ParseInt(ugids[i], 10, 64); err != nil {
|
||||
|
||||
@@ -1,24 +1,25 @@
|
||||
package common
|
||||
package models
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"math"
|
||||
"strings"
|
||||
|
||||
"github.com/ccfos/nightingale/v6/models"
|
||||
"github.com/ccfos/nightingale/v6/pkg/unit"
|
||||
"github.com/prometheus/common/model"
|
||||
)
|
||||
|
||||
type AnomalyPoint struct {
|
||||
Key string `json:"key"`
|
||||
Labels model.Metric `json:"labels"`
|
||||
Timestamp int64 `json:"timestamp"`
|
||||
Value float64 `json:"value"`
|
||||
Severity int `json:"severity"`
|
||||
Triggered bool `json:"triggered"`
|
||||
Query string `json:"query"`
|
||||
Values string `json:"values"`
|
||||
RecoverConfig models.RecoverConfig `json:"recover_config"`
|
||||
Key string `json:"key"`
|
||||
Labels model.Metric `json:"labels"`
|
||||
Timestamp int64 `json:"timestamp"`
|
||||
Value float64 `json:"value"`
|
||||
Severity int `json:"severity"`
|
||||
Triggered bool `json:"triggered"`
|
||||
Query string `json:"query"`
|
||||
Values string `json:"values"`
|
||||
ValuesUnit map[string]unit.FormattedValue `json:"values_unit"`
|
||||
RecoverConfig RecoverConfig `json:"recover_config"`
|
||||
}
|
||||
|
||||
func NewAnomalyPoint(key string, labels map[string]string, ts int64, value float64, severity int) AnomalyPoint {
|
||||
@@ -10,9 +10,20 @@ import (
|
||||
|
||||
// BuiltinComponent represents a builtin component along with its metadata.
|
||||
type BuiltinComponent struct {
|
||||
ID uint64 `json:"id" gorm:"primaryKey;type:bigint;autoIncrement;comment:'unique identifier'"`
|
||||
Ident string `json:"ident" gorm:"type:varchar(191);not null;uniqueIndex:idx_ident,sort:asc"`
|
||||
Logo string `json:"logo" gorm:"type:mediumtext;comment:'logo of component'"`
|
||||
Readme string `json:"readme" gorm:"type:text;not null;comment:'readme of component'"`
|
||||
CreatedAt int64 `json:"created_at" gorm:"type:bigint;not null;default:0;comment:'create time'"`
|
||||
CreatedBy string `json:"created_by" gorm:"type:varchar(191);not null;default:'';comment:'creator'"`
|
||||
UpdatedAt int64 `json:"updated_at" gorm:"type:bigint;not null;default:0;comment:'update time'"`
|
||||
UpdatedBy string `json:"updated_by" gorm:"type:varchar(191);not null;default:'';comment:'updater'"`
|
||||
}
|
||||
|
||||
type PostgresBuiltinComponent struct {
|
||||
ID uint64 `json:"id" gorm:"primaryKey;type:bigint;autoIncrement;comment:'unique identifier'"`
|
||||
Ident string `json:"ident" gorm:"type:varchar(191);not null;uniqueIndex:idx_ident,sort:asc;comment:'identifier of component'"`
|
||||
Logo string `json:"logo" gorm:"type:varchar(191);not null;comment:'logo of component'"`
|
||||
Logo string `json:"logo" gorm:"type:text;comment:'logo of component'"`
|
||||
Readme string `json:"readme" gorm:"type:text;not null;comment:'readme of component'"`
|
||||
CreatedAt int64 `json:"created_at" gorm:"type:bigint;not null;default:0;comment:'create time'"`
|
||||
CreatedBy string `json:"created_by" gorm:"type:varchar(191);not null;default:'';comment:'creator'"`
|
||||
|
||||
@@ -14,12 +14,12 @@ import (
|
||||
type BuiltinMetric struct {
|
||||
ID int64 `json:"id" gorm:"primaryKey;type:bigint;autoIncrement;comment:'unique identifier'"`
|
||||
UUID int64 `json:"uuid" gorm:"type:bigint;not null;default:0;comment:'uuid'"`
|
||||
Collector string `json:"collector" gorm:"type:varchar(191);not null;index:idx_collector,sort:asc;comment:'type of collector'"` // Type of collector (e.g., 'categraf', 'telegraf')
|
||||
Typ string `json:"typ" gorm:"type:varchar(191);not null;index:idx_typ,sort:asc;comment:'type of metric'"` // Type of metric (e.g., 'host', 'mysql', 'redis')
|
||||
Name string `json:"name" gorm:"type:varchar(191);not null;index:idx_builtinmetric_name,sort:asc;comment:'name of metric'"`
|
||||
Collector string `json:"collector" gorm:"uniqueIndex:idx_collector_typ_name;type:varchar(191);not null;index:idx_collector,sort:asc;comment:'type of collector'"`
|
||||
Typ string `json:"typ" gorm:"uniqueIndex:idx_collector_typ_name;type:varchar(191);not null;index:idx_typ,sort:asc;comment:'type of metric'"`
|
||||
Name string `json:"name" gorm:"uniqueIndex:idx_collector_typ_name;type:varchar(191);not null;index:idx_builtinmetric_name,sort:asc;comment:'name of metric'"`
|
||||
Unit string `json:"unit" gorm:"type:varchar(191);not null;comment:'unit of metric'"`
|
||||
Note string `json:"note" gorm:"type:varchar(4096);not null;comment:'description of metric'"`
|
||||
Lang string `json:"lang" gorm:"type:varchar(191);not null;default:'zh';index:idx_lang,sort:asc;comment:'language'"`
|
||||
Lang string `json:"lang" gorm:"uniqueIndex:idx_collector_typ_name;type:varchar(191);not null;default:'zh';index:idx_lang,sort:asc;comment:'language'"`
|
||||
Expression string `json:"expression" gorm:"type:varchar(4096);not null;comment:'expression of metric'"`
|
||||
CreatedAt int64 `json:"created_at" gorm:"type:bigint;not null;default:0;comment:'create time'"`
|
||||
CreatedBy string `json:"created_by" gorm:"type:varchar(191);not null;default:'';comment:'creator'"`
|
||||
|
||||
@@ -115,68 +115,54 @@ func BusiGroupExists(ctx *ctx.Context, where string, args ...interface{}) (bool,
|
||||
return num > 0, err
|
||||
}
|
||||
|
||||
var entries = []struct {
|
||||
entry interface{}
|
||||
errorMessage string
|
||||
}{
|
||||
{
|
||||
entry: &AlertRule{},
|
||||
errorMessage: "Some alert rules still in the BusiGroup",
|
||||
},
|
||||
{
|
||||
entry: &AlertMute{},
|
||||
errorMessage: "Some alert mutes still in the BusiGroup",
|
||||
},
|
||||
{
|
||||
entry: &AlertSubscribe{},
|
||||
errorMessage: "Some alert subscribes still in the BusiGroup",
|
||||
},
|
||||
{
|
||||
entry: &Target{},
|
||||
errorMessage: "Some targets still in the BusiGroup",
|
||||
},
|
||||
{
|
||||
entry: &RecordingRule{},
|
||||
errorMessage: "Some recording rules still in the BusiGroup",
|
||||
},
|
||||
{
|
||||
entry: &TaskTpl{},
|
||||
errorMessage: "Some recovery scripts still in the BusiGroup",
|
||||
},
|
||||
{
|
||||
entry: &TaskRecord{},
|
||||
errorMessage: "Some Task Record records still in the BusiGroup",
|
||||
},
|
||||
{
|
||||
entry: &TargetBusiGroup{},
|
||||
errorMessage: "Some target busigroups still in the BusiGroup",
|
||||
},
|
||||
}
|
||||
|
||||
func (bg *BusiGroup) Del(ctx *ctx.Context) error {
|
||||
has, err := Exists(DB(ctx).Model(&AlertMute{}).Where("group_id=?", bg.Id))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for _, e := range entries {
|
||||
has, err := Exists(DB(ctx).Model(e.entry).Where("group_id=?", bg.Id))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if has {
|
||||
return errors.New("Some alert mutes still in the BusiGroup")
|
||||
}
|
||||
|
||||
has, err = Exists(DB(ctx).Model(&AlertSubscribe{}).Where("group_id=?", bg.Id))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if has {
|
||||
return errors.New("Some alert subscribes still in the BusiGroup")
|
||||
}
|
||||
|
||||
has, err = Exists(DB(ctx).Model(&TargetBusiGroup{}).Where("group_id=?", bg.Id))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if has {
|
||||
return errors.New("Some targets still in the BusiGroup")
|
||||
}
|
||||
|
||||
has, err = Exists(DB(ctx).Model(&Board{}).Where("group_id=?", bg.Id))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if has {
|
||||
return errors.New("Some dashboards still in the BusiGroup")
|
||||
}
|
||||
|
||||
has, err = Exists(DB(ctx).Model(&TaskTpl{}).Where("group_id=?", bg.Id))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if has {
|
||||
return errors.New("Some recovery scripts still in the BusiGroup")
|
||||
}
|
||||
|
||||
// hasCR, err := Exists(DB(ctx).Table("collect_rule").Where("group_id=?", bg.Id))
|
||||
// if err != nil {
|
||||
// return err
|
||||
// }
|
||||
|
||||
// if hasCR {
|
||||
// return errors.New("Some collect rules still in the BusiGroup")
|
||||
// }
|
||||
|
||||
has, err = Exists(DB(ctx).Model(&AlertRule{}).Where("group_id=?", bg.Id))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if has {
|
||||
return errors.New("Some alert rules still in the BusiGroup")
|
||||
if has {
|
||||
return errors.New(e.errorMessage)
|
||||
}
|
||||
}
|
||||
|
||||
return DB(ctx).Transaction(func(tx *gorm.DB) error {
|
||||
|
||||
@@ -12,7 +12,6 @@ import (
|
||||
"github.com/ccfos/nightingale/v6/pkg/poster"
|
||||
"github.com/pkg/errors"
|
||||
"github.com/toolkits/pkg/logger"
|
||||
"github.com/toolkits/pkg/net/httplib"
|
||||
"github.com/toolkits/pkg/str"
|
||||
)
|
||||
|
||||
@@ -104,14 +103,13 @@ func (h HTTP) NewReq(reqUrl *string) (req *http.Request, err error) {
|
||||
|
||||
func (h HTTP) ParseUrl() (target *url.URL, err error) {
|
||||
urls := h.GetUrls()
|
||||
for i := 0; i < len(urls); i++ {
|
||||
if target, err = url.Parse(urls[i]); err != nil {
|
||||
continue
|
||||
}
|
||||
if len(urls) == 0 {
|
||||
return nil, errors.New("no urls")
|
||||
}
|
||||
|
||||
if _, err = httplib.Get(urls[i]).SetTimeout(time.Duration(h.Timeout) * time.Millisecond).Response(); err == nil {
|
||||
return
|
||||
}
|
||||
target, err = url.Parse(urls[0])
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return
|
||||
}
|
||||
@@ -388,12 +386,12 @@ func DatasourceGetMap(ctx *ctx.Context) (map[int64]*Datasource, error) {
|
||||
}
|
||||
}
|
||||
|
||||
ret := make(map[int64]*Datasource)
|
||||
ds := make(map[int64]*Datasource)
|
||||
for i := 0; i < len(lst); i++ {
|
||||
ret[lst[i].Id] = lst[i]
|
||||
ds[lst[i].Id] = lst[i]
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
return ds, nil
|
||||
}
|
||||
|
||||
func DatasourceStatistics(ctx *ctx.Context) (*Statistics, error) {
|
||||
|
||||
@@ -28,7 +28,7 @@ func MigrateIbexTables(db *gorm.DB) {
|
||||
db = db.Set("gorm:table_options", tableOptions)
|
||||
}
|
||||
|
||||
dts := []interface{}{&imodels.TaskMeta{}, &imodels.TaskScheduler{}, &imodels.TaskSchedulerHealth{}, &TaskHostDoing{}, &imodels.TaskAction{}}
|
||||
dts := []interface{}{&imodels.TaskMeta{}, &imodels.TaskScheduler{}, &TaskHostDoing{}, &imodels.TaskAction{}}
|
||||
for _, dt := range dts {
|
||||
err := db.AutoMigrate(dt)
|
||||
if err != nil {
|
||||
@@ -38,13 +38,22 @@ func MigrateIbexTables(db *gorm.DB) {
|
||||
|
||||
for i := 0; i < 100; i++ {
|
||||
tableName := fmt.Sprintf("task_host_%d", i)
|
||||
err := db.Table(tableName).AutoMigrate(&imodels.TaskHost{})
|
||||
if err != nil {
|
||||
logger.Errorf("failed to migrate table:%s %v", tableName, err)
|
||||
exists := db.Migrator().HasTable(tableName)
|
||||
if exists {
|
||||
continue
|
||||
} else {
|
||||
err := db.Table(tableName).AutoMigrate(&imodels.TaskHost{})
|
||||
if err != nil {
|
||||
logger.Errorf("failed to migrate table:%s %v", tableName, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func isPostgres(db *gorm.DB) bool {
|
||||
dialect := db.Dialector.Name()
|
||||
return dialect == "postgres"
|
||||
}
|
||||
func MigrateTables(db *gorm.DB) error {
|
||||
var tableOptions string
|
||||
switch db.Dialector.(type) {
|
||||
@@ -54,13 +63,22 @@ func MigrateTables(db *gorm.DB) error {
|
||||
if tableOptions != "" {
|
||||
db = db.Set("gorm:table_options", tableOptions)
|
||||
}
|
||||
|
||||
dts := []interface{}{&RecordingRule{}, &AlertRule{}, &AlertSubscribe{}, &AlertMute{},
|
||||
&TaskRecord{}, &ChartShare{}, &Target{}, &Configs{}, &Datasource{}, &NotifyTpl{},
|
||||
&Board{}, &BoardBusigroup{}, &Users{}, &SsoConfig{}, &models.BuiltinMetric{},
|
||||
&models.MetricFilter{}, &models.BuiltinComponent{}, &models.NotificaitonRecord{},
|
||||
&models.MetricFilter{}, &models.NotificaitonRecord{},
|
||||
&models.TargetBusiGroup{}}
|
||||
|
||||
if isPostgres(db) {
|
||||
dts = append(dts, &models.PostgresBuiltinComponent{})
|
||||
} else {
|
||||
dts = append(dts, &models.BuiltinComponent{})
|
||||
}
|
||||
|
||||
if !db.Migrator().HasColumn(&imodels.TaskSchedulerHealth{}, "scheduler") {
|
||||
dts = append(dts, &imodels.TaskSchedulerHealth{})
|
||||
}
|
||||
|
||||
if !columnHasIndex(db, &AlertHisEvent{}, "original_tags") ||
|
||||
!columnHasIndex(db, &AlertCurEvent{}, "original_tags") {
|
||||
asyncDts := []interface{}{&AlertHisEvent{}, &AlertCurEvent{}}
|
||||
@@ -74,7 +92,7 @@ func MigrateTables(db *gorm.DB) error {
|
||||
|
||||
for _, dt := range asyncDts {
|
||||
if err := db.AutoMigrate(dt); err != nil {
|
||||
logger.Errorf("failed to migrate table: %v", err)
|
||||
logger.Errorf("failed to migrate table %+v err:%v", dt, err)
|
||||
}
|
||||
}
|
||||
}()
|
||||
@@ -170,14 +188,20 @@ func InsertPermPoints(db *gorm.DB) {
|
||||
})
|
||||
|
||||
for _, op := range ops {
|
||||
exists, err := models.Exists(db.Model(&models.RoleOperation{}).Where("operation = ? and role_name = ?", op.Operation, op.RoleName))
|
||||
var count int64
|
||||
|
||||
err := db.Raw("SELECT COUNT(*) FROM role_operation WHERE operation = ? AND role_name = ?",
|
||||
op.Operation, op.RoleName).Scan(&count).Error
|
||||
|
||||
if err != nil {
|
||||
logger.Errorf("check role operation exists failed, %v", err)
|
||||
continue
|
||||
}
|
||||
if exists {
|
||||
|
||||
if count > 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
err = db.Create(&op).Error
|
||||
if err != nil {
|
||||
logger.Errorf("insert role operation failed, %v", err)
|
||||
@@ -186,15 +210,17 @@ func InsertPermPoints(db *gorm.DB) {
|
||||
}
|
||||
|
||||
type AlertRule struct {
|
||||
ExtraConfig string `gorm:"type:text;column:extra_config"` // extra config
|
||||
ExtraConfig string `gorm:"type:text;column:extra_config"`
|
||||
CronPattern string `gorm:"type:varchar(64);column:cron_pattern"`
|
||||
DatasourceQueries []models.DatasourceQuery `gorm:"datasource_queries;type:text;serializer:json"` // datasource queries
|
||||
}
|
||||
|
||||
type AlertSubscribe struct {
|
||||
ExtraConfig string `gorm:"type:text;column:extra_config"` // extra config
|
||||
Severities string `gorm:"column:severities;type:varchar(32);not null;default:''"`
|
||||
BusiGroups ormx.JSONArr `gorm:"column:busi_groups;type:varchar(4096);not null;default:'[]'"`
|
||||
BusiGroups ormx.JSONArr `gorm:"column:busi_groups;type:varchar(4096)"`
|
||||
Note string `gorm:"column:note;type:varchar(1024);default:'';comment:note"`
|
||||
RuleIds []int64 `gorm:"column:rule_ids;type:varchar(1024);default:'';comment:rule_ids"`
|
||||
RuleIds []int64 `gorm:"column:rule_ids;type:varchar(1024)"`
|
||||
}
|
||||
|
||||
type AlertMute struct {
|
||||
@@ -203,9 +229,10 @@ type AlertMute struct {
|
||||
}
|
||||
|
||||
type RecordingRule struct {
|
||||
QueryConfigs string `gorm:"type:text;not null;column:query_configs"` // query_configs
|
||||
DatasourceIds string `gorm:"column:datasource_ids;type:varchar(255);default:'';comment:datasource ids"`
|
||||
CronPattern string `gorm:"column:cron_pattern;type:varchar(255);default:'';comment:cron pattern"`
|
||||
QueryConfigs string `gorm:"type:text;not null;column:query_configs"` // query_configs
|
||||
DatasourceIds string `gorm:"column:datasource_ids;type:varchar(255);default:'';comment:datasource ids"`
|
||||
CronPattern string `gorm:"column:cron_pattern;type:varchar(255);default:'';comment:cron pattern"`
|
||||
DatasourceQueries []models.DatasourceQuery `json:"datasource_queries" gorm:"datasource_queries;type:text;serializer:json"` // datasource queries
|
||||
}
|
||||
|
||||
type AlertingEngines struct {
|
||||
|
||||
69
models/migrate/migrate_test.go
Normal file
69
models/migrate/migrate_test.go
Normal file
@@ -0,0 +1,69 @@
|
||||
package migrate
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"testing"
|
||||
|
||||
"github.com/ccfos/nightingale/v6/models"
|
||||
"gorm.io/driver/mysql"
|
||||
"gorm.io/gorm"
|
||||
"gorm.io/gorm/schema"
|
||||
)
|
||||
|
||||
func TestInsertPermPoints(t *testing.T) {
|
||||
db, err := gorm.Open(mysql.Open("root:1234@tcp(127.0.0.1:3306)/n9e_v6?charset=utf8mb4&parseTime=True&loc=Local&allowNativePasswords=true"), &gorm.Config{NamingStrategy: schema.NamingStrategy{
|
||||
SingularTable: true,
|
||||
}})
|
||||
if err != nil {
|
||||
fmt.Printf("failed to connect database: %v", err)
|
||||
}
|
||||
|
||||
var ops []models.RoleOperation
|
||||
ops = append(ops, models.RoleOperation{
|
||||
RoleName: "Standard",
|
||||
Operation: "/alert-mutes/put",
|
||||
})
|
||||
|
||||
ops = append(ops, models.RoleOperation{
|
||||
RoleName: "Standard",
|
||||
Operation: "/log/index-patterns",
|
||||
})
|
||||
|
||||
ops = append(ops, models.RoleOperation{
|
||||
RoleName: "Standard",
|
||||
Operation: "/help/variable-configs",
|
||||
})
|
||||
|
||||
ops = append(ops, models.RoleOperation{
|
||||
RoleName: "Admin",
|
||||
Operation: "/permissions",
|
||||
})
|
||||
|
||||
ops = append(ops, models.RoleOperation{
|
||||
RoleName: "Standard",
|
||||
Operation: "/ibex-settings",
|
||||
})
|
||||
|
||||
db = db.Debug()
|
||||
for _, op := range ops {
|
||||
var count int64
|
||||
|
||||
err := db.Raw("SELECT COUNT(*) FROM role_operation WHERE operation = ? AND role_name = ?",
|
||||
op.Operation, op.RoleName).Scan(&count).Error
|
||||
fmt.Printf("count: %d\n", count)
|
||||
|
||||
if err != nil {
|
||||
fmt.Printf("check role operation exists failed, %v", err)
|
||||
continue
|
||||
}
|
||||
|
||||
if count > 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
err = db.Create(&op).Error
|
||||
if err != nil {
|
||||
fmt.Printf("insert role operation failed, %v", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,12 @@
|
||||
package models
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net/http"
|
||||
|
||||
"github.com/toolkits/pkg/str"
|
||||
)
|
||||
|
||||
const WEBHOOKKEY = "webhook"
|
||||
const NOTIFYSCRIPT = "notify_script"
|
||||
const NOTIFYCHANNEL = "notify_channel"
|
||||
@@ -24,6 +31,11 @@ type Webhook struct {
|
||||
RetryCount int `json:"retry_count"`
|
||||
RetryInterval int `json:"retry_interval"`
|
||||
Batch int `json:"batch"`
|
||||
Client *http.Client `json:"-"`
|
||||
}
|
||||
|
||||
func (w *Webhook) Hash() string {
|
||||
return str.MD5(fmt.Sprintf("%d_%t_%s_%s_%s_%d_%v_%t_%s_%d_%d_%d", w.Type, w.Enable, w.Url, w.BasicAuthUser, w.BasicAuthPass, w.Timeout, w.HeaderMap, w.SkipVerify, w.Note, w.RetryCount, w.RetryInterval, w.Batch))
|
||||
}
|
||||
|
||||
type NotifyScript struct {
|
||||
|
||||
@@ -31,7 +31,7 @@ func convertInterval(interval string) int {
|
||||
return int(duration.Seconds())
|
||||
}
|
||||
|
||||
func ConvertAlert(rule PromRule, interval string, datasouceIds []int64, disabled int) AlertRule {
|
||||
func ConvertAlert(rule PromRule, interval string, datasouceQueries []DatasourceQuery, disabled int) AlertRule {
|
||||
annotations := rule.Annotations
|
||||
appendTags := []string{}
|
||||
severity := 2
|
||||
@@ -55,29 +55,31 @@ func ConvertAlert(rule PromRule, interval string, datasouceIds []int64, disabled
|
||||
}
|
||||
}
|
||||
|
||||
return AlertRule{
|
||||
Name: ruleName,
|
||||
Severity: severity,
|
||||
DatasourceIdsJson: datasouceIds,
|
||||
Disabled: disabled,
|
||||
PromForDuration: convertInterval(rule.For),
|
||||
PromQl: rule.Expr,
|
||||
PromEvalInterval: convertInterval(interval),
|
||||
EnableStimeJSON: "00:00",
|
||||
EnableEtimeJSON: "23:59",
|
||||
ar := AlertRule{
|
||||
Name: rule.Alert,
|
||||
Severity: severity,
|
||||
Disabled: disabled,
|
||||
PromForDuration: convertInterval(rule.For),
|
||||
PromQl: rule.Expr,
|
||||
PromEvalInterval: convertInterval(interval),
|
||||
EnableStimeJSON: "00:00",
|
||||
EnableEtimeJSON: "23:59",
|
||||
EnableDaysOfWeekJSON: []string{
|
||||
"1", "2", "3", "4", "5", "6", "0",
|
||||
},
|
||||
EnableInBG: AlertRuleEnableInGlobalBG,
|
||||
NotifyRecovered: AlertRuleNotifyRecovered,
|
||||
NotifyRepeatStep: AlertRuleNotifyRepeatStep60Min,
|
||||
RecoverDuration: AlertRuleRecoverDuration0Sec,
|
||||
AnnotationsJSON: annotations,
|
||||
AppendTagsJSON: appendTags,
|
||||
EnableInBG: AlertRuleEnableInGlobalBG,
|
||||
NotifyRecovered: AlertRuleNotifyRecovered,
|
||||
NotifyRepeatStep: AlertRuleNotifyRepeatStep60Min,
|
||||
RecoverDuration: AlertRuleRecoverDuration0Sec,
|
||||
AnnotationsJSON: annotations,
|
||||
AppendTagsJSON: appendTags,
|
||||
DatasourceQueries: datasouceQueries,
|
||||
}
|
||||
|
||||
return ar
|
||||
}
|
||||
|
||||
func DealPromGroup(promRule []PromRuleGroup, dataSourceIds []int64, disabled int) []AlertRule {
|
||||
func DealPromGroup(promRule []PromRuleGroup, dataSourceQueries []DatasourceQuery, disabled int) []AlertRule {
|
||||
var alertRules []AlertRule
|
||||
|
||||
for _, group := range promRule {
|
||||
@@ -88,7 +90,7 @@ func DealPromGroup(promRule []PromRuleGroup, dataSourceIds []int64, disabled int
|
||||
for _, rule := range group.Rules {
|
||||
if rule.Alert != "" {
|
||||
alertRules = append(alertRules,
|
||||
ConvertAlert(rule, interval, dataSourceIds, disabled))
|
||||
ConvertAlert(rule, interval, dataSourceQueries, disabled))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -21,7 +21,7 @@ func TestConvertAlert(t *testing.T) {
|
||||
t.Errorf("Failed to Unmarshal, err: %s", err)
|
||||
}
|
||||
t.Logf("jobMissing: %+v", jobMissing[0])
|
||||
convJobMissing := models.ConvertAlert(jobMissing[0], "30s", []int64{1}, 0)
|
||||
convJobMissing := models.ConvertAlert(jobMissing[0], "30s", []models.DatasourceQuery{}, 0)
|
||||
if convJobMissing.PromEvalInterval != 30 {
|
||||
t.Errorf("PromEvalInterval is expected to be 30, but got %d",
|
||||
convJobMissing.PromEvalInterval)
|
||||
@@ -45,7 +45,7 @@ func TestConvertAlert(t *testing.T) {
|
||||
description: "Prometheus rule evaluation took more time than the scheduled interval. It indicates a slower storage backend access or too complex query.\n VALUE = {{ $value }}\n LABELS = {{ $labels }}"
|
||||
`), &ruleEvaluationSlow)
|
||||
t.Logf("ruleEvaluationSlow: %+v", ruleEvaluationSlow[0])
|
||||
convRuleEvaluationSlow := models.ConvertAlert(ruleEvaluationSlow[0], "1m", []int64{1}, 0)
|
||||
convRuleEvaluationSlow := models.ConvertAlert(ruleEvaluationSlow[0], "1m", []models.DatasourceQuery{}, 0)
|
||||
if convRuleEvaluationSlow.PromEvalInterval != 60 {
|
||||
t.Errorf("PromEvalInterval is expected to be 60, but got %d",
|
||||
convJobMissing.PromEvalInterval)
|
||||
@@ -69,7 +69,7 @@ func TestConvertAlert(t *testing.T) {
|
||||
description: "A Prometheus target has disappeared. An exporter might be crashed.\n VALUE = {{ $value }}\n LABELS = {{ $labels }}"
|
||||
`), &targetMissing)
|
||||
t.Logf("targetMissing: %+v", targetMissing[0])
|
||||
convTargetMissing := models.ConvertAlert(targetMissing[0], "1h", []int64{1}, 0)
|
||||
convTargetMissing := models.ConvertAlert(targetMissing[0], "1h", []models.DatasourceQuery{}, 0)
|
||||
if convTargetMissing.PromEvalInterval != 3600 {
|
||||
t.Errorf("PromEvalInterval is expected to be 3600, but got %d",
|
||||
convTargetMissing.PromEvalInterval)
|
||||
|
||||
@@ -16,25 +16,25 @@ import (
|
||||
|
||||
// A RecordingRule records its vector expression into new timeseries.
|
||||
type RecordingRule struct {
|
||||
Id int64 `json:"id" gorm:"primaryKey"`
|
||||
GroupId int64 `json:"group_id"` // busi group id
|
||||
DatasourceIds string `json:"-" gorm:"datasource_ids"` // datasource ids
|
||||
DatasourceIdsJson []int64 `json:"datasource_ids" gorm:"-"` // for fe
|
||||
Cluster string `json:"cluster"` // take effect by cluster, seperated by space
|
||||
Name string `json:"name"` // new metric name
|
||||
Disabled int `json:"disabled"` // 0: enabled, 1: disabled
|
||||
PromQl string `json:"prom_ql"` // just one ql for promql
|
||||
QueryConfigs string `json:"-" gorm:"query_configs"` // query_configs
|
||||
QueryConfigsJson []QueryConfig `json:"query_configs" gorm:"-"` // query_configs for fe
|
||||
PromEvalInterval int `json:"prom_eval_interval"` // unit:s
|
||||
CronPattern string `json:"cron_pattern"`
|
||||
AppendTags string `json:"-"` // split by space: service=n9e mod=api
|
||||
AppendTagsJSON []string `json:"append_tags" gorm:"-"` // for fe
|
||||
Note string `json:"note"` // note
|
||||
CreateAt int64 `json:"create_at"`
|
||||
CreateBy string `json:"create_by"`
|
||||
UpdateAt int64 `json:"update_at"`
|
||||
UpdateBy string `json:"update_by"`
|
||||
Id int64 `json:"id" gorm:"primaryKey"`
|
||||
GroupId int64 `json:"group_id"` // busi group id
|
||||
DatasourceIds string `json:"-" gorm:"datasource_ids,omitempty"` // datasource ids
|
||||
DatasourceQueries []DatasourceQuery `json:"datasource_queries,omitempty" gorm:"datasource_queries;type:text;serializer:json"` // datasource queries
|
||||
Cluster string `json:"cluster"` // take effect by cluster, seperated by space
|
||||
Name string `json:"name"` // new metric name
|
||||
Disabled int `json:"disabled"` // 0: enabled, 1: disabled
|
||||
PromQl string `json:"prom_ql"` // just one ql for promql
|
||||
QueryConfigs string `json:"-" gorm:"query_configs"` // query_configs
|
||||
QueryConfigsJson []QueryConfig `json:"query_configs" gorm:"-"` // query_configs for fe
|
||||
PromEvalInterval int `json:"prom_eval_interval"` // unit:s
|
||||
CronPattern string `json:"cron_pattern"`
|
||||
AppendTags string `json:"-"` // split by space: service=n9e mod=api
|
||||
AppendTagsJSON []string `json:"append_tags" gorm:"-"` // for fe
|
||||
Note string `json:"note"` // note
|
||||
CreateAt int64 `json:"create_at"`
|
||||
CreateBy string `json:"create_by"`
|
||||
UpdateAt int64 `json:"update_at"`
|
||||
UpdateBy string `json:"update_by"`
|
||||
}
|
||||
|
||||
type QueryConfig struct {
|
||||
@@ -46,9 +46,10 @@ type QueryConfig struct {
|
||||
}
|
||||
|
||||
type Query struct {
|
||||
DatasourceIds []int64 `json:"datasource_ids"`
|
||||
Cate string `json:"cate"`
|
||||
Config interface{} `json:"config"`
|
||||
DatasourceIds []int64 `json:"datasource_ids"`
|
||||
DatasourceQueries []DatasourceQuery `json:"datasource_queries"`
|
||||
Cate string `json:"cate"`
|
||||
Config interface{} `json:"config"`
|
||||
}
|
||||
|
||||
func (re *RecordingRule) TableName() string {
|
||||
@@ -57,8 +58,6 @@ func (re *RecordingRule) TableName() string {
|
||||
|
||||
func (re *RecordingRule) FE2DB() {
|
||||
re.AppendTags = strings.Join(re.AppendTagsJSON, " ")
|
||||
idsByte, _ := json.Marshal(re.DatasourceIdsJson)
|
||||
re.DatasourceIds = string(idsByte)
|
||||
|
||||
queryConfigsByte, _ := json.Marshal(re.QueryConfigsJson)
|
||||
re.QueryConfigs = string(queryConfigsByte)
|
||||
@@ -66,9 +65,28 @@ func (re *RecordingRule) FE2DB() {
|
||||
|
||||
func (re *RecordingRule) DB2FE() error {
|
||||
re.AppendTagsJSON = strings.Fields(re.AppendTags)
|
||||
json.Unmarshal([]byte(re.DatasourceIds), &re.DatasourceIdsJson)
|
||||
|
||||
re.FillDatasourceQueries()
|
||||
|
||||
json.Unmarshal([]byte(re.QueryConfigs), &re.QueryConfigsJson)
|
||||
// 存量数据规则不包含 DatasourceQueries 字段,将 DatasourceIds 转换为 DatasourceQueries 字段
|
||||
for i := range re.QueryConfigsJson {
|
||||
for j := range re.QueryConfigsJson[i].Queries {
|
||||
if len(re.QueryConfigsJson[i].Queries[j].DatasourceQueries) == 0 {
|
||||
values := make([]interface{}, 0, len(re.QueryConfigsJson[i].Queries[j].DatasourceIds))
|
||||
for _, dsID := range re.QueryConfigsJson[i].Queries[j].DatasourceIds {
|
||||
values = append(values, dsID)
|
||||
}
|
||||
re.QueryConfigsJson[i].Queries[j].DatasourceQueries = []DatasourceQuery{
|
||||
{
|
||||
MatchType: 0,
|
||||
Op: "in",
|
||||
Values: values,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if re.CronPattern == "" && re.PromEvalInterval != 0 {
|
||||
re.CronPattern = fmt.Sprintf("@every %ds", re.PromEvalInterval)
|
||||
@@ -77,14 +95,42 @@ func (re *RecordingRule) DB2FE() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (re *RecordingRule) FillDatasourceQueries() error {
|
||||
// 兼容旧逻辑,将 datasourceIds 转换为 datasourceQueries
|
||||
if len(re.DatasourceQueries) == 0 && len(re.DatasourceIds) != 0 {
|
||||
datasourceQueries := DatasourceQuery{
|
||||
MatchType: 0,
|
||||
Op: "in",
|
||||
Values: make([]interface{}, 0),
|
||||
}
|
||||
|
||||
var values []int64
|
||||
if re.DatasourceIds != "" {
|
||||
json.Unmarshal([]byte(re.DatasourceIds), &values)
|
||||
}
|
||||
|
||||
for i := range values {
|
||||
if values[i] == 0 {
|
||||
// 0 表示所有数据源
|
||||
datasourceQueries.MatchType = 2
|
||||
break
|
||||
}
|
||||
datasourceQueries.Values = append(datasourceQueries.Values, values[i])
|
||||
}
|
||||
|
||||
re.DatasourceQueries = []DatasourceQuery{datasourceQueries}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (re *RecordingRule) Verify() error {
|
||||
if re.GroupId < 0 {
|
||||
return fmt.Errorf("GroupId(%d) invalid", re.GroupId)
|
||||
}
|
||||
|
||||
if IsAllDatasource(re.DatasourceIdsJson) {
|
||||
re.DatasourceIdsJson = []int64{0}
|
||||
}
|
||||
//if IsAllDatasource(re.DatasourceIdsJson) {
|
||||
// re.DatasourceIdsJson = []int64{0}
|
||||
//}
|
||||
|
||||
if re.PromQl != "" && !model.MetricNameRE.MatchString(re.Name) {
|
||||
return errors.New("Name has invalid chreacters")
|
||||
|
||||
@@ -185,8 +185,16 @@ func BuildTargetWhereWithQuery(query string) BuildTargetWhereOption {
|
||||
if query != "" {
|
||||
arr := strings.Fields(query)
|
||||
for i := 0; i < len(arr); i++ {
|
||||
q := "%" + arr[i] + "%"
|
||||
session = session.Where("ident like ? or host_ip like ? or note like ? or tags like ? or host_tags like ? or os like ?", q, q, q, q, q, q)
|
||||
if strings.HasPrefix(arr[i], "-") {
|
||||
q := "%" + arr[i][1:] + "%"
|
||||
session = session.Where("ident not like ? and host_ip not like ? and "+
|
||||
"note not like ? and tags not like ? and (host_tags not like ? or "+
|
||||
"host_tags is null) and os not like ?", q, q, q, q, q, q)
|
||||
} else {
|
||||
q := "%" + arr[i] + "%"
|
||||
session = session.Where("ident like ? or host_ip like ? or note like ? or "+
|
||||
"tags like ? or host_tags like ? or os like ?", q, q, q, q, q, q)
|
||||
}
|
||||
}
|
||||
}
|
||||
return session
|
||||
@@ -197,6 +205,8 @@ func BuildTargetWhereWithDowntime(downtime int64) BuildTargetWhereOption {
|
||||
return func(session *gorm.DB) *gorm.DB {
|
||||
if downtime > 0 {
|
||||
session = session.Where("target.update_at < ?", time.Now().Unix()-downtime)
|
||||
} else if downtime < 0 {
|
||||
session = session.Where("target.update_at > ?", time.Now().Unix()+downtime)
|
||||
}
|
||||
return session
|
||||
}
|
||||
@@ -270,7 +280,11 @@ func TargetFilterQueryBuild(ctx *ctx.Context, query []map[string]interface{}, li
|
||||
for _, q := range query {
|
||||
tx := DB(ctx).Model(&Target{})
|
||||
for k, v := range q {
|
||||
tx = tx.Or(k, v)
|
||||
if strings.Count(k, "?") > 1 {
|
||||
tx = tx.Or(k, v.([]interface{})...)
|
||||
} else {
|
||||
tx = tx.Or(k, v)
|
||||
}
|
||||
}
|
||||
sub = sub.Where(tx)
|
||||
}
|
||||
@@ -409,7 +423,8 @@ func TargetsGetIdentsByIdentsAndHostIps(ctx *ctx.Context, idents, hostIps []stri
|
||||
return inexistence, identSet.ToSlice(), nil
|
||||
}
|
||||
|
||||
func TargetGetTags(ctx *ctx.Context, idents []string, ignoreHostTag bool) ([]string, error) {
|
||||
func TargetGetTags(ctx *ctx.Context, idents []string, ignoreHostTag bool, bgLabelKey string) (
|
||||
[]string, error) {
|
||||
session := DB(ctx).Model(new(Target))
|
||||
|
||||
var arr []*Target
|
||||
@@ -447,7 +462,22 @@ func TargetGetTags(ctx *ctx.Context, idents []string, ignoreHostTag bool) ([]str
|
||||
ret = append(ret, key)
|
||||
}
|
||||
|
||||
sort.Strings(ret)
|
||||
if bgLabelKey != "" {
|
||||
sort.Slice(ret, func(i, j int) bool {
|
||||
if strings.HasPrefix(ret[i], bgLabelKey) && strings.HasPrefix(ret[j], bgLabelKey) {
|
||||
return ret[i] < ret[j]
|
||||
}
|
||||
if strings.HasPrefix(ret[i], bgLabelKey) {
|
||||
return true
|
||||
}
|
||||
if strings.HasPrefix(ret[j], bgLabelKey) {
|
||||
return false
|
||||
}
|
||||
return ret[i] < ret[j]
|
||||
})
|
||||
} else {
|
||||
sort.Strings(ret)
|
||||
}
|
||||
|
||||
return ret, err
|
||||
}
|
||||
@@ -585,9 +615,9 @@ func CanMigrateBg(ctx *ctx.Context) bool {
|
||||
}
|
||||
|
||||
if maxGroupId == 0 {
|
||||
log.Println("migration bgid has been completed.")
|
||||
return false
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
@@ -626,7 +656,7 @@ func DoMigrateBg(ctx *ctx.Context, bgLabelKey string) error {
|
||||
}
|
||||
err := DB(ctx).Transaction(func(tx *gorm.DB) error {
|
||||
// 4.1 将 group_id 迁移至关联表
|
||||
if err := TargetBindBgids(ctx, []string{t.Ident}, []int64{t.GroupId}); err != nil {
|
||||
if err := TargetBindBgids(ctx, []string{t.Ident}, []int64{t.GroupId}, nil); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := TargetUpdateBgid(ctx, []string{t.Ident}, 0, false); err != nil {
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
package models
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/ccfos/nightingale/v6/pkg/ctx"
|
||||
@@ -20,6 +21,10 @@ func (t *TargetBusiGroup) TableName() string {
|
||||
return "target_busi_group"
|
||||
}
|
||||
|
||||
func (t *TargetBusiGroup) TableOptions() string {
|
||||
return "ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci"
|
||||
}
|
||||
|
||||
func TargetBusiGroupsGetAll(ctx *ctx.Context) (map[string][]int64, error) {
|
||||
var lst []*TargetBusiGroup
|
||||
err := DB(ctx).Find(&lst).Error
|
||||
@@ -60,7 +65,7 @@ func TargetGroupIdsGetByIdents(ctx *ctx.Context, idents []string) ([]int64, erro
|
||||
return groupIds, nil
|
||||
}
|
||||
|
||||
func TargetBindBgids(ctx *ctx.Context, idents []string, bgids []int64) error {
|
||||
func TargetBindBgids(ctx *ctx.Context, idents []string, bgids []int64, tags []string) error {
|
||||
lst := make([]TargetBusiGroup, 0, len(bgids)*len(idents))
|
||||
updateAt := time.Now().Unix()
|
||||
for _, bgid := range bgids {
|
||||
@@ -73,7 +78,6 @@ func TargetBindBgids(ctx *ctx.Context, idents []string, bgids []int64) error {
|
||||
lst = append(lst, cur)
|
||||
}
|
||||
}
|
||||
|
||||
var cl clause.Expression = clause.Insert{Modifier: "ignore"}
|
||||
switch DB(ctx).Dialector.Name() {
|
||||
case "sqlite":
|
||||
@@ -81,7 +85,23 @@ func TargetBindBgids(ctx *ctx.Context, idents []string, bgids []int64) error {
|
||||
case "postgres":
|
||||
cl = clause.OnConflict{DoNothing: true}
|
||||
}
|
||||
return DB(ctx).Clauses(cl).CreateInBatches(&lst, 10).Error
|
||||
|
||||
return DB(ctx).Transaction(func(tx *gorm.DB) error {
|
||||
if err := DB(ctx).Clauses(cl).CreateInBatches(&lst, 10).Error; err != nil {
|
||||
return err
|
||||
}
|
||||
if targets, err := TargetsGetByIdents(ctx, idents); err != nil {
|
||||
return err
|
||||
} else if len(tags) > 0 {
|
||||
for _, t := range targets {
|
||||
if err := t.AddTags(ctx, tags); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
})
|
||||
}
|
||||
|
||||
func TargetUnbindBgids(ctx *ctx.Context, idents []string, bgids []int64) error {
|
||||
@@ -93,7 +113,7 @@ func TargetDeleteBgids(ctx *ctx.Context, idents []string) error {
|
||||
return DB(ctx).Where("target_ident in ?", idents).Delete(&TargetBusiGroup{}).Error
|
||||
}
|
||||
|
||||
func TargetOverrideBgids(ctx *ctx.Context, idents []string, bgids []int64) error {
|
||||
func TargetOverrideBgids(ctx *ctx.Context, idents []string, bgids []int64, tags []string) error {
|
||||
return DB(ctx).Transaction(func(tx *gorm.DB) error {
|
||||
// 先删除旧的关联
|
||||
if err := tx.Where("target_ident IN ?", idents).Delete(&TargetBusiGroup{}).Error; err != nil {
|
||||
@@ -126,7 +146,15 @@ func TargetOverrideBgids(ctx *ctx.Context, idents []string, bgids []int64) error
|
||||
case "postgres":
|
||||
cl = clause.OnConflict{DoNothing: true}
|
||||
}
|
||||
return tx.Clauses(cl).CreateInBatches(&lst, 10).Error
|
||||
if err := tx.Clauses(cl).CreateInBatches(&lst, 10).Error; err != nil {
|
||||
return err
|
||||
}
|
||||
if len(tags) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
return tx.Model(Target{}).Where("ident IN ?", idents).Updates(map[string]interface{}{
|
||||
"tags": strings.Join(tags, " ") + " ", "update_at": updateAt}).Error
|
||||
})
|
||||
}
|
||||
|
||||
@@ -156,3 +184,13 @@ func SeparateTargetIdents(ctx *ctx.Context, idents []string) (existing, nonExist
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func TargetIndentsGetByBgids(ctx *ctx.Context, bgids []int64) ([]string, error) {
|
||||
var idents []string
|
||||
err := DB(ctx).Model(&TargetBusiGroup{}).
|
||||
Where("group_id IN ?", bgids).
|
||||
Distinct("target_ident").
|
||||
Pluck("target_ident", &idents).
|
||||
Error
|
||||
return idents, err
|
||||
}
|
||||
|
||||
@@ -348,7 +348,7 @@ func UsersGetByGroupIds(ctx *ctx.Context, groupIds []int64) ([]User, error) {
|
||||
return users, nil
|
||||
}
|
||||
|
||||
func InitRoot(ctx *ctx.Context) {
|
||||
func InitRoot(ctx *ctx.Context) bool {
|
||||
user, err := UserGetByUsername(ctx, "root")
|
||||
if err != nil {
|
||||
fmt.Println("failed to query user root:", err)
|
||||
@@ -356,12 +356,12 @@ func InitRoot(ctx *ctx.Context) {
|
||||
}
|
||||
|
||||
if user == nil {
|
||||
return
|
||||
return false
|
||||
}
|
||||
|
||||
if len(user.Password) > 31 {
|
||||
// already done before
|
||||
return
|
||||
return false
|
||||
}
|
||||
|
||||
newPass, err := CryptoPass(ctx, user.Password)
|
||||
@@ -377,6 +377,7 @@ func InitRoot(ctx *ctx.Context) {
|
||||
}
|
||||
|
||||
fmt.Println("root password init done")
|
||||
return true
|
||||
}
|
||||
|
||||
func reachLoginFailCount(ctx *ctx.Context, redisObj storage.Redis, username string, count int64) (bool, error) {
|
||||
@@ -803,6 +804,10 @@ func (u *User) BusiGroups(ctx *ctx.Context, limit int, query string, all ...bool
|
||||
return lst, err
|
||||
}
|
||||
|
||||
if t == nil {
|
||||
return lst, nil
|
||||
}
|
||||
|
||||
t.GroupIds, err = TargetGroupIdsGetByIdent(ctx, t.Ident)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
||||
1995
pkg/ormx/database_init.go
Normal file
1995
pkg/ormx/database_init.go
Normal file
File diff suppressed because it is too large
Load Diff
60
pkg/ormx/database_init_test.go
Normal file
60
pkg/ormx/database_init_test.go
Normal file
@@ -0,0 +1,60 @@
|
||||
package ormx
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"gorm.io/driver/mysql"
|
||||
"gorm.io/driver/postgres"
|
||||
"gorm.io/driver/sqlite"
|
||||
"gorm.io/gorm"
|
||||
)
|
||||
|
||||
func TestDataBaseInit(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
config DBConfig
|
||||
}{
|
||||
{
|
||||
name: "MySQL",
|
||||
config: DBConfig{
|
||||
DBType: "mysql",
|
||||
DSN: "root:1234@tcp(127.0.0.1:3306)/test?charset=utf8mb4&parseTime=True&loc=Local&allowNativePasswords=true",
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Postgres",
|
||||
config: DBConfig{
|
||||
DBType: "postgres",
|
||||
DSN: "host=127.0.0.1 port=5432 user=postgres dbname=test password=1234 sslmode=disable",
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "SQLite",
|
||||
config: DBConfig{
|
||||
DBType: "sqlite",
|
||||
DSN: "./test.db",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
err := createDatabase(tt.config, &gorm.Config{})
|
||||
assert.NoError(t, err)
|
||||
var dialector gorm.Dialector
|
||||
switch tt.config.DBType {
|
||||
case "mysql":
|
||||
dialector = mysql.Open(tt.config.DSN)
|
||||
case "postgres":
|
||||
dialector = postgres.Open(tt.config.DSN)
|
||||
case "sqlite":
|
||||
dialector = sqlite.Open(tt.config.DSN)
|
||||
}
|
||||
db, err := gorm.Open(dialector, &gorm.Config{})
|
||||
assert.NoError(t, err)
|
||||
err = DataBaseInit(tt.config, db)
|
||||
assert.NoError(t, err)
|
||||
})
|
||||
}
|
||||
}
|
||||
252
pkg/ormx/ormx.go
252
pkg/ormx/ormx.go
@@ -2,6 +2,7 @@ package ormx
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"reflect"
|
||||
"strings"
|
||||
"time"
|
||||
@@ -70,6 +71,234 @@ func (l *TKitLogger) Printf(s string, i ...interface{}) {
|
||||
}
|
||||
}
|
||||
|
||||
func createDatabase(c DBConfig, gconfig *gorm.Config) error {
|
||||
switch strings.ToLower(c.DBType) {
|
||||
case "mysql":
|
||||
return createMysqlDatabase(c.DSN, gconfig)
|
||||
case "postgres":
|
||||
return createPostgresDatabase(c.DSN, gconfig)
|
||||
case "sqlite":
|
||||
return createSqliteDatabase(c.DSN, gconfig)
|
||||
default:
|
||||
return fmt.Errorf("dialector(%s) not supported", c.DBType)
|
||||
}
|
||||
}
|
||||
|
||||
func createSqliteDatabase(dsn string, gconfig *gorm.Config) error {
|
||||
tempDialector := sqlite.Open(dsn)
|
||||
|
||||
_, err := gorm.Open(tempDialector, gconfig)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to open temporary connection: %v", err)
|
||||
}
|
||||
|
||||
fmt.Println("sqlite file created")
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func createPostgresDatabase(dsn string, gconfig *gorm.Config) error {
|
||||
dsnParts := strings.Split(dsn, " ")
|
||||
dbName := ""
|
||||
connectionWithoutDB := ""
|
||||
for _, part := range dsnParts {
|
||||
if strings.HasPrefix(part, "dbname=") {
|
||||
dbName = part[strings.Index(part, "=")+1:]
|
||||
} else {
|
||||
connectionWithoutDB += part
|
||||
connectionWithoutDB += " "
|
||||
}
|
||||
}
|
||||
|
||||
createDBQuery := fmt.Sprintf("CREATE DATABASE %s ENCODING='UTF8' LC_COLLATE='en_US.UTF-8' LC_CTYPE='en_US.UTF-8';", dbName)
|
||||
|
||||
tempDialector := postgres.Open(connectionWithoutDB)
|
||||
|
||||
tempDB, err := gorm.Open(tempDialector, gconfig)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to open temporary connection: %v", err)
|
||||
}
|
||||
|
||||
result := tempDB.Exec(createDBQuery)
|
||||
if result.Error != nil {
|
||||
return fmt.Errorf("failed to execute create database query: %v", result.Error)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func createMysqlDatabase(dsn string, gconfig *gorm.Config) error {
|
||||
dsnParts := strings.SplitN(dsn, "/", 2)
|
||||
if len(dsnParts) != 2 {
|
||||
return fmt.Errorf("failed to parse DSN: %s", dsn)
|
||||
}
|
||||
|
||||
connectionInfo := dsnParts[0]
|
||||
dbInfo := dsnParts[1]
|
||||
dbName := dbInfo
|
||||
|
||||
queryIndex := strings.Index(dbInfo, "?")
|
||||
if queryIndex != -1 {
|
||||
dbName = dbInfo[:queryIndex]
|
||||
} else {
|
||||
return fmt.Errorf("failed to parse database name from DSN: %s", dsn)
|
||||
}
|
||||
|
||||
connectionWithoutDB := connectionInfo + "/?" + dbInfo[queryIndex+1:]
|
||||
createDBQuery := fmt.Sprintf("CREATE DATABASE IF NOT EXISTS %s CHARACTER SET utf8mb4", dbName)
|
||||
|
||||
tempDialector := mysql.Open(connectionWithoutDB)
|
||||
|
||||
tempDB, err := gorm.Open(tempDialector, gconfig)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to open temporary connection: %v", err)
|
||||
}
|
||||
|
||||
result := tempDB.Exec(createDBQuery)
|
||||
if result.Error != nil {
|
||||
return fmt.Errorf("failed to execute create database query: %v", result.Error)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func checkDatabaseExist(c DBConfig) (bool, error) {
|
||||
switch strings.ToLower(c.DBType) {
|
||||
case "mysql":
|
||||
return checkMysqlDatabaseExist(c)
|
||||
case "postgres":
|
||||
return checkPostgresDatabaseExist(c)
|
||||
case "sqlite":
|
||||
return checkSqliteDatabaseExist(c)
|
||||
default:
|
||||
return false, fmt.Errorf("dialector(%s) not supported", c.DBType)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func checkSqliteDatabaseExist(c DBConfig) (bool, error) {
|
||||
if _, err := os.Stat(c.DSN); os.IsNotExist(err) {
|
||||
fmt.Printf("sqlite file not exists: %s\n", c.DSN)
|
||||
return false, nil
|
||||
} else {
|
||||
return true, nil
|
||||
}
|
||||
}
|
||||
|
||||
func checkPostgresDatabaseExist(c DBConfig) (bool, error) {
|
||||
dsnParts := strings.Split(c.DSN, " ")
|
||||
dbName := ""
|
||||
connectionWithoutDB := ""
|
||||
for _, part := range dsnParts {
|
||||
if strings.HasPrefix(part, "dbname=") {
|
||||
dbName = part[strings.Index(part, "=")+1:]
|
||||
} else {
|
||||
connectionWithoutDB += part
|
||||
connectionWithoutDB += " "
|
||||
}
|
||||
}
|
||||
|
||||
dialector := postgres.Open(connectionWithoutDB)
|
||||
|
||||
gconfig := &gorm.Config{
|
||||
NamingStrategy: schema.NamingStrategy{
|
||||
TablePrefix: c.TablePrefix,
|
||||
SingularTable: true,
|
||||
},
|
||||
Logger: gormLogger,
|
||||
}
|
||||
|
||||
db, err := gorm.Open(dialector, gconfig)
|
||||
if err != nil {
|
||||
return false, fmt.Errorf("failed to open database: %v", err)
|
||||
}
|
||||
|
||||
var databases []string
|
||||
query := genQuery(c)
|
||||
if err := db.Raw(query).Scan(&databases).Error; err != nil {
|
||||
return false, fmt.Errorf("failed to query: %v", err)
|
||||
}
|
||||
|
||||
for _, database := range databases {
|
||||
if database == dbName {
|
||||
fmt.Println("Database exist")
|
||||
return true, nil
|
||||
}
|
||||
}
|
||||
|
||||
return false, nil
|
||||
}
|
||||
|
||||
func checkMysqlDatabaseExist(c DBConfig) (bool, error) {
|
||||
dsnParts := strings.SplitN(c.DSN, "/", 2)
|
||||
if len(dsnParts) != 2 {
|
||||
return false, fmt.Errorf("failed to parse DSN: %s", c.DSN)
|
||||
}
|
||||
|
||||
connectionInfo := dsnParts[0]
|
||||
dbInfo := dsnParts[1]
|
||||
dbName := dbInfo
|
||||
|
||||
queryIndex := strings.Index(dbInfo, "?")
|
||||
if queryIndex != -1 {
|
||||
dbName = dbInfo[:queryIndex]
|
||||
} else {
|
||||
return false, fmt.Errorf("failed to parse database name from DSN: %s", c.DSN)
|
||||
}
|
||||
|
||||
connectionWithoutDB := connectionInfo + "/?" + dbInfo[queryIndex+1:]
|
||||
|
||||
var dialector gorm.Dialector
|
||||
switch strings.ToLower(c.DBType) {
|
||||
case "mysql":
|
||||
dialector = mysql.Open(connectionWithoutDB)
|
||||
case "postgres":
|
||||
dialector = postgres.Open(connectionWithoutDB)
|
||||
default:
|
||||
return false, fmt.Errorf("unsupported database type: %s", c.DBType)
|
||||
}
|
||||
|
||||
gconfig := &gorm.Config{
|
||||
NamingStrategy: schema.NamingStrategy{
|
||||
TablePrefix: c.TablePrefix,
|
||||
SingularTable: true,
|
||||
},
|
||||
Logger: gormLogger,
|
||||
}
|
||||
|
||||
db, err := gorm.Open(dialector, gconfig)
|
||||
if err != nil {
|
||||
return false, fmt.Errorf("failed to open database: %v", err)
|
||||
}
|
||||
|
||||
var databases []string
|
||||
query := genQuery(c)
|
||||
if err := db.Raw(query).Scan(&databases).Error; err != nil {
|
||||
return false, fmt.Errorf("failed to query: %v", err)
|
||||
}
|
||||
|
||||
for _, database := range databases {
|
||||
if database == dbName {
|
||||
return true, nil
|
||||
}
|
||||
}
|
||||
|
||||
return false, nil
|
||||
}
|
||||
|
||||
func genQuery(c DBConfig) string {
|
||||
switch strings.ToLower(c.DBType) {
|
||||
case "mysql":
|
||||
return "SHOW DATABASES"
|
||||
case "postgres":
|
||||
return "SELECT datname FROM pg_database"
|
||||
case "sqlite":
|
||||
return ""
|
||||
default:
|
||||
return ""
|
||||
}
|
||||
}
|
||||
|
||||
// New Create gorm.DB instance
|
||||
func New(c DBConfig) (*gorm.DB, error) {
|
||||
var dialector gorm.Dialector
|
||||
@@ -95,9 +324,30 @@ func New(c DBConfig) (*gorm.DB, error) {
|
||||
Logger: gormLogger,
|
||||
}
|
||||
|
||||
dbExist, checkErr := checkDatabaseExist(c)
|
||||
if checkErr != nil {
|
||||
return nil, checkErr
|
||||
}
|
||||
if !dbExist {
|
||||
fmt.Println("Database not exist, trying to create it")
|
||||
createErr := createDatabase(c, gconfig)
|
||||
if createErr != nil {
|
||||
return nil, fmt.Errorf("failed to create database: %v", createErr)
|
||||
}
|
||||
|
||||
db, err := gorm.Open(dialector, gconfig)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to reopen database after creation: %v", err)
|
||||
}
|
||||
err = DataBaseInit(c, db)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to init database: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
db, err := gorm.Open(dialector, gconfig)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return nil, fmt.Errorf("failed to open database: %v", err)
|
||||
}
|
||||
|
||||
if c.Debug {
|
||||
|
||||
@@ -7,6 +7,8 @@ import (
|
||||
"io/ioutil"
|
||||
"math/rand"
|
||||
"net/http"
|
||||
"os"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/ccfos/nightingale/v6/conf"
|
||||
@@ -61,6 +63,10 @@ func GetByUrl[T any](url string, cfg conf.CenterApi) (T, error) {
|
||||
Timeout: time.Duration(cfg.Timeout) * time.Millisecond,
|
||||
}
|
||||
|
||||
if useProxy(url) {
|
||||
client.Transport = ProxyTransporter
|
||||
}
|
||||
|
||||
resp, err := client.Do(req)
|
||||
if err != nil {
|
||||
return dat, fmt.Errorf("failed to fetch from url: %w", err)
|
||||
@@ -141,6 +147,10 @@ func PostByUrl[T any](url string, cfg conf.CenterApi, v interface{}) (t T, err e
|
||||
Timeout: time.Duration(cfg.Timeout) * time.Millisecond,
|
||||
}
|
||||
|
||||
if useProxy(url) {
|
||||
client.Transport = ProxyTransporter
|
||||
}
|
||||
|
||||
req, err := http.NewRequest("POST", url, bf)
|
||||
if err != nil {
|
||||
return t, fmt.Errorf("failed to create request %q: %w", url, err)
|
||||
@@ -181,6 +191,29 @@ func PostByUrl[T any](url string, cfg conf.CenterApi, v interface{}) (t T, err e
|
||||
|
||||
}
|
||||
|
||||
var ProxyTransporter = &http.Transport{
|
||||
Proxy: http.ProxyFromEnvironment,
|
||||
}
|
||||
|
||||
func useProxy(url string) bool {
|
||||
// N9E_PROXY_URL=oapi.dingtalk.com,feishu.com
|
||||
patterns := os.Getenv("N9E_PROXY_URL")
|
||||
if patterns != "" {
|
||||
// 说明要让某些 URL 走代理
|
||||
for _, u := range strings.Split(patterns, ",") {
|
||||
u = strings.TrimSpace(u)
|
||||
if u == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
if strings.Contains(url, u) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func PostJSON(url string, timeout time.Duration, v interface{}, retries ...int) (response []byte, code int, err error) {
|
||||
var bs []byte
|
||||
|
||||
@@ -195,6 +228,10 @@ func PostJSON(url string, timeout time.Duration, v interface{}, retries ...int)
|
||||
Timeout: timeout,
|
||||
}
|
||||
|
||||
if useProxy(url) {
|
||||
client.Transport = ProxyTransporter
|
||||
}
|
||||
|
||||
req, err := http.NewRequest("POST", url, bf)
|
||||
if err != nil {
|
||||
return
|
||||
|
||||
@@ -700,18 +700,24 @@ func (h *httpAPI) Query(ctx context.Context, query string, ts time.Time) (model.
|
||||
var err error
|
||||
var warnings Warnings
|
||||
var value model.Value
|
||||
var statusCode int
|
||||
for i := 0; i < 3; i++ {
|
||||
value, warnings, err = h.query(ctx, query, ts)
|
||||
value, warnings, statusCode, err = h.query(ctx, query, ts)
|
||||
if err == nil {
|
||||
return value, warnings, nil
|
||||
}
|
||||
|
||||
// statusCode 4xx do not retry
|
||||
if statusCode >= 400 && statusCode < 500 {
|
||||
return nil, warnings, err
|
||||
}
|
||||
|
||||
time.Sleep(100 * time.Millisecond)
|
||||
}
|
||||
return nil, warnings, err
|
||||
}
|
||||
|
||||
func (h *httpAPI) query(ctx context.Context, query string, ts time.Time) (model.Value, Warnings, error) {
|
||||
func (h *httpAPI) query(ctx context.Context, query string, ts time.Time) (model.Value, Warnings, int, error) {
|
||||
u := h.client.URL(epQuery, nil)
|
||||
q := u.Query()
|
||||
|
||||
@@ -722,15 +728,11 @@ func (h *httpAPI) query(ctx context.Context, query string, ts time.Time) (model.
|
||||
|
||||
resp, body, warnings, err := h.client.DoGetFallback(ctx, u, q)
|
||||
if err != nil {
|
||||
return nil, warnings, err
|
||||
}
|
||||
|
||||
if resp.StatusCode > 200 {
|
||||
fmt.Println("status code:", resp.StatusCode)
|
||||
return nil, warnings, 0, err
|
||||
}
|
||||
|
||||
var qres queryResult
|
||||
return model.Value(qres.v), warnings, json.Unmarshal(body, &qres)
|
||||
return model.Value(qres.v), warnings, resp.StatusCode, json.Unmarshal(body, &qres)
|
||||
}
|
||||
|
||||
func (h *httpAPI) QueryRange(ctx context.Context, query string, r Range) (model.Value, Warnings, error) {
|
||||
|
||||
@@ -27,6 +27,15 @@ type sample struct {
|
||||
Value float64
|
||||
}
|
||||
|
||||
type QueryFunc func(int64, string) model.Value
|
||||
|
||||
var queryFunc QueryFunc
|
||||
|
||||
// RegisterQueryFunc 为了避免循环引用,通过外部注入的方式注册 queryFunc
|
||||
func RegisterQueryFunc(f QueryFunc) {
|
||||
queryFunc = f
|
||||
}
|
||||
|
||||
type queryResult []*sample
|
||||
|
||||
type queryResultByLabelSorter struct {
|
||||
@@ -564,3 +573,13 @@ func convertToFloat(i interface{}) (float64, error) {
|
||||
return 0, fmt.Errorf("can't convert %T to float", v)
|
||||
}
|
||||
}
|
||||
|
||||
func Query(datasourceID int64, promql string) model.Value {
|
||||
|
||||
value := queryFunc(datasourceID, promql)
|
||||
if value != nil {
|
||||
return value
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -54,6 +54,15 @@ var TemplateFuncMap = template.FuncMap{
|
||||
"printf": Printf,
|
||||
}
|
||||
|
||||
// NewTemplateFuncMap copy on write for TemplateFuncMap
|
||||
func NewTemplateFuncMap() template.FuncMap {
|
||||
m := template.FuncMap{}
|
||||
for k, v := range TemplateFuncMap {
|
||||
m[k] = v
|
||||
}
|
||||
return m
|
||||
}
|
||||
|
||||
// ReplaceTemplateUseHtml replaces variables in a template string with values.
|
||||
//
|
||||
// It accepts the following parameters:
|
||||
|
||||
320
pkg/unit/unit_convert.go
Normal file
320
pkg/unit/unit_convert.go
Normal file
@@ -0,0 +1,320 @@
|
||||
package unit
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"math"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
// FormattedValue 格式化后的值的结构
|
||||
type FormattedValue struct {
|
||||
Value float64 `json:"value"`
|
||||
Unit string `json:"unit"`
|
||||
Text string `json:"text"`
|
||||
Stat float64 `json:"stat"`
|
||||
}
|
||||
|
||||
// FormatOptions 格式化选项
|
||||
type FormatOptions struct {
|
||||
Type string // "si" 或 "iec"
|
||||
Base string // "bits" 或 "bytes"
|
||||
Decimals int // 小数位数
|
||||
Postfix string // 后缀
|
||||
}
|
||||
|
||||
// 时间相关常量
|
||||
const (
|
||||
NanosecondVal = 0.000000001
|
||||
MicrosecondVal = 0.000001
|
||||
MillisecondVal = 0.001
|
||||
SecondVal = 1
|
||||
MinuteVal = 60
|
||||
HourVal = 3600
|
||||
DayVal = 86400
|
||||
WeekVal = 86400 * 7
|
||||
YearVal = 86400 * 365
|
||||
)
|
||||
|
||||
var (
|
||||
valueMap = []struct {
|
||||
Exp int
|
||||
Si string
|
||||
Iec string
|
||||
IecExp int
|
||||
}{
|
||||
{0, "", "", 1},
|
||||
{3, "k", "Ki", 10},
|
||||
{6, "M", "Mi", 20},
|
||||
{9, "G", "Gi", 30},
|
||||
{12, "T", "Ti", 40},
|
||||
{15, "P", "Pi", 50},
|
||||
{18, "E", "Ei", 60},
|
||||
{21, "Z", "Zi", 70},
|
||||
{24, "Y", "Yi", 80},
|
||||
}
|
||||
|
||||
baseUtilMap = map[string]string{
|
||||
"bits": "b",
|
||||
"bytes": "B",
|
||||
}
|
||||
)
|
||||
|
||||
// ValueFormatter 格式化入口函数
|
||||
func ValueFormatter(unit string, decimals int, value float64) FormattedValue {
|
||||
if math.IsNaN(value) {
|
||||
return FormattedValue{
|
||||
Value: 0,
|
||||
Unit: "",
|
||||
Text: "NaN",
|
||||
Stat: 0,
|
||||
}
|
||||
}
|
||||
|
||||
// 处理时间单位
|
||||
switch unit {
|
||||
case "none":
|
||||
return formatNone(value, decimals)
|
||||
case "ns", "nanoseconds":
|
||||
return formatDuration(value, "ns", decimals)
|
||||
case "µs", "microseconds":
|
||||
return formatDuration(value, "µs", decimals)
|
||||
case "ms", "milliseconds":
|
||||
return formatDuration(value, "ms", decimals)
|
||||
case "s", "seconds":
|
||||
return formatDuration(value, "s", decimals)
|
||||
case "min", "h", "d", "w":
|
||||
return formatDuration(value, unit, decimals)
|
||||
case "percent":
|
||||
return formatPercent(value, decimals, false)
|
||||
case "percentUnit":
|
||||
return formatPercent(value, decimals, true)
|
||||
case "bytesIEC", "bytes(IEC)", "bitsIEC", "bits(IEC)":
|
||||
base := unit
|
||||
base = strings.TrimSuffix(base, "(IEC)")
|
||||
base = strings.TrimSuffix(base, "IEC")
|
||||
base = strings.TrimSuffix(base, "s")
|
||||
opts := FormatOptions{
|
||||
Type: "iec",
|
||||
Base: base,
|
||||
Decimals: decimals,
|
||||
}
|
||||
return formatBytes(value, opts)
|
||||
case "bytesSI", "bytes(SI)", "bitsSI", "bits(SI)", "default", "sishort":
|
||||
base := unit
|
||||
base = strings.TrimSuffix(base, "(SI)")
|
||||
base = strings.TrimSuffix(base, "SI")
|
||||
base = strings.TrimSuffix(base, "s")
|
||||
opts := FormatOptions{
|
||||
Type: "si",
|
||||
Base: base,
|
||||
Decimals: decimals,
|
||||
}
|
||||
return formatBytes(value, opts)
|
||||
case "bytesSecIEC":
|
||||
opts := FormatOptions{
|
||||
Type: "iec",
|
||||
Base: "bytes",
|
||||
Decimals: decimals,
|
||||
Postfix: "/s",
|
||||
}
|
||||
return formatBytes(value, opts)
|
||||
case "bitsSecIEC":
|
||||
opts := FormatOptions{
|
||||
Type: "iec",
|
||||
Base: "bits",
|
||||
Decimals: decimals,
|
||||
Postfix: "/s",
|
||||
}
|
||||
return formatBytes(value, opts)
|
||||
case "bytesSecSI":
|
||||
opts := FormatOptions{
|
||||
Type: "si",
|
||||
Base: "bytes",
|
||||
Decimals: decimals,
|
||||
Postfix: "/s",
|
||||
}
|
||||
return formatBytes(value, opts)
|
||||
case "bitsSecSI":
|
||||
opts := FormatOptions{
|
||||
Type: "si",
|
||||
Base: "bits",
|
||||
Decimals: decimals,
|
||||
Postfix: "/s",
|
||||
}
|
||||
return formatBytes(value, opts)
|
||||
case "datetimeSeconds", "datetimeMilliseconds":
|
||||
return formatDateTime(unit, value)
|
||||
default:
|
||||
return formatNone(value, decimals)
|
||||
}
|
||||
}
|
||||
|
||||
// formatDuration 处理时间单位的转换
|
||||
func formatDuration(originValue float64, unit string, decimals int) FormattedValue {
|
||||
var converted float64
|
||||
var targetUnit string
|
||||
value := originValue
|
||||
// 标准化到秒
|
||||
switch unit {
|
||||
case "ns":
|
||||
value *= NanosecondVal
|
||||
case "µs":
|
||||
value *= MicrosecondVal
|
||||
case "ms":
|
||||
value *= MillisecondVal
|
||||
case "min":
|
||||
value *= MinuteVal
|
||||
case "h":
|
||||
value *= HourVal
|
||||
case "d":
|
||||
value *= DayVal
|
||||
case "w":
|
||||
value *= WeekVal
|
||||
}
|
||||
|
||||
// 选择合适的单位
|
||||
switch {
|
||||
case value >= YearVal:
|
||||
converted = value / YearVal
|
||||
targetUnit = "y"
|
||||
case value >= WeekVal:
|
||||
converted = value / WeekVal
|
||||
targetUnit = "w"
|
||||
case value >= DayVal:
|
||||
converted = value / DayVal
|
||||
targetUnit = "d"
|
||||
case value >= HourVal:
|
||||
converted = value / HourVal
|
||||
targetUnit = "h"
|
||||
case value >= MinuteVal:
|
||||
converted = value / MinuteVal
|
||||
targetUnit = "min"
|
||||
case value >= SecondVal:
|
||||
converted = value
|
||||
targetUnit = "s"
|
||||
case value >= MillisecondVal:
|
||||
converted = value / MillisecondVal
|
||||
targetUnit = "ms"
|
||||
case value >= MicrosecondVal:
|
||||
converted = value / MicrosecondVal
|
||||
targetUnit = "µs"
|
||||
default:
|
||||
converted = value / NanosecondVal
|
||||
targetUnit = "ns"
|
||||
}
|
||||
|
||||
return FormattedValue{
|
||||
Value: roundFloat(converted, decimals),
|
||||
Unit: targetUnit,
|
||||
Text: fmt.Sprintf("%.*f %s", decimals, converted, targetUnit),
|
||||
Stat: originValue,
|
||||
}
|
||||
}
|
||||
|
||||
// formatBytes 处理字节相关的转换
|
||||
func formatBytes(value float64, opts FormatOptions) FormattedValue {
|
||||
if value == 0 {
|
||||
baseUtil := baseUtilMap[opts.Base]
|
||||
return FormattedValue{
|
||||
Value: 0,
|
||||
Unit: baseUtil + opts.Postfix,
|
||||
Text: fmt.Sprintf("0%s%s", baseUtil, opts.Postfix),
|
||||
Stat: 0,
|
||||
}
|
||||
}
|
||||
|
||||
baseUtil := baseUtilMap[opts.Base]
|
||||
threshold := 1000.0
|
||||
if opts.Type == "iec" {
|
||||
threshold = 1024.0
|
||||
}
|
||||
|
||||
if math.Abs(value) < threshold {
|
||||
return FormattedValue{
|
||||
Value: roundFloat(value, opts.Decimals),
|
||||
Unit: baseUtil + opts.Postfix,
|
||||
Text: fmt.Sprintf("%.*f%s%s", opts.Decimals, value, baseUtil, opts.Postfix),
|
||||
Stat: value,
|
||||
}
|
||||
}
|
||||
|
||||
// 计算指数
|
||||
exp := int(math.Floor(math.Log10(math.Abs(value))/3.0)) * 3
|
||||
if exp > 24 {
|
||||
exp = 24
|
||||
}
|
||||
|
||||
var unit string
|
||||
var divider float64
|
||||
|
||||
// 查找对应的单位
|
||||
for _, v := range valueMap {
|
||||
if v.Exp == exp {
|
||||
if opts.Type == "iec" {
|
||||
unit = v.Iec
|
||||
divider = math.Pow(2, float64(v.IecExp))
|
||||
} else {
|
||||
unit = v.Si
|
||||
divider = math.Pow(10, float64(v.Exp))
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
newValue := value / divider
|
||||
return FormattedValue{
|
||||
Value: roundFloat(newValue, opts.Decimals),
|
||||
Unit: unit + baseUtil + opts.Postfix,
|
||||
Text: fmt.Sprintf("%.*f%s%s%s", opts.Decimals, newValue, unit, baseUtil, opts.Postfix),
|
||||
Stat: value,
|
||||
}
|
||||
}
|
||||
|
||||
// formatPercent 处理百分比格式化
|
||||
func formatPercent(value float64, decimals int, isUnit bool) FormattedValue {
|
||||
if isUnit {
|
||||
value = value * 100
|
||||
}
|
||||
return FormattedValue{
|
||||
Value: roundFloat(value, decimals),
|
||||
Unit: "%",
|
||||
Text: fmt.Sprintf("%.*f%%", decimals, value),
|
||||
Stat: value,
|
||||
}
|
||||
}
|
||||
|
||||
// formatNone 处理无单位格式化
|
||||
func formatNone(value float64, decimals int) FormattedValue {
|
||||
return FormattedValue{
|
||||
Value: value,
|
||||
Unit: "",
|
||||
Text: fmt.Sprintf("%.*f", decimals, value),
|
||||
Stat: value,
|
||||
}
|
||||
}
|
||||
|
||||
// formatDateTime 处理时间戳格式化
|
||||
func formatDateTime(uint string, value float64) FormattedValue {
|
||||
var t time.Time
|
||||
switch uint {
|
||||
case "datetimeSeconds":
|
||||
t = time.Unix(int64(value), 0)
|
||||
case "datetimeMilliseconds":
|
||||
t = time.Unix(0, int64(value)*int64(time.Millisecond))
|
||||
}
|
||||
|
||||
text := t.Format("2006-01-02 15:04:05")
|
||||
return FormattedValue{
|
||||
Value: value,
|
||||
Unit: "",
|
||||
Text: text,
|
||||
Stat: value,
|
||||
}
|
||||
}
|
||||
|
||||
// roundFloat 四舍五入到指定小数位
|
||||
func roundFloat(val float64, precision int) float64 {
|
||||
ratio := math.Pow(10, float64(precision))
|
||||
return math.Round(val*ratio) / ratio
|
||||
}
|
||||
318
pkg/unit/unit_convert_test.go
Normal file
318
pkg/unit/unit_convert_test.go
Normal file
@@ -0,0 +1,318 @@
|
||||
package unit
|
||||
|
||||
import (
|
||||
"math"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestValueFormatter(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
unit string
|
||||
decimals int
|
||||
value float64
|
||||
want FormattedValue
|
||||
}{
|
||||
// 字节测试
|
||||
{
|
||||
name: "IEC字节测试",
|
||||
unit: "bytes(IEC)",
|
||||
decimals: 2,
|
||||
value: 1024 * 1024,
|
||||
want: FormattedValue{Value: 1, Unit: "Mi", Text: "1.00Mi", Stat: 1024 * 1024},
|
||||
},
|
||||
{
|
||||
name: "SI字节测试",
|
||||
unit: "bytes(SI)",
|
||||
decimals: 2,
|
||||
value: 1000 * 1000,
|
||||
want: FormattedValue{Value: 1, Unit: "M", Text: "1.00M", Stat: 1000 * 1000},
|
||||
},
|
||||
// 时间单位测试
|
||||
{
|
||||
name: "毫秒转秒",
|
||||
unit: "ms",
|
||||
decimals: 2,
|
||||
value: 1500,
|
||||
want: FormattedValue{
|
||||
Value: 1.50,
|
||||
Unit: "s",
|
||||
Text: "1.50 s",
|
||||
Stat: 1500,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "秒转分钟",
|
||||
unit: "s",
|
||||
decimals: 1,
|
||||
value: 150,
|
||||
want: FormattedValue{
|
||||
Value: 2.5,
|
||||
Unit: "min",
|
||||
Text: "2.5 min",
|
||||
Stat: 150,
|
||||
},
|
||||
},
|
||||
// 百分比测试
|
||||
{
|
||||
name: "百分比",
|
||||
unit: "percent",
|
||||
decimals: 2,
|
||||
value: 0.9555,
|
||||
want: FormattedValue{
|
||||
Value: 0.96,
|
||||
Unit: "%",
|
||||
Text: "0.96%",
|
||||
Stat: 0.9555,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "百分比单位",
|
||||
unit: "percentUnit",
|
||||
decimals: 1,
|
||||
value: 0.95,
|
||||
want: FormattedValue{
|
||||
Value: 95.0,
|
||||
Unit: "%",
|
||||
Text: "95.0%",
|
||||
Stat: 95.0,
|
||||
},
|
||||
},
|
||||
// SI格式测试
|
||||
{
|
||||
name: "SI格式",
|
||||
unit: "sishort",
|
||||
decimals: 2,
|
||||
value: 1500,
|
||||
want: FormattedValue{
|
||||
Value: 1.50,
|
||||
Unit: "k",
|
||||
Text: "1.50k",
|
||||
Stat: 1500,
|
||||
},
|
||||
},
|
||||
// 时间戳测试
|
||||
{
|
||||
name: "时间戳 s",
|
||||
unit: "datetimeSeconds",
|
||||
decimals: 0,
|
||||
value: 1683518400,
|
||||
want: FormattedValue{
|
||||
Value: 1683518400,
|
||||
Unit: "",
|
||||
Text: "2023-05-08 12:00:00",
|
||||
Stat: 1683518400,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "时间戳 ms",
|
||||
unit: "datetimeMilliseconds",
|
||||
decimals: 0,
|
||||
value: 1683518400000,
|
||||
want: FormattedValue{
|
||||
Value: 1683518400000,
|
||||
Unit: "",
|
||||
Text: "2023-05-08 12:00:00",
|
||||
Stat: 1683518400000,
|
||||
},
|
||||
},
|
||||
// 补充时间单位测试
|
||||
{
|
||||
name: "纳秒测试",
|
||||
unit: "ns",
|
||||
decimals: 2,
|
||||
value: 1500,
|
||||
want: FormattedValue{
|
||||
Value: 1.50,
|
||||
Unit: "µs",
|
||||
Text: "1.50 µs",
|
||||
Stat: 1500,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "微秒测试",
|
||||
unit: "µs",
|
||||
decimals: 2,
|
||||
value: 1500,
|
||||
want: FormattedValue{
|
||||
Value: 1.50,
|
||||
Unit: "ms",
|
||||
Text: "1.50 ms",
|
||||
Stat: 1500,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "小时测试",
|
||||
unit: "h",
|
||||
decimals: 1,
|
||||
value: 2.5,
|
||||
want: FormattedValue{
|
||||
Value: 2.5,
|
||||
Unit: "h",
|
||||
Text: "2.5 h",
|
||||
Stat: 2.5,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "天数测试",
|
||||
unit: "d",
|
||||
decimals: 1,
|
||||
value: 1.5,
|
||||
want: FormattedValue{
|
||||
Value: 1.5,
|
||||
Unit: "d",
|
||||
Text: "1.5 d",
|
||||
Stat: 1.5,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "周数测试",
|
||||
unit: "w",
|
||||
decimals: 1,
|
||||
value: 1.5,
|
||||
want: FormattedValue{
|
||||
Value: 1.5,
|
||||
Unit: "w",
|
||||
Text: "1.5 w",
|
||||
Stat: 1.5,
|
||||
},
|
||||
},
|
||||
// 补充字节速率测试
|
||||
{
|
||||
name: "IEC字节每秒",
|
||||
unit: "bytesSecIEC",
|
||||
decimals: 2,
|
||||
value: 1024 * 1024,
|
||||
want: FormattedValue{
|
||||
Value: 1,
|
||||
Unit: "MiB/s",
|
||||
Text: "1.00MiB/s",
|
||||
Stat: 1024 * 1024,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "IEC比特每秒",
|
||||
unit: "bitsSecIEC",
|
||||
decimals: 2,
|
||||
value: 1024 * 1024,
|
||||
want: FormattedValue{
|
||||
Value: 1,
|
||||
Unit: "Mib/s",
|
||||
Text: "1.00Mib/s",
|
||||
Stat: 1024 * 1024,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "SI字节每秒",
|
||||
unit: "bytesSecSI",
|
||||
decimals: 2,
|
||||
value: 1000 * 1000,
|
||||
want: FormattedValue{
|
||||
Value: 1,
|
||||
Unit: "MB/s",
|
||||
Text: "1.00MB/s",
|
||||
Stat: 1000 * 1000,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "SI比特每秒",
|
||||
unit: "bitsSecSI",
|
||||
decimals: 2,
|
||||
value: 1000 * 1000,
|
||||
want: FormattedValue{
|
||||
Value: 1,
|
||||
Unit: "Mb/s",
|
||||
Text: "1.00Mb/s",
|
||||
Stat: 1000 * 1000,
|
||||
},
|
||||
},
|
||||
// none 类型测试
|
||||
{
|
||||
name: "无单位测试",
|
||||
unit: "none",
|
||||
decimals: 2,
|
||||
value: 1234.5678,
|
||||
want: FormattedValue{
|
||||
Value: 1234.5678,
|
||||
Unit: "",
|
||||
Text: "1234.57",
|
||||
Stat: 1234.5678,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
got := ValueFormatter(tt.unit, tt.decimals, tt.value)
|
||||
if !compareFormattedValues(got, tt.want) {
|
||||
t.Errorf("ValueFormatter() = %v, want %v", got, tt.want)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestEdgeCases(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
unit string
|
||||
decimals int
|
||||
value float64
|
||||
wantNil bool
|
||||
}{
|
||||
{
|
||||
name: "NaN值",
|
||||
unit: "bytes",
|
||||
decimals: 2,
|
||||
value: math.NaN(),
|
||||
wantNil: false,
|
||||
},
|
||||
{
|
||||
name: "零值",
|
||||
unit: "bytes",
|
||||
decimals: 2,
|
||||
value: 0,
|
||||
wantNil: false,
|
||||
},
|
||||
{
|
||||
name: "极小值",
|
||||
unit: "bytes",
|
||||
decimals: 2,
|
||||
value: 0.0000001,
|
||||
wantNil: false,
|
||||
},
|
||||
{
|
||||
name: "极大值",
|
||||
unit: "bytes",
|
||||
decimals: 2,
|
||||
value: 1e30,
|
||||
wantNil: false,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
got := ValueFormatter(tt.unit, tt.decimals, tt.value)
|
||||
if (got == FormattedValue{}) == !tt.wantNil {
|
||||
t.Errorf("ValueFormatter() got = %v, wantNil = %v", got, tt.wantNil)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// compareFormattedValues 比较两个FormattedValue是否相等
|
||||
func compareFormattedValues(a, b FormattedValue) bool {
|
||||
const epsilon = 0.0001
|
||||
if math.Abs(a.Value-b.Value) > epsilon {
|
||||
return false
|
||||
}
|
||||
if math.Abs(a.Stat-b.Stat) > epsilon {
|
||||
return false
|
||||
}
|
||||
if a.Unit != b.Unit {
|
||||
return false
|
||||
}
|
||||
if a.Text != b.Text {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
@@ -3,7 +3,6 @@ package prom
|
||||
import (
|
||||
"sync"
|
||||
|
||||
"github.com/ccfos/nightingale/v6/models"
|
||||
"github.com/ccfos/nightingale/v6/pkg/ctx"
|
||||
"github.com/ccfos/nightingale/v6/pkg/prom"
|
||||
)
|
||||
@@ -62,29 +61,6 @@ func (pc *PromClientMap) IsNil(datasourceId int64) bool {
|
||||
return c == nil
|
||||
}
|
||||
|
||||
// Hit 根据当前有效的 datasourceId 和规则的 datasourceId 配置计算有效的cluster列表
|
||||
func (pc *PromClientMap) Hit(datasourceIds []int64) []int64 {
|
||||
pc.RLock()
|
||||
defer pc.RUnlock()
|
||||
dsIds := make([]int64, 0, len(pc.ReaderClients))
|
||||
if len(datasourceIds) == 1 && datasourceIds[0] == models.DatasourceIdAll {
|
||||
for c := range pc.ReaderClients {
|
||||
dsIds = append(dsIds, c)
|
||||
}
|
||||
return dsIds
|
||||
}
|
||||
|
||||
for dsId := range pc.ReaderClients {
|
||||
for _, id := range datasourceIds {
|
||||
if id == dsId {
|
||||
dsIds = append(dsIds, id)
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
return dsIds
|
||||
}
|
||||
|
||||
func (pc *PromClientMap) Reset() {
|
||||
pc.Lock()
|
||||
defer pc.Unlock()
|
||||
|
||||
@@ -24,8 +24,10 @@ type Pushgw struct {
|
||||
}
|
||||
|
||||
type WriterGlobalOpt struct {
|
||||
QueueMaxSize int
|
||||
QueuePopSize int
|
||||
QueueMaxSize int
|
||||
QueuePopSize int
|
||||
AllQueueMaxSize int
|
||||
AllQueueMaxSizeInterval int
|
||||
}
|
||||
|
||||
type WriterOptions struct {
|
||||
@@ -77,6 +79,14 @@ func (p *Pushgw) PreCheck() {
|
||||
p.WriterOpt.QueuePopSize = 1000
|
||||
}
|
||||
|
||||
if p.WriterOpt.AllQueueMaxSize <= 0 {
|
||||
p.WriterOpt.AllQueueMaxSize = 10000000
|
||||
}
|
||||
|
||||
if p.WriterOpt.AllQueueMaxSizeInterval <= 0 {
|
||||
p.WriterOpt.AllQueueMaxSizeInterval = 200
|
||||
}
|
||||
|
||||
if p.WriteConcurrency <= 0 {
|
||||
p.WriteConcurrency = 5000
|
||||
}
|
||||
|
||||
@@ -79,6 +79,10 @@ func (rt *Router) AppendLabels(pt *prompb.TimeSeries, target *models.Target, bgC
|
||||
// }
|
||||
|
||||
func (rt *Router) debugSample(remoteAddr string, v *prompb.TimeSeries) {
|
||||
if v == nil {
|
||||
return
|
||||
}
|
||||
|
||||
filter := rt.Pushgw.DebugSample
|
||||
if len(filter) == 0 {
|
||||
return
|
||||
@@ -164,6 +168,7 @@ func (rt *Router) ForwardByIdent(clientIP string, ident string, v *prompb.TimeSe
|
||||
|
||||
func (rt *Router) ForwardByMetric(clientIP string, metric string, v *prompb.TimeSeries) {
|
||||
v = rt.BeforePush(clientIP, v)
|
||||
rt.debugSample(clientIP, v)
|
||||
if v == nil {
|
||||
return
|
||||
}
|
||||
|
||||
@@ -8,6 +8,7 @@ import (
|
||||
"net/http"
|
||||
"strings"
|
||||
"sync"
|
||||
"sync/atomic"
|
||||
"time"
|
||||
|
||||
"github.com/ccfos/nightingale/v6/pkg/fasttime"
|
||||
@@ -138,9 +139,10 @@ func (w WriterType) Post(req []byte, headers ...map[string]string) error {
|
||||
}
|
||||
|
||||
type WritersType struct {
|
||||
pushgw pconf.Pushgw
|
||||
backends map[string]WriterType
|
||||
queues map[string]*IdentQueue
|
||||
pushgw pconf.Pushgw
|
||||
backends map[string]WriterType
|
||||
queues map[string]*IdentQueue
|
||||
allQueueLen atomic.Value
|
||||
sync.RWMutex
|
||||
}
|
||||
|
||||
@@ -160,14 +162,30 @@ func (ws *WritersType) ReportQueueStats(ident string, identQueue *IdentQueue) (i
|
||||
}
|
||||
}
|
||||
|
||||
func (ws *WritersType) SetAllQueueLen() {
|
||||
for {
|
||||
curMetricLen := 0
|
||||
ws.RLock()
|
||||
for _, q := range ws.queues {
|
||||
curMetricLen += q.list.Len()
|
||||
}
|
||||
ws.RUnlock()
|
||||
ws.allQueueLen.Store(curMetricLen)
|
||||
time.Sleep(time.Duration(ws.pushgw.WriterOpt.AllQueueMaxSizeInterval) * time.Millisecond)
|
||||
}
|
||||
}
|
||||
|
||||
func NewWriters(pushgwConfig pconf.Pushgw) *WritersType {
|
||||
writers := &WritersType{
|
||||
backends: make(map[string]WriterType),
|
||||
queues: make(map[string]*IdentQueue),
|
||||
pushgw: pushgwConfig,
|
||||
backends: make(map[string]WriterType),
|
||||
queues: make(map[string]*IdentQueue),
|
||||
pushgw: pushgwConfig,
|
||||
allQueueLen: atomic.Value{},
|
||||
}
|
||||
|
||||
writers.Init()
|
||||
|
||||
go writers.SetAllQueueLen()
|
||||
go writers.CleanExpQueue()
|
||||
return writers
|
||||
}
|
||||
@@ -217,6 +235,13 @@ func (ws *WritersType) PushSample(ident string, v interface{}) {
|
||||
}
|
||||
|
||||
identQueue.ts = time.Now().Unix()
|
||||
curLen := ws.allQueueLen.Load().(int)
|
||||
if curLen > ws.pushgw.WriterOpt.AllQueueMaxSize {
|
||||
logger.Warningf("Write %+v full, metric count over limit: %d", v, curLen)
|
||||
CounterPushQueueErrorTotal.WithLabelValues(ident).Inc()
|
||||
return
|
||||
}
|
||||
|
||||
succ := identQueue.list.PushFront(v)
|
||||
if !succ {
|
||||
logger.Warningf("Write channel(%s) full, current channel size: %d", ident, identQueue.list.Len())
|
||||
@@ -245,6 +270,7 @@ func (ws *WritersType) StartConsumer(identQueue *IdentQueue) {
|
||||
|
||||
func (ws *WritersType) Init() error {
|
||||
opts := ws.pushgw.Writers
|
||||
ws.allQueueLen.Store(0)
|
||||
|
||||
for i := 0; i < len(opts); i++ {
|
||||
tlsConf, err := opts[i].ClientConfig.TLSConfig()
|
||||
|
||||
@@ -7,6 +7,7 @@ import (
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
"github.com/alicebob/miniredis/v2"
|
||||
"github.com/ccfos/nightingale/v6/pkg/tlsx"
|
||||
"github.com/redis/go-redis/v9"
|
||||
"github.com/toolkits/pkg/logger"
|
||||
@@ -28,6 +29,7 @@ type Redis redis.Cmdable
|
||||
|
||||
func NewRedis(cfg RedisConfig) (Redis, error) {
|
||||
var redisClient Redis
|
||||
|
||||
switch cfg.RedisType {
|
||||
case "standalone", "":
|
||||
redisOptions := &redis.Options{
|
||||
@@ -88,6 +90,16 @@ func NewRedis(cfg RedisConfig) (Redis, error) {
|
||||
|
||||
redisClient = redis.NewFailoverClient(redisOptions)
|
||||
|
||||
case "miniredis":
|
||||
s, err := miniredis.Run()
|
||||
if err != nil {
|
||||
fmt.Println("failed to init miniredis:", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
redisClient = redis.NewClient(&redis.Options{
|
||||
Addr: s.Addr(),
|
||||
})
|
||||
|
||||
default:
|
||||
fmt.Println("failed to init redis , redis type is illegal:", cfg.RedisType)
|
||||
os.Exit(1)
|
||||
|
||||
44
storage/redis_test.go
Normal file
44
storage/redis_test.go
Normal file
@@ -0,0 +1,44 @@
|
||||
package storage
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
|
||||
"github.com/alicebob/miniredis/v2"
|
||||
"github.com/redis/go-redis/v9"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestMiniRedisMGet(t *testing.T) {
|
||||
s, err := miniredis.Run()
|
||||
if err != nil {
|
||||
t.Fatalf("failed to start miniredis: %v", err)
|
||||
}
|
||||
defer s.Close()
|
||||
|
||||
rdb := redis.NewClient(&redis.Options{
|
||||
Addr: s.Addr(),
|
||||
})
|
||||
|
||||
err = rdb.Ping(context.Background()).Err()
|
||||
if err != nil {
|
||||
t.Fatalf("failed to ping miniredis: %v", err)
|
||||
}
|
||||
|
||||
mp := make(map[string]interface{})
|
||||
mp["key1"] = "value1"
|
||||
mp["key2"] = "value2"
|
||||
mp["key3"] = "value3"
|
||||
|
||||
err = MSet(context.Background(), rdb, mp)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to set miniredis value: %v", err)
|
||||
}
|
||||
|
||||
ctx := context.Background()
|
||||
keys := []string{"key1", "key2", "key3", "key4"}
|
||||
vals := MGet(ctx, rdb, keys)
|
||||
|
||||
expected := [][]byte{[]byte("value1"), []byte("value2"), []byte("value3")}
|
||||
assert.Equal(t, expected, vals)
|
||||
}
|
||||
@@ -80,29 +80,6 @@ func (pc *TdengineClientMap) IsNil(datasourceId int64) bool {
|
||||
return c == nil
|
||||
}
|
||||
|
||||
// Hit 根据当前有效的 datasourceId 和规则的 datasourceId 配置计算有效的cluster列表
|
||||
func (pc *TdengineClientMap) Hit(datasourceIds []int64) []int64 {
|
||||
pc.RLock()
|
||||
defer pc.RUnlock()
|
||||
dsIds := make([]int64, 0, len(pc.ReaderClients))
|
||||
if len(datasourceIds) == 1 && datasourceIds[0] == models.DatasourceIdAll {
|
||||
for c := range pc.ReaderClients {
|
||||
dsIds = append(dsIds, c)
|
||||
}
|
||||
return dsIds
|
||||
}
|
||||
|
||||
for dsId := range pc.ReaderClients {
|
||||
for _, id := range datasourceIds {
|
||||
if id == dsId {
|
||||
dsIds = append(dsIds, id)
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
return dsIds
|
||||
}
|
||||
|
||||
func (pc *TdengineClientMap) Reset() {
|
||||
pc.Lock()
|
||||
defer pc.Unlock()
|
||||
|
||||
Reference in New Issue
Block a user