Compare commits

..

10 Commits

Author SHA1 Message Date
ning
f5ef0f07be add validateCronPattern 2025-02-17 10:59:07 +08:00
Yening Qin
80b31c0e70 add DisableNotifyRecord (#2448) 2025-01-24 16:19:59 +08:00
ning
9677c42385 update toolkits/pkg version 2025-01-16 16:26:43 +08:00
Yening Qin
a66f98ed8b refactor: event set AnnotationsJSON 2024-12-29 10:22:15 +08:00
Yening Qin
41a72a1de8 fix: the dedup logic when adding tags to target (#2387)
Co-authored-by: flashbo <36443248+lwb0214@users.noreply.github.com>
2024-12-24 11:49:59 +08:00
Yening Qin
39cb026b10 feat: support webhook proxy configuration (#2376) 2024-12-20 14:36:24 +08:00
Yening Qin
e0eb7e72d4 refactor: es_index_pattern add cross_cluster_enabled (#2371) 2024-12-19 14:11:51 +08:00
Yening Qin
33118c53c7 refactor: group delete check (#2369) 2024-12-18 17:52:40 +08:00
Yening Qin
54732cf387 fix: alert rule with var (#2358) 2024-12-12 16:59:41 +08:00
Yening Qin
90fb400a10 fix: alert rule with var (#2355) 2024-12-12 13:20:50 +08:00
60 changed files with 381 additions and 1450 deletions

View File

@@ -29,10 +29,11 @@ type HeartbeatConfig struct {
}
type Alerting struct {
Timeout int64
TemplatesDir string
NotifyConcurrency int
WebhookBatchSend bool
Timeout int64
TemplatesDir string
NotifyConcurrency int
WebhookBatchSend bool
DisableNotifyRecord bool
}
type CallPlugin struct {

View File

@@ -41,7 +41,7 @@ func Initialize(configDir string, cryptoKey string) (func(), error) {
return nil, err
}
ctx := ctx.NewContext(context.Background(), nil, false, config.CenterApi)
ctx := ctx.NewContext(context.Background(), nil, false, config.Alert.Alerting.DisableNotifyRecord, config.CenterApi)
var redis storage.Redis
redis, err = storage.NewRedis(config.Redis)

View File

@@ -11,6 +11,7 @@ const (
type Stats struct {
AlertNotifyTotal *prometheus.CounterVec
CounterConsumeAlertsTotal *prometheus.CounterVec
AlertNotifyErrorTotal *prometheus.CounterVec
CounterAlertsTotal *prometheus.CounterVec
GaugeAlertQueueSize prometheus.Gauge
@@ -90,6 +91,13 @@ func NewSyncStats() *Stats {
Help: "Total number alert events.",
}, []string{"cluster", "type", "busi_group"})
CounterConsumeAlertsTotal := prometheus.NewCounterVec(prometheus.CounterOpts{
Namespace: namespace,
Subsystem: subsystem,
Name: "consume_alerts_total",
Help: "Total number alert events consume.",
}, []string{"cluster", "type", "busi_group"})
// 内存中的告警事件队列的长度
GaugeAlertQueueSize := prometheus.NewGauge(prometheus.GaugeOpts{
Namespace: namespace,
@@ -121,6 +129,7 @@ func NewSyncStats() *Stats {
prometheus.MustRegister(
CounterAlertsTotal,
CounterConsumeAlertsTotal,
GaugeAlertQueueSize,
AlertNotifyTotal,
AlertNotifyErrorTotal,
@@ -137,6 +146,7 @@ func NewSyncStats() *Stats {
return &Stats{
CounterAlertsTotal: CounterAlertsTotal,
CounterConsumeAlertsTotal: CounterConsumeAlertsTotal,
GaugeAlertQueueSize: GaugeAlertQueueSize,
AlertNotifyTotal: AlertNotifyTotal,
AlertNotifyErrorTotal: AlertNotifyErrorTotal,

View File

@@ -88,7 +88,7 @@ func (e *Consumer) consumeOne(event *models.AlertCurEvent) {
eventType = "recovery"
}
e.dispatch.Astats.CounterAlertsTotal.WithLabelValues(event.Cluster, eventType, event.GroupName).Inc()
e.dispatch.Astats.CounterConsumeAlertsTotal.WithLabelValues(event.Cluster, eventType, event.GroupName).Inc()
if err := event.ParseRule("rule_name"); err != nil {
logger.Warningf("ruleid:%d failed to parse rule name: %v", event.RuleId, err)

View File

@@ -293,9 +293,9 @@ func (e *Dispatch) Send(rule *models.AlertRule, event *models.AlertCurEvent, not
// handle global webhooks
if !event.OverrideGlobalWebhook() {
if e.alerting.WebhookBatchSend {
sender.BatchSendWebhooks(e.ctx, notifyTarget.ToWebhookMap(), event, e.Astats)
sender.BatchSendWebhooks(e.ctx, notifyTarget.ToWebhookList(), event, e.Astats)
} else {
sender.SingleSendWebhooks(e.ctx, notifyTarget.ToWebhookMap(), event, e.Astats)
sender.SingleSendWebhooks(e.ctx, notifyTarget.ToWebhookList(), event, e.Astats)
}
}

View File

@@ -76,8 +76,52 @@ func (s *NotifyTarget) ToCallbackList() []string {
return callbacks
}
func (s *NotifyTarget) ToWebhookList() []*models.Webhook {
webhooks := make([]*models.Webhook, 0, len(s.webhooks))
for _, wh := range s.webhooks {
if wh.Batch == 0 {
wh.Batch = 1000
}
if wh.Timeout == 0 {
wh.Timeout = 10
}
if wh.RetryCount == 0 {
wh.RetryCount = 10
}
if wh.RetryInterval == 0 {
wh.RetryInterval = 10
}
webhooks = append(webhooks, wh)
}
return webhooks
}
func (s *NotifyTarget) ToWebhookMap() map[string]*models.Webhook {
return s.webhooks
webhookMap := make(map[string]*models.Webhook, len(s.webhooks))
for _, wh := range s.webhooks {
if wh.Batch == 0 {
wh.Batch = 1000
}
if wh.Timeout == 0 {
wh.Timeout = 10
}
if wh.RetryCount == 0 {
wh.RetryCount = 10
}
if wh.RetryInterval == 0 {
wh.RetryInterval = 10
}
webhookMap[wh.Url] = wh
}
return webhookMap
}
func (s *NotifyTarget) ToUidList() []int64 {

View File

@@ -20,7 +20,6 @@ import (
"github.com/ccfos/nightingale/v6/pkg/hash"
"github.com/ccfos/nightingale/v6/pkg/parser"
promsdk "github.com/ccfos/nightingale/v6/pkg/prom"
promql2 "github.com/ccfos/nightingale/v6/pkg/promql"
"github.com/ccfos/nightingale/v6/pkg/unit"
"github.com/ccfos/nightingale/v6/prom"
"github.com/ccfos/nightingale/v6/tdengine"
@@ -60,10 +59,6 @@ const (
QUERY_DATA = "query_data"
)
const (
JoinMark = "@@"
)
type JoinType string
const (
@@ -381,7 +376,7 @@ func (arw *AlertRuleWorker) VarFillingAfterQuery(query models.PromQuery, readerC
}
seqVals := getSamples(value)
// 得到参数变量的所有组合
paramPermutation, err := arw.getParamPermutation(param, ParamKeys, varToLabel, query.PromQl, readerClient)
paramPermutation, err := arw.getParamPermutation(param, ParamKeys)
if err != nil {
logger.Errorf("rule_eval:%s, paramPermutation error:%v", arw.Key(), err)
continue
@@ -396,8 +391,8 @@ func (arw *AlertRuleWorker) VarFillingAfterQuery(query models.PromQuery, readerC
curRealQuery = fillVar(curRealQuery, paramKey, val)
}
if _, ok := paramPermutation[strings.Join(cur, JoinMark)]; ok {
anomalyPointsMap[strings.Join(cur, JoinMark)] = models.AnomalyPoint{
if _, ok := paramPermutation[strings.Join(cur, "-")]; ok {
anomalyPointsMap[strings.Join(cur, "-")] = models.AnomalyPoint{
Key: seqVals[i].Metric.String(),
Timestamp: seqVals[i].Timestamp.Unix(),
Value: float64(seqVals[i].Value),
@@ -406,7 +401,7 @@ func (arw *AlertRuleWorker) VarFillingAfterQuery(query models.PromQuery, readerC
Query: curRealQuery,
}
// 生成异常点后,删除该参数组合
delete(paramPermutation, strings.Join(cur, JoinMark))
delete(paramPermutation, strings.Join(cur, "-"))
}
}
@@ -502,7 +497,7 @@ func removeVal(promql string) string {
}
// 获取参数变量的所有组合
func (arw *AlertRuleWorker) getParamPermutation(paramVal map[string]models.ParamQuery, paramKeys []string, varToLabel map[string]string, originPromql string, readerClient promsdk.API) (map[string]struct{}, error) {
func (arw *AlertRuleWorker) getParamPermutation(paramVal map[string]models.ParamQuery, paramKeys []string) (map[string]struct{}, error) {
// 参数变量查询,得到参数变量值
paramMap := make(map[string][]string)
@@ -534,15 +529,7 @@ func (arw *AlertRuleWorker) getParamPermutation(paramVal map[string]models.Param
if err != nil {
logger.Errorf("query:%s fail to unmarshalling into string slice, error:%v", paramQuery.Query, err)
}
if len(query) == 0 {
paramsKeyAllLabel, err := getParamKeyAllLabel(varToLabel[paramKey], originPromql, readerClient)
if err != nil {
logger.Errorf("rule_eval:%s, fail to getParamKeyAllLabel, error:%v", arw.Key(), paramQuery.Query, err)
}
params = paramsKeyAllLabel
} else {
params = query
}
params = query
default:
return nil, fmt.Errorf("unknown param type: %s", paramQuery.ParamType)
}
@@ -560,63 +547,12 @@ func (arw *AlertRuleWorker) getParamPermutation(paramVal map[string]models.Param
res := make(map[string]struct{})
for i := range permutation {
res[strings.Join(permutation[i], JoinMark)] = struct{}{}
res[strings.Join(permutation[i], "@@")] = struct{}{}
}
return res, nil
}
func getParamKeyAllLabel(paramKey string, promql string, client promsdk.API) ([]string, error) {
labels, metricName, err := promql2.GetLabelsAndMetricNameWithReplace(promql, "$")
if err != nil {
return nil, fmt.Errorf("promql:%s, get labels error:%v", promql, err)
}
labelstrs := make([]string, 0)
for _, label := range labels {
if strings.HasPrefix(label.Value, "$") {
continue
}
labelstrs = append(labelstrs, label.Name+label.Op+label.Value)
}
pr := metricName + "{" + strings.Join(labelstrs, ",") + "}"
value, _, err := client.Query(context.Background(), pr, time.Now())
if err != nil {
return nil, fmt.Errorf("promql: %s query error: %v", pr, err)
}
labelValuesMap := make(map[string]struct{})
switch value.Type() {
case model.ValVector:
vector := value.(model.Vector)
for _, sample := range vector {
for labelName, labelValue := range sample.Metric {
// 只处理ParamKeys中指定的label
if string(labelName) == paramKey {
labelValuesMap[string(labelValue)] = struct{}{}
}
}
}
case model.ValMatrix:
matrix := value.(model.Matrix)
for _, series := range matrix {
for labelName, labelValue := range series.Metric {
// 只处理ParamKeys中指定的label
if string(labelName) == paramKey {
labelValuesMap[string(labelValue)] = struct{}{}
}
}
}
}
result := make([]string, 0)
for labelValue, _ := range labelValuesMap {
result = append(result, labelValue)
}
return result, nil
}
func (arw *AlertRuleWorker) getHostIdents(paramQuery models.ParamQuery) ([]string, error) {
var params []string
q, _ := json.Marshal(paramQuery.Query)
@@ -1294,7 +1230,6 @@ func GetQueryRefAndUnit(query interface{}) (string, string, error) {
// 再查询得到满足值变量的所有结果加入异常点列表
// 参数变量的值不满足的组合,需要覆盖上层筛选中产生的异常点
func (arw *AlertRuleWorker) VarFillingBeforeQuery(query models.PromQuery, readerClient promsdk.API) []models.AnomalyPoint {
varToLabel := ExtractVarMapping(query.PromQl)
// 存储异常点的 mapkey 为参数变量的组合,可以实现子筛选对上一层筛选的覆盖
anomalyPointsMap := sync.Map{}
// 统一变量配置格式
@@ -1337,7 +1272,7 @@ func (arw *AlertRuleWorker) VarFillingBeforeQuery(query models.PromQuery, reader
curPromql = strings.Replace(curPromql, fmt.Sprintf("$%s", key), val, -1)
}
// 得到参数变量的所有组合
paramPermutation, err := arw.getParamPermutation(param, ParamKeys, varToLabel, query.PromQl, readerClient)
paramPermutation, err := arw.getParamPermutation(param, ParamKeys)
if err != nil {
logger.Errorf("rule_eval:%s, paramPermutation error:%v", arw.Key(), err)
continue
@@ -1346,7 +1281,7 @@ func (arw *AlertRuleWorker) VarFillingBeforeQuery(query models.PromQuery, reader
keyToPromql := make(map[string]string)
for paramPermutationKeys, _ := range paramPermutation {
realPromql := curPromql
split := strings.Split(paramPermutationKeys, JoinMark)
split := strings.Split(paramPermutationKeys, "@@")
for j := range ParamKeys {
realPromql = fillVar(realPromql, ParamKeys[j], split[j])
}

View File

@@ -226,7 +226,7 @@ func (p *Processor) BuildEvent(anomalyPoint models.AnomalyPoint, from string, no
if err := json.Unmarshal([]byte(p.rule.Annotations), &event.AnnotationsJSON); err != nil {
event.AnnotationsJSON = make(map[string]string) // 解析失败时使用空 map
logger.Warningf("unmarshal annotations json failed: %v, rule: %d", err, p.rule.Id)
logger.Warningf("unmarshal annotations json failed:%v, rule:%d annotations:%s", err, p.rule.Id, p.rule.Annotations)
}
if p.target != "" {
@@ -522,6 +522,14 @@ func (p *Processor) pushEventToQueue(e *models.AlertCurEvent) {
}
dispatch.LogEvent(e, "push_queue")
eventType := "alert"
if e.IsRecovered {
eventType = "recovery"
}
p.Stats.CounterAlertsTotal.WithLabelValues(e.Cluster, eventType, e.GroupName).Inc()
if !queue.EventQueue.PushFront(e) {
logger.Warningf("event_push_queue: queue is full, event:%+v", e)
p.Stats.CounterRuleEvalErrorTotal.WithLabelValues(fmt.Sprintf("%v", p.DatasourceId()), "push_event_queue", p.BusiGroupCache.GetNameByBusiGroupId(p.rule.GroupId), fmt.Sprintf("%v", p.rule.Id)).Inc()

View File

@@ -139,6 +139,10 @@ func doSendAndRecord(ctx *ctx.Context, url, token string, body interface{}, chan
}
func NotifyRecord(ctx *ctx.Context, evts []*models.AlertCurEvent, channel, target, res string, err error) {
if ctx.DisableNotifyRecord {
return
}
// 一个通知可能对应多个 event都需要记录
notis := make([]*models.NotificaitonRecord, 0, len(evts))
for _, evt := range evts {

View File

@@ -13,6 +13,7 @@ import (
"github.com/ccfos/nightingale/v6/alert/astats"
"github.com/ccfos/nightingale/v6/models"
"github.com/ccfos/nightingale/v6/pkg/ctx"
"github.com/ccfos/nightingale/v6/pkg/poster"
"github.com/toolkits/pkg/logger"
)
@@ -60,20 +61,22 @@ func sendWebhook(webhook *models.Webhook, event interface{}, stats *astats.Stats
insecureSkipVerify = webhook.SkipVerify
}
if conf.Client == nil {
logger.Warningf("event_%s, event:%s, url: [%s], error: [%s]", channel, string(bs), conf.Url, "client is nil")
conf.Client = &http.Client{
Timeout: time.Duration(conf.Timeout) * time.Second,
Transport: &http.Transport{
TLSClientConfig: &tls.Config{InsecureSkipVerify: insecureSkipVerify},
},
}
transport := &http.Transport{
TLSClientConfig: &tls.Config{InsecureSkipVerify: insecureSkipVerify},
}
if poster.UseProxy(conf.Url) {
transport.Proxy = http.ProxyFromEnvironment
}
client := http.Client{
Timeout: time.Duration(conf.Timeout) * time.Second,
Transport: transport,
}
stats.AlertNotifyTotal.WithLabelValues(channel).Inc()
var resp *http.Response
var body []byte
resp, err = conf.Client.Do(req)
resp, err = client.Do(req)
if err != nil {
stats.AlertNotifyErrorTotal.WithLabelValues(channel).Inc()
@@ -95,7 +98,7 @@ func sendWebhook(webhook *models.Webhook, event interface{}, stats *astats.Stats
return false, string(body), nil
}
func SingleSendWebhooks(ctx *ctx.Context, webhooks map[string]*models.Webhook, event *models.AlertCurEvent, stats *astats.Stats) {
func SingleSendWebhooks(ctx *ctx.Context, webhooks []*models.Webhook, event *models.AlertCurEvent, stats *astats.Stats) {
for _, conf := range webhooks {
retryCount := 0
for retryCount < 3 {
@@ -110,7 +113,7 @@ func SingleSendWebhooks(ctx *ctx.Context, webhooks map[string]*models.Webhook, e
}
}
func BatchSendWebhooks(ctx *ctx.Context, webhooks map[string]*models.Webhook, event *models.AlertCurEvent, stats *astats.Stats) {
func BatchSendWebhooks(ctx *ctx.Context, webhooks []*models.Webhook, event *models.AlertCurEvent, stats *astats.Stats) {
for _, conf := range webhooks {
logger.Infof("push event:%+v to queue:%v", event, conf)
PushEvent(ctx, conf, event, stats)

View File

@@ -66,7 +66,7 @@ func Initialize(configDir string, cryptoKey string) (func(), error) {
if err != nil {
return nil, err
}
ctx := ctx.NewContext(context.Background(), db, true)
ctx := ctx.NewContext(context.Background(), db, true, config.Alert.Alerting.DisableNotifyRecord)
migrate.Migrate(db)
isRootInit := models.InitRoot(ctx)

View File

@@ -322,11 +322,6 @@ func (rt *Router) Config(r *gin.Engine) {
pages.GET("/share-charts", rt.chartShareGets)
pages.POST("/share-charts", rt.auth(), rt.chartShareAdd)
pages.POST("/dashboard-annotations", rt.auth(), rt.user(), rt.perm("/dashboards/put"), rt.dashAnnotationAdd)
pages.GET("/dashboard-annotations", rt.dashAnnotationGets)
pages.PUT("/dashboard-annotation/:id", rt.auth(), rt.user(), rt.perm("/dashboards/put"), rt.dashAnnotationPut)
pages.DELETE("/dashboard-annotation/:id", rt.auth(), rt.user(), rt.perm("/dashboards/del"), rt.dashAnnotationDel)
// pages.GET("/alert-rules/builtin/alerts-cates", rt.auth(), rt.user(), rt.builtinAlertCateGets)
// pages.GET("/alert-rules/builtin/list", rt.auth(), rt.user(), rt.builtinAlertRules)
pages.GET("/alert-rules/callbacks", rt.auth(), rt.user(), rt.alertRuleCallbacks)
@@ -484,7 +479,6 @@ func (rt *Router) Config(r *gin.Engine) {
pages.PUT("/builtin-payloads", rt.auth(), rt.user(), rt.perm("/built-in-components/put"), rt.builtinPayloadsPut)
pages.DELETE("/builtin-payloads", rt.auth(), rt.user(), rt.perm("/built-in-components/del"), rt.builtinPayloadsDel)
pages.GET("/builtin-payload", rt.auth(), rt.user(), rt.builtinPayloadsGetByUUIDOrID)
}
r.GET("/api/n9e/versions", func(c *gin.Context) {

View File

@@ -36,9 +36,8 @@ func (rt *Router) builtinComponentsAdd(c *gin.Context) {
func (rt *Router) builtinComponentsGets(c *gin.Context) {
query := ginx.QueryStr(c, "query", "")
disabled := ginx.QueryInt(c, "disabled", -1)
bc, err := models.BuiltinComponentGets(rt.Ctx, query, disabled)
bc, err := models.BuiltinComponentGets(rt.Ctx, query)
ginx.Dangerous(err)
ginx.NewRender(c).Data(bc, nil)

View File

@@ -1,99 +0,0 @@
package router
import (
"fmt"
"net/http"
"time"
"github.com/ccfos/nightingale/v6/models"
"github.com/ccfos/nightingale/v6/pkg/ctx"
"github.com/gin-gonic/gin"
"github.com/toolkits/pkg/ginx"
)
func checkAnnotationPermission(c *gin.Context, ctx *ctx.Context, dashboardId int64) {
dashboard, err := models.BoardGetByID(ctx, dashboardId)
if err != nil {
ginx.Bomb(http.StatusInternalServerError, "failed to get dashboard: %v", err)
}
if dashboard == nil {
ginx.Bomb(http.StatusNotFound, "dashboard not found")
}
bg := BusiGroup(ctx, dashboard.GroupId)
me := c.MustGet("user").(*models.User)
can, err := me.CanDoBusiGroup(ctx, bg, "rw")
ginx.Dangerous(err)
if !can {
ginx.Bomb(http.StatusForbidden, "forbidden")
}
}
func (rt *Router) dashAnnotationAdd(c *gin.Context) {
var f models.DashAnnotation
ginx.BindJSON(c, &f)
username := c.MustGet("username").(string)
now := time.Now().Unix()
checkAnnotationPermission(c, rt.Ctx, f.DashboardId)
f.CreateBy = username
f.CreateAt = now
f.UpdateBy = username
f.UpdateAt = now
ginx.NewRender(c).Data(f.Id, f.Add(rt.Ctx))
}
func (rt *Router) dashAnnotationGets(c *gin.Context) {
dashboardId := ginx.QueryInt64(c, "dashboard_id")
from := ginx.QueryInt64(c, "from")
to := ginx.QueryInt64(c, "to")
limit := ginx.QueryInt(c, "limit", 100)
lst, err := models.DashAnnotationGets(rt.Ctx, dashboardId, from, to, limit)
ginx.NewRender(c).Data(lst, err)
}
func (rt *Router) dashAnnotationPut(c *gin.Context) {
var f models.DashAnnotation
ginx.BindJSON(c, &f)
id := ginx.UrlParamInt64(c, "id")
annotation, err := getAnnotationById(rt.Ctx, id)
ginx.Dangerous(err)
checkAnnotationPermission(c, rt.Ctx, annotation.DashboardId)
f.Id = id
f.UpdateAt = time.Now().Unix()
f.UpdateBy = c.MustGet("username").(string)
ginx.NewRender(c).Message(f.Update(rt.Ctx))
}
func (rt *Router) dashAnnotationDel(c *gin.Context) {
id := ginx.UrlParamInt64(c, "id")
annotation, err := getAnnotationById(rt.Ctx, id)
ginx.Dangerous(err)
checkAnnotationPermission(c, rt.Ctx, annotation.DashboardId)
ginx.NewRender(c).Message(models.DashAnnotationDel(rt.Ctx, id))
}
// 可以提取获取注释的通用方法
func getAnnotationById(ctx *ctx.Context, id int64) (*models.DashAnnotation, error) {
annotation, err := models.DashAnnotationGet(ctx, "id=?", id)
if err != nil {
return nil, err
}
if annotation == nil {
return nil, fmt.Errorf("annotation not found")
}
return annotation, nil
}

View File

@@ -130,9 +130,9 @@ func (rt *Router) User() gin.HandlerFunc {
func (rt *Router) user() gin.HandlerFunc {
return func(c *gin.Context) {
username := c.MustGet("username").(string)
userid := c.MustGet("userid").(int64)
user, err := models.UserGetByUsername(rt.Ctx, username)
user, err := models.UserGetById(rt.Ctx, userid)
if err != nil {
ginx.Bomb(http.StatusUnauthorized, "unauthorized")
}

View File

@@ -9,7 +9,6 @@ import (
"time"
"github.com/ccfos/nightingale/v6/models"
"github.com/ccfos/nightingale/v6/pkg/ctx"
"github.com/ccfos/nightingale/v6/storage"
"github.com/gin-gonic/gin"
@@ -400,22 +399,6 @@ type targetBgidsForm struct {
Action string `json:"action"` // add del reset
}
func haveNeverGroupedIdent(ctx *ctx.Context, idents []string) (bool, error) {
for _, ident := range idents {
bgids, err := models.TargetGroupIdsGetByIdent(ctx, ident)
if err != nil {
return false, err
}
if len(bgids) <= 0 {
return true, nil
}
}
return false, nil
}
func (rt *Router) targetBindBgids(c *gin.Context) {
var f targetBgidsForm
var err error
@@ -458,15 +441,11 @@ func (rt *Router) targetBindBgids(c *gin.Context) {
ginx.Bomb(http.StatusForbidden, "No permission. You are not admin of BG(%s)", bg.Name)
}
}
isNeverGrouped, checkErr := haveNeverGroupedIdent(rt.Ctx, f.Idents)
ginx.Dangerous(checkErr)
if isNeverGrouped {
can, err := user.CheckPerm(rt.Ctx, "/targets/bind")
ginx.Dangerous(err)
if !can {
ginx.Bomb(http.StatusForbidden, "No permission. Only admin can assign BG")
}
can, err := user.CheckPerm(rt.Ctx, "/targets/bind")
ginx.Dangerous(err)
if !can {
ginx.Bomb(http.StatusForbidden, "No permission. Only admin can assign BG")
}
}

View File

@@ -18,7 +18,7 @@ func Upgrade(configFile string) error {
return err
}
ctx := ctx.NewContext(context.Background(), db, true)
ctx := ctx.NewContext(context.Background(), db, true, false)
for _, cluster := range config.Clusters {
count, err := models.GetDatasourcesCountByName(ctx, cluster.Name)
if err != nil {

View File

@@ -41,7 +41,7 @@ func Initialize(configDir string, cryptoKey string) (func(), error) {
if len(config.CenterApi.Addrs) < 1 {
return nil, errors.New("failed to init config: the CenterApi configuration is missing")
}
ctx := ctx.NewContext(context.Background(), nil, false, config.CenterApi)
ctx := ctx.NewContext(context.Background(), nil, false, config.Alert.Alerting.DisableNotifyRecord, config.CenterApi)
var redis storage.Redis
redis, err = storage.NewRedis(config.Redis)

View File

@@ -50,7 +50,7 @@ Enable = true
# user001 = "ccc26da7b9aba533cbb263a36c07dcc5"
[HTTP.APIForService]
Enable = false
Enable = true
[HTTP.APIForService.BasicAuth]
user001 = "ccc26da7b9aba533cbb263a36c07dcc5"

View File

@@ -50,7 +50,7 @@ Enable = true
# user001 = "ccc26da7b9aba533cbb263a36c07dcc5"
[HTTP.APIForService]
Enable = false
Enable = true
[HTTP.APIForService.BasicAuth]
user001 = "ccc26da7b9aba533cbb263a36c07dcc5"

View File

@@ -50,7 +50,7 @@ Enable = true
# user001 = "ccc26da7b9aba533cbb263a36c07dcc5"
[HTTP.APIForService]
Enable = false
Enable = true
[HTTP.APIForService.BasicAuth]
user001 = "ccc26da7b9aba533cbb263a36c07dcc5"

View File

@@ -790,7 +790,6 @@ CREATE TABLE es_index_pattern (
time_field varchar(128) not null default '@timestamp',
allow_hide_system_indices smallint not null default 0,
fields_format varchar(4096) not null default '',
cross_cluster_enabled int not null default 0,
create_at bigint default '0',
create_by varchar(64) default '',
update_at bigint default '0',
@@ -860,7 +859,6 @@ CREATE TABLE builtin_components (
ident VARCHAR(191) NOT NULL,
logo VARCHAR(191) NOT NULL,
readme TEXT NOT NULL,
disabled INT NOT NULL DEFAULT 0,
created_at BIGINT NOT NULL DEFAULT 0,
created_by VARCHAR(191) NOT NULL DEFAULT '',
updated_at BIGINT NOT NULL DEFAULT 0,
@@ -887,20 +885,4 @@ CREATE TABLE builtin_payloads (
CREATE INDEX idx_component ON builtin_payloads (component);
CREATE INDEX idx_builtin_payloads_name ON builtin_payloads (name);
CREATE INDEX idx_cate ON builtin_payloads (cate);
CREATE INDEX idx_type ON builtin_payloads (type);
CREATE TABLE dash_annotation (
id bigserial PRIMARY KEY,
dashboard_id bigint not null,
panel_id varchar(191) not null,
tags text,
description text,
config text,
time_start bigint not null default 0,
time_end bigint not null default 0,
create_at bigint not null default 0,
create_by varchar(64) not null default '',
update_at bigint not null default 0,
update_by varchar(64) not null default ''
);
CREATE INDEX idx_type ON builtin_payloads (type);

View File

@@ -50,7 +50,7 @@ Enable = true
# user001 = "ccc26da7b9aba533cbb263a36c07dcc5"
[HTTP.APIForService]
Enable = false
Enable = true
[HTTP.APIForService.BasicAuth]
user001 = "ccc26da7b9aba533cbb263a36c07dcc5"

View File

@@ -531,7 +531,6 @@ CREATE TABLE `builtin_components` (
`created_by` varchar(191) NOT NULL DEFAULT '' COMMENT '''creator''',
`updated_at` bigint NOT NULL DEFAULT 0 COMMENT '''update time''',
`updated_by` varchar(191) NOT NULL DEFAULT '' COMMENT '''updater''',
`disabled` int NOT NULL DEFAULT 0 COMMENT '''is disabled or not''',
PRIMARY KEY (`id`),
UNIQUE KEY `idx_ident` (`ident`) USING BTREE
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
@@ -694,7 +693,6 @@ CREATE TABLE `es_index_pattern` (
`time_field` varchar(128) not null default '@timestamp',
`allow_hide_system_indices` tinyint(1) not null default 0,
`fields_format` varchar(4096) not null default '',
`cross_cluster_enabled` int not null default 0,
`create_at` bigint default '0',
`create_by` varchar(64) default '',
`update_at` bigint default '0',
@@ -747,24 +745,6 @@ CREATE TABLE `target_busi_group` (
UNIQUE KEY `idx_target_group` (`target_ident`,`group_id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
CREATE TABLE `dash_annotation` (
`id` bigint unsigned not null auto_increment,
`dashboard_id` bigint not null comment 'dashboard id',
`panel_id` varchar(191) not null comment 'panel id',
`tags` text comment 'tags array json string',
`description` text comment 'annotation description',
`config` text comment 'annotation config',
`time_start` bigint not null default 0 comment 'start timestamp',
`time_end` bigint not null default 0 comment 'end timestamp',
`create_at` bigint not null default 0 comment 'create time',
`create_by` varchar(64) not null default '' comment 'creator',
`update_at` bigint not null default 0 comment 'update time',
`update_by` varchar(64) not null default '' comment 'updater',
PRIMARY KEY (`id`),
KEY `idx_dashboard_id` (`dashboard_id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
CREATE TABLE `task_meta`
(
`id` bigint unsigned NOT NULL AUTO_INCREMENT,

View File

@@ -120,31 +120,4 @@ CREATE TABLE `target_busi_group` (
/* v7.7.2 2024-12-02 */
ALTER TABLE alert_subscribe MODIFY COLUMN rule_ids varchar(1024);
ALTER TABLE alert_subscribe MODIFY COLUMN busi_groups varchar(4096);
/* v8.0.0-beta.1 2024-12-13 */
ALTER TABLE `alert_rule` ADD COLUMN `cron_pattern` VARCHAR(64);
ALTER TABLE `builtin_components` MODIFY COLUMN `logo` mediumtext COMMENT '''logo of component''';
/* v8.0.0-beta.2 2024-12-26 */
ALTER TABLE `es_index_pattern` ADD COLUMN `cross_cluster_enabled` int not null default 0;
/* v8.0.0-beta.3 2024-01-03 */
ALTER TABLE `builtin_components` ADD COLUMN `disabled` INT NOT NULL DEFAULT 0 COMMENT 'is disabled or not';
CREATE TABLE `dash_annotation` (
`id` bigint unsigned not null auto_increment,
`dashboard_id` bigint not null comment 'dashboard id',
`panel_id` varchar(191) not null comment 'panel id',
`tags` text comment 'tags array json string',
`description` text comment 'annotation description',
`config` text comment 'annotation config',
`time_start` bigint not null default 0 comment 'start timestamp',
`time_end` bigint not null default 0 comment 'end timestamp',
`create_at` bigint not null default 0 comment 'create time',
`create_by` varchar(64) not null default '' comment 'creator',
`update_at` bigint not null default 0 comment 'update time',
`update_by` varchar(64) not null default '' comment 'updater',
PRIMARY KEY (`id`),
KEY `idx_dashboard_id` (`dashboard_id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
ALTER TABLE alert_subscribe MODIFY COLUMN busi_groups varchar(4096);

View File

@@ -17,8 +17,6 @@ CREATE TABLE `users` (
`update_by` varchar(64) not null default ''
);
CREATE UNIQUE INDEX idx_users_username ON `users` (username);
insert into `users`(id, username, nickname, password, roles, create_at, create_by, update_at, update_by) values(1, 'root', '超管', 'root.2020', 'Admin', strftime('%s', 'now'), 'system', strftime('%s', 'now'), 'system');
CREATE TABLE `user_group` (
@@ -184,9 +182,8 @@ CREATE TABLE `board` (
`create_by` varchar(64) not null default '',
`update_at` bigint not null default 0,
`update_by` varchar(64) not null default '',
`public_cate` bigint not null default 0
unique (`group_id`, `name`)
);
CREATE UNIQUE INDEX idx_board_group_id_name ON `board` (group_id, name);
CREATE INDEX `idx_board_ident` ON `board` (`ident` asc);
-- for dashboard new version
@@ -195,15 +192,6 @@ CREATE TABLE `board_payload` (
`payload` mediumtext not null
);
CREATE TABLE `chart` (
`id` integer primary key autoincrement,
`group_id` integer not null,
`configs` text,
`weight` integer not null default 0
);
CREATE INDEX idx_chart_group_id ON `chart` (group_id);
CREATE TABLE `chart_share` (
`id` integer primary key autoincrement,
`cluster` varchar(128) not null,
@@ -250,9 +238,7 @@ CREATE TABLE `alert_rule` (
`create_at` bigint not null default 0,
`create_by` varchar(64) not null default '',
`update_at` bigint not null default 0,
`update_by` varchar(64) not null default '',
`cron_pattern` varchar(64),
`datasource_queries` text
`update_by` varchar(64) not null default ''
);
CREATE INDEX `idx_alert_rule_group_id` ON `alert_rule` (`group_id` asc);
CREATE INDEX `idx_alert_rule_update_at` ON `alert_rule` (`update_at` asc);
@@ -322,18 +308,11 @@ CREATE TABLE `target` (
`tags` varchar(512) not null default '',
`host_ip` varchar(15) default '',
`agent_version` varchar(255) default '',
`host_tags` text,
`engine_name` varchar(255) default '',
`os` varchar(31) default '',
`update_at` bigint not null default 0
);
CREATE INDEX `idx_target_group_id` ON `target` (`group_id` asc);
CREATE UNIQUE INDEX idx_target_ident ON `target` (ident);
CREATE INDEX idx_host_ip ON `target` (host_ip);
CREATE INDEX idx_agent_version ON `target` (agent_version);
CREATE INDEX idx_engine_name ON `target` (engine_name);
CREATE INDEX idx_os ON `target` (os);
CREATE TABLE `metric_view` (
`id` integer primary key autoincrement,
@@ -358,14 +337,12 @@ CREATE TABLE `recording_rule` (
`disabled` tinyint(1) not null default 0,
`prom_ql` varchar(8192) not null,
`prom_eval_interval` int not null,
`cron_pattern` varchar(255) default '',
`append_tags` varchar(255) default '',
`query_configs` text not null,
`create_at` bigint default '0',
`create_by` varchar(64) default '',
`update_at` bigint default '0',
`update_by` varchar(64) default '',
`datasource_queries` text
`update_by` varchar(64) default ''
);
CREATE INDEX `idx_recording_rule_group_id` ON `recording_rule` (`group_id` asc);
CREATE INDEX `idx_recording_rule_update_at` ON `recording_rule` (`update_at` asc);
@@ -453,7 +430,6 @@ CREATE TABLE `alert_his_event` (
`trigger_value` varchar(2048) not null,
`recover_time` bigint not null default 0,
`last_eval_time` bigint not null default 0,
`original_tags` varchar(8192),
`tags` varchar(1024) not null default '',
`annotations` text not null,
`rule_config` text not null
@@ -483,8 +459,6 @@ CREATE INDEX `idx_builtin_components_ident` ON `builtin_components` (`ident` asc
CREATE TABLE `builtin_payloads` (
`id` integer primary key autoincrement,
`component_id` integer not null default 0,
`uuid` integer not null,
`type` varchar(191) not null,
`component` varchar(191) not null,
`cate` varchar(191) not null,
@@ -500,20 +474,6 @@ CREATE INDEX `idx_builtin_payloads_component` ON `builtin_payloads` (`component`
CREATE INDEX `idx_builtin_payloads_name` ON `builtin_payloads` (`name` asc);
CREATE INDEX `idx_builtin_payloads_cate` ON `builtin_payloads` (`cate` asc);
CREATE INDEX `idx_builtin_payloads_type` ON `builtin_payloads` (`type` asc);
CREATE INDEX idx_uuid ON `builtin_payloads` (uuid);
CREATE TABLE `notification_record` (
`id` integer primary key autoincrement,
`event_id` integer not null,
`sub_id` integer,
`channel` varchar(255) not null,
`status` integer,
`target` varchar(1024) not null,
`details` varchar(2048) default '',
`created_at` integer not null
);
CREATE INDEX idx_evt ON notification_record (event_id);
CREATE TABLE `task_tpl` (
`id` integer primary key autoincrement,
@@ -593,8 +553,6 @@ CREATE TABLE `datasource`
`updated_by` varchar(64) not null default ''
);
CREATE UNIQUE INDEX idx_datasource_name ON datasource (name);
CREATE TABLE `builtin_cate` (
`id` integer primary key autoincrement,
`name` varchar(191) not null,
@@ -612,8 +570,6 @@ CREATE TABLE `notify_tpl` (
`update_by` varchar(64) not null default ''
);
CREATE UNIQUE INDEX idx_notify_tpl_channel ON notify_tpl (channel);
CREATE TABLE `sso_config` (
`id` integer primary key autoincrement,
`name` varchar(191) not null unique,
@@ -621,8 +577,6 @@ CREATE TABLE `sso_config` (
`update_at` bigint not null default 0
);
CREATE UNIQUE INDEX idx_sso_config_name ON sso_config (name);
CREATE TABLE `es_index_pattern` (
`id` integer primary key autoincrement,
`datasource_id` bigint not null default 0,
@@ -630,7 +584,6 @@ CREATE TABLE `es_index_pattern` (
`time_field` varchar(128) not null default '@timestamp',
`allow_hide_system_indices` tinyint(1) not null default 0,
`fields_format` varchar(4096) not null default '',
`cross_cluster_enabled` int not null default 0,
`create_at` bigint default '0',
`create_by` varchar(64) default '',
`update_at` bigint default '0',
@@ -638,8 +591,6 @@ CREATE TABLE `es_index_pattern` (
unique (`datasource_id`, `name`)
);
CREATE UNIQUE INDEX idx_es_index_pattern_datasource_id_name ON es_index_pattern (datasource_id, name);
CREATE TABLE `builtin_metrics` (
`id` integer primary key autoincrement,
`collector` varchar(191) NOT NULL,
@@ -652,15 +603,13 @@ CREATE TABLE `builtin_metrics` (
`created_at` bigint NOT NULL DEFAULT 0,
`created_by` varchar(191) NOT NULL DEFAULT '',
`updated_at` bigint NOT NULL DEFAULT 0,
`updated_by` varchar(191) NOT NULL DEFAULT '',
`uuid integer` not null default 0
`updated_by` varchar(191) NOT NULL DEFAULT ''
);
CREATE UNIQUE INDEX idx_collector_typ_name ON builtin_metrics (lang, collector, typ, name);
CREATE INDEX idx_collector ON builtin_metrics (collector);
CREATE INDEX idx_typ ON builtin_metrics (typ);
CREATE INDEX idx_builtinmetric_name ON builtin_metrics (name);
CREATE INDEX idx_lang ON builtin_metrics (lang);
-- CREATE UNIQUE INDEX `idx_builtin_metrics_collector_typ_name` ON `builtin_metrics` (`lang`,`collector`, `typ`, `name` asc);
-- CREATE INDEX `idx_builtin_metrics_collector` ON `builtin_metrics` (`collector` asc);
-- CREATE INDEX `idx_builtin_metrics_typ` ON `builtin_metrics` (`typ` asc);
-- CREATE INDEX `idx_builtin_metrics_name` ON `builtin_metrics` (`name` asc);
-- CREATE INDEX `idx_builtin_metrics_lang` ON `builtin_metrics` (`lang` asc);
CREATE TABLE `metric_filter` (
@@ -675,30 +624,6 @@ CREATE TABLE `metric_filter` (
);
CREATE INDEX `idx_metric_filter_name` ON `metric_filter` (`name` asc);
CREATE TABLE `target_busi_group` (
`id` integer primary key autoincrement,
`target_ident` varchar(191) not null,
`group_id` integer not null,
`update_at` integer not null
);
CREATE UNIQUE INDEX idx_target_busi_group ON target_busi_group (target_ident, group_id);
CREATE TABLE `dash_annotation` (
`id` integer primary key autoincrement,
`dashboard_id` bigint not null,
`panel_id` varchar(191) not null,
`tags` text,
`description` text,
`config` text,
`time_start` bigint not null default 0,
`time_end` bigint not null default 0,
`create_at` bigint not null default 0,
`create_by` varchar(64) not null default '',
`update_at` bigint not null default 0,
`update_by` varchar(64) not null default ''
);
CREATE TABLE `task_meta`
(

View File

@@ -50,7 +50,7 @@ Enable = true
# user001 = "ccc26da7b9aba533cbb263a36c07dcc5"
[HTTP.APIForService]
Enable = false
Enable = true
[HTTP.APIForService.BasicAuth]
user001 = "ccc26da7b9aba533cbb263a36c07dcc5"
@@ -77,7 +77,7 @@ OpenRSA = false
DBType = "sqlite"
# postgres: host=%s port=%s user=%s dbname=%s password=%s sslmode=%s
# postgres: DSN="host=127.0.0.1 port=5432 user=root dbname=n9e_v6 password=1234 sslmode=disable"
# mysql: DSN="root:1234@tcp(localhost:3306)/n9e_v6?charset=utf8mb4&parseTime=True&loc=Local"
# sqlite: DSN="/path/to/filename.db"
DSN = "n9e.db"
# enable debug mode or not
Debug = false
@@ -138,9 +138,6 @@ ForceUseServerTS = true
# [Pushgw.WriterOpt]
# QueueMaxSize = 1000000
# QueuePopSize = 1000
# AllQueueMaxSize = 1000000
# fresh time, unit ms
# AllQueueMaxSizeInterval = 200
[[Pushgw.Writers]]
# Url = "http://127.0.0.1:8480/insert/0/prometheus/api/v1/write"

View File

@@ -54,7 +54,7 @@ Enable = true
# user001 = "ccc26da7b9aba533cbb263a36c07dcc5"
[HTTP.APIForService]
Enable = false
Enable = true
[HTTP.APIForService.BasicAuth]
user001 = "ccc26da7b9aba533cbb263a36c07dcc5"

29
go.mod
View File

@@ -4,7 +4,6 @@ go 1.22
require (
github.com/BurntSushi/toml v0.3.1
github.com/VictoriaMetrics/metricsql v0.81.1
github.com/coreos/go-oidc v2.2.1+incompatible
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc
github.com/dgrijalva/jwt-go v3.2.0+incompatible
@@ -12,7 +11,6 @@ require (
github.com/flashcatcloud/ibex v1.3.5
github.com/gin-contrib/pprof v1.4.0
github.com/gin-gonic/gin v1.9.1
github.com/glebarez/sqlite v1.11.0
github.com/go-ldap/ldap/v3 v3.4.4
github.com/gogo/protobuf v1.3.2
github.com/golang-jwt/jwt v3.2.2+incompatible
@@ -36,34 +34,21 @@ require (
github.com/spaolacci/murmur3 v1.1.0
github.com/stretchr/testify v1.8.4
github.com/tidwall/gjson v1.14.0
github.com/toolkits/pkg v1.3.8
github.com/toolkits/pkg v1.3.9
golang.org/x/exp v0.0.0-20230713183714-613f0c0eb8a1
golang.org/x/oauth2 v0.10.0
gopkg.in/gomail.v2 v2.0.0-20160411212932-81ebce5c23df
gopkg.in/yaml.v2 v2.4.0
gorm.io/driver/mysql v1.4.4
gorm.io/driver/postgres v1.5.11
gorm.io/driver/postgres v1.4.5
gorm.io/driver/sqlite v1.5.5
gorm.io/gorm v1.25.12
gorm.io/gorm v1.25.7-0.20240204074919-46816ad31dde
)
require (
github.com/VictoriaMetrics/metrics v1.34.0 // indirect
github.com/alicebob/gopher-json v0.0.0-20200520072559-a9ecdc9d1d3a // indirect
github.com/dustin/go-humanize v1.0.1 // indirect
github.com/glebarez/go-sqlite v1.21.2 // indirect
github.com/jackc/pgx/v5 v5.7.1 // indirect
github.com/jackc/puddle/v2 v2.2.2 // indirect
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect
github.com/valyala/fastrand v1.1.0 // indirect
github.com/valyala/histogram v1.2.0 // indirect
github.com/yuin/gopher-lua v1.1.1 // indirect
golang.org/x/sync v0.10.0 // indirect
modernc.org/libc v1.22.5 // indirect
modernc.org/mathutil v1.5.0 // indirect
modernc.org/memory v1.5.0 // indirect
modernc.org/sqlite v1.23.1 // indirect
)
require (
@@ -94,7 +79,7 @@ require (
github.com/jackc/pgio v1.0.0 // indirect
github.com/jackc/pgpassfile v1.0.0 // indirect
github.com/jackc/pgproto3/v2 v2.3.1 // indirect
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 // indirect
github.com/jackc/pgservicefile v0.0.0-20200714003250-2b9c44734f2b // indirect
github.com/jackc/pgtype v1.12.0 // indirect
github.com/jackc/pgx/v4 v4.17.2 // indirect
github.com/jinzhu/inflection v1.0.0 // indirect
@@ -117,11 +102,11 @@ require (
go.uber.org/atomic v1.11.0 // indirect
go.uber.org/automaxprocs v1.5.2 // indirect
golang.org/x/arch v0.3.0 // indirect
golang.org/x/crypto v0.31.0 // indirect
golang.org/x/crypto v0.21.0 // indirect
golang.org/x/image v0.18.0 // indirect
golang.org/x/net v0.23.0 // indirect
golang.org/x/sys v0.28.0 // indirect
golang.org/x/text v0.21.0 // indirect
golang.org/x/sys v0.21.0 // indirect
golang.org/x/text v0.16.0 // indirect
google.golang.org/appengine v1.6.7 // indirect
google.golang.org/protobuf v1.33.0 // indirect
gopkg.in/alexcesaro/quotedprintable.v3 v3.0.0-20150716171945-2caba252f4dc // indirect

62
go.sum
View File

@@ -13,10 +13,6 @@ github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
github.com/Masterminds/semver/v3 v3.1.1 h1:hLg3sBzpNErnxhQtUy/mmLR2I9foDujNK030IGemrRc=
github.com/Masterminds/semver/v3 v3.1.1/go.mod h1:VPu/7SZ7ePZ3QOrcuXROw5FAcLl4a0cBrbBpGY/8hQs=
github.com/VictoriaMetrics/metrics v1.34.0 h1:0i8k/gdOJdSoZB4Z9pikVnVQXfhcIvnG7M7h2WaQW2w=
github.com/VictoriaMetrics/metrics v1.34.0/go.mod h1:r7hveu6xMdUACXvB8TYdAj8WEsKzWB0EkpJN+RDtOf8=
github.com/VictoriaMetrics/metricsql v0.81.1 h1:1gpqI3Mwru1tCM8nZiKxBG0P+DNkjlRwLhRPII3cuho=
github.com/VictoriaMetrics/metricsql v0.81.1/go.mod h1:1g4hdCwlbJZ851PU9VN65xy9Rdlzupo6fx3SNZ8Z64U=
github.com/alecthomas/units v0.0.0-20211218093645-b94a6e3cc137 h1:s6gZFSlWYmbqAuRjVTiNNhvNRfY2Wxp9nhfyel4rklc=
github.com/alecthomas/units v0.0.0-20211218093645-b94a6e3cc137/go.mod h1:OMCwj8VM1Kc9e19TLln2VL61YJF0x1XFtfdL4JdbSyE=
github.com/alicebob/gopher-json v0.0.0-20200520072559-a9ecdc9d1d3a h1:HbKu58rmZpUGpz5+4FfNmIU+FmZg2P3Xaj2v2bfNWmk=
@@ -57,8 +53,6 @@ github.com/dgrijalva/jwt-go v3.2.0+incompatible h1:7qlOGliEKZXTDg6OTjfoBKDXWrumC
github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ=
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f h1:lO4WD4F/rVNCu3HqELle0jiPLLBs70cWOduZpkS1E78=
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f/go.mod h1:cuUVRXasLTGF7a8hSLbxyZXjz+1KgoB3wDUb6vlszIc=
github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY=
github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
github.com/expr-lang/expr v1.16.1 h1:Na8CUcMdyGbnNpShY7kzcHCU7WqxuL+hnxgHZ4vaz/A=
github.com/expr-lang/expr v1.16.1/go.mod h1:uCkhfG+x7fcZ5A5sXHKuQ07jGZRl6J0FCAaf2k4PtVQ=
github.com/fatih/camelcase v1.0.0 h1:hxNvNX/xYBp0ovncs8WyWZrOrpBNub/JfaMvbURyft8=
@@ -77,10 +71,6 @@ github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm
github.com/gin-gonic/gin v1.8.1/go.mod h1:ji8BvRH1azfM+SYow9zQ6SZMvR8qOMZHmsCuWR9tTTk=
github.com/gin-gonic/gin v1.9.1 h1:4idEAncQnU5cB7BeOkPtxjfCSye0AAm1R0RVIqJ+Jmg=
github.com/gin-gonic/gin v1.9.1/go.mod h1:hPrL7YrpYKXt5YId3A/Tnip5kqbEAP+KLuI3SUcPTeU=
github.com/glebarez/go-sqlite v1.21.2 h1:3a6LFC4sKahUunAmynQKLZceZCOzUthkRkEAl9gAXWo=
github.com/glebarez/go-sqlite v1.21.2/go.mod h1:sfxdZyhQjTM2Wry3gVYWaW072Ri1WMdWJi0k6+3382k=
github.com/glebarez/sqlite v1.11.0 h1:wSG0irqzP6VurnMEpFGer5Li19RpIRi2qvQz++w0GMw=
github.com/glebarez/sqlite v1.11.0/go.mod h1:h8/o8j5wiAsqSPoWELDUdJXhjAhsVliSn7bWZjOhrgQ=
github.com/go-asn1-ber/asn1-ber v1.5.4 h1:vXT6d/FNDiELJnLb6hGNa309LMsrCoYFvpwHDF0+Y1A=
github.com/go-asn1-ber/asn1-ber v1.5.4/go.mod h1:hEBeB/ic+5LoWskz+yKT7vGhhPYkProFKoKdwZRWMe0=
github.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vbaY=
@@ -130,8 +120,6 @@ github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/
github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38=
github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
github.com/google/pprof v0.0.0-20230705174524-200ffdc848b8 h1:n6vlPhxsA+BW/XsS5+uqi7GyzaLa5MH7qlSLBZtRdiA=
github.com/google/pprof v0.0.0-20230705174524-200ffdc848b8/go.mod h1:Jh3hGz2jkYak8qXPD19ryItVnUgpgeqzdkY/D0EaeuA=
github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI=
github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I=
github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
@@ -170,8 +158,6 @@ github.com/jackc/pgproto3/v2 v2.3.1 h1:nwj7qwf0S+Q7ISFfBndqeLwSwxs+4DPsbRFjECT1Y
github.com/jackc/pgproto3/v2 v2.3.1/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA=
github.com/jackc/pgservicefile v0.0.0-20200714003250-2b9c44734f2b h1:C8S2+VttkHFdOOCXJe+YGfa4vHYwlt4Zx+IVXQ97jYg=
github.com/jackc/pgservicefile v0.0.0-20200714003250-2b9c44734f2b/go.mod h1:vsD4gTJCa9TptPL8sPkXrLZ+hDuNrZCnj29CQpr4X1E=
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 h1:iCEnooe7UlwOQYpKFhBabPMi4aNAfoODPEFNiAnClxo=
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761/go.mod h1:5TJZWKEWniPve33vlWYSoGYefn3gLQRzjfDlhSJ9ZKM=
github.com/jackc/pgtype v0.0.0-20190421001408-4ed0de4755e0/go.mod h1:hdSHsc1V01CGwFsrv11mJRHWJ6aifDLfdV3aVjFF0zg=
github.com/jackc/pgtype v0.0.0-20190824184912-ab885b375b90/go.mod h1:KcahbBH1nCMSo2DXpzsoWOAfFkdEtEJpPbVLq8eE+mc=
github.com/jackc/pgtype v0.0.0-20190828014616-a8802b16cc59/go.mod h1:MWlu30kVJrUS8lot6TQqcg7mtthZ9T0EoIBFiJcmcyw=
@@ -184,15 +170,10 @@ github.com/jackc/pgx/v4 v4.0.0-pre1.0.20190824185557-6972a5742186/go.mod h1:X+GQ
github.com/jackc/pgx/v4 v4.12.1-0.20210724153913-640aa07df17c/go.mod h1:1QD0+tgSXP7iUjYm9C1NxKhny7lq6ee99u/z+IHFcgs=
github.com/jackc/pgx/v4 v4.17.2 h1:0Ut0rpeKwvIVbMQ1KbMBU4h6wxehBI535LK6Flheh8E=
github.com/jackc/pgx/v4 v4.17.2/go.mod h1:lcxIZN44yMIrWI78a5CpucdD14hX0SBDbNRvjDBItsw=
github.com/jackc/pgx/v5 v5.7.1 h1:x7SYsPBYDkHDksogeSmZZ5xzThcTgRz++I5E+ePFUcs=
github.com/jackc/pgx/v5 v5.7.1/go.mod h1:e7O26IywZZ+naJtWWos6i6fvWK+29etgITqrqHLfoZA=
github.com/jackc/puddle v0.0.0-20190413234325-e4ced69a3a2b/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk=
github.com/jackc/puddle v0.0.0-20190608224051-11cab39313c9/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk=
github.com/jackc/puddle v1.1.3/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk=
github.com/jackc/puddle v1.3.0 h1:eHK/5clGOatcjX3oWGBO/MpxpbHzSwud5EWTSCI+MX0=
github.com/jackc/puddle v1.3.0/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk=
github.com/jackc/puddle/v2 v2.2.2 h1:PR8nw+E/1w0GLuRFSmiioY6UooMp6KJv0/61nB7icHo=
github.com/jackc/puddle/v2 v2.2.2/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4=
github.com/jinzhu/copier v0.4.0 h1:w3ciUoD19shMCRargcpm0cm91ytaBhDvuRpz1ODO/U8=
github.com/jinzhu/copier v0.4.0/go.mod h1:DfbEm0FYsaqBcKcFuvmOZb218JkPGtvSHsKg8S8hyyg=
github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E=
@@ -296,9 +277,6 @@ github.com/rakyll/statik v0.1.7 h1:OF3QCZUuyPxuGEP7B4ypUa7sB/iHtqOTDYZXGM8KOdQ=
github.com/rakyll/statik v0.1.7/go.mod h1:AlZONWzMtEnMs7W4e/1LURLiI49pIMmp6V9Unghqrcc=
github.com/redis/go-redis/v9 v9.0.2 h1:BA426Zqe/7r56kCcvxYLWe1mkaz71LKF77GwgFzSxfE=
github.com/redis/go-redis/v9 v9.0.2/go.mod h1:/xDTe9EF1LM61hek62Poq2nzQSGj0xSrEtEHbBQevps=
github.com/remyoudompheng/bigfft v0.0.0-20200410134404-eec4a21b6bb0/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo=
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE=
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo=
github.com/robfig/cron/v3 v3.0.1 h1:WdRxkvbJztn8LMz/QEvLN5sBU+xKpSqwwUO1Pjr4qDs=
github.com/robfig/cron/v3 v3.0.1/go.mod h1:eQICP3HwyT7UooqI/z+Ov+PtYAWygg1TEWWzGIFLtro=
github.com/robfig/go-cache v0.0.0-20130306151617-9fc39e0dbf62/go.mod h1:65XQgovT59RWatovFwnwocoUxiI/eENTnOY5GK3STuY=
@@ -343,18 +321,14 @@ github.com/tidwall/match v1.1.1 h1:+Ho715JplO36QYgwN9PGYNhgZvoUSc9X2c80KVTi+GA=
github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM=
github.com/tidwall/pretty v1.2.0 h1:RWIZEg2iJ8/g6fDDYzMpobmaoGh5OLl4AXtGUGPcqCs=
github.com/tidwall/pretty v1.2.0/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU=
github.com/toolkits/pkg v1.3.8 h1:2yamC20c5mHRtbcGiLY99Lm/2mVitFn6onE8KKvMT1o=
github.com/toolkits/pkg v1.3.8/go.mod h1:M9ecwFGW1vxCTUFM9sr2ZjXSKb04N+1sTQ6SA3RNAIU=
github.com/toolkits/pkg v1.3.9 h1:ua4kVPNgsugBtZcd7kepbkuXoszzkifsG7P+Ku4nYVk=
github.com/toolkits/pkg v1.3.9/go.mod h1:M9ecwFGW1vxCTUFM9sr2ZjXSKb04N+1sTQ6SA3RNAIU=
github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI=
github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08=
github.com/ugorji/go v1.2.7/go.mod h1:nF9osbDWLy6bDVv/Rtoh6QgnvNDpmCalQV5urGCCS6M=
github.com/ugorji/go/codec v1.2.7/go.mod h1:WGN1fab3R1fzQlVQTkfxVtIBhWDRqOviHU95kRgeqEY=
github.com/ugorji/go/codec v1.2.11 h1:BMaWp1Bb6fHwEtbplGBGJ498wD+LKlNSl25MjdZY4dU=
github.com/ugorji/go/codec v1.2.11/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg=
github.com/valyala/fastrand v1.1.0 h1:f+5HkLW4rsgzdNoleUOB69hyT9IlD2ZQh9GyDMfb5G8=
github.com/valyala/fastrand v1.1.0/go.mod h1:HWqCzkrkg6QXT8V2EXWvXCoow7vLwOFN002oeRzjapQ=
github.com/valyala/histogram v1.2.0 h1:wyYGAZZt3CpwUiIb9AU/Zbllg1llXyrtApRS815OLoQ=
github.com/valyala/histogram v1.2.0/go.mod h1:Hb4kBwb4UxsaNbbbh+RRz8ZR6pdodR57tzWUS3BUzXY=
github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
@@ -396,8 +370,8 @@ golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0
golang.org/x/crypto v0.0.0-20220722155217-630584e8d5aa/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
golang.org/x/crypto v0.7.0/go.mod h1:pYwdfH91IfpZVANVyUOhSIPZaFoJGxTFbZhFTx+dXZU=
golang.org/x/crypto v0.9.0/go.mod h1:yrmDGqONDYtNj3tH8X9dzUun2m2lzPa9ngI6/RUPGR0=
golang.org/x/crypto v0.31.0 h1:ihbySMvVjLAeSH1IbfcRTkD/iNscyz8rGzjF/E5hV6U=
golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk=
golang.org/x/crypto v0.21.0 h1:X31++rzVUdKhX5sWmSOFZxx8UW/ldWx55cbf08iNAMA=
golang.org/x/crypto v0.21.0/go.mod h1:0BP7YvVV9gBbVKyeTG0Gyn+gZm94bibOW5BjDEYAOMs=
golang.org/x/exp v0.0.0-20230713183714-613f0c0eb8a1 h1:MGwJjxBy0HJshjDNfLsYO8xppfqWlA5ZT9OhtUUhTNw=
golang.org/x/exp v0.0.0-20230713183714-613f0c0eb8a1/go.mod h1:FXUEEKJgO7OQYeo8N01OfiKP8RXMtf6e8aTskBGqWdc=
golang.org/x/image v0.13.0/go.mod h1:6mmbMOeV28HuMTgA6OSRkdXKYw/t5W9Uwn2Yv1r3Yxk=
@@ -433,8 +407,8 @@ golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJ
golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.10.0 h1:3NQrjDixjgGwUOCaF8w2+VYHv0Ve/vGYSbdkTa98gmQ=
golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sync v0.7.0 h1:YsImfSBoP9QPYL0xyKJPq0gcaJdG3rInoqxTWbfQu9M=
golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
@@ -457,10 +431,8 @@ golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBc
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.15.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.21.0 h1:rF+pYz3DAGSQAxAu1CbC7catZg4ebC4UIeIhKxBZvws=
golang.org/x/sys v0.21.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.28.0 h1:Fksou7UEQUWlKvIdsqzJmUmCX3cZuD2+P3XyyzwMhlA=
golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
@@ -478,8 +450,8 @@ golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/text v0.8.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
golang.org/x/text v0.21.0 h1:zyQAAkrwaneQ066sspRyJaG9VNi/YJ1NfzcGB3hZ/qo=
golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ=
golang.org/x/text v0.16.0 h1:a94ExnEXNtEwYLGJSIUxnWoxoRz/ZcCsV63ROupILh4=
golang.org/x/text v0.16.0/go.mod h1:GhwF1Be+LQoKShO3cGOHzqOgRrGaYc9AvblQOmPVHnI=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190425163242-31fd60d6bfdc/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
@@ -530,23 +502,11 @@ gorm.io/driver/mysql v1.4.4 h1:MX0K9Qvy0Na4o7qSC/YI7XxqUw5KDw01umqgID+svdQ=
gorm.io/driver/mysql v1.4.4/go.mod h1:BCg8cKI+R0j/rZRQxeKis/forqRwRSYOR8OM3Wo6hOM=
gorm.io/driver/postgres v1.4.5 h1:mTeXTTtHAgnS9PgmhN2YeUbazYpLhUI1doLnw42XUZc=
gorm.io/driver/postgres v1.4.5/go.mod h1:GKNQYSJ14qvWkvPwXljMGehpKrhlDNsqYRr5HnYGncg=
gorm.io/driver/postgres v1.5.11 h1:ubBVAfbKEUld/twyKZ0IYn9rSQh448EdelLYk9Mv314=
gorm.io/driver/postgres v1.5.11/go.mod h1:DX3GReXH+3FPWGrrgffdvCk3DQ1dwDPdmbenSkweRGI=
gorm.io/driver/sqlite v1.5.5 h1:7MDMtUZhV065SilG62E0MquljeArQZNfJnjd9i9gx3E=
gorm.io/driver/sqlite v1.5.5/go.mod h1:6NgQ7sQWAIFsPrJJl1lSNSu2TABh0ZZ/zm5fosATavE=
gorm.io/gorm v1.23.8/go.mod h1:l2lP/RyAtc1ynaTjFksBde/O8v9oOGIApu2/xRitmZk=
gorm.io/gorm v1.24.1-0.20221019064659-5dd2bb482755/go.mod h1:DVrVomtaYTbqs7gB/x2uVvqnXzv0nqjB396B8cG4dBA=
gorm.io/gorm v1.25.7 h1:VsD6acwRjz2zFxGO50gPO6AkNs7KKnvfzUjHQhZDz/A=
gorm.io/gorm v1.25.7/go.mod h1:hbnx/Oo0ChWMn1BIhpy1oYozzpM15i4YPuHDmfYtwg8=
gorm.io/gorm v1.25.12 h1:I0u8i2hWQItBq1WfE0o2+WuL9+8L21K9e2HHSTE/0f8=
gorm.io/gorm v1.25.12/go.mod h1:xh7N7RHfYlNc5EmcI/El95gXusucDrQnHXe0+CgWcLQ=
gorm.io/gorm v1.25.7-0.20240204074919-46816ad31dde h1:9DShaph9qhkIYw7QF91I/ynrr4cOO2PZra2PFD7Mfeg=
gorm.io/gorm v1.25.7-0.20240204074919-46816ad31dde/go.mod h1:hbnx/Oo0ChWMn1BIhpy1oYozzpM15i4YPuHDmfYtwg8=
honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg=
modernc.org/libc v1.22.5 h1:91BNch/e5B0uPbJFgqbxXuOnxBQjlS//icfQEGmvyjE=
modernc.org/libc v1.22.5/go.mod h1:jj+Z7dTNX8fBScMVNRAYZ/jF91K8fdT2hYMThc3YjBY=
modernc.org/mathutil v1.5.0 h1:rV0Ko/6SfM+8G+yKiyI830l3Wuz1zRutdslNoQ0kfiQ=
modernc.org/mathutil v1.5.0/go.mod h1:mZW8CKdRPY1v87qxC/wUdX5O1qDzXMP5TH3wjfpga6E=
modernc.org/memory v1.5.0 h1:N+/8c5rE6EqugZwHii4IFsaJ7MUhoWX07J5tC/iI5Ds=
modernc.org/memory v1.5.0/go.mod h1:PkUhL0Mugw21sHPeskwZW4D6VscE/GQJOnIpCnW6pSU=
modernc.org/sqlite v1.23.1 h1:nrSBg4aRQQwq59JpvGEQ15tNxoO5pX/kUjcRNwSAGQM=
modernc.org/sqlite v1.23.1/go.mod h1:OrDj17Mggn6MhE+iPbBNf7RGKODDE9NFT0f3EwDzJqk=
rsc.io/pdf v0.1.1/go.mod h1:n8OzWcQ6Sp37PL01nO98y4iUCRdTGarVfzxY20ICaU4=

View File

Before

Width:  |  Height:  |  Size: 1.6 KiB

After

Width:  |  Height:  |  Size: 1.6 KiB

View File

Before

Width:  |  Height:  |  Size: 377 KiB

After

Width:  |  Height:  |  Size: 377 KiB

View File

@@ -1,28 +1,25 @@
package memsto
import (
"crypto/tls"
"encoding/json"
"net/http"
"strings"
"sync"
"time"
"github.com/ccfos/nightingale/v6/pkg/tplx"
"github.com/BurntSushi/toml"
"github.com/ccfos/nightingale/v6/alert/aconf"
"github.com/ccfos/nightingale/v6/dumper"
"github.com/ccfos/nightingale/v6/models"
"github.com/ccfos/nightingale/v6/pkg/ctx"
"github.com/ccfos/nightingale/v6/pkg/poster"
"github.com/ccfos/nightingale/v6/pkg/tplx"
"github.com/BurntSushi/toml"
"github.com/toolkits/pkg/logger"
)
type NotifyConfigCacheType struct {
ctx *ctx.Context
ConfigCache *ConfigCache
webhooks map[string]*models.Webhook
webhooks []*models.Webhook
smtp aconf.SMTPConfig
script models.NotifyScript
@@ -50,7 +47,6 @@ func NewNotifyConfigCache(ctx *ctx.Context, configCache *ConfigCache) *NotifyCon
w := &NotifyConfigCacheType{
ctx: ctx,
ConfigCache: configCache,
webhooks: make(map[string]*models.Webhook),
}
w.SyncNotifyConfigs()
return w
@@ -89,60 +85,11 @@ func (w *NotifyConfigCacheType) syncNotifyConfigs() error {
}
if strings.TrimSpace(cval) != "" {
var webhooks []*models.Webhook
err = json.Unmarshal([]byte(cval), &webhooks)
err = json.Unmarshal([]byte(cval), &w.webhooks)
if err != nil {
dumper.PutSyncRecord("webhooks", start.Unix(), -1, -1, "failed to unmarshal configs.webhook: "+err.Error())
logger.Errorf("failed to unmarshal webhooks:%s error:%v", cval, err)
}
newWebhooks := make(map[string]*models.Webhook, len(webhooks))
for i := 0; i < len(webhooks); i++ {
if webhooks[i].Batch == 0 {
webhooks[i].Batch = 1000
}
if webhooks[i].Timeout == 0 {
webhooks[i].Timeout = 10
}
if webhooks[i].RetryCount == 0 {
webhooks[i].RetryCount = 10
}
if webhooks[i].RetryInterval == 0 {
webhooks[i].RetryInterval = 10
}
if webhooks[i].Client == nil {
transport := &http.Transport{
TLSClientConfig: &tls.Config{InsecureSkipVerify: webhooks[i].SkipVerify},
}
if poster.UseProxy(webhooks[i].Url) {
transport.Proxy = http.ProxyFromEnvironment
}
webhooks[i].Client = &http.Client{
Timeout: time.Second * time.Duration(webhooks[i].Timeout),
Transport: transport,
}
}
newWebhooks[webhooks[i].Url] = webhooks[i]
}
for url, wh := range newWebhooks {
if oldWh, has := w.webhooks[url]; has && oldWh.Hash() != wh.Hash() {
w.webhooks[url] = wh
} else {
w.webhooks[url] = wh
}
}
for url := range w.webhooks {
if _, has := newWebhooks[url]; !has {
delete(w.webhooks, url)
}
}
}
dumper.PutSyncRecord("webhooks", start.Unix(), time.Since(start).Milliseconds(), len(w.webhooks), "success, webhooks:\n"+cval)
@@ -186,7 +133,7 @@ func (w *NotifyConfigCacheType) syncNotifyConfigs() error {
return nil
}
func (w *NotifyConfigCacheType) GetWebhooks() map[string]*models.Webhook {
func (w *NotifyConfigCacheType) GetWebhooks() []*models.Webhook {
w.RWMutex.RLock()
defer w.RWMutex.RUnlock()
return w.webhooks

View File

@@ -10,6 +10,7 @@ import (
"github.com/ccfos/nightingale/v6/pkg/ctx"
"github.com/ccfos/nightingale/v6/pkg/poster"
"github.com/ccfos/nightingale/v6/pushgw/pconf"
"github.com/robfig/cron/v3"
"github.com/jinzhu/copier"
"github.com/pkg/errors"
@@ -493,6 +494,27 @@ func (ar *AlertRule) Verify() error {
}
}
if err := ar.validateCronPattern(); err != nil {
return err
}
return nil
}
func (ar *AlertRule) validateCronPattern() error {
if ar.CronPattern == "" {
return nil
}
// 创建一个临时的 cron scheduler 来验证表达式
scheduler := cron.New(cron.WithSeconds())
// 尝试添加一个空函数来验证 cron 表达式
_, err := scheduler.AddFunc(ar.CronPattern, func() {})
if err != nil {
return fmt.Errorf("invalid cron pattern: %s, error: %v", ar.CronPattern, err)
}
return nil
}

View File

@@ -14,7 +14,6 @@ type BuiltinComponent struct {
Ident string `json:"ident" gorm:"type:varchar(191);not null;uniqueIndex:idx_ident,sort:asc"`
Logo string `json:"logo" gorm:"type:mediumtext;comment:'logo of component'"`
Readme string `json:"readme" gorm:"type:text;not null;comment:'readme of component'"`
Disabled int `json:"disabled" gorm:"type:int;not null;default:0;comment:'is disabled or not'"`
CreatedAt int64 `json:"created_at" gorm:"type:bigint;not null;default:0;comment:'create time'"`
CreatedBy string `json:"created_by" gorm:"type:varchar(191);not null;default:'';comment:'creator'"`
UpdatedAt int64 `json:"updated_at" gorm:"type:bigint;not null;default:0;comment:'update time'"`
@@ -26,17 +25,12 @@ type PostgresBuiltinComponent struct {
Ident string `json:"ident" gorm:"type:varchar(191);not null;uniqueIndex:idx_ident,sort:asc;comment:'identifier of component'"`
Logo string `json:"logo" gorm:"type:text;comment:'logo of component'"`
Readme string `json:"readme" gorm:"type:text;not null;comment:'readme of component'"`
Disabled int `json:"disabled" gorm:"type:int;not null;default:0;comment:'is disabled or not'"`
CreatedAt int64 `json:"created_at" gorm:"type:bigint;not null;default:0;comment:'create time'"`
CreatedBy string `json:"created_by" gorm:"type:varchar(191);not null;default:'';comment:'creator'"`
UpdatedAt int64 `json:"updated_at" gorm:"type:bigint;not null;default:0;comment:'update time'"`
UpdatedBy string `json:"updated_by" gorm:"type:varchar(191);not null;default:'';comment:'updater'"`
}
func (bc *PostgresBuiltinComponent) TableName() string {
return "builtin_components"
}
func (bc *BuiltinComponent) TableName() string {
return "builtin_components"
}
@@ -103,15 +97,12 @@ func BuiltinComponentDels(ctx *ctx.Context, ids []int64) error {
return DB(ctx).Where("id in ?", ids).Delete(new(BuiltinComponent)).Error
}
func BuiltinComponentGets(ctx *ctx.Context, query string, disabled int) ([]*BuiltinComponent, error) {
func BuiltinComponentGets(ctx *ctx.Context, query string) ([]*BuiltinComponent, error) {
session := DB(ctx)
if query != "" {
queryPattern := "%" + query + "%"
session = session.Where("ident LIKE ?", queryPattern)
}
if disabled == 0 || disabled == 1 {
session = session.Where("disabled = ?", disabled)
}
var lst []*BuiltinComponent

View File

@@ -168,7 +168,7 @@ func BuiltinPayloadComponents(ctx *ctx.Context, typ, cate string) (string, error
func InitBuiltinPayloads(ctx *ctx.Context) error {
var lst []*BuiltinPayload
components, err := BuiltinComponentGets(ctx, "", -1)
components, err := BuiltinComponentGets(ctx, "")
if err != nil {
return err
}

View File

@@ -130,7 +130,7 @@ func ConfigsGetAll(ctx *ctx.Context) ([]*Configs, error) { // select built-in ty
}
var lst []*Configs
err := DB(ctx).Model(&Configs{}).Select("id, ckey, cval").
err := DB(ctx).Model(&Configs{}).Select("ckey, cval").
Where("ckey!='' and external=? ", 0).Find(&lst).Error
if err != nil {
return nil, errors.WithMessage(err, "failed to query configs")

View File

@@ -1,89 +0,0 @@
package models
import (
"encoding/json"
"github.com/ccfos/nightingale/v6/pkg/ctx"
)
type DashAnnotation struct {
Id int64 `json:"id" gorm:"primaryKey"`
DashboardId int64 `json:"dashboard_id"`
PanelId string `json:"panel_id"`
Tags string `json:"-"`
TagsJSON []string `json:"tags" gorm:"-"`
Description string `json:"description"`
Config string `json:"config"`
TimeStart int64 `json:"time_start"`
TimeEnd int64 `json:"time_end"`
CreateAt int64 `json:"create_at"`
CreateBy string `json:"create_by"`
UpdateAt int64 `json:"update_at"`
UpdateBy string `json:"update_by"`
}
func (da *DashAnnotation) TableName() string {
return "dash_annotation"
}
func (da *DashAnnotation) DB2FE() error {
return json.Unmarshal([]byte(da.Tags), &da.TagsJSON)
}
func (da *DashAnnotation) FE2DB() error {
b, err := json.Marshal(da.TagsJSON)
if err != nil {
return err
}
da.Tags = string(b)
return nil
}
func (da *DashAnnotation) Add(ctx *ctx.Context) error {
if err := da.FE2DB(); err != nil {
return err
}
return Insert(ctx, da)
}
func (da *DashAnnotation) Update(ctx *ctx.Context) error {
if err := da.FE2DB(); err != nil {
return err
}
return DB(ctx).Model(da).Select("dashboard_id", "panel_id", "tags", "description", "config", "time_start", "time_end", "update_at", "update_by").Updates(da).Error
}
func DashAnnotationDel(ctx *ctx.Context, id int64) error {
return DB(ctx).Where("id = ?", id).Delete(&DashAnnotation{}).Error
}
func DashAnnotationGet(ctx *ctx.Context, where string, args ...interface{}) (*DashAnnotation, error) {
var lst []*DashAnnotation
err := DB(ctx).Where(where, args...).Find(&lst).Error
if err != nil {
return nil, err
}
if len(lst) == 0 {
return nil, nil
}
err = lst[0].DB2FE()
return lst[0], err
}
func DashAnnotationGets(ctx *ctx.Context, dashboardId int64, from, to int64, limit int) ([]DashAnnotation, error) {
session := DB(ctx).Where("dashboard_id = ? AND time_start <= ? AND time_end >= ?", dashboardId, to, from)
var lst []DashAnnotation
err := session.Order("id").Limit(limit).Find(&lst).Error
if err != nil {
return nil, err
}
for i := 0; i < len(lst); i++ {
lst[i].DB2FE()
}
return lst, nil
}

View File

@@ -67,7 +67,7 @@ func MigrateTables(db *gorm.DB) error {
&TaskRecord{}, &ChartShare{}, &Target{}, &Configs{}, &Datasource{}, &NotifyTpl{},
&Board{}, &BoardBusigroup{}, &Users{}, &SsoConfig{}, &models.BuiltinMetric{},
&models.MetricFilter{}, &models.NotificaitonRecord{},
&models.TargetBusiGroup{}, &EsIndexPatternMigrate{}, &DashAnnotation{}}
&models.TargetBusiGroup{}, &EsIndexPatternMigrate{}}
if isPostgres(db) {
dts = append(dts, &models.PostgresBuiltinComponent{})
@@ -327,22 +327,3 @@ type EsIndexPatternMigrate struct {
func (EsIndexPatternMigrate) TableName() string {
return "es_index_pattern"
}
type DashAnnotation struct {
Id int64 `gorm:"column:id;primaryKey;autoIncrement"`
DashboardId int64 `gorm:"column:dashboard_id;not null"`
PanelId string `gorm:"column:panel_id;type:varchar(191);not null"`
Tags string `gorm:"column:tags;type:text"`
Description string `gorm:"column:description;type:text"`
Config string `gorm:"column:config;type:text"`
TimeStart int64 `gorm:"column:time_start;not null;default:0"`
TimeEnd int64 `gorm:"column:time_end;not null;default:0"`
CreateAt int64 `gorm:"column:create_at;not null;default:0"`
CreateBy string `gorm:"column:create_by;type:varchar(64);not null;default:''"`
UpdateAt int64 `gorm:"column:update_at;not null;default:0"`
UpdateBy string `gorm:"column:update_by;type:varchar(64);not null;default:''"`
}
func (DashAnnotation) TableName() string {
return "dash_annotation"
}

View File

@@ -1,12 +1,5 @@
package models
import (
"fmt"
"net/http"
"github.com/toolkits/pkg/str"
)
const WEBHOOKKEY = "webhook"
const NOTIFYSCRIPT = "notify_script"
const NOTIFYCHANNEL = "notify_channel"
@@ -31,11 +24,6 @@ type Webhook struct {
RetryCount int `json:"retry_count"`
RetryInterval int `json:"retry_interval"`
Batch int `json:"batch"`
Client *http.Client `json:"-"`
}
func (w *Webhook) Hash() string {
return str.MD5(fmt.Sprintf("%d_%t_%s_%s_%s_%d_%v_%t_%s_%d_%d_%d", w.Type, w.Enable, w.Url, w.BasicAuthUser, w.BasicAuthPass, w.Timeout, w.HeaderMap, w.SkipVerify, w.Note, w.RetryCount, w.RetryInterval, w.Batch))
}
type NotifyScript struct {

View File

@@ -29,22 +29,16 @@ func LoadConfigByDir(configDir string, configPtr interface{}) error {
if err != nil {
return fmt.Errorf("failed to list files under: %s : %v", configDir, err)
}
found := false
s := NewFileScanner()
for _, fpath := range files {
switch {
case strings.HasSuffix(fpath, ".toml"):
found = true
s.Read(path.Join(configDir, fpath))
tBuf = append(tBuf, s.Data()...)
tBuf = append(tBuf, []byte("\n")...)
case strings.HasSuffix(fpath, ".json"):
found = true
loaders = append(loaders, &multiconfig.JSONLoader{Path: path.Join(configDir, fpath)})
case strings.HasSuffix(fpath, ".yaml") || strings.HasSuffix(fpath, ".yml"):
found = true
loaders = append(loaders, &multiconfig.YAMLLoader{Path: path.Join(configDir, fpath)})
}
if s.Err() != nil {
@@ -52,10 +46,6 @@ func LoadConfigByDir(configDir string, configPtr interface{}) error {
}
}
if !found {
return fmt.Errorf("fail to found config file, config dir path: %v", configDir)
}
if len(tBuf) != 0 {
loaders = append(loaders, &multiconfig.TOMLLoader{Reader: bytes.NewReader(tBuf)})
}

View File

@@ -9,23 +9,25 @@ import (
)
type Context struct {
DB *gorm.DB
CenterApi conf.CenterApi
Ctx context.Context
IsCenter bool
DB *gorm.DB
CenterApi conf.CenterApi
Ctx context.Context
IsCenter bool
DisableNotifyRecord bool
}
func NewContext(ctx context.Context, db *gorm.DB, isCenter bool, centerApis ...conf.CenterApi) *Context {
func NewContext(ctx context.Context, db *gorm.DB, isCenter bool, disableNotifyRecord bool, centerApis ...conf.CenterApi) *Context {
var api conf.CenterApi
if len(centerApis) > 0 {
api = centerApis[0]
}
return &Context{
Ctx: ctx,
DB: db,
CenterApi: api,
IsCenter: isCenter,
Ctx: ctx,
DB: db,
CenterApi: api,
IsCenter: isCenter,
DisableNotifyRecord: disableNotifyRecord,
}
}

View File

@@ -340,43 +340,42 @@ func (InitChartShare) TableOptions() string {
}
type InitAlertRule struct {
ID uint64 `gorm:"primaryKey;autoIncrement"`
GroupID uint64 `gorm:"not null;default:0;comment:busi group id;index"`
Cate string `gorm:"size:128;not null"`
DatasourceIDs string `gorm:"size:255;not null;default:'';comment:datasource ids"`
Cluster string `gorm:"size:128;not null"`
Name string `gorm:"size:255;not null"`
Note string `gorm:"size:1024;not null;default:''"`
Prod string `gorm:"size:255;not null;default:''"`
Algorithm string `gorm:"size:255;not null;default:''"`
AlgoParams string `gorm:"size:255"`
Delay int32 `gorm:"not null;default:0"`
Severity int16 `gorm:"type:tinyint(1);not null;comment:1:Emergency 2:Warning 3:Notice"`
Disabled bool `gorm:"type:tinyint(1);not null;comment:0:enabled 1:disabled"`
PromForDuration int32 `gorm:"not null;comment:prometheus for, unit:s"`
RuleConfig string `gorm:"type:text;not null;comment:rule_config"`
PromQL string `gorm:"type:text;not null;comment:promql"`
PromEvalInterval int32 `gorm:"not null;comment:evaluate interval"`
EnableStime string `gorm:"size:255;not null;default:'00:00'"`
EnableEtime string `gorm:"size:255;not null;default:'23:59'"`
EnableDaysOfWeek string `gorm:"size:255;not null;default:'';comment:split by space: 0 1 2 3 4 5 6"`
EnableInBg bool `gorm:"type:tinyint(1);not null;default:0;comment:1: only this bg 0: global"`
NotifyRecovered bool `gorm:"type:tinyint(1);not null;comment:whether notify when recovery"`
NotifyChannels string `gorm:"size:255;not null;default:'';comment:split by space: sms voice email dingtalk wecom"`
NotifyGroups string `gorm:"size:255;not null;default:'';comment:split by space: 233 43"`
NotifyRepeatStep int32 `gorm:"not null;default:0;comment:unit: min"`
NotifyMaxNumber int32 `gorm:"not null;default:0"`
RecoverDuration int32 `gorm:"not null;default:0;comment:unit: s"`
Callbacks string `gorm:"size:4096;not null;default:'';comment:split by space: http://a.com/api/x http://a.com/api/y"`
RunbookURL string `gorm:"size:4096"`
AppendTags string `gorm:"size:255;not null;default:'';comment:split by space: service=n9e mod=api"`
Annotations string `gorm:"type:text;not null;comment:annotations"`
ExtraConfig string `gorm:"type:text;not null;comment:extra_config"`
CreateAt int64 `gorm:"not null;default:0"`
CreateBy string `gorm:"size:64;not null;default:''"`
UpdateAt int64 `gorm:"not null;default:0;index"`
UpdateBy string `gorm:"size:64;not null;default:''"`
DatasourceQueries string `gorm:"type:text"`
ID uint64 `gorm:"primaryKey;autoIncrement"`
GroupID uint64 `gorm:"not null;default:0;comment:busi group id;index"`
Cate string `gorm:"size:128;not null"`
DatasourceIDs string `gorm:"size:255;not null;default:'';comment:datasource ids"`
Cluster string `gorm:"size:128;not null"`
Name string `gorm:"size:255;not null"`
Note string `gorm:"size:1024;not null;default:''"`
Prod string `gorm:"size:255;not null;default:''"`
Algorithm string `gorm:"size:255;not null;default:''"`
AlgoParams string `gorm:"size:255"`
Delay int32 `gorm:"not null;default:0"`
Severity int16 `gorm:"type:tinyint(1);not null;comment:1:Emergency 2:Warning 3:Notice"`
Disabled bool `gorm:"type:tinyint(1);not null;comment:0:enabled 1:disabled"`
PromForDuration int32 `gorm:"not null;comment:prometheus for, unit:s"`
RuleConfig string `gorm:"type:text;not null;comment:rule_config"`
PromQL string `gorm:"type:text;not null;comment:promql"`
PromEvalInterval int32 `gorm:"not null;comment:evaluate interval"`
EnableStime string `gorm:"size:255;not null;default:'00:00'"`
EnableEtime string `gorm:"size:255;not null;default:'23:59'"`
EnableDaysOfWeek string `gorm:"size:255;not null;default:'';comment:split by space: 0 1 2 3 4 5 6"`
EnableInBg bool `gorm:"type:tinyint(1);not null;default:0;comment:1: only this bg 0: global"`
NotifyRecovered bool `gorm:"type:tinyint(1);not null;comment:whether notify when recovery"`
NotifyChannels string `gorm:"size:255;not null;default:'';comment:split by space: sms voice email dingtalk wecom"`
NotifyGroups string `gorm:"size:255;not null;default:'';comment:split by space: 233 43"`
NotifyRepeatStep int32 `gorm:"not null;default:0;comment:unit: min"`
NotifyMaxNumber int32 `gorm:"not null;default:0"`
RecoverDuration int32 `gorm:"not null;default:0;comment:unit: s"`
Callbacks string `gorm:"size:4096;not null;default:'';comment:split by space: http://a.com/api/x http://a.com/api/y"`
RunbookURL string `gorm:"size:4096"`
AppendTags string `gorm:"size:255;not null;default:'';comment:split by space: service=n9e mod=api"`
Annotations string `gorm:"type:text;not null;comment:annotations"`
ExtraConfig string `gorm:"type:text;not null;comment:extra_config"`
CreateAt int64 `gorm:"not null;default:0"`
CreateBy string `gorm:"size:64;not null;default:''"`
UpdateAt int64 `gorm:"not null;default:0;index"`
UpdateBy string `gorm:"size:64;not null;default:''"`
}
func (InitAlertRule) TableName() string {
@@ -388,43 +387,42 @@ func (InitAlertRule) TableOptions() string {
}
type InitPostgresAlertRule struct {
ID uint64 `gorm:"primaryKey;autoIncrement"`
GroupID uint64 `gorm:"not null;default:0;comment:busi group id;index"`
Cate string `gorm:"size:128;not null"`
DatasourceIDs string `gorm:"size:255;not null;default:'';comment:datasource ids"`
Cluster string `gorm:"size:128;not null"`
Name string `gorm:"size:255;not null"`
Note string `gorm:"size:1024;not null;default:''"`
Prod string `gorm:"size:255;not null;default:''"`
Algorithm string `gorm:"size:255;not null;default:''"`
AlgoParams string `gorm:"size:255"`
Delay int32 `gorm:"not null;default:0"`
Severity int16 `gorm:"type:smallint;not null;comment:1:Emergency 2:Warning 3:Notice"`
Disabled int16 `gorm:"type:smallint;not null;comment:0:enabled 1:disabled"`
PromForDuration int32 `gorm:"not null;comment:prometheus for, unit:s"`
RuleConfig string `gorm:"type:text;not null;comment:rule_config"`
PromQL string `gorm:"type:text;not null;comment:promql"`
PromEvalInterval int32 `gorm:"not null;comment:evaluate interval"`
EnableStime string `gorm:"size:255;not null;default:'00:00'"`
EnableEtime string `gorm:"size:255;not null;default:'23:59'"`
EnableDaysOfWeek string `gorm:"size:255;not null;default:'';comment:split by space: 0 1 2 3 4 5 6"`
EnableInBg int16 `gorm:"type:smallint;not null;default:0;comment:1: only this bg 0: global"`
NotifyRecovered int16 `gorm:"type:smallint;not null;comment:whether notify when recovery"`
NotifyChannels string `gorm:"size:255;not null;default:'';comment:split by space: sms voice email dingtalk wecom"`
NotifyGroups string `gorm:"size:255;not null;default:'';comment:split by space: 233 43"`
NotifyRepeatStep int32 `gorm:"not null;default:0;comment:unit: min"`
NotifyMaxNumber int32 `gorm:"not null;default:0"`
RecoverDuration int32 `gorm:"not null;default:0;comment:unit: s"`
Callbacks string `gorm:"size:4096;not null;default:'';comment:split by space: http://a.com/api/x http://a.com/api/y"`
RunbookURL string `gorm:"size:4096"`
AppendTags string `gorm:"size:255;not null;default:'';comment:split by space: service=n9e mod=api"`
Annotations string `gorm:"type:text;not null;comment:annotations"`
ExtraConfig string `gorm:"type:text;not null;comment:extra_config"`
CreateAt int64 `gorm:"not null;default:0"`
CreateBy string `gorm:"size:64;not null;default:''"`
UpdateAt int64 `gorm:"not null;default:0;index"`
UpdateBy string `gorm:"size:64;not null;default:''"`
DatasourceQueries string `gorm:"type:text"`
ID uint64 `gorm:"primaryKey;autoIncrement"`
GroupID uint64 `gorm:"not null;default:0;comment:busi group id;index"`
Cate string `gorm:"size:128;not null"`
DatasourceIDs string `gorm:"size:255;not null;default:'';comment:datasource ids"`
Cluster string `gorm:"size:128;not null"`
Name string `gorm:"size:255;not null"`
Note string `gorm:"size:1024;not null;default:''"`
Prod string `gorm:"size:255;not null;default:''"`
Algorithm string `gorm:"size:255;not null;default:''"`
AlgoParams string `gorm:"size:255"`
Delay int32 `gorm:"not null;default:0"`
Severity int16 `gorm:"type:smallint;not null;comment:1:Emergency 2:Warning 3:Notice"`
Disabled int16 `gorm:"type:smallint;not null;comment:0:enabled 1:disabled"`
PromForDuration int32 `gorm:"not null;comment:prometheus for, unit:s"`
RuleConfig string `gorm:"type:text;not null;comment:rule_config"`
PromQL string `gorm:"type:text;not null;comment:promql"`
PromEvalInterval int32 `gorm:"not null;comment:evaluate interval"`
EnableStime string `gorm:"size:255;not null;default:'00:00'"`
EnableEtime string `gorm:"size:255;not null;default:'23:59'"`
EnableDaysOfWeek string `gorm:"size:255;not null;default:'';comment:split by space: 0 1 2 3 4 5 6"`
EnableInBg int16 `gorm:"type:smallint;not null;default:0;comment:1: only this bg 0: global"`
NotifyRecovered int16 `gorm:"type:smallint;not null;comment:whether notify when recovery"`
NotifyChannels string `gorm:"size:255;not null;default:'';comment:split by space: sms voice email dingtalk wecom"`
NotifyGroups string `gorm:"size:255;not null;default:'';comment:split by space: 233 43"`
NotifyRepeatStep int32 `gorm:"not null;default:0;comment:unit: min"`
NotifyMaxNumber int32 `gorm:"not null;default:0"`
RecoverDuration int32 `gorm:"not null;default:0;comment:unit: s"`
Callbacks string `gorm:"size:4096;not null;default:'';comment:split by space: http://a.com/api/x http://a.com/api/y"`
RunbookURL string `gorm:"size:4096"`
AppendTags string `gorm:"size:255;not null;default:'';comment:split by space: service=n9e mod=api"`
Annotations string `gorm:"type:text;not null;comment:annotations"`
ExtraConfig string `gorm:"type:text;not null;comment:extra_config"`
CreateAt int64 `gorm:"not null;default:0"`
CreateBy string `gorm:"size:64;not null;default:''"`
UpdateAt int64 `gorm:"not null;default:0;index"`
UpdateBy string `gorm:"size:64;not null;default:''"`
}
func (InitPostgresAlertRule) TableName() string {
@@ -614,23 +612,22 @@ func (InitPostgresMetricView) TableName() string {
}
type InitRecordingRule struct {
ID uint64 `gorm:"primaryKey;autoIncrement"`
GroupID uint64 `gorm:"not null;default:0;comment:group_id;index"`
DatasourceIDs string `gorm:"size:255;not null;default:'';comment:datasource ids"`
Cluster string `gorm:"size:128;not null"`
Name string `gorm:"size:255;not null;comment:new metric name"`
Note string `gorm:"size:255;not null;comment:rule note"`
Disabled bool `gorm:"type:tinyint(1);not null;default:0;comment:0:enabled 1:disabled"`
PromQL string `gorm:"size:8192;not null;comment:promql"`
PromEvalInterval int32 `gorm:"not null;comment:evaluate interval"`
CronPattern string `gorm:"size:255;default:'';comment:cron pattern"`
AppendTags string `gorm:"size:255;default:'';comment:split by space: service=n9e mod=api"`
QueryConfigs string `gorm:"type:text;not null;comment:query configs"`
CreateAt int64 `gorm:"default:0"`
CreateBy string `gorm:"size:64;default:''"`
UpdateAt int64 `gorm:"default:0;index"`
UpdateBy string `gorm:"size:64;default:''"`
DatasourceQueries string `gorm:"type:text"`
ID uint64 `gorm:"primaryKey;autoIncrement"`
GroupID uint64 `gorm:"not null;default:0;comment:group_id;index"`
DatasourceIDs string `gorm:"size:255;not null;default:'';comment:datasource ids"`
Cluster string `gorm:"size:128;not null"`
Name string `gorm:"size:255;not null;comment:new metric name"`
Note string `gorm:"size:255;not null;comment:rule note"`
Disabled bool `gorm:"type:tinyint(1);not null;default:0;comment:0:enabled 1:disabled"`
PromQL string `gorm:"size:8192;not null;comment:promql"`
PromEvalInterval int32 `gorm:"not null;comment:evaluate interval"`
CronPattern string `gorm:"size:255;default:'';comment:cron pattern"`
AppendTags string `gorm:"size:255;default:'';comment:split by space: service=n9e mod=api"`
QueryConfigs string `gorm:"type:text;not null;comment:query configs"`
CreateAt int64 `gorm:"default:0"`
CreateBy string `gorm:"size:64;default:''"`
UpdateAt int64 `gorm:"default:0;index"`
UpdateBy string `gorm:"size:64;default:''"`
}
func (InitRecordingRule) TableName() string {
@@ -642,23 +639,22 @@ func (InitRecordingRule) TableOptions() string {
}
type InitPostgresRecordingRule struct {
ID uint64 `gorm:"primaryKey;autoIncrement"`
GroupID uint64 `gorm:"not null;default:0;comment:group_id;index"`
DatasourceIDs string `gorm:"size:255;not null;default:'';comment:datasource ids"`
Cluster string `gorm:"size:128;not null"`
Name string `gorm:"size:255;not null;comment:new metric name"`
Note string `gorm:"size:255;not null;comment:rule note"`
Disabled int16 `gorm:"type:smallint;not null;default:0;comment:0:enabled 1:disabled"`
PromQL string `gorm:"size:8192;not null;comment:promql"`
PromEvalInterval int32 `gorm:"not null;comment:evaluate interval"`
CronPattern string `gorm:"size:255;default:'';comment:cron pattern"`
AppendTags string `gorm:"size:255;default:'';comment:split by space: service=n9e mod=api"`
QueryConfigs string `gorm:"type:text;not null;comment:query configs"`
CreateAt int64 `gorm:"default:0"`
CreateBy string `gorm:"size:64;default:''"`
UpdateAt int64 `gorm:"default:0;index"`
UpdateBy string `gorm:"size:64;default:''"`
DatasourceQueries string `gorm:"type:text"`
ID uint64 `gorm:"primaryKey;autoIncrement"`
GroupID uint64 `gorm:"not null;default:0;comment:group_id;index"`
DatasourceIDs string `gorm:"size:255;not null;default:'';comment:datasource ids"`
Cluster string `gorm:"size:128;not null"`
Name string `gorm:"size:255;not null;comment:new metric name"`
Note string `gorm:"size:255;not null;comment:rule note"`
Disabled int16 `gorm:"type:smallint;not null;default:0;comment:0:enabled 1:disabled"`
PromQL string `gorm:"size:8192;not null;comment:promql"`
PromEvalInterval int32 `gorm:"not null;comment:evaluate interval"`
CronPattern string `gorm:"size:255;default:'';comment:cron pattern"`
AppendTags string `gorm:"size:255;default:'';comment:split by space: service=n9e mod=api"`
QueryConfigs string `gorm:"type:text;not null;comment:query configs"`
CreateAt int64 `gorm:"default:0"`
CreateBy string `gorm:"size:64;default:''"`
UpdateAt int64 `gorm:"default:0;index"`
UpdateBy string `gorm:"size:64;default:''"`
}
func (InitPostgresRecordingRule) TableName() string {
@@ -1211,7 +1207,6 @@ type InitBuiltinMetric struct {
CreatedBy string `gorm:"size:191;not null;default:'';comment:creator"`
UpdatedAt int64 `gorm:"not null;default:0;comment:update time"`
UpdatedBy string `gorm:"size:191;not null;default:'';comment:updater"`
UUID int64 `gorm:"not null;default:0;comment:'uuid'"`
}
func (InitBuiltinMetric) TableName() string {
@@ -1235,7 +1230,6 @@ type InitSqliteBuiltinMetric struct {
CreatedBy string `gorm:"size:191;not null;default:'';comment:creator"`
UpdatedAt int64 `gorm:"not null;default:0;comment:update time"`
UpdatedBy string `gorm:"size:191;not null;default:'';comment:updater"`
UUID int64 `gorm:"not null;default:0;comment:'uuid'"`
}
func (InitSqliteBuiltinMetric) TableName() string {

View File

@@ -1,7 +1,6 @@
package ormx
import (
"fmt"
"testing"
"github.com/stretchr/testify/assert"
@@ -11,43 +10,6 @@ import (
"gorm.io/gorm"
)
func TestCheckPostgresDatabaseExist(t *testing.T) {
tests := []struct {
name string
config DBConfig
}{
{
name: "MySQL",
config: DBConfig{
DBType: "mysql",
DSN: "root:1234@tcp(127.0.0.1:3306)/test?charset=utf8mb4&parseTime=True&loc=Local&allowNativePasswords=true",
},
},
{
name: "Postgres",
config: DBConfig{
DBType: "postgres",
DSN: "host=127.0.0.1 port=5432 user=root dbname=n9e_v6 password=1234 sslmode=disable",
},
},
{
name: "SQLite",
config: DBConfig{
DBType: "sqlite",
DSN: "./test.db",
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
exist, err := checkPostgresDatabaseExist(tt.config)
fmt.Printf("exitst: %v", exist)
assert.NoError(t, err)
})
}
}
func TestDataBaseInit(t *testing.T) {
tests := []struct {
name string

View File

@@ -7,10 +7,10 @@ import (
"strings"
"time"
"github.com/glebarez/sqlite"
tklog "github.com/toolkits/pkg/logger"
"gorm.io/driver/mysql"
"gorm.io/driver/postgres"
"gorm.io/driver/sqlite"
"gorm.io/gorm"
"gorm.io/gorm/logger"
"gorm.io/gorm/schema"
@@ -91,7 +91,7 @@ func createSqliteDatabase(dsn string, gconfig *gorm.Config) error {
if err != nil {
return fmt.Errorf("failed to open temporary connection: %v", err)
}
fmt.Println("sqlite file created")
return nil
@@ -110,7 +110,7 @@ func createPostgresDatabase(dsn string, gconfig *gorm.Config) error {
}
}
createDBQuery := fmt.Sprintf("CREATE DATABASE %s ENCODING='UTF8' LC_COLLATE='en_US.utf8' LC_CTYPE='en_US.utf8';", dbName)
createDBQuery := fmt.Sprintf("CREATE DATABASE %s ENCODING='UTF8' LC_COLLATE='en_US.UTF-8' LC_CTYPE='en_US.UTF-8';", dbName)
tempDialector := postgres.Open(connectionWithoutDB)
@@ -187,16 +187,18 @@ func checkSqliteDatabaseExist(c DBConfig) (bool, error) {
func checkPostgresDatabaseExist(c DBConfig) (bool, error) {
dsnParts := strings.Split(c.DSN, " ")
dbName := ""
dbpair := ""
for _, part := range dsnParts {
if strings.HasPrefix(part, "dbname=") {
dbName = part[strings.Index(part, "=")+1:]
dbpair = part
}
}
connectionStr := strings.Replace(c.DSN, dbpair, "dbname=postgres", 1)
dialector := postgres.Open(connectionStr)
dbName := ""
connectionWithoutDB := ""
for _, part := range dsnParts {
if strings.HasPrefix(part, "dbname=") {
dbName = part[strings.Index(part, "=")+1:]
} else {
connectionWithoutDB += part
connectionWithoutDB += " "
}
}
dialector := postgres.Open(connectionWithoutDB)
gconfig := &gorm.Config{
NamingStrategy: schema.NamingStrategy{
@@ -211,89 +213,89 @@ func checkPostgresDatabaseExist(c DBConfig) (bool, error) {
return false, fmt.Errorf("failed to open database: %v", err)
}
var databases []string
query := genQuery(c)
if err := db.Raw(query).Scan(&databases).Error; err != nil {
return false, fmt.Errorf("failed to query: %v", err)
}
var databases []string
query := genQuery(c)
if err := db.Raw(query).Scan(&databases).Error; err != nil {
return false, fmt.Errorf("failed to query: %v", err)
}
for _, database := range databases {
if database == dbName {
for _, database := range databases {
if database == dbName {
fmt.Println("Database exist")
return true, nil
}
}
return true, nil
}
}
return false, nil
return false, nil
}
func checkMysqlDatabaseExist(c DBConfig) (bool, error) {
dsnParts := strings.SplitN(c.DSN, "/", 2)
if len(dsnParts) != 2 {
return false, fmt.Errorf("failed to parse DSN: %s", c.DSN)
}
if len(dsnParts) != 2 {
return false, fmt.Errorf("failed to parse DSN: %s", c.DSN)
}
connectionInfo := dsnParts[0]
dbInfo := dsnParts[1]
dbName := dbInfo
connectionInfo := dsnParts[0]
dbInfo := dsnParts[1]
dbName := dbInfo
queryIndex := strings.Index(dbInfo, "?")
if queryIndex != -1 {
dbName = dbInfo[:queryIndex]
} else {
return false, fmt.Errorf("failed to parse database name from DSN: %s", c.DSN)
}
queryIndex := strings.Index(dbInfo, "?")
if queryIndex != -1 {
dbName = dbInfo[:queryIndex]
} else {
return false, fmt.Errorf("failed to parse database name from DSN: %s", c.DSN)
}
connectionWithoutDB := connectionInfo + "/?" + dbInfo[queryIndex+1:]
connectionWithoutDB := connectionInfo + "/?" + dbInfo[queryIndex+1:]
var dialector gorm.Dialector
switch strings.ToLower(c.DBType) {
case "mysql":
dialector = mysql.Open(connectionWithoutDB)
case "postgres":
dialector = postgres.Open(connectionWithoutDB)
default:
return false, fmt.Errorf("unsupported database type: %s", c.DBType)
}
var dialector gorm.Dialector
switch strings.ToLower(c.DBType) {
case "mysql":
dialector = mysql.Open(connectionWithoutDB)
case "postgres":
dialector = postgres.Open(connectionWithoutDB)
default:
return false, fmt.Errorf("unsupported database type: %s", c.DBType)
}
gconfig := &gorm.Config{
NamingStrategy: schema.NamingStrategy{
TablePrefix: c.TablePrefix,
SingularTable: true,
},
Logger: gormLogger,
}
gconfig := &gorm.Config{
NamingStrategy: schema.NamingStrategy{
TablePrefix: c.TablePrefix,
SingularTable: true,
},
Logger: gormLogger,
}
db, err := gorm.Open(dialector, gconfig)
if err != nil {
return false, fmt.Errorf("failed to open database: %v", err)
}
db, err := gorm.Open(dialector, gconfig)
if err != nil {
return false, fmt.Errorf("failed to open database: %v", err)
}
var databases []string
query := genQuery(c)
if err := db.Raw(query).Scan(&databases).Error; err != nil {
return false, fmt.Errorf("failed to query: %v", err)
}
var databases []string
query := genQuery(c)
if err := db.Raw(query).Scan(&databases).Error; err != nil {
return false, fmt.Errorf("failed to query: %v", err)
}
for _, database := range databases {
if database == dbName {
return true, nil
}
}
for _, database := range databases {
if database == dbName {
return true, nil
}
}
return false, nil
return false, nil
}
func genQuery(c DBConfig) string {
switch strings.ToLower(c.DBType) {
case "mysql":
return "SHOW DATABASES"
case "postgres":
return "SELECT datname FROM pg_database"
case "sqlite":
return ""
default:
return ""
case "mysql":
return "SHOW DATABASES"
case "postgres":
return "SELECT datname FROM pg_database"
case "sqlite":
return ""
default:
return ""
}
}

View File

@@ -1,181 +0,0 @@
package promql
import (
"regexp"
"strings"
"github.com/VictoriaMetrics/metricsql"
"github.com/prometheus/prometheus/promql/parser"
)
func SplitBinaryOp(code string) ([]string, error) {
var lst []string
expr, err := metricsql.Parse(code)
if err != nil {
return lst, err
}
m := make(map[string]struct{})
ParseExpr(expr, false, m)
for k := range m {
lst = append(lst, k)
}
return lst, nil
}
func GetMetric(ql string) (map[string]string, error) {
metrics := make(map[string]string)
expr, err := parser.ParseExpr(ql)
if err != nil {
return metrics, err
}
selectors := parser.ExtractSelectors(expr)
for i := 0; i < len(selectors); i++ {
var metric string
var labels []string
for j := 0; j < len(selectors[i]); j++ {
if selectors[i][j].Name == "__name__" {
metric = selectors[i][j].Value
} else {
labels = append(labels, selectors[i][j].Name+selectors[i][j].Type.String()+"\""+selectors[i][j].Value+"\"")
}
}
if len(labels) != 0 {
metrics[metric] = metric + "{" + strings.Join(labels, ",") + "}"
} else {
metrics[metric] = metric
}
}
return metrics, nil
}
// GetLabels 解析PromQL查询并返回其中的所有标签和它们的值。
func GetLabels(ql string) (map[string]string, error) {
labels := make(map[string]string)
// 解析PromQL表达式
expr, err := parser.ParseExpr(ql)
if err != nil {
return labels, err
}
// 提取所有的选择器
selectors := parser.ExtractSelectors(expr)
for _, selector := range selectors {
for _, labelMatcher := range selector {
if labelMatcher.Name != "__name__" {
labels[labelMatcher.Name] = labelMatcher.Value
}
}
}
return labels, nil
}
func GetLabelsAndMetricName(ql string) (map[string]string, string, error) {
labels := make(map[string]string)
metricName := ""
// 解析PromQL表达式
expr, err := parser.ParseExpr(ql)
if err != nil {
return labels, metricName, err
}
// 提取所有的选择器
selectors := parser.ExtractSelectors(expr)
for _, selector := range selectors {
for _, labelMatcher := range selector {
if labelMatcher.Name != "__name__" {
labels[labelMatcher.Name] = labelMatcher.Value
} else {
metricName = labelMatcher.Value
}
}
}
return labels, metricName, nil
}
type Label struct {
Name string
Value string
Op string
}
func GetLabelsAndMetricNameWithReplace(ql string, rep string) (map[string]Label, string, error) {
labels := make(map[string]Label)
metricName := ""
ql = strings.ReplaceAll(ql, rep, "____")
ql = removeBrackets(ql)
// 解析PromQL表达式
expr, err := parser.ParseExpr(ql)
if err != nil {
return labels, metricName, err
}
// 提取所有的选择器
selectors := parser.ExtractSelectors(expr)
for _, selector := range selectors {
for _, labelMatcher := range selector {
labelMatcher.Value = strings.ReplaceAll(labelMatcher.Value, "____", rep)
if labelMatcher.Name != "__name__" {
label := Label{
Name: labelMatcher.Name,
Value: labelMatcher.Value,
Op: labelMatcher.Type.String(),
}
labels[labelMatcher.Name] = label
} else {
if strings.Contains(labelMatcher.Value, "$") {
continue
}
metricName = labelMatcher.Value
}
}
}
return labels, metricName, nil
}
func GetFirstMetric(ql string) (string, error) {
var metric string
expr, err := parser.ParseExpr(ql)
if err != nil {
return metric, err
}
selectors := parser.ExtractSelectors(expr)
for i := 0; i < len(selectors); i++ {
for j := 0; j < len(selectors[i]); j++ {
if selectors[i][j].Name == "__name__" {
metric = selectors[i][j].Value
return metric, nil
}
}
}
return metric, nil
}
func removeBrackets(promql string) string {
if strings.Contains(promql, "_over_time") || strings.Contains(promql, "rate") || strings.Contains(promql, "increase") ||
strings.Contains(promql, "predict_linear") || strings.Contains(promql, "resets") ||
strings.Contains(promql, "changes") || strings.Contains(promql, "holt_winters") ||
strings.Contains(promql, "delta") || strings.Contains(promql, "deriv") {
return promql
}
if !strings.Contains(promql, "[") {
return promql
}
// 使用正则表达式匹配 [xx] 形式的内容xx 可以是任何字符序列
re := regexp.MustCompile(`\[[^\]]*\]`)
// 删除匹配到的内容
return re.ReplaceAllString(promql, "")
}

View File

@@ -1,218 +0,0 @@
package promql
import (
"reflect"
"testing"
)
func TestGetMetric(t *testing.T) {
tests := []struct {
name string
ql string
want map[string]string
wantErr error
}{
{
name: "Valid query with labels",
ql: "metric_name{label1=\"value1\",label2=\"value2\"}",
want: map[string]string{"metric_name": "metric_name{label1=\"value1\",label2=\"value2\"}"},
wantErr: nil,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got, err := GetMetric(tt.ql)
if err != tt.wantErr && err != nil {
t.Errorf("GetMetric() error = %v, wantErr %v ql:%s", err, tt.wantErr, tt.ql)
return
}
if !reflect.DeepEqual(got, tt.want) {
t.Errorf("GetMetric() = %v, want %v", got, tt.want)
}
})
}
}
func TestGetLabels(t *testing.T) {
tests := []struct {
name string
ql string
want map[string]string
wantErr bool
}{
{
name: "Valid query with multiple labels",
ql: "metric_name{label1=\"value1\", label2=\"value2\"} > 3",
want: map[string]string{"label1": "value1", "label2": "value2"},
},
{
name: "Valid query with multiple labels",
ql: "metric_name{label1=\"$value1\", label2=\"$value2\"} > 3",
want: map[string]string{"label1": "$value1", "label2": "$value2"},
},
{
name: "Query without labels",
ql: "metric_name",
want: map[string]string{},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got, err := GetLabels(tt.ql)
if (err != nil) != tt.wantErr {
t.Errorf("GetLabels() error = %v, wantErr %v", err, tt.wantErr)
return
}
if !reflect.DeepEqual(got, tt.want) {
t.Errorf("GetLabels() = %v, want %v ql:%s", got, tt.want, tt.ql)
}
})
}
}
func TestGetLabelsAndMetricNameWithReplace(t *testing.T) {
// 定义测试案例
tests := []struct {
name string
ql string
rep string
expectedLabels map[string]Label
expectedMetricName string
expectError bool
}{
{
name: "正常情况",
ql: `(snmp_arista_system_cpuuse{ent_descr="$ent_descr"} / 100 > $cpu_high_threshold[1m])`,
rep: "$",
expectedLabels: map[string]Label{
"ent_descr": {Name: "ent_descr", Value: "$ent_descr", Op: "="},
},
expectedMetricName: "snmp_arista_system_cpuuse",
expectError: false,
},
{
name: "正常情况",
ql: `rate(snmp_interface_incoming{agent_host='$agent_host',ifname='$ifname'}[2m]) * 8 / 10^9 > snmp_interface_speed{agent_host='$agent_host',ifname='$ifname'}/ 10^3 * $traffic_in and snmp_interface_speed{agent_host='$agent_host',ifname='$ifname'} > 0`,
rep: "$",
expectedLabels: map[string]Label{
"agent_host": {Name: "agent_host", Value: "$agent_host", Op: "="},
"ifname": {Name: "ifname", Value: "$ifname", Op: "="},
},
expectedMetricName: "snmp_interface_speed",
expectError: false,
},
{
name: "正常情况",
ql: `rate(snmp_interface_incoming{agent_host='$agent_host',ifname='$ifname'}[2m]) * 8 / 10^9 > snmp_interface_speed{agent_host='$agent_host',ifname='$ifname'}/ 10^3 * $traffic_in`,
rep: "$",
expectedLabels: map[string]Label{
"agent_host": {Name: "agent_host", Value: "$agent_host", Op: "="},
"ifname": {Name: "ifname", Value: "$ifname", Op: "="},
},
expectedMetricName: "snmp_interface_speed",
expectError: false,
},
{
name: "正常情况",
ql: `rate(snmp_interface_incoming{agent_host='$agent_host',ifname='$ifname'}[2m]) * 8 / 10^9 > 10`,
rep: "$",
expectedLabels: map[string]Label{
"agent_host": {Name: "agent_host", Value: "$agent_host", Op: "="},
"ifname": {Name: "ifname", Value: "$ifname", Op: "="},
},
expectedMetricName: "snmp_interface_incoming",
expectError: false,
},
{
name: "带有替换字符",
ql: `rate(snmp_interface_outgoing{Role=~'ZRT.*',agent_host='$agent_host',ifname='$ifname'}[2m]) * 8 / 10^9 > snmp_interface_speed{Role=~'ZRT.*',agent_host='$agent_host',ifname='$ifname'}/ 10^3 * $outgoing_warning and snmp_interface_speed{Role=~'ZRT.*',agent_host='$agent_host',ifname='$ifname'} > 0`,
rep: "$",
expectedLabels: map[string]Label{
"agent_host": {Name: "agent_host", Value: "$agent_host", Op: "="},
"ifname": {Name: "ifname", Value: "$ifname", Op: "="},
"Role": {Name: "Role", Value: "ZRT.*", Op: "=~"},
},
expectedMetricName: "snmp_interface_speed",
expectError: false,
},
// 更多测试案例...
{
name: "告警规则支持变量",
ql: `mem{test1="$test1", test2="$test2", test3="test3"} > $val`,
rep: "$",
expectedLabels: map[string]Label{},
expectedMetricName: "snmp_interface_speed",
expectError: false,
},
}
// 运行测试案例
for _, tc := range tests {
t.Run(tc.name, func(t *testing.T) {
labels, metricName, err := GetLabelsAndMetricNameWithReplace(tc.ql, tc.rep)
if (err != nil) != tc.expectError {
t.Errorf("ql:%s 测试 '%v' 发生错误: %v, 期望的错误状态: %v", tc.ql, tc.name, err, tc.expectError)
}
if !reflect.DeepEqual(labels, tc.expectedLabels) {
t.Errorf("ql:%s 测试 '%v' 返回的标签不匹配: got %v, want %v", tc.ql, tc.name, labels, tc.expectedLabels)
}
if metricName != tc.expectedMetricName {
t.Errorf("ql:%s 测试 '%v' 返回的度量名称不匹配: got %s, want %s", tc.ql, tc.name, metricName, tc.expectedMetricName)
}
})
}
}
func TestSplitBinaryOp(t *testing.T) {
tests := []struct {
name string
code string
want []string
wantErr bool
}{
{
name: "valid binary operation with spaces",
code: "cpu_usage + memory_usage",
want: []string{"cpu_usage + memory_usage"},
},
{
name: "12",
code: "cpu_usage > 0 and memory_usage>0",
want: []string{"cpu_usage", "memory_usage"},
},
{
name: "12",
code: "cpu_usage +1> 0",
want: []string{"cpu_usage + 1"},
},
{
name: "valid complex binary operation",
code: "(cpu_usage + memory_usage) / 2",
want: []string{"(cpu_usage + memory_usage) / 2"},
},
{
name: "invalid binary operation",
code: "cpu_usage + ",
wantErr: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got, err := SplitBinaryOp(tt.code)
if (err != nil) != tt.wantErr {
t.Errorf("SplitBinaryOp() code:%s error = %v, wantErr %v", tt.code, err, tt.wantErr)
return
}
if !reflect.DeepEqual(got, tt.want) {
t.Errorf("SplitBinaryOp() got = %v, want %v", got, tt.want)
}
})
}
}

View File

@@ -1,104 +0,0 @@
package promql
import (
"github.com/VictoriaMetrics/metricsql"
)
// copy from https://github.com/laixintao/promqlpy/blob/main/go/promql/promql.go
// ModifierExpr represents MetricsQL modifier such as `<op> (...)`
type ModifierExpr struct {
// Op is modifier operation.
Op string `json:"op"`
// Args contains modifier args from parens.
Args []string `json:"args"`
}
type Expression struct {
// if true, all fields are set
// if false, then it's a normal expression, only `code` is set
IsBinaryOp bool `json:"is_binary_op"`
Left *Expression `json:"left"`
Right *Expression `json:"right"`
Op string `json:"op"`
// GroupModifier contains modifier such as "on" or "ignoring".
GroupModifier ModifierExpr `json:"group_modifier"`
// JoinModifier contains modifier such as "group_left" or "group_right".
JoinModifier ModifierExpr `json:"join_modifier"`
Code string `json:"code"`
}
var compareOps = map[string]bool{
"==": true,
"!=": true,
">": true,
"<": true,
">=": true,
"<=": true,
}
var logicalOps = map[string]bool{
"and": true,
"or": true,
"unless": true,
}
// if `mustBeExpression` is true, means that the last level is compareOps
// or ready.
// example:
// (a > 10) > b
// result: a > 10 is expression, compare to b
func ParseExpr(expr metricsql.Expr, mustBeExpression bool, m map[string]struct{}) *Expression {
// I am sure it is a normal expression!
if mustBeExpression {
return &Expression{
Code: string(expr.AppendString(nil)),
IsBinaryOp: false,
}
}
if bop, ok := expr.(*metricsql.BinaryOpExpr); ok {
if logicalOps[bop.Op] {
return &Expression{
Left: ParseExpr(bop.Left, false, m),
Right: ParseExpr(bop.Right, false, m),
GroupModifier: ModifierExpr(bop.GroupModifier),
JoinModifier: ModifierExpr(bop.JoinModifier),
Op: bop.Op,
Code: string(bop.AppendString(nil)),
IsBinaryOp: true,
}
}
if compareOps[bop.Op] {
m[string(bop.Left.AppendString(nil))] = struct{}{}
return &Expression{
Left: ParseExpr(bop.Left, true, m),
Right: ParseExpr(bop.Right, true, m),
GroupModifier: ModifierExpr(bop.GroupModifier),
JoinModifier: ModifierExpr(bop.JoinModifier),
Op: bop.Op,
Code: string(bop.AppendString(nil)),
IsBinaryOp: true,
}
}
}
if len(m) == 0 {
m[string(expr.AppendString(nil))] = struct{}{}
}
// treat +,-,* etc still as normal expression
// default: just return the literal code as it is
return &Expression{
Code: string(expr.AppendString(nil)),
IsBinaryOp: false,
}
}

View File

@@ -24,10 +24,8 @@ type Pushgw struct {
}
type WriterGlobalOpt struct {
QueueMaxSize int
QueuePopSize int
AllQueueMaxSize int
AllQueueMaxSizeInterval int
QueueMaxSize int
QueuePopSize int
}
type WriterOptions struct {
@@ -79,14 +77,6 @@ func (p *Pushgw) PreCheck() {
p.WriterOpt.QueuePopSize = 1000
}
if p.WriterOpt.AllQueueMaxSize <= 0 {
p.WriterOpt.AllQueueMaxSize = 10000000
}
if p.WriterOpt.AllQueueMaxSizeInterval <= 0 {
p.WriterOpt.AllQueueMaxSizeInterval = 200
}
if p.WriteConcurrency <= 0 {
p.WriteConcurrency = 5000
}

View File

@@ -114,7 +114,7 @@ func (rt *Router) remoteWrite(c *gin.Context) {
var (
ignoreIdent = ginx.QueryBool(c, "ignore_ident", false)
ignoreHost = ginx.QueryBool(c, "ignore_host", true) // 默认值改成 true要不然答疑成本太高。发版的时候通知 telegraf 用户,让他们设置 ignore_host=false
ignoreHost = ginx.QueryBool(c, "ignore_host", false)
ids = make(map[string]struct{})
)

View File

@@ -8,7 +8,6 @@ import (
"net/http"
"strings"
"sync"
"sync/atomic"
"time"
"github.com/ccfos/nightingale/v6/pkg/fasttime"
@@ -139,10 +138,9 @@ func (w WriterType) Post(req []byte, headers ...map[string]string) error {
}
type WritersType struct {
pushgw pconf.Pushgw
backends map[string]WriterType
queues map[string]*IdentQueue
allQueueLen atomic.Value
pushgw pconf.Pushgw
backends map[string]WriterType
queues map[string]*IdentQueue
sync.RWMutex
}
@@ -162,30 +160,14 @@ func (ws *WritersType) ReportQueueStats(ident string, identQueue *IdentQueue) (i
}
}
func (ws *WritersType) SetAllQueueLen() {
for {
curMetricLen := 0
ws.RLock()
for _, q := range ws.queues {
curMetricLen += q.list.Len()
}
ws.RUnlock()
ws.allQueueLen.Store(curMetricLen)
time.Sleep(time.Duration(ws.pushgw.WriterOpt.AllQueueMaxSizeInterval) * time.Millisecond)
}
}
func NewWriters(pushgwConfig pconf.Pushgw) *WritersType {
writers := &WritersType{
backends: make(map[string]WriterType),
queues: make(map[string]*IdentQueue),
pushgw: pushgwConfig,
allQueueLen: atomic.Value{},
backends: make(map[string]WriterType),
queues: make(map[string]*IdentQueue),
pushgw: pushgwConfig,
}
writers.Init()
go writers.SetAllQueueLen()
go writers.CleanExpQueue()
return writers
}
@@ -235,13 +217,6 @@ func (ws *WritersType) PushSample(ident string, v interface{}) {
}
identQueue.ts = time.Now().Unix()
curLen := ws.allQueueLen.Load().(int)
if curLen > ws.pushgw.WriterOpt.AllQueueMaxSize {
logger.Warningf("Write %+v full, metric count over limit: %d", v, curLen)
CounterPushQueueErrorTotal.WithLabelValues(ident).Inc()
return
}
succ := identQueue.list.PushFront(v)
if !succ {
logger.Warningf("Write channel(%s) full, current channel size: %d", ident, identQueue.list.Len())
@@ -270,7 +245,6 @@ func (ws *WritersType) StartConsumer(identQueue *IdentQueue) {
func (ws *WritersType) Init() error {
opts := ws.pushgw.Writers
ws.allQueueLen.Store(0)
for i := 0; i < len(opts); i++ {
tlsConf, err := opts[i].ClientConfig.TLSConfig()