Compare commits

..

30 Commits

Author SHA1 Message Date
ning
640e914420 code refactor 2025-01-03 15:40:43 +08:00
ning
6d6dee160b code refactor 2025-01-03 14:16:26 +08:00
ning
473a00bbaa add dash-annotaion api 2025-01-03 12:10:08 +08:00
flashbo
b0131a3799 feat: support setting builtin component to disabled (#2406) 2025-01-02 11:11:54 +08:00
Yening Qin
cbb03a7c63 refactor: optimize enum type for alert rule with var
Co-authored-by: Xu Bin <140785332+Reditiny@users.noreply.github.com>
2024-12-28 22:18:39 +08:00
ning
080d412124 docs: add sql 2024-12-26 14:29:50 +08:00
ning
752e02f32d docs: add sql 2024-12-26 14:26:15 +08:00
CRISPpp
e05d59d72a refactor: update target update group part (#2388) 2024-12-25 15:35:19 +08:00
Yening Qin
854e30551a fix: docker compose of postgres init error (#2370) (#2392)
Co-authored-by: CRISPpp <78430796+CRISPpp@users.noreply.github.com>
2024-12-24 15:55:03 +08:00
Yening Qin
0b6dc5beba refactor get user from context (#2391)
Co-authored-by: flashbo <36443248+lwb0214@users.noreply.github.com>
2024-12-24 15:52:55 +08:00
ning
8685a95fa5 Merge branch 'main' of github.com:ccfos/nightingale 2024-12-24 15:40:22 +08:00
ning
7ca7fd8d66 refactor: event set AnnotationsJSON 2024-12-24 15:40:08 +08:00
Yening Qin
1b5dc81b6c fix: the dedup logic when adding tags to target (#2386)
Co-authored-by: flashbo <36443248+lwb0214@users.noreply.github.com>
2024-12-24 11:48:58 +08:00
Ulric Qin
04495f0892 set ignore_host to true 2024-12-20 18:10:48 +08:00
Yening Qin
8158ce1b90 refactor: global webhook add env proxy (#2375)
Co-authored-by: Xu Bin <140785332+Reditiny@users.noreply.github.com>
2024-12-20 14:21:47 +08:00
Yening Qin
a43952e168 refactor: es_index_pattern add cross_cluster_enabled (#2372) 2024-12-19 14:12:27 +08:00
Yening Qin
5702fc81d0 refactor: group delete check (#2368)
Co-authored-by: Xu Bin <140785332+Reditiny@users.noreply.github.com>
2024-12-18 17:00:18 +08:00
Xu Bin
7cc65a2ca7 refactor: add id for configsGetAll (#2361) 2024-12-16 20:38:53 +08:00
ning
7bb6c6541a chore: uodate gomod 2024-12-15 19:42:00 +08:00
ning
8b4cfe65e3 Merge branch 'main' of github.com:ccfos/nightingale 2024-12-13 10:56:27 +08:00
ning
7227de8c22 docs: update migrate.sql 2024-12-13 10:56:15 +08:00
CRISPpp
069e267af8 docs: update sqlite.sql (#2356) 2024-12-13 10:18:59 +08:00
ning
7c5c9a95c3 refactor: change sqlite driver 2024-12-12 21:32:51 +08:00
ning
e3da7f344b docs: update goreleaser.yaml 2024-12-12 21:12:57 +08:00
Yening Qin
dd741a177f docs: rename es integration 2024-12-12 19:27:36 +08:00
ning
4fdd25f020 docs: set HTTP.APIForService.Enable to false 2024-12-12 19:24:07 +08:00
Yening Qin
62350bfbc6 fix: alert rule with var (#2357) 2024-12-12 16:59:09 +08:00
CRISPpp
5ee1baaf07 feat: add config dir and config file check (#2350)
Co-authored-by: Yening Qin <710leo@gmail.com>
2024-12-12 13:24:11 +08:00
Xu Bin
fa12889f06 fix: alert rule check with var when not exact match (#2354) 2024-12-12 11:04:48 +08:00
Yening Qin
39306a5bf0 refactor: optimize webhook send (#2352) 2024-12-11 17:51:20 +08:00
47 changed files with 1385 additions and 276 deletions

View File

@@ -20,6 +20,7 @@ import (
"github.com/ccfos/nightingale/v6/pkg/hash"
"github.com/ccfos/nightingale/v6/pkg/parser"
promsdk "github.com/ccfos/nightingale/v6/pkg/prom"
promql2 "github.com/ccfos/nightingale/v6/pkg/promql"
"github.com/ccfos/nightingale/v6/pkg/unit"
"github.com/ccfos/nightingale/v6/prom"
"github.com/ccfos/nightingale/v6/tdengine"
@@ -59,6 +60,10 @@ const (
QUERY_DATA = "query_data"
)
const (
JoinMark = "@@"
)
type JoinType string
const (
@@ -252,8 +257,8 @@ func (arw *AlertRuleWorker) GetPromAnomalyPoint(ruleConfig string) ([]models.Ano
if query.VarEnabled {
var anomalyPoints []models.AnomalyPoint
if hasLabelLossAggregator(query) {
// 若有聚合函数则需要先填充变量然后查询,这个方式效率较低
if hasLabelLossAggregator(query) || notExactMatch(query) {
// 若有聚合函数或非精确匹配则需要先填充变量然后查询,这个方式效率较低
anomalyPoints = arw.VarFillingBeforeQuery(query, readerClient)
} else {
// 先查询再过滤变量,效率较高,但无法处理有聚合函数的情况
@@ -376,7 +381,7 @@ func (arw *AlertRuleWorker) VarFillingAfterQuery(query models.PromQuery, readerC
}
seqVals := getSamples(value)
// 得到参数变量的所有组合
paramPermutation, err := arw.getParamPermutation(param, ParamKeys)
paramPermutation, err := arw.getParamPermutation(param, ParamKeys, varToLabel, query.PromQl, readerClient)
if err != nil {
logger.Errorf("rule_eval:%s, paramPermutation error:%v", arw.Key(), err)
continue
@@ -391,8 +396,8 @@ func (arw *AlertRuleWorker) VarFillingAfterQuery(query models.PromQuery, readerC
curRealQuery = fillVar(curRealQuery, paramKey, val)
}
if _, ok := paramPermutation[strings.Join(cur, "-")]; ok {
anomalyPointsMap[strings.Join(cur, "-")] = models.AnomalyPoint{
if _, ok := paramPermutation[strings.Join(cur, JoinMark)]; ok {
anomalyPointsMap[strings.Join(cur, JoinMark)] = models.AnomalyPoint{
Key: seqVals[i].Metric.String(),
Timestamp: seqVals[i].Timestamp.Unix(),
Value: float64(seqVals[i].Value),
@@ -401,7 +406,7 @@ func (arw *AlertRuleWorker) VarFillingAfterQuery(query models.PromQuery, readerC
Query: curRealQuery,
}
// 生成异常点后,删除该参数组合
delete(paramPermutation, strings.Join(cur, "-"))
delete(paramPermutation, strings.Join(cur, JoinMark))
}
}
@@ -497,7 +502,7 @@ func removeVal(promql string) string {
}
// 获取参数变量的所有组合
func (arw *AlertRuleWorker) getParamPermutation(paramVal map[string]models.ParamQuery, paramKeys []string) (map[string]struct{}, error) {
func (arw *AlertRuleWorker) getParamPermutation(paramVal map[string]models.ParamQuery, paramKeys []string, varToLabel map[string]string, originPromql string, readerClient promsdk.API) (map[string]struct{}, error) {
// 参数变量查询,得到参数变量值
paramMap := make(map[string][]string)
@@ -529,7 +534,15 @@ func (arw *AlertRuleWorker) getParamPermutation(paramVal map[string]models.Param
if err != nil {
logger.Errorf("query:%s fail to unmarshalling into string slice, error:%v", paramQuery.Query, err)
}
params = query
if len(query) == 0 {
paramsKeyAllLabel, err := getParamKeyAllLabel(varToLabel[paramKey], originPromql, readerClient)
if err != nil {
logger.Errorf("rule_eval:%s, fail to getParamKeyAllLabel, error:%v", arw.Key(), paramQuery.Query, err)
}
params = paramsKeyAllLabel
} else {
params = query
}
default:
return nil, fmt.Errorf("unknown param type: %s", paramQuery.ParamType)
}
@@ -538,6 +551,7 @@ func (arw *AlertRuleWorker) getParamPermutation(paramVal map[string]models.Param
return nil, fmt.Errorf("param key: %s, params is empty", paramKey)
}
logger.Infof("rule_eval:%s paramKey: %s, params: %v", arw.Key(), paramKey, params)
paramMap[paramKey] = params
}
@@ -546,12 +560,63 @@ func (arw *AlertRuleWorker) getParamPermutation(paramVal map[string]models.Param
res := make(map[string]struct{})
for i := range permutation {
res[strings.Join(permutation[i], "-")] = struct{}{}
res[strings.Join(permutation[i], JoinMark)] = struct{}{}
}
return res, nil
}
func getParamKeyAllLabel(paramKey string, promql string, client promsdk.API) ([]string, error) {
labels, metricName, err := promql2.GetLabelsAndMetricNameWithReplace(promql, "$")
if err != nil {
return nil, fmt.Errorf("promql:%s, get labels error:%v", promql, err)
}
labelstrs := make([]string, 0)
for _, label := range labels {
if strings.HasPrefix(label.Value, "$") {
continue
}
labelstrs = append(labelstrs, label.Name+label.Op+label.Value)
}
pr := metricName + "{" + strings.Join(labelstrs, ",") + "}"
value, _, err := client.Query(context.Background(), pr, time.Now())
if err != nil {
return nil, fmt.Errorf("promql: %s query error: %v", pr, err)
}
labelValuesMap := make(map[string]struct{})
switch value.Type() {
case model.ValVector:
vector := value.(model.Vector)
for _, sample := range vector {
for labelName, labelValue := range sample.Metric {
// 只处理ParamKeys中指定的label
if string(labelName) == paramKey {
labelValuesMap[string(labelValue)] = struct{}{}
}
}
}
case model.ValMatrix:
matrix := value.(model.Matrix)
for _, series := range matrix {
for labelName, labelValue := range series.Metric {
// 只处理ParamKeys中指定的label
if string(labelName) == paramKey {
labelValuesMap[string(labelValue)] = struct{}{}
}
}
}
}
result := make([]string, 0)
for labelValue, _ := range labelValuesMap {
result = append(result, labelValue)
}
return result, nil
}
func (arw *AlertRuleWorker) getHostIdents(paramQuery models.ParamQuery) ([]string, error) {
var params []string
q, _ := json.Marshal(paramQuery.Query)
@@ -1229,6 +1294,7 @@ func GetQueryRefAndUnit(query interface{}) (string, string, error) {
// 再查询得到满足值变量的所有结果加入异常点列表
// 参数变量的值不满足的组合,需要覆盖上层筛选中产生的异常点
func (arw *AlertRuleWorker) VarFillingBeforeQuery(query models.PromQuery, readerClient promsdk.API) []models.AnomalyPoint {
varToLabel := ExtractVarMapping(query.PromQl)
// 存储异常点的 mapkey 为参数变量的组合,可以实现子筛选对上一层筛选的覆盖
anomalyPointsMap := sync.Map{}
// 统一变量配置格式
@@ -1271,7 +1337,7 @@ func (arw *AlertRuleWorker) VarFillingBeforeQuery(query models.PromQuery, reader
curPromql = strings.Replace(curPromql, fmt.Sprintf("$%s", key), val, -1)
}
// 得到参数变量的所有组合
paramPermutation, err := arw.getParamPermutation(param, ParamKeys)
paramPermutation, err := arw.getParamPermutation(param, ParamKeys, varToLabel, query.PromQl, readerClient)
if err != nil {
logger.Errorf("rule_eval:%s, paramPermutation error:%v", arw.Key(), err)
continue
@@ -1280,7 +1346,7 @@ func (arw *AlertRuleWorker) VarFillingBeforeQuery(query models.PromQuery, reader
keyToPromql := make(map[string]string)
for paramPermutationKeys, _ := range paramPermutation {
realPromql := curPromql
split := strings.Split(paramPermutationKeys, "-")
split := strings.Split(paramPermutationKeys, JoinMark)
for j := range ParamKeys {
realPromql = fillVar(realPromql, ParamKeys[j], split[j])
}
@@ -1303,6 +1369,7 @@ func (arw *AlertRuleWorker) VarFillingBeforeQuery(query models.PromQuery, reader
logger.Errorf("rule_eval:%s, promql:%s, error:%v", arw.Key(), promql, err)
return
}
logger.Infof("rule_eval:%s, promql:%s, value:%+v", arw.Key(), promql, value)
points := models.ConvertAnomalyPoints(value)
if len(points) == 0 {
@@ -1353,6 +1420,15 @@ func hasLabelLossAggregator(query models.PromQuery) bool {
return false
}
// 判断 query 中是否有 != =~ !~
func notExactMatch(query models.PromQuery) bool {
promql := strings.ToLower(query.PromQl)
if strings.Contains(promql, "!=") || strings.Contains(promql, "=~") || strings.Contains(promql, "!~") {
return true
}
return false
}
// ExtractVarMapping 从 promql 中提取变量映射关系,为了在 query 之后可以将标签正确的放回 promql
// 输入: sum(rate(mem_used_percent{host="$my_host"})) by (instance) + avg(node_load1{region="$region"}) > $val
// 输出: map[string]string{"my_host":"host", "region":"region"}

View File

@@ -2,6 +2,7 @@ package process
import (
"bytes"
"encoding/json"
"fmt"
"html/template"
"sort"
@@ -215,7 +216,6 @@ func (p *Processor) BuildEvent(anomalyPoint models.AnomalyPoint, from string, no
event.Callbacks = p.rule.Callbacks
event.CallbacksJSON = p.rule.CallbacksJSON
event.Annotations = p.rule.Annotations
event.AnnotationsJSON = make(map[string]string)
event.RuleConfig = p.rule.RuleConfig
event.RuleConfigJson = p.rule.RuleConfigJson
event.Severity = anomalyPoint.Severity
@@ -224,6 +224,11 @@ func (p *Processor) BuildEvent(anomalyPoint models.AnomalyPoint, from string, no
event.RecoverConfig = anomalyPoint.RecoverConfig
event.RuleHash = ruleHash
if err := json.Unmarshal([]byte(p.rule.Annotations), &event.AnnotationsJSON); err != nil {
event.AnnotationsJSON = make(map[string]string) // 解析失败时使用空 map
logger.Warningf("unmarshal annotations json failed: %v, rule: %d", err, p.rule.Id)
}
if p.target != "" {
if pt, exist := p.TargetCache.Get(p.target); exist {
pt.GroupNames = p.BusiGroupCache.GetNamesByBusiGroupIds(pt.GroupIds)

View File

@@ -322,6 +322,11 @@ func (rt *Router) Config(r *gin.Engine) {
pages.GET("/share-charts", rt.chartShareGets)
pages.POST("/share-charts", rt.auth(), rt.chartShareAdd)
pages.POST("/dashboard-annotations", rt.auth(), rt.user(), rt.perm("/dashboards/put"), rt.dashAnnotationAdd)
pages.GET("/dashboard-annotations", rt.dashAnnotationGets)
pages.PUT("/dashboard-annotation/:id", rt.auth(), rt.user(), rt.perm("/dashboards/put"), rt.dashAnnotationPut)
pages.DELETE("/dashboard-annotation/:id", rt.auth(), rt.user(), rt.perm("/dashboards/del"), rt.dashAnnotationDel)
// pages.GET("/alert-rules/builtin/alerts-cates", rt.auth(), rt.user(), rt.builtinAlertCateGets)
// pages.GET("/alert-rules/builtin/list", rt.auth(), rt.user(), rt.builtinAlertRules)
pages.GET("/alert-rules/callbacks", rt.auth(), rt.user(), rt.alertRuleCallbacks)
@@ -479,6 +484,7 @@ func (rt *Router) Config(r *gin.Engine) {
pages.PUT("/builtin-payloads", rt.auth(), rt.user(), rt.perm("/built-in-components/put"), rt.builtinPayloadsPut)
pages.DELETE("/builtin-payloads", rt.auth(), rt.user(), rt.perm("/built-in-components/del"), rt.builtinPayloadsDel)
pages.GET("/builtin-payload", rt.auth(), rt.user(), rt.builtinPayloadsGetByUUIDOrID)
}
r.GET("/api/n9e/versions", func(c *gin.Context) {

View File

@@ -36,8 +36,9 @@ func (rt *Router) builtinComponentsAdd(c *gin.Context) {
func (rt *Router) builtinComponentsGets(c *gin.Context) {
query := ginx.QueryStr(c, "query", "")
disabled := ginx.QueryInt(c, "disabled", -1)
bc, err := models.BuiltinComponentGets(rt.Ctx, query)
bc, err := models.BuiltinComponentGets(rt.Ctx, query, disabled)
ginx.Dangerous(err)
ginx.NewRender(c).Data(bc, nil)

View File

@@ -0,0 +1,99 @@
package router
import (
"fmt"
"net/http"
"time"
"github.com/ccfos/nightingale/v6/models"
"github.com/ccfos/nightingale/v6/pkg/ctx"
"github.com/gin-gonic/gin"
"github.com/toolkits/pkg/ginx"
)
func checkAnnotationPermission(c *gin.Context, ctx *ctx.Context, dashboardId int64) {
dashboard, err := models.BoardGetByID(ctx, dashboardId)
if err != nil {
ginx.Bomb(http.StatusInternalServerError, "failed to get dashboard: %v", err)
}
if dashboard == nil {
ginx.Bomb(http.StatusNotFound, "dashboard not found")
}
bg := BusiGroup(ctx, dashboard.GroupId)
me := c.MustGet("user").(*models.User)
can, err := me.CanDoBusiGroup(ctx, bg, "rw")
ginx.Dangerous(err)
if !can {
ginx.Bomb(http.StatusForbidden, "forbidden")
}
}
func (rt *Router) dashAnnotationAdd(c *gin.Context) {
var f models.DashAnnotation
ginx.BindJSON(c, &f)
username := c.MustGet("username").(string)
now := time.Now().Unix()
checkAnnotationPermission(c, rt.Ctx, f.DashboardId)
f.CreateBy = username
f.CreateAt = now
f.UpdateBy = username
f.UpdateAt = now
ginx.NewRender(c).Data(f.Id, f.Add(rt.Ctx))
}
func (rt *Router) dashAnnotationGets(c *gin.Context) {
dashboardId := ginx.QueryInt64(c, "dashboard_id")
from := ginx.QueryInt64(c, "from")
to := ginx.QueryInt64(c, "to")
limit := ginx.QueryInt(c, "limit", 100)
lst, err := models.DashAnnotationGets(rt.Ctx, dashboardId, from, to, limit)
ginx.NewRender(c).Data(lst, err)
}
func (rt *Router) dashAnnotationPut(c *gin.Context) {
var f models.DashAnnotation
ginx.BindJSON(c, &f)
id := ginx.UrlParamInt64(c, "id")
annotation, err := getAnnotationById(rt.Ctx, id)
ginx.Dangerous(err)
checkAnnotationPermission(c, rt.Ctx, annotation.DashboardId)
f.Id = id
f.UpdateAt = time.Now().Unix()
f.UpdateBy = c.MustGet("username").(string)
ginx.NewRender(c).Message(f.Update(rt.Ctx))
}
func (rt *Router) dashAnnotationDel(c *gin.Context) {
id := ginx.UrlParamInt64(c, "id")
annotation, err := getAnnotationById(rt.Ctx, id)
ginx.Dangerous(err)
checkAnnotationPermission(c, rt.Ctx, annotation.DashboardId)
ginx.NewRender(c).Message(models.DashAnnotationDel(rt.Ctx, id))
}
// 可以提取获取注释的通用方法
func getAnnotationById(ctx *ctx.Context, id int64) (*models.DashAnnotation, error) {
annotation, err := models.DashAnnotationGet(ctx, "id=?", id)
if err != nil {
return nil, err
}
if annotation == nil {
return nil, fmt.Errorf("annotation not found")
}
return annotation, nil
}

View File

@@ -130,9 +130,9 @@ func (rt *Router) User() gin.HandlerFunc {
func (rt *Router) user() gin.HandlerFunc {
return func(c *gin.Context) {
userid := c.MustGet("userid").(int64)
username := c.MustGet("username").(string)
user, err := models.UserGetById(rt.Ctx, userid)
user, err := models.UserGetByUsername(rt.Ctx, username)
if err != nil {
ginx.Bomb(http.StatusUnauthorized, "unauthorized")
}

View File

@@ -9,6 +9,7 @@ import (
"time"
"github.com/ccfos/nightingale/v6/models"
"github.com/ccfos/nightingale/v6/pkg/ctx"
"github.com/ccfos/nightingale/v6/storage"
"github.com/gin-gonic/gin"
@@ -272,11 +273,9 @@ func (rt *Router) validateTags(tags []string) error {
}
func (rt *Router) addTagsToTarget(target *models.Target, tags []string) error {
hostTagsMap := target.GetHostTagsMap()
for _, tag := range tags {
tagKey := strings.Split(tag, "=")[0]
if _, ok := hostTagsMap[tagKey]; ok ||
strings.Contains(target.Tags, tagKey+"=") {
if _, exist := target.TagsMap[tagKey]; exist {
return fmt.Errorf("duplicate tagkey(%s)", tagKey)
}
}
@@ -401,6 +400,22 @@ type targetBgidsForm struct {
Action string `json:"action"` // add del reset
}
func haveNeverGroupedIdent(ctx *ctx.Context, idents []string) (bool, error) {
for _, ident := range idents {
bgids, err := models.TargetGroupIdsGetByIdent(ctx, ident)
if err != nil {
return false, err
}
if len(bgids) <= 0 {
return true, nil
}
}
return false, nil
}
func (rt *Router) targetBindBgids(c *gin.Context) {
var f targetBgidsForm
var err error
@@ -443,11 +458,15 @@ func (rt *Router) targetBindBgids(c *gin.Context) {
ginx.Bomb(http.StatusForbidden, "No permission. You are not admin of BG(%s)", bg.Name)
}
}
isNeverGrouped, checkErr := haveNeverGroupedIdent(rt.Ctx, f.Idents)
ginx.Dangerous(checkErr)
can, err := user.CheckPerm(rt.Ctx, "/targets/bind")
ginx.Dangerous(err)
if !can {
ginx.Bomb(http.StatusForbidden, "No permission. Only admin can assign BG")
if isNeverGrouped {
can, err := user.CheckPerm(rt.Ctx, "/targets/bind")
ginx.Dangerous(err)
if !can {
ginx.Bomb(http.StatusForbidden, "No permission. Only admin can assign BG")
}
}
}

View File

@@ -50,7 +50,7 @@ Enable = true
# user001 = "ccc26da7b9aba533cbb263a36c07dcc5"
[HTTP.APIForService]
Enable = true
Enable = false
[HTTP.APIForService.BasicAuth]
user001 = "ccc26da7b9aba533cbb263a36c07dcc5"

View File

@@ -50,7 +50,7 @@ Enable = true
# user001 = "ccc26da7b9aba533cbb263a36c07dcc5"
[HTTP.APIForService]
Enable = true
Enable = false
[HTTP.APIForService.BasicAuth]
user001 = "ccc26da7b9aba533cbb263a36c07dcc5"

View File

@@ -50,7 +50,7 @@ Enable = true
# user001 = "ccc26da7b9aba533cbb263a36c07dcc5"
[HTTP.APIForService]
Enable = true
Enable = false
[HTTP.APIForService.BasicAuth]
user001 = "ccc26da7b9aba533cbb263a36c07dcc5"

View File

@@ -790,6 +790,7 @@ CREATE TABLE es_index_pattern (
time_field varchar(128) not null default '@timestamp',
allow_hide_system_indices smallint not null default 0,
fields_format varchar(4096) not null default '',
cross_cluster_enabled int not null default 0,
create_at bigint default '0',
create_by varchar(64) default '',
update_at bigint default '0',
@@ -859,6 +860,7 @@ CREATE TABLE builtin_components (
ident VARCHAR(191) NOT NULL,
logo VARCHAR(191) NOT NULL,
readme TEXT NOT NULL,
disabled INT NOT NULL DEFAULT 0,
created_at BIGINT NOT NULL DEFAULT 0,
created_by VARCHAR(191) NOT NULL DEFAULT '',
updated_at BIGINT NOT NULL DEFAULT 0,
@@ -885,4 +887,20 @@ CREATE TABLE builtin_payloads (
CREATE INDEX idx_component ON builtin_payloads (component);
CREATE INDEX idx_builtin_payloads_name ON builtin_payloads (name);
CREATE INDEX idx_cate ON builtin_payloads (cate);
CREATE INDEX idx_type ON builtin_payloads (type);
CREATE INDEX idx_type ON builtin_payloads (type);
CREATE TABLE dash_annotation (
id bigserial PRIMARY KEY,
dashboard_id bigint not null,
panel_id varchar(191) not null,
tags text,
description text,
config text,
time_start bigint not null default 0,
time_end bigint not null default 0,
create_at bigint not null default 0,
create_by varchar(64) not null default '',
update_at bigint not null default 0,
update_by varchar(64) not null default ''
);

View File

@@ -50,7 +50,7 @@ Enable = true
# user001 = "ccc26da7b9aba533cbb263a36c07dcc5"
[HTTP.APIForService]
Enable = true
Enable = false
[HTTP.APIForService.BasicAuth]
user001 = "ccc26da7b9aba533cbb263a36c07dcc5"

View File

@@ -531,6 +531,7 @@ CREATE TABLE `builtin_components` (
`created_by` varchar(191) NOT NULL DEFAULT '' COMMENT '''creator''',
`updated_at` bigint NOT NULL DEFAULT 0 COMMENT '''update time''',
`updated_by` varchar(191) NOT NULL DEFAULT '' COMMENT '''updater''',
`disabled` int NOT NULL DEFAULT 0 COMMENT '''is disabled or not''',
PRIMARY KEY (`id`),
UNIQUE KEY `idx_ident` (`ident`) USING BTREE
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
@@ -693,6 +694,7 @@ CREATE TABLE `es_index_pattern` (
`time_field` varchar(128) not null default '@timestamp',
`allow_hide_system_indices` tinyint(1) not null default 0,
`fields_format` varchar(4096) not null default '',
`cross_cluster_enabled` int not null default 0,
`create_at` bigint default '0',
`create_by` varchar(64) default '',
`update_at` bigint default '0',
@@ -745,6 +747,24 @@ CREATE TABLE `target_busi_group` (
UNIQUE KEY `idx_target_group` (`target_ident`,`group_id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
CREATE TABLE `dash_annotation` (
`id` bigint unsigned not null auto_increment,
`dashboard_id` bigint not null comment 'dashboard id',
`panel_id` varchar(191) not null comment 'panel id',
`tags` text comment 'tags array json string',
`description` text comment 'annotation description',
`config` text comment 'annotation config',
`time_start` bigint not null default 0 comment 'start timestamp',
`time_end` bigint not null default 0 comment 'end timestamp',
`create_at` bigint not null default 0 comment 'create time',
`create_by` varchar(64) not null default '' comment 'creator',
`update_at` bigint not null default 0 comment 'update time',
`update_by` varchar(64) not null default '' comment 'updater',
PRIMARY KEY (`id`),
KEY `idx_dashboard_id` (`dashboard_id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
CREATE TABLE `task_meta`
(
`id` bigint unsigned NOT NULL AUTO_INCREMENT,

View File

@@ -120,4 +120,31 @@ CREATE TABLE `target_busi_group` (
/* v7.7.2 2024-12-02 */
ALTER TABLE alert_subscribe MODIFY COLUMN rule_ids varchar(1024);
ALTER TABLE alert_subscribe MODIFY COLUMN busi_groups varchar(4096);
ALTER TABLE alert_subscribe MODIFY COLUMN busi_groups varchar(4096);
/* v8.0.0-beta.1 2024-12-13 */
ALTER TABLE `alert_rule` ADD COLUMN `cron_pattern` VARCHAR(64);
ALTER TABLE `builtin_components` MODIFY COLUMN `logo` mediumtext COMMENT '''logo of component''';
/* v8.0.0-beta.2 2024-12-26 */
ALTER TABLE `es_index_pattern` ADD COLUMN `cross_cluster_enabled` int not null default 0;
/* v8.0.0-beta.3 2024-01-03 */
ALTER TABLE `builtin_components` ADD COLUMN `disabled` INT NOT NULL DEFAULT 0 COMMENT 'is disabled or not';
CREATE TABLE `dash_annotation` (
`id` bigint unsigned not null auto_increment,
`dashboard_id` bigint not null comment 'dashboard id',
`panel_id` varchar(191) not null comment 'panel id',
`tags` text comment 'tags array json string',
`description` text comment 'annotation description',
`config` text comment 'annotation config',
`time_start` bigint not null default 0 comment 'start timestamp',
`time_end` bigint not null default 0 comment 'end timestamp',
`create_at` bigint not null default 0 comment 'create time',
`create_by` varchar(64) not null default '' comment 'creator',
`update_at` bigint not null default 0 comment 'update time',
`update_by` varchar(64) not null default '' comment 'updater',
PRIMARY KEY (`id`),
KEY `idx_dashboard_id` (`dashboard_id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;

View File

@@ -17,6 +17,8 @@ CREATE TABLE `users` (
`update_by` varchar(64) not null default ''
);
CREATE UNIQUE INDEX idx_users_username ON `users` (username);
insert into `users`(id, username, nickname, password, roles, create_at, create_by, update_at, update_by) values(1, 'root', '超管', 'root.2020', 'Admin', strftime('%s', 'now'), 'system', strftime('%s', 'now'), 'system');
CREATE TABLE `user_group` (
@@ -182,8 +184,9 @@ CREATE TABLE `board` (
`create_by` varchar(64) not null default '',
`update_at` bigint not null default 0,
`update_by` varchar(64) not null default '',
unique (`group_id`, `name`)
`public_cate` bigint not null default 0
);
CREATE UNIQUE INDEX idx_board_group_id_name ON `board` (group_id, name);
CREATE INDEX `idx_board_ident` ON `board` (`ident` asc);
-- for dashboard new version
@@ -192,6 +195,15 @@ CREATE TABLE `board_payload` (
`payload` mediumtext not null
);
CREATE TABLE `chart` (
`id` integer primary key autoincrement,
`group_id` integer not null,
`configs` text,
`weight` integer not null default 0
);
CREATE INDEX idx_chart_group_id ON `chart` (group_id);
CREATE TABLE `chart_share` (
`id` integer primary key autoincrement,
`cluster` varchar(128) not null,
@@ -238,7 +250,9 @@ CREATE TABLE `alert_rule` (
`create_at` bigint not null default 0,
`create_by` varchar(64) not null default '',
`update_at` bigint not null default 0,
`update_by` varchar(64) not null default ''
`update_by` varchar(64) not null default '',
`cron_pattern` varchar(64),
`datasource_queries` text
);
CREATE INDEX `idx_alert_rule_group_id` ON `alert_rule` (`group_id` asc);
CREATE INDEX `idx_alert_rule_update_at` ON `alert_rule` (`update_at` asc);
@@ -308,11 +322,18 @@ CREATE TABLE `target` (
`tags` varchar(512) not null default '',
`host_ip` varchar(15) default '',
`agent_version` varchar(255) default '',
`host_tags` text,
`engine_name` varchar(255) default '',
`os` varchar(31) default '',
`update_at` bigint not null default 0
);
CREATE INDEX `idx_target_group_id` ON `target` (`group_id` asc);
CREATE INDEX `idx_target_group_id` ON `target` (`group_id` asc);
CREATE UNIQUE INDEX idx_target_ident ON `target` (ident);
CREATE INDEX idx_host_ip ON `target` (host_ip);
CREATE INDEX idx_agent_version ON `target` (agent_version);
CREATE INDEX idx_engine_name ON `target` (engine_name);
CREATE INDEX idx_os ON `target` (os);
CREATE TABLE `metric_view` (
`id` integer primary key autoincrement,
@@ -337,12 +358,14 @@ CREATE TABLE `recording_rule` (
`disabled` tinyint(1) not null default 0,
`prom_ql` varchar(8192) not null,
`prom_eval_interval` int not null,
`cron_pattern` varchar(255) default '',
`append_tags` varchar(255) default '',
`query_configs` text not null,
`create_at` bigint default '0',
`create_by` varchar(64) default '',
`update_at` bigint default '0',
`update_by` varchar(64) default ''
`update_by` varchar(64) default '',
`datasource_queries` text
);
CREATE INDEX `idx_recording_rule_group_id` ON `recording_rule` (`group_id` asc);
CREATE INDEX `idx_recording_rule_update_at` ON `recording_rule` (`update_at` asc);
@@ -430,6 +453,7 @@ CREATE TABLE `alert_his_event` (
`trigger_value` varchar(2048) not null,
`recover_time` bigint not null default 0,
`last_eval_time` bigint not null default 0,
`original_tags` varchar(8192),
`tags` varchar(1024) not null default '',
`annotations` text not null,
`rule_config` text not null
@@ -459,6 +483,8 @@ CREATE INDEX `idx_builtin_components_ident` ON `builtin_components` (`ident` asc
CREATE TABLE `builtin_payloads` (
`id` integer primary key autoincrement,
`component_id` integer not null default 0,
`uuid` integer not null,
`type` varchar(191) not null,
`component` varchar(191) not null,
`cate` varchar(191) not null,
@@ -474,6 +500,20 @@ CREATE INDEX `idx_builtin_payloads_component` ON `builtin_payloads` (`component`
CREATE INDEX `idx_builtin_payloads_name` ON `builtin_payloads` (`name` asc);
CREATE INDEX `idx_builtin_payloads_cate` ON `builtin_payloads` (`cate` asc);
CREATE INDEX `idx_builtin_payloads_type` ON `builtin_payloads` (`type` asc);
CREATE INDEX idx_uuid ON `builtin_payloads` (uuid);
CREATE TABLE `notification_record` (
`id` integer primary key autoincrement,
`event_id` integer not null,
`sub_id` integer,
`channel` varchar(255) not null,
`status` integer,
`target` varchar(1024) not null,
`details` varchar(2048) default '',
`created_at` integer not null
);
CREATE INDEX idx_evt ON notification_record (event_id);
CREATE TABLE `task_tpl` (
`id` integer primary key autoincrement,
@@ -553,6 +593,8 @@ CREATE TABLE `datasource`
`updated_by` varchar(64) not null default ''
);
CREATE UNIQUE INDEX idx_datasource_name ON datasource (name);
CREATE TABLE `builtin_cate` (
`id` integer primary key autoincrement,
`name` varchar(191) not null,
@@ -570,6 +612,8 @@ CREATE TABLE `notify_tpl` (
`update_by` varchar(64) not null default ''
);
CREATE UNIQUE INDEX idx_notify_tpl_channel ON notify_tpl (channel);
CREATE TABLE `sso_config` (
`id` integer primary key autoincrement,
`name` varchar(191) not null unique,
@@ -577,6 +621,8 @@ CREATE TABLE `sso_config` (
`update_at` bigint not null default 0
);
CREATE UNIQUE INDEX idx_sso_config_name ON sso_config (name);
CREATE TABLE `es_index_pattern` (
`id` integer primary key autoincrement,
`datasource_id` bigint not null default 0,
@@ -584,6 +630,7 @@ CREATE TABLE `es_index_pattern` (
`time_field` varchar(128) not null default '@timestamp',
`allow_hide_system_indices` tinyint(1) not null default 0,
`fields_format` varchar(4096) not null default '',
`cross_cluster_enabled` int not null default 0,
`create_at` bigint default '0',
`create_by` varchar(64) default '',
`update_at` bigint default '0',
@@ -591,6 +638,8 @@ CREATE TABLE `es_index_pattern` (
unique (`datasource_id`, `name`)
);
CREATE UNIQUE INDEX idx_es_index_pattern_datasource_id_name ON es_index_pattern (datasource_id, name);
CREATE TABLE `builtin_metrics` (
`id` integer primary key autoincrement,
`collector` varchar(191) NOT NULL,
@@ -603,13 +652,15 @@ CREATE TABLE `builtin_metrics` (
`created_at` bigint NOT NULL DEFAULT 0,
`created_by` varchar(191) NOT NULL DEFAULT '',
`updated_at` bigint NOT NULL DEFAULT 0,
`updated_by` varchar(191) NOT NULL DEFAULT ''
`updated_by` varchar(191) NOT NULL DEFAULT '',
`uuid integer` not null default 0
);
-- CREATE UNIQUE INDEX `idx_builtin_metrics_collector_typ_name` ON `builtin_metrics` (`lang`,`collector`, `typ`, `name` asc);
-- CREATE INDEX `idx_builtin_metrics_collector` ON `builtin_metrics` (`collector` asc);
-- CREATE INDEX `idx_builtin_metrics_typ` ON `builtin_metrics` (`typ` asc);
-- CREATE INDEX `idx_builtin_metrics_name` ON `builtin_metrics` (`name` asc);
-- CREATE INDEX `idx_builtin_metrics_lang` ON `builtin_metrics` (`lang` asc);
CREATE UNIQUE INDEX idx_collector_typ_name ON builtin_metrics (lang, collector, typ, name);
CREATE INDEX idx_collector ON builtin_metrics (collector);
CREATE INDEX idx_typ ON builtin_metrics (typ);
CREATE INDEX idx_builtinmetric_name ON builtin_metrics (name);
CREATE INDEX idx_lang ON builtin_metrics (lang);
CREATE TABLE `metric_filter` (
@@ -624,6 +675,30 @@ CREATE TABLE `metric_filter` (
);
CREATE INDEX `idx_metric_filter_name` ON `metric_filter` (`name` asc);
CREATE TABLE `target_busi_group` (
`id` integer primary key autoincrement,
`target_ident` varchar(191) not null,
`group_id` integer not null,
`update_at` integer not null
);
CREATE UNIQUE INDEX idx_target_busi_group ON target_busi_group (target_ident, group_id);
CREATE TABLE `dash_annotation` (
`id` integer primary key autoincrement,
`dashboard_id` bigint not null,
`panel_id` varchar(191) not null,
`tags` text,
`description` text,
`config` text,
`time_start` bigint not null default 0,
`time_end` bigint not null default 0,
`create_at` bigint not null default 0,
`create_by` varchar(64) not null default '',
`update_at` bigint not null default 0,
`update_by` varchar(64) not null default ''
);
CREATE TABLE `task_meta`
(

View File

@@ -50,7 +50,7 @@ Enable = true
# user001 = "ccc26da7b9aba533cbb263a36c07dcc5"
[HTTP.APIForService]
Enable = true
Enable = false
[HTTP.APIForService.BasicAuth]
user001 = "ccc26da7b9aba533cbb263a36c07dcc5"
@@ -77,7 +77,7 @@ OpenRSA = false
DBType = "sqlite"
# postgres: host=%s port=%s user=%s dbname=%s password=%s sslmode=%s
# postgres: DSN="host=127.0.0.1 port=5432 user=root dbname=n9e_v6 password=1234 sslmode=disable"
# sqlite: DSN="/path/to/filename.db"
# mysql: DSN="root:1234@tcp(localhost:3306)/n9e_v6?charset=utf8mb4&parseTime=True&loc=Local"
DSN = "n9e.db"
# enable debug mode or not
Debug = false

View File

@@ -54,7 +54,7 @@ Enable = true
# user001 = "ccc26da7b9aba533cbb263a36c07dcc5"
[HTTP.APIForService]
Enable = true
Enable = false
[HTTP.APIForService.BasicAuth]
user001 = "ccc26da7b9aba533cbb263a36c07dcc5"

27
go.mod
View File

@@ -4,6 +4,7 @@ go 1.22
require (
github.com/BurntSushi/toml v0.3.1
github.com/VictoriaMetrics/metricsql v0.81.1
github.com/coreos/go-oidc v2.2.1+incompatible
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc
github.com/dgrijalva/jwt-go v3.2.0+incompatible
@@ -11,6 +12,7 @@ require (
github.com/flashcatcloud/ibex v1.3.5
github.com/gin-contrib/pprof v1.4.0
github.com/gin-gonic/gin v1.9.1
github.com/glebarez/sqlite v1.11.0
github.com/go-ldap/ldap/v3 v3.4.4
github.com/gogo/protobuf v1.3.2
github.com/golang-jwt/jwt v3.2.2+incompatible
@@ -40,15 +42,28 @@ require (
gopkg.in/gomail.v2 v2.0.0-20160411212932-81ebce5c23df
gopkg.in/yaml.v2 v2.4.0
gorm.io/driver/mysql v1.4.4
gorm.io/driver/postgres v1.4.5
gorm.io/driver/postgres v1.5.11
gorm.io/driver/sqlite v1.5.5
gorm.io/gorm v1.25.7-0.20240204074919-46816ad31dde
gorm.io/gorm v1.25.12
)
require (
github.com/VictoriaMetrics/metrics v1.34.0 // indirect
github.com/alicebob/gopher-json v0.0.0-20200520072559-a9ecdc9d1d3a // indirect
github.com/dustin/go-humanize v1.0.1 // indirect
github.com/glebarez/go-sqlite v1.21.2 // indirect
github.com/jackc/pgx/v5 v5.7.1 // indirect
github.com/jackc/puddle/v2 v2.2.2 // indirect
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect
github.com/valyala/fastrand v1.1.0 // indirect
github.com/valyala/histogram v1.2.0 // indirect
github.com/yuin/gopher-lua v1.1.1 // indirect
golang.org/x/sync v0.10.0 // indirect
modernc.org/libc v1.22.5 // indirect
modernc.org/mathutil v1.5.0 // indirect
modernc.org/memory v1.5.0 // indirect
modernc.org/sqlite v1.23.1 // indirect
)
require (
@@ -79,7 +94,7 @@ require (
github.com/jackc/pgio v1.0.0 // indirect
github.com/jackc/pgpassfile v1.0.0 // indirect
github.com/jackc/pgproto3/v2 v2.3.1 // indirect
github.com/jackc/pgservicefile v0.0.0-20200714003250-2b9c44734f2b // indirect
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 // indirect
github.com/jackc/pgtype v1.12.0 // indirect
github.com/jackc/pgx/v4 v4.17.2 // indirect
github.com/jinzhu/inflection v1.0.0 // indirect
@@ -102,11 +117,11 @@ require (
go.uber.org/atomic v1.11.0 // indirect
go.uber.org/automaxprocs v1.5.2 // indirect
golang.org/x/arch v0.3.0 // indirect
golang.org/x/crypto v0.21.0 // indirect
golang.org/x/crypto v0.31.0 // indirect
golang.org/x/image v0.18.0 // indirect
golang.org/x/net v0.23.0 // indirect
golang.org/x/sys v0.21.0 // indirect
golang.org/x/text v0.16.0 // indirect
golang.org/x/sys v0.28.0 // indirect
golang.org/x/text v0.21.0 // indirect
google.golang.org/appengine v1.6.7 // indirect
google.golang.org/protobuf v1.33.0 // indirect
gopkg.in/alexcesaro/quotedprintable.v3 v3.0.0-20150716171945-2caba252f4dc // indirect

58
go.sum
View File

@@ -13,6 +13,10 @@ github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
github.com/Masterminds/semver/v3 v3.1.1 h1:hLg3sBzpNErnxhQtUy/mmLR2I9foDujNK030IGemrRc=
github.com/Masterminds/semver/v3 v3.1.1/go.mod h1:VPu/7SZ7ePZ3QOrcuXROw5FAcLl4a0cBrbBpGY/8hQs=
github.com/VictoriaMetrics/metrics v1.34.0 h1:0i8k/gdOJdSoZB4Z9pikVnVQXfhcIvnG7M7h2WaQW2w=
github.com/VictoriaMetrics/metrics v1.34.0/go.mod h1:r7hveu6xMdUACXvB8TYdAj8WEsKzWB0EkpJN+RDtOf8=
github.com/VictoriaMetrics/metricsql v0.81.1 h1:1gpqI3Mwru1tCM8nZiKxBG0P+DNkjlRwLhRPII3cuho=
github.com/VictoriaMetrics/metricsql v0.81.1/go.mod h1:1g4hdCwlbJZ851PU9VN65xy9Rdlzupo6fx3SNZ8Z64U=
github.com/alecthomas/units v0.0.0-20211218093645-b94a6e3cc137 h1:s6gZFSlWYmbqAuRjVTiNNhvNRfY2Wxp9nhfyel4rklc=
github.com/alecthomas/units v0.0.0-20211218093645-b94a6e3cc137/go.mod h1:OMCwj8VM1Kc9e19TLln2VL61YJF0x1XFtfdL4JdbSyE=
github.com/alicebob/gopher-json v0.0.0-20200520072559-a9ecdc9d1d3a h1:HbKu58rmZpUGpz5+4FfNmIU+FmZg2P3Xaj2v2bfNWmk=
@@ -53,6 +57,8 @@ github.com/dgrijalva/jwt-go v3.2.0+incompatible h1:7qlOGliEKZXTDg6OTjfoBKDXWrumC
github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ=
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f h1:lO4WD4F/rVNCu3HqELle0jiPLLBs70cWOduZpkS1E78=
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f/go.mod h1:cuUVRXasLTGF7a8hSLbxyZXjz+1KgoB3wDUb6vlszIc=
github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY=
github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
github.com/expr-lang/expr v1.16.1 h1:Na8CUcMdyGbnNpShY7kzcHCU7WqxuL+hnxgHZ4vaz/A=
github.com/expr-lang/expr v1.16.1/go.mod h1:uCkhfG+x7fcZ5A5sXHKuQ07jGZRl6J0FCAaf2k4PtVQ=
github.com/fatih/camelcase v1.0.0 h1:hxNvNX/xYBp0ovncs8WyWZrOrpBNub/JfaMvbURyft8=
@@ -71,6 +77,10 @@ github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm
github.com/gin-gonic/gin v1.8.1/go.mod h1:ji8BvRH1azfM+SYow9zQ6SZMvR8qOMZHmsCuWR9tTTk=
github.com/gin-gonic/gin v1.9.1 h1:4idEAncQnU5cB7BeOkPtxjfCSye0AAm1R0RVIqJ+Jmg=
github.com/gin-gonic/gin v1.9.1/go.mod h1:hPrL7YrpYKXt5YId3A/Tnip5kqbEAP+KLuI3SUcPTeU=
github.com/glebarez/go-sqlite v1.21.2 h1:3a6LFC4sKahUunAmynQKLZceZCOzUthkRkEAl9gAXWo=
github.com/glebarez/go-sqlite v1.21.2/go.mod h1:sfxdZyhQjTM2Wry3gVYWaW072Ri1WMdWJi0k6+3382k=
github.com/glebarez/sqlite v1.11.0 h1:wSG0irqzP6VurnMEpFGer5Li19RpIRi2qvQz++w0GMw=
github.com/glebarez/sqlite v1.11.0/go.mod h1:h8/o8j5wiAsqSPoWELDUdJXhjAhsVliSn7bWZjOhrgQ=
github.com/go-asn1-ber/asn1-ber v1.5.4 h1:vXT6d/FNDiELJnLb6hGNa309LMsrCoYFvpwHDF0+Y1A=
github.com/go-asn1-ber/asn1-ber v1.5.4/go.mod h1:hEBeB/ic+5LoWskz+yKT7vGhhPYkProFKoKdwZRWMe0=
github.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vbaY=
@@ -120,6 +130,8 @@ github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/
github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38=
github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
github.com/google/pprof v0.0.0-20230705174524-200ffdc848b8 h1:n6vlPhxsA+BW/XsS5+uqi7GyzaLa5MH7qlSLBZtRdiA=
github.com/google/pprof v0.0.0-20230705174524-200ffdc848b8/go.mod h1:Jh3hGz2jkYak8qXPD19ryItVnUgpgeqzdkY/D0EaeuA=
github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI=
github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I=
github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
@@ -158,6 +170,8 @@ github.com/jackc/pgproto3/v2 v2.3.1 h1:nwj7qwf0S+Q7ISFfBndqeLwSwxs+4DPsbRFjECT1Y
github.com/jackc/pgproto3/v2 v2.3.1/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA=
github.com/jackc/pgservicefile v0.0.0-20200714003250-2b9c44734f2b h1:C8S2+VttkHFdOOCXJe+YGfa4vHYwlt4Zx+IVXQ97jYg=
github.com/jackc/pgservicefile v0.0.0-20200714003250-2b9c44734f2b/go.mod h1:vsD4gTJCa9TptPL8sPkXrLZ+hDuNrZCnj29CQpr4X1E=
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 h1:iCEnooe7UlwOQYpKFhBabPMi4aNAfoODPEFNiAnClxo=
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761/go.mod h1:5TJZWKEWniPve33vlWYSoGYefn3gLQRzjfDlhSJ9ZKM=
github.com/jackc/pgtype v0.0.0-20190421001408-4ed0de4755e0/go.mod h1:hdSHsc1V01CGwFsrv11mJRHWJ6aifDLfdV3aVjFF0zg=
github.com/jackc/pgtype v0.0.0-20190824184912-ab885b375b90/go.mod h1:KcahbBH1nCMSo2DXpzsoWOAfFkdEtEJpPbVLq8eE+mc=
github.com/jackc/pgtype v0.0.0-20190828014616-a8802b16cc59/go.mod h1:MWlu30kVJrUS8lot6TQqcg7mtthZ9T0EoIBFiJcmcyw=
@@ -170,10 +184,15 @@ github.com/jackc/pgx/v4 v4.0.0-pre1.0.20190824185557-6972a5742186/go.mod h1:X+GQ
github.com/jackc/pgx/v4 v4.12.1-0.20210724153913-640aa07df17c/go.mod h1:1QD0+tgSXP7iUjYm9C1NxKhny7lq6ee99u/z+IHFcgs=
github.com/jackc/pgx/v4 v4.17.2 h1:0Ut0rpeKwvIVbMQ1KbMBU4h6wxehBI535LK6Flheh8E=
github.com/jackc/pgx/v4 v4.17.2/go.mod h1:lcxIZN44yMIrWI78a5CpucdD14hX0SBDbNRvjDBItsw=
github.com/jackc/pgx/v5 v5.7.1 h1:x7SYsPBYDkHDksogeSmZZ5xzThcTgRz++I5E+ePFUcs=
github.com/jackc/pgx/v5 v5.7.1/go.mod h1:e7O26IywZZ+naJtWWos6i6fvWK+29etgITqrqHLfoZA=
github.com/jackc/puddle v0.0.0-20190413234325-e4ced69a3a2b/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk=
github.com/jackc/puddle v0.0.0-20190608224051-11cab39313c9/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk=
github.com/jackc/puddle v1.1.3/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk=
github.com/jackc/puddle v1.3.0 h1:eHK/5clGOatcjX3oWGBO/MpxpbHzSwud5EWTSCI+MX0=
github.com/jackc/puddle v1.3.0/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk=
github.com/jackc/puddle/v2 v2.2.2 h1:PR8nw+E/1w0GLuRFSmiioY6UooMp6KJv0/61nB7icHo=
github.com/jackc/puddle/v2 v2.2.2/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4=
github.com/jinzhu/copier v0.4.0 h1:w3ciUoD19shMCRargcpm0cm91ytaBhDvuRpz1ODO/U8=
github.com/jinzhu/copier v0.4.0/go.mod h1:DfbEm0FYsaqBcKcFuvmOZb218JkPGtvSHsKg8S8hyyg=
github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E=
@@ -277,6 +296,9 @@ github.com/rakyll/statik v0.1.7 h1:OF3QCZUuyPxuGEP7B4ypUa7sB/iHtqOTDYZXGM8KOdQ=
github.com/rakyll/statik v0.1.7/go.mod h1:AlZONWzMtEnMs7W4e/1LURLiI49pIMmp6V9Unghqrcc=
github.com/redis/go-redis/v9 v9.0.2 h1:BA426Zqe/7r56kCcvxYLWe1mkaz71LKF77GwgFzSxfE=
github.com/redis/go-redis/v9 v9.0.2/go.mod h1:/xDTe9EF1LM61hek62Poq2nzQSGj0xSrEtEHbBQevps=
github.com/remyoudompheng/bigfft v0.0.0-20200410134404-eec4a21b6bb0/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo=
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE=
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo=
github.com/robfig/cron/v3 v3.0.1 h1:WdRxkvbJztn8LMz/QEvLN5sBU+xKpSqwwUO1Pjr4qDs=
github.com/robfig/cron/v3 v3.0.1/go.mod h1:eQICP3HwyT7UooqI/z+Ov+PtYAWygg1TEWWzGIFLtro=
github.com/robfig/go-cache v0.0.0-20130306151617-9fc39e0dbf62/go.mod h1:65XQgovT59RWatovFwnwocoUxiI/eENTnOY5GK3STuY=
@@ -329,6 +351,10 @@ github.com/ugorji/go v1.2.7/go.mod h1:nF9osbDWLy6bDVv/Rtoh6QgnvNDpmCalQV5urGCCS6
github.com/ugorji/go/codec v1.2.7/go.mod h1:WGN1fab3R1fzQlVQTkfxVtIBhWDRqOviHU95kRgeqEY=
github.com/ugorji/go/codec v1.2.11 h1:BMaWp1Bb6fHwEtbplGBGJ498wD+LKlNSl25MjdZY4dU=
github.com/ugorji/go/codec v1.2.11/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg=
github.com/valyala/fastrand v1.1.0 h1:f+5HkLW4rsgzdNoleUOB69hyT9IlD2ZQh9GyDMfb5G8=
github.com/valyala/fastrand v1.1.0/go.mod h1:HWqCzkrkg6QXT8V2EXWvXCoow7vLwOFN002oeRzjapQ=
github.com/valyala/histogram v1.2.0 h1:wyYGAZZt3CpwUiIb9AU/Zbllg1llXyrtApRS815OLoQ=
github.com/valyala/histogram v1.2.0/go.mod h1:Hb4kBwb4UxsaNbbbh+RRz8ZR6pdodR57tzWUS3BUzXY=
github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
@@ -370,8 +396,8 @@ golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0
golang.org/x/crypto v0.0.0-20220722155217-630584e8d5aa/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
golang.org/x/crypto v0.7.0/go.mod h1:pYwdfH91IfpZVANVyUOhSIPZaFoJGxTFbZhFTx+dXZU=
golang.org/x/crypto v0.9.0/go.mod h1:yrmDGqONDYtNj3tH8X9dzUun2m2lzPa9ngI6/RUPGR0=
golang.org/x/crypto v0.21.0 h1:X31++rzVUdKhX5sWmSOFZxx8UW/ldWx55cbf08iNAMA=
golang.org/x/crypto v0.21.0/go.mod h1:0BP7YvVV9gBbVKyeTG0Gyn+gZm94bibOW5BjDEYAOMs=
golang.org/x/crypto v0.31.0 h1:ihbySMvVjLAeSH1IbfcRTkD/iNscyz8rGzjF/E5hV6U=
golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk=
golang.org/x/exp v0.0.0-20230713183714-613f0c0eb8a1 h1:MGwJjxBy0HJshjDNfLsYO8xppfqWlA5ZT9OhtUUhTNw=
golang.org/x/exp v0.0.0-20230713183714-613f0c0eb8a1/go.mod h1:FXUEEKJgO7OQYeo8N01OfiKP8RXMtf6e8aTskBGqWdc=
golang.org/x/image v0.13.0/go.mod h1:6mmbMOeV28HuMTgA6OSRkdXKYw/t5W9Uwn2Yv1r3Yxk=
@@ -407,8 +433,8 @@ golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJ
golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.7.0 h1:YsImfSBoP9QPYL0xyKJPq0gcaJdG3rInoqxTWbfQu9M=
golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sync v0.10.0 h1:3NQrjDixjgGwUOCaF8w2+VYHv0Ve/vGYSbdkTa98gmQ=
golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
@@ -431,8 +457,10 @@ golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBc
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.21.0 h1:rF+pYz3DAGSQAxAu1CbC7catZg4ebC4UIeIhKxBZvws=
golang.org/x/sys v0.15.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.21.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.28.0 h1:Fksou7UEQUWlKvIdsqzJmUmCX3cZuD2+P3XyyzwMhlA=
golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
@@ -450,8 +478,8 @@ golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/text v0.8.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
golang.org/x/text v0.16.0 h1:a94ExnEXNtEwYLGJSIUxnWoxoRz/ZcCsV63ROupILh4=
golang.org/x/text v0.16.0/go.mod h1:GhwF1Be+LQoKShO3cGOHzqOgRrGaYc9AvblQOmPVHnI=
golang.org/x/text v0.21.0 h1:zyQAAkrwaneQ066sspRyJaG9VNi/YJ1NfzcGB3hZ/qo=
golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190425163242-31fd60d6bfdc/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
@@ -502,11 +530,23 @@ gorm.io/driver/mysql v1.4.4 h1:MX0K9Qvy0Na4o7qSC/YI7XxqUw5KDw01umqgID+svdQ=
gorm.io/driver/mysql v1.4.4/go.mod h1:BCg8cKI+R0j/rZRQxeKis/forqRwRSYOR8OM3Wo6hOM=
gorm.io/driver/postgres v1.4.5 h1:mTeXTTtHAgnS9PgmhN2YeUbazYpLhUI1doLnw42XUZc=
gorm.io/driver/postgres v1.4.5/go.mod h1:GKNQYSJ14qvWkvPwXljMGehpKrhlDNsqYRr5HnYGncg=
gorm.io/driver/postgres v1.5.11 h1:ubBVAfbKEUld/twyKZ0IYn9rSQh448EdelLYk9Mv314=
gorm.io/driver/postgres v1.5.11/go.mod h1:DX3GReXH+3FPWGrrgffdvCk3DQ1dwDPdmbenSkweRGI=
gorm.io/driver/sqlite v1.5.5 h1:7MDMtUZhV065SilG62E0MquljeArQZNfJnjd9i9gx3E=
gorm.io/driver/sqlite v1.5.5/go.mod h1:6NgQ7sQWAIFsPrJJl1lSNSu2TABh0ZZ/zm5fosATavE=
gorm.io/gorm v1.23.8/go.mod h1:l2lP/RyAtc1ynaTjFksBde/O8v9oOGIApu2/xRitmZk=
gorm.io/gorm v1.24.1-0.20221019064659-5dd2bb482755/go.mod h1:DVrVomtaYTbqs7gB/x2uVvqnXzv0nqjB396B8cG4dBA=
gorm.io/gorm v1.25.7-0.20240204074919-46816ad31dde h1:9DShaph9qhkIYw7QF91I/ynrr4cOO2PZra2PFD7Mfeg=
gorm.io/gorm v1.25.7-0.20240204074919-46816ad31dde/go.mod h1:hbnx/Oo0ChWMn1BIhpy1oYozzpM15i4YPuHDmfYtwg8=
gorm.io/gorm v1.25.7 h1:VsD6acwRjz2zFxGO50gPO6AkNs7KKnvfzUjHQhZDz/A=
gorm.io/gorm v1.25.7/go.mod h1:hbnx/Oo0ChWMn1BIhpy1oYozzpM15i4YPuHDmfYtwg8=
gorm.io/gorm v1.25.12 h1:I0u8i2hWQItBq1WfE0o2+WuL9+8L21K9e2HHSTE/0f8=
gorm.io/gorm v1.25.12/go.mod h1:xh7N7RHfYlNc5EmcI/El95gXusucDrQnHXe0+CgWcLQ=
honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg=
modernc.org/libc v1.22.5 h1:91BNch/e5B0uPbJFgqbxXuOnxBQjlS//icfQEGmvyjE=
modernc.org/libc v1.22.5/go.mod h1:jj+Z7dTNX8fBScMVNRAYZ/jF91K8fdT2hYMThc3YjBY=
modernc.org/mathutil v1.5.0 h1:rV0Ko/6SfM+8G+yKiyI830l3Wuz1zRutdslNoQ0kfiQ=
modernc.org/mathutil v1.5.0/go.mod h1:mZW8CKdRPY1v87qxC/wUdX5O1qDzXMP5TH3wjfpga6E=
modernc.org/memory v1.5.0 h1:N+/8c5rE6EqugZwHii4IFsaJ7MUhoWX07J5tC/iI5Ds=
modernc.org/memory v1.5.0/go.mod h1:PkUhL0Mugw21sHPeskwZW4D6VscE/GQJOnIpCnW6pSU=
modernc.org/sqlite v1.23.1 h1:nrSBg4aRQQwq59JpvGEQ15tNxoO5pX/kUjcRNwSAGQM=
modernc.org/sqlite v1.23.1/go.mod h1:OrDj17Mggn6MhE+iPbBNf7RGKODDE9NFT0f3EwDzJqk=
rsc.io/pdf v0.1.1/go.mod h1:n8OzWcQ6Sp37PL01nO98y4iUCRdTGarVfzxY20ICaU4=

View File

Before

Width:  |  Height:  |  Size: 1.6 KiB

After

Width:  |  Height:  |  Size: 1.6 KiB

View File

Before

Width:  |  Height:  |  Size: 377 KiB

After

Width:  |  Height:  |  Size: 377 KiB

View File

@@ -8,13 +8,14 @@ import (
"sync"
"time"
"github.com/ccfos/nightingale/v6/pkg/tplx"
"github.com/BurntSushi/toml"
"github.com/ccfos/nightingale/v6/alert/aconf"
"github.com/ccfos/nightingale/v6/dumper"
"github.com/ccfos/nightingale/v6/models"
"github.com/ccfos/nightingale/v6/pkg/ctx"
"github.com/ccfos/nightingale/v6/pkg/poster"
"github.com/ccfos/nightingale/v6/pkg/tplx"
"github.com/BurntSushi/toml"
"github.com/toolkits/pkg/logger"
)
@@ -114,13 +115,18 @@ func (w *NotifyConfigCacheType) syncNotifyConfigs() error {
}
if webhooks[i].Client == nil {
transport := &http.Transport{
TLSClientConfig: &tls.Config{InsecureSkipVerify: webhooks[i].SkipVerify},
}
if poster.UseProxy(webhooks[i].Url) {
transport.Proxy = http.ProxyFromEnvironment
}
webhooks[i].Client = &http.Client{
Timeout: time.Second * time.Duration(webhooks[i].Timeout),
Transport: &http.Transport{
TLSClientConfig: &tls.Config{InsecureSkipVerify: webhooks[i].SkipVerify},
},
Timeout: time.Second * time.Duration(webhooks[i].Timeout),
Transport: transport,
}
}
newWebhooks[webhooks[i].Url] = webhooks[i]
}

View File

@@ -14,6 +14,7 @@ type BuiltinComponent struct {
Ident string `json:"ident" gorm:"type:varchar(191);not null;uniqueIndex:idx_ident,sort:asc"`
Logo string `json:"logo" gorm:"type:mediumtext;comment:'logo of component'"`
Readme string `json:"readme" gorm:"type:text;not null;comment:'readme of component'"`
Disabled int `json:"disabled" gorm:"type:int;not null;default:0;comment:'is disabled or not'"`
CreatedAt int64 `json:"created_at" gorm:"type:bigint;not null;default:0;comment:'create time'"`
CreatedBy string `json:"created_by" gorm:"type:varchar(191);not null;default:'';comment:'creator'"`
UpdatedAt int64 `json:"updated_at" gorm:"type:bigint;not null;default:0;comment:'update time'"`
@@ -25,12 +26,17 @@ type PostgresBuiltinComponent struct {
Ident string `json:"ident" gorm:"type:varchar(191);not null;uniqueIndex:idx_ident,sort:asc;comment:'identifier of component'"`
Logo string `json:"logo" gorm:"type:text;comment:'logo of component'"`
Readme string `json:"readme" gorm:"type:text;not null;comment:'readme of component'"`
Disabled int `json:"disabled" gorm:"type:int;not null;default:0;comment:'is disabled or not'"`
CreatedAt int64 `json:"created_at" gorm:"type:bigint;not null;default:0;comment:'create time'"`
CreatedBy string `json:"created_by" gorm:"type:varchar(191);not null;default:'';comment:'creator'"`
UpdatedAt int64 `json:"updated_at" gorm:"type:bigint;not null;default:0;comment:'update time'"`
UpdatedBy string `json:"updated_by" gorm:"type:varchar(191);not null;default:'';comment:'updater'"`
}
func (bc *PostgresBuiltinComponent) TableName() string {
return "builtin_components"
}
func (bc *BuiltinComponent) TableName() string {
return "builtin_components"
}
@@ -97,12 +103,15 @@ func BuiltinComponentDels(ctx *ctx.Context, ids []int64) error {
return DB(ctx).Where("id in ?", ids).Delete(new(BuiltinComponent)).Error
}
func BuiltinComponentGets(ctx *ctx.Context, query string) ([]*BuiltinComponent, error) {
func BuiltinComponentGets(ctx *ctx.Context, query string, disabled int) ([]*BuiltinComponent, error) {
session := DB(ctx)
if query != "" {
queryPattern := "%" + query + "%"
session = session.Where("ident LIKE ?", queryPattern)
}
if disabled == 0 || disabled == 1 {
session = session.Where("disabled = ?", disabled)
}
var lst []*BuiltinComponent

View File

@@ -168,7 +168,7 @@ func BuiltinPayloadComponents(ctx *ctx.Context, typ, cate string) (string, error
func InitBuiltinPayloads(ctx *ctx.Context) error {
var lst []*BuiltinPayload
components, err := BuiltinComponentGets(ctx, "")
components, err := BuiltinComponentGets(ctx, "", -1)
if err != nil {
return err
}

View File

@@ -115,53 +115,74 @@ func BusiGroupExists(ctx *ctx.Context, where string, args ...interface{}) (bool,
return num > 0, err
}
var entries = []struct {
entry interface{}
errorMessage string
}{
// RegisterGroupDelCheckEntries 提供给外部注册删除 group 时需要检查的表
func RegisterGroupDelCheckEntries(e []CheckEntry) {
entries = append(entries, e...)
}
type CheckEntry struct {
Entry interface{}
ErrorMessage string
FieldName string
}
var entries = []CheckEntry{
{
entry: &AlertRule{},
errorMessage: "Some alert rules still in the BusiGroup",
Entry: &AlertRule{},
ErrorMessage: "Some alert rules still in the BusiGroup",
FieldName: "group_id",
},
{
entry: &AlertMute{},
errorMessage: "Some alert mutes still in the BusiGroup",
Entry: &AlertMute{},
ErrorMessage: "Some alert mutes still in the BusiGroup",
FieldName: "group_id",
},
{
entry: &AlertSubscribe{},
errorMessage: "Some alert subscribes still in the BusiGroup",
Entry: &AlertSubscribe{},
ErrorMessage: "Some alert subscribes still in the BusiGroup",
FieldName: "group_id",
},
{
entry: &Target{},
errorMessage: "Some targets still in the BusiGroup",
Entry: &Board{},
ErrorMessage: "Some Board still in the BusiGroup",
FieldName: "group_id",
},
{
entry: &RecordingRule{},
errorMessage: "Some recording rules still in the BusiGroup",
Entry: &Target{},
ErrorMessage: "Some targets still in the BusiGroup",
FieldName: "group_id",
},
{
entry: &TaskTpl{},
errorMessage: "Some recovery scripts still in the BusiGroup",
Entry: &RecordingRule{},
ErrorMessage: "Some recording rules still in the BusiGroup",
FieldName: "group_id",
},
{
entry: &TaskRecord{},
errorMessage: "Some Task Record records still in the BusiGroup",
Entry: &TaskTpl{},
ErrorMessage: "Some recovery scripts still in the BusiGroup",
FieldName: "group_id",
},
{
entry: &TargetBusiGroup{},
errorMessage: "Some target busigroups still in the BusiGroup",
Entry: &TaskRecord{},
ErrorMessage: "Some Task Record records still in the BusiGroup",
FieldName: "group_id",
},
{
Entry: &TargetBusiGroup{},
ErrorMessage: "Some target busigroups still in the BusiGroup",
FieldName: "group_id",
},
}
func (bg *BusiGroup) Del(ctx *ctx.Context) error {
for _, e := range entries {
has, err := Exists(DB(ctx).Model(e.entry).Where("group_id=?", bg.Id))
has, err := Exists(DB(ctx).Model(e.Entry).Where(fmt.Sprintf("%s=?", e.FieldName), bg.Id))
if err != nil {
return err
}
if has {
return errors.New(e.errorMessage)
return errors.New(e.ErrorMessage)
}
}

View File

@@ -130,7 +130,7 @@ func ConfigsGetAll(ctx *ctx.Context) ([]*Configs, error) { // select built-in ty
}
var lst []*Configs
err := DB(ctx).Model(&Configs{}).Select("ckey, cval").
err := DB(ctx).Model(&Configs{}).Select("id, ckey, cval").
Where("ckey!='' and external=? ", 0).Find(&lst).Error
if err != nil {
return nil, errors.WithMessage(err, "failed to query configs")

89
models/dash_annotation.go Normal file
View File

@@ -0,0 +1,89 @@
package models
import (
"encoding/json"
"github.com/ccfos/nightingale/v6/pkg/ctx"
)
type DashAnnotation struct {
Id int64 `json:"id" gorm:"primaryKey"`
DashboardId int64 `json:"dashboard_id"`
PanelId string `json:"panel_id"`
Tags string `json:"-"`
TagsJSON []string `json:"tags" gorm:"-"`
Description string `json:"description"`
Config string `json:"config"`
TimeStart int64 `json:"time_start"`
TimeEnd int64 `json:"time_end"`
CreateAt int64 `json:"create_at"`
CreateBy string `json:"create_by"`
UpdateAt int64 `json:"update_at"`
UpdateBy string `json:"update_by"`
}
func (da *DashAnnotation) TableName() string {
return "dash_annotation"
}
func (da *DashAnnotation) DB2FE() error {
return json.Unmarshal([]byte(da.Tags), &da.TagsJSON)
}
func (da *DashAnnotation) FE2DB() error {
b, err := json.Marshal(da.TagsJSON)
if err != nil {
return err
}
da.Tags = string(b)
return nil
}
func (da *DashAnnotation) Add(ctx *ctx.Context) error {
if err := da.FE2DB(); err != nil {
return err
}
return Insert(ctx, da)
}
func (da *DashAnnotation) Update(ctx *ctx.Context) error {
if err := da.FE2DB(); err != nil {
return err
}
return DB(ctx).Model(da).Select("dashboard_id", "panel_id", "tags", "description", "config", "time_start", "time_end", "update_at", "update_by").Updates(da).Error
}
func DashAnnotationDel(ctx *ctx.Context, id int64) error {
return DB(ctx).Where("id = ?", id).Delete(&DashAnnotation{}).Error
}
func DashAnnotationGet(ctx *ctx.Context, where string, args ...interface{}) (*DashAnnotation, error) {
var lst []*DashAnnotation
err := DB(ctx).Where(where, args...).Find(&lst).Error
if err != nil {
return nil, err
}
if len(lst) == 0 {
return nil, nil
}
err = lst[0].DB2FE()
return lst[0], err
}
func DashAnnotationGets(ctx *ctx.Context, dashboardId int64, from, to int64, limit int) ([]DashAnnotation, error) {
session := DB(ctx).Where("dashboard_id = ? AND time_start <= ? AND time_end >= ?", dashboardId, to, from)
var lst []DashAnnotation
err := session.Order("id").Limit(limit).Find(&lst).Error
if err != nil {
return nil, err
}
for i := 0; i < len(lst); i++ {
lst[i].DB2FE()
}
return lst, nil
}

View File

@@ -20,6 +20,7 @@ type EsIndexPattern struct {
CreateBy string `json:"create_by"`
UpdateAt int64 `json:"update_at"`
UpdateBy string `json:"update_by"`
CrossClusterEnabled int `json:"cross_cluster_enabled"`
}
func (t *EsIndexPattern) TableName() string {

View File

@@ -67,7 +67,7 @@ func MigrateTables(db *gorm.DB) error {
&TaskRecord{}, &ChartShare{}, &Target{}, &Configs{}, &Datasource{}, &NotifyTpl{},
&Board{}, &BoardBusigroup{}, &Users{}, &SsoConfig{}, &models.BuiltinMetric{},
&models.MetricFilter{}, &models.NotificaitonRecord{},
&models.TargetBusiGroup{}}
&models.TargetBusiGroup{}, &EsIndexPatternMigrate{}, &DashAnnotation{}}
if isPostgres(db) {
dts = append(dts, &models.PostgresBuiltinComponent{})
@@ -319,3 +319,30 @@ type TaskHostDoing struct {
func (TaskHostDoing) TableName() string {
return "task_host_doing"
}
type EsIndexPatternMigrate struct {
CrossClusterEnabled int `gorm:"column:cross_cluster_enabled;type:int;default:0"`
}
func (EsIndexPatternMigrate) TableName() string {
return "es_index_pattern"
}
type DashAnnotation struct {
Id int64 `gorm:"column:id;primaryKey;autoIncrement"`
DashboardId int64 `gorm:"column:dashboard_id;not null"`
PanelId string `gorm:"column:panel_id;type:varchar(191);not null"`
Tags string `gorm:"column:tags;type:text"`
Description string `gorm:"column:description;type:text"`
Config string `gorm:"column:config;type:text"`
TimeStart int64 `gorm:"column:time_start;not null;default:0"`
TimeEnd int64 `gorm:"column:time_end;not null;default:0"`
CreateAt int64 `gorm:"column:create_at;not null;default:0"`
CreateBy string `gorm:"column:create_by;type:varchar(64);not null;default:''"`
UpdateAt int64 `gorm:"column:update_at;not null;default:0"`
UpdateBy string `gorm:"column:update_by;type:varchar(64);not null;default:''"`
}
func (DashAnnotation) TableName() string {
return "dash_annotation"
}

View File

@@ -29,16 +29,22 @@ func LoadConfigByDir(configDir string, configPtr interface{}) error {
if err != nil {
return fmt.Errorf("failed to list files under: %s : %v", configDir, err)
}
found := false
s := NewFileScanner()
for _, fpath := range files {
switch {
case strings.HasSuffix(fpath, ".toml"):
found = true
s.Read(path.Join(configDir, fpath))
tBuf = append(tBuf, s.Data()...)
tBuf = append(tBuf, []byte("\n")...)
case strings.HasSuffix(fpath, ".json"):
found = true
loaders = append(loaders, &multiconfig.JSONLoader{Path: path.Join(configDir, fpath)})
case strings.HasSuffix(fpath, ".yaml") || strings.HasSuffix(fpath, ".yml"):
found = true
loaders = append(loaders, &multiconfig.YAMLLoader{Path: path.Join(configDir, fpath)})
}
if s.Err() != nil {
@@ -46,6 +52,10 @@ func LoadConfigByDir(configDir string, configPtr interface{}) error {
}
}
if !found {
return fmt.Errorf("fail to found config file, config dir path: %v", configDir)
}
if len(tBuf) != 0 {
loaders = append(loaders, &multiconfig.TOMLLoader{Reader: bytes.NewReader(tBuf)})
}

View File

@@ -340,42 +340,43 @@ func (InitChartShare) TableOptions() string {
}
type InitAlertRule struct {
ID uint64 `gorm:"primaryKey;autoIncrement"`
GroupID uint64 `gorm:"not null;default:0;comment:busi group id;index"`
Cate string `gorm:"size:128;not null"`
DatasourceIDs string `gorm:"size:255;not null;default:'';comment:datasource ids"`
Cluster string `gorm:"size:128;not null"`
Name string `gorm:"size:255;not null"`
Note string `gorm:"size:1024;not null;default:''"`
Prod string `gorm:"size:255;not null;default:''"`
Algorithm string `gorm:"size:255;not null;default:''"`
AlgoParams string `gorm:"size:255"`
Delay int32 `gorm:"not null;default:0"`
Severity int16 `gorm:"type:tinyint(1);not null;comment:1:Emergency 2:Warning 3:Notice"`
Disabled bool `gorm:"type:tinyint(1);not null;comment:0:enabled 1:disabled"`
PromForDuration int32 `gorm:"not null;comment:prometheus for, unit:s"`
RuleConfig string `gorm:"type:text;not null;comment:rule_config"`
PromQL string `gorm:"type:text;not null;comment:promql"`
PromEvalInterval int32 `gorm:"not null;comment:evaluate interval"`
EnableStime string `gorm:"size:255;not null;default:'00:00'"`
EnableEtime string `gorm:"size:255;not null;default:'23:59'"`
EnableDaysOfWeek string `gorm:"size:255;not null;default:'';comment:split by space: 0 1 2 3 4 5 6"`
EnableInBg bool `gorm:"type:tinyint(1);not null;default:0;comment:1: only this bg 0: global"`
NotifyRecovered bool `gorm:"type:tinyint(1);not null;comment:whether notify when recovery"`
NotifyChannels string `gorm:"size:255;not null;default:'';comment:split by space: sms voice email dingtalk wecom"`
NotifyGroups string `gorm:"size:255;not null;default:'';comment:split by space: 233 43"`
NotifyRepeatStep int32 `gorm:"not null;default:0;comment:unit: min"`
NotifyMaxNumber int32 `gorm:"not null;default:0"`
RecoverDuration int32 `gorm:"not null;default:0;comment:unit: s"`
Callbacks string `gorm:"size:4096;not null;default:'';comment:split by space: http://a.com/api/x http://a.com/api/y"`
RunbookURL string `gorm:"size:4096"`
AppendTags string `gorm:"size:255;not null;default:'';comment:split by space: service=n9e mod=api"`
Annotations string `gorm:"type:text;not null;comment:annotations"`
ExtraConfig string `gorm:"type:text;not null;comment:extra_config"`
CreateAt int64 `gorm:"not null;default:0"`
CreateBy string `gorm:"size:64;not null;default:''"`
UpdateAt int64 `gorm:"not null;default:0;index"`
UpdateBy string `gorm:"size:64;not null;default:''"`
ID uint64 `gorm:"primaryKey;autoIncrement"`
GroupID uint64 `gorm:"not null;default:0;comment:busi group id;index"`
Cate string `gorm:"size:128;not null"`
DatasourceIDs string `gorm:"size:255;not null;default:'';comment:datasource ids"`
Cluster string `gorm:"size:128;not null"`
Name string `gorm:"size:255;not null"`
Note string `gorm:"size:1024;not null;default:''"`
Prod string `gorm:"size:255;not null;default:''"`
Algorithm string `gorm:"size:255;not null;default:''"`
AlgoParams string `gorm:"size:255"`
Delay int32 `gorm:"not null;default:0"`
Severity int16 `gorm:"type:tinyint(1);not null;comment:1:Emergency 2:Warning 3:Notice"`
Disabled bool `gorm:"type:tinyint(1);not null;comment:0:enabled 1:disabled"`
PromForDuration int32 `gorm:"not null;comment:prometheus for, unit:s"`
RuleConfig string `gorm:"type:text;not null;comment:rule_config"`
PromQL string `gorm:"type:text;not null;comment:promql"`
PromEvalInterval int32 `gorm:"not null;comment:evaluate interval"`
EnableStime string `gorm:"size:255;not null;default:'00:00'"`
EnableEtime string `gorm:"size:255;not null;default:'23:59'"`
EnableDaysOfWeek string `gorm:"size:255;not null;default:'';comment:split by space: 0 1 2 3 4 5 6"`
EnableInBg bool `gorm:"type:tinyint(1);not null;default:0;comment:1: only this bg 0: global"`
NotifyRecovered bool `gorm:"type:tinyint(1);not null;comment:whether notify when recovery"`
NotifyChannels string `gorm:"size:255;not null;default:'';comment:split by space: sms voice email dingtalk wecom"`
NotifyGroups string `gorm:"size:255;not null;default:'';comment:split by space: 233 43"`
NotifyRepeatStep int32 `gorm:"not null;default:0;comment:unit: min"`
NotifyMaxNumber int32 `gorm:"not null;default:0"`
RecoverDuration int32 `gorm:"not null;default:0;comment:unit: s"`
Callbacks string `gorm:"size:4096;not null;default:'';comment:split by space: http://a.com/api/x http://a.com/api/y"`
RunbookURL string `gorm:"size:4096"`
AppendTags string `gorm:"size:255;not null;default:'';comment:split by space: service=n9e mod=api"`
Annotations string `gorm:"type:text;not null;comment:annotations"`
ExtraConfig string `gorm:"type:text;not null;comment:extra_config"`
CreateAt int64 `gorm:"not null;default:0"`
CreateBy string `gorm:"size:64;not null;default:''"`
UpdateAt int64 `gorm:"not null;default:0;index"`
UpdateBy string `gorm:"size:64;not null;default:''"`
DatasourceQueries string `gorm:"type:text"`
}
func (InitAlertRule) TableName() string {
@@ -387,42 +388,43 @@ func (InitAlertRule) TableOptions() string {
}
type InitPostgresAlertRule struct {
ID uint64 `gorm:"primaryKey;autoIncrement"`
GroupID uint64 `gorm:"not null;default:0;comment:busi group id;index"`
Cate string `gorm:"size:128;not null"`
DatasourceIDs string `gorm:"size:255;not null;default:'';comment:datasource ids"`
Cluster string `gorm:"size:128;not null"`
Name string `gorm:"size:255;not null"`
Note string `gorm:"size:1024;not null;default:''"`
Prod string `gorm:"size:255;not null;default:''"`
Algorithm string `gorm:"size:255;not null;default:''"`
AlgoParams string `gorm:"size:255"`
Delay int32 `gorm:"not null;default:0"`
Severity int16 `gorm:"type:smallint;not null;comment:1:Emergency 2:Warning 3:Notice"`
Disabled int16 `gorm:"type:smallint;not null;comment:0:enabled 1:disabled"`
PromForDuration int32 `gorm:"not null;comment:prometheus for, unit:s"`
RuleConfig string `gorm:"type:text;not null;comment:rule_config"`
PromQL string `gorm:"type:text;not null;comment:promql"`
PromEvalInterval int32 `gorm:"not null;comment:evaluate interval"`
EnableStime string `gorm:"size:255;not null;default:'00:00'"`
EnableEtime string `gorm:"size:255;not null;default:'23:59'"`
EnableDaysOfWeek string `gorm:"size:255;not null;default:'';comment:split by space: 0 1 2 3 4 5 6"`
EnableInBg int16 `gorm:"type:smallint;not null;default:0;comment:1: only this bg 0: global"`
NotifyRecovered int16 `gorm:"type:smallint;not null;comment:whether notify when recovery"`
NotifyChannels string `gorm:"size:255;not null;default:'';comment:split by space: sms voice email dingtalk wecom"`
NotifyGroups string `gorm:"size:255;not null;default:'';comment:split by space: 233 43"`
NotifyRepeatStep int32 `gorm:"not null;default:0;comment:unit: min"`
NotifyMaxNumber int32 `gorm:"not null;default:0"`
RecoverDuration int32 `gorm:"not null;default:0;comment:unit: s"`
Callbacks string `gorm:"size:4096;not null;default:'';comment:split by space: http://a.com/api/x http://a.com/api/y"`
RunbookURL string `gorm:"size:4096"`
AppendTags string `gorm:"size:255;not null;default:'';comment:split by space: service=n9e mod=api"`
Annotations string `gorm:"type:text;not null;comment:annotations"`
ExtraConfig string `gorm:"type:text;not null;comment:extra_config"`
CreateAt int64 `gorm:"not null;default:0"`
CreateBy string `gorm:"size:64;not null;default:''"`
UpdateAt int64 `gorm:"not null;default:0;index"`
UpdateBy string `gorm:"size:64;not null;default:''"`
ID uint64 `gorm:"primaryKey;autoIncrement"`
GroupID uint64 `gorm:"not null;default:0;comment:busi group id;index"`
Cate string `gorm:"size:128;not null"`
DatasourceIDs string `gorm:"size:255;not null;default:'';comment:datasource ids"`
Cluster string `gorm:"size:128;not null"`
Name string `gorm:"size:255;not null"`
Note string `gorm:"size:1024;not null;default:''"`
Prod string `gorm:"size:255;not null;default:''"`
Algorithm string `gorm:"size:255;not null;default:''"`
AlgoParams string `gorm:"size:255"`
Delay int32 `gorm:"not null;default:0"`
Severity int16 `gorm:"type:smallint;not null;comment:1:Emergency 2:Warning 3:Notice"`
Disabled int16 `gorm:"type:smallint;not null;comment:0:enabled 1:disabled"`
PromForDuration int32 `gorm:"not null;comment:prometheus for, unit:s"`
RuleConfig string `gorm:"type:text;not null;comment:rule_config"`
PromQL string `gorm:"type:text;not null;comment:promql"`
PromEvalInterval int32 `gorm:"not null;comment:evaluate interval"`
EnableStime string `gorm:"size:255;not null;default:'00:00'"`
EnableEtime string `gorm:"size:255;not null;default:'23:59'"`
EnableDaysOfWeek string `gorm:"size:255;not null;default:'';comment:split by space: 0 1 2 3 4 5 6"`
EnableInBg int16 `gorm:"type:smallint;not null;default:0;comment:1: only this bg 0: global"`
NotifyRecovered int16 `gorm:"type:smallint;not null;comment:whether notify when recovery"`
NotifyChannels string `gorm:"size:255;not null;default:'';comment:split by space: sms voice email dingtalk wecom"`
NotifyGroups string `gorm:"size:255;not null;default:'';comment:split by space: 233 43"`
NotifyRepeatStep int32 `gorm:"not null;default:0;comment:unit: min"`
NotifyMaxNumber int32 `gorm:"not null;default:0"`
RecoverDuration int32 `gorm:"not null;default:0;comment:unit: s"`
Callbacks string `gorm:"size:4096;not null;default:'';comment:split by space: http://a.com/api/x http://a.com/api/y"`
RunbookURL string `gorm:"size:4096"`
AppendTags string `gorm:"size:255;not null;default:'';comment:split by space: service=n9e mod=api"`
Annotations string `gorm:"type:text;not null;comment:annotations"`
ExtraConfig string `gorm:"type:text;not null;comment:extra_config"`
CreateAt int64 `gorm:"not null;default:0"`
CreateBy string `gorm:"size:64;not null;default:''"`
UpdateAt int64 `gorm:"not null;default:0;index"`
UpdateBy string `gorm:"size:64;not null;default:''"`
DatasourceQueries string `gorm:"type:text"`
}
func (InitPostgresAlertRule) TableName() string {
@@ -612,22 +614,23 @@ func (InitPostgresMetricView) TableName() string {
}
type InitRecordingRule struct {
ID uint64 `gorm:"primaryKey;autoIncrement"`
GroupID uint64 `gorm:"not null;default:0;comment:group_id;index"`
DatasourceIDs string `gorm:"size:255;not null;default:'';comment:datasource ids"`
Cluster string `gorm:"size:128;not null"`
Name string `gorm:"size:255;not null;comment:new metric name"`
Note string `gorm:"size:255;not null;comment:rule note"`
Disabled bool `gorm:"type:tinyint(1);not null;default:0;comment:0:enabled 1:disabled"`
PromQL string `gorm:"size:8192;not null;comment:promql"`
PromEvalInterval int32 `gorm:"not null;comment:evaluate interval"`
CronPattern string `gorm:"size:255;default:'';comment:cron pattern"`
AppendTags string `gorm:"size:255;default:'';comment:split by space: service=n9e mod=api"`
QueryConfigs string `gorm:"type:text;not null;comment:query configs"`
CreateAt int64 `gorm:"default:0"`
CreateBy string `gorm:"size:64;default:''"`
UpdateAt int64 `gorm:"default:0;index"`
UpdateBy string `gorm:"size:64;default:''"`
ID uint64 `gorm:"primaryKey;autoIncrement"`
GroupID uint64 `gorm:"not null;default:0;comment:group_id;index"`
DatasourceIDs string `gorm:"size:255;not null;default:'';comment:datasource ids"`
Cluster string `gorm:"size:128;not null"`
Name string `gorm:"size:255;not null;comment:new metric name"`
Note string `gorm:"size:255;not null;comment:rule note"`
Disabled bool `gorm:"type:tinyint(1);not null;default:0;comment:0:enabled 1:disabled"`
PromQL string `gorm:"size:8192;not null;comment:promql"`
PromEvalInterval int32 `gorm:"not null;comment:evaluate interval"`
CronPattern string `gorm:"size:255;default:'';comment:cron pattern"`
AppendTags string `gorm:"size:255;default:'';comment:split by space: service=n9e mod=api"`
QueryConfigs string `gorm:"type:text;not null;comment:query configs"`
CreateAt int64 `gorm:"default:0"`
CreateBy string `gorm:"size:64;default:''"`
UpdateAt int64 `gorm:"default:0;index"`
UpdateBy string `gorm:"size:64;default:''"`
DatasourceQueries string `gorm:"type:text"`
}
func (InitRecordingRule) TableName() string {
@@ -639,22 +642,23 @@ func (InitRecordingRule) TableOptions() string {
}
type InitPostgresRecordingRule struct {
ID uint64 `gorm:"primaryKey;autoIncrement"`
GroupID uint64 `gorm:"not null;default:0;comment:group_id;index"`
DatasourceIDs string `gorm:"size:255;not null;default:'';comment:datasource ids"`
Cluster string `gorm:"size:128;not null"`
Name string `gorm:"size:255;not null;comment:new metric name"`
Note string `gorm:"size:255;not null;comment:rule note"`
Disabled int16 `gorm:"type:smallint;not null;default:0;comment:0:enabled 1:disabled"`
PromQL string `gorm:"size:8192;not null;comment:promql"`
PromEvalInterval int32 `gorm:"not null;comment:evaluate interval"`
CronPattern string `gorm:"size:255;default:'';comment:cron pattern"`
AppendTags string `gorm:"size:255;default:'';comment:split by space: service=n9e mod=api"`
QueryConfigs string `gorm:"type:text;not null;comment:query configs"`
CreateAt int64 `gorm:"default:0"`
CreateBy string `gorm:"size:64;default:''"`
UpdateAt int64 `gorm:"default:0;index"`
UpdateBy string `gorm:"size:64;default:''"`
ID uint64 `gorm:"primaryKey;autoIncrement"`
GroupID uint64 `gorm:"not null;default:0;comment:group_id;index"`
DatasourceIDs string `gorm:"size:255;not null;default:'';comment:datasource ids"`
Cluster string `gorm:"size:128;not null"`
Name string `gorm:"size:255;not null;comment:new metric name"`
Note string `gorm:"size:255;not null;comment:rule note"`
Disabled int16 `gorm:"type:smallint;not null;default:0;comment:0:enabled 1:disabled"`
PromQL string `gorm:"size:8192;not null;comment:promql"`
PromEvalInterval int32 `gorm:"not null;comment:evaluate interval"`
CronPattern string `gorm:"size:255;default:'';comment:cron pattern"`
AppendTags string `gorm:"size:255;default:'';comment:split by space: service=n9e mod=api"`
QueryConfigs string `gorm:"type:text;not null;comment:query configs"`
CreateAt int64 `gorm:"default:0"`
CreateBy string `gorm:"size:64;default:''"`
UpdateAt int64 `gorm:"default:0;index"`
UpdateBy string `gorm:"size:64;default:''"`
DatasourceQueries string `gorm:"type:text"`
}
func (InitPostgresRecordingRule) TableName() string {
@@ -1207,6 +1211,7 @@ type InitBuiltinMetric struct {
CreatedBy string `gorm:"size:191;not null;default:'';comment:creator"`
UpdatedAt int64 `gorm:"not null;default:0;comment:update time"`
UpdatedBy string `gorm:"size:191;not null;default:'';comment:updater"`
UUID int64 `gorm:"not null;default:0;comment:'uuid'"`
}
func (InitBuiltinMetric) TableName() string {
@@ -1230,6 +1235,7 @@ type InitSqliteBuiltinMetric struct {
CreatedBy string `gorm:"size:191;not null;default:'';comment:creator"`
UpdatedAt int64 `gorm:"not null;default:0;comment:update time"`
UpdatedBy string `gorm:"size:191;not null;default:'';comment:updater"`
UUID int64 `gorm:"not null;default:0;comment:'uuid'"`
}
func (InitSqliteBuiltinMetric) TableName() string {

View File

@@ -1,6 +1,7 @@
package ormx
import (
"fmt"
"testing"
"github.com/stretchr/testify/assert"
@@ -10,6 +11,43 @@ import (
"gorm.io/gorm"
)
func TestCheckPostgresDatabaseExist(t *testing.T) {
tests := []struct {
name string
config DBConfig
}{
{
name: "MySQL",
config: DBConfig{
DBType: "mysql",
DSN: "root:1234@tcp(127.0.0.1:3306)/test?charset=utf8mb4&parseTime=True&loc=Local&allowNativePasswords=true",
},
},
{
name: "Postgres",
config: DBConfig{
DBType: "postgres",
DSN: "host=127.0.0.1 port=5432 user=root dbname=n9e_v6 password=1234 sslmode=disable",
},
},
{
name: "SQLite",
config: DBConfig{
DBType: "sqlite",
DSN: "./test.db",
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
exist, err := checkPostgresDatabaseExist(tt.config)
fmt.Printf("exitst: %v", exist)
assert.NoError(t, err)
})
}
}
func TestDataBaseInit(t *testing.T) {
tests := []struct {
name string

View File

@@ -7,10 +7,10 @@ import (
"strings"
"time"
"github.com/glebarez/sqlite"
tklog "github.com/toolkits/pkg/logger"
"gorm.io/driver/mysql"
"gorm.io/driver/postgres"
"gorm.io/driver/sqlite"
"gorm.io/gorm"
"gorm.io/gorm/logger"
"gorm.io/gorm/schema"
@@ -91,7 +91,7 @@ func createSqliteDatabase(dsn string, gconfig *gorm.Config) error {
if err != nil {
return fmt.Errorf("failed to open temporary connection: %v", err)
}
fmt.Println("sqlite file created")
return nil
@@ -110,7 +110,7 @@ func createPostgresDatabase(dsn string, gconfig *gorm.Config) error {
}
}
createDBQuery := fmt.Sprintf("CREATE DATABASE %s ENCODING='UTF8' LC_COLLATE='en_US.UTF-8' LC_CTYPE='en_US.UTF-8';", dbName)
createDBQuery := fmt.Sprintf("CREATE DATABASE %s ENCODING='UTF8' LC_COLLATE='en_US.utf8' LC_CTYPE='en_US.utf8';", dbName)
tempDialector := postgres.Open(connectionWithoutDB)
@@ -187,18 +187,16 @@ func checkSqliteDatabaseExist(c DBConfig) (bool, error) {
func checkPostgresDatabaseExist(c DBConfig) (bool, error) {
dsnParts := strings.Split(c.DSN, " ")
dbName := ""
connectionWithoutDB := ""
for _, part := range dsnParts {
if strings.HasPrefix(part, "dbname=") {
dbName = part[strings.Index(part, "=")+1:]
} else {
connectionWithoutDB += part
connectionWithoutDB += " "
}
}
dialector := postgres.Open(connectionWithoutDB)
dbName := ""
dbpair := ""
for _, part := range dsnParts {
if strings.HasPrefix(part, "dbname=") {
dbName = part[strings.Index(part, "=")+1:]
dbpair = part
}
}
connectionStr := strings.Replace(c.DSN, dbpair, "dbname=postgres", 1)
dialector := postgres.Open(connectionStr)
gconfig := &gorm.Config{
NamingStrategy: schema.NamingStrategy{
@@ -213,89 +211,89 @@ func checkPostgresDatabaseExist(c DBConfig) (bool, error) {
return false, fmt.Errorf("failed to open database: %v", err)
}
var databases []string
query := genQuery(c)
if err := db.Raw(query).Scan(&databases).Error; err != nil {
return false, fmt.Errorf("failed to query: %v", err)
}
var databases []string
query := genQuery(c)
if err := db.Raw(query).Scan(&databases).Error; err != nil {
return false, fmt.Errorf("failed to query: %v", err)
}
for _, database := range databases {
if database == dbName {
for _, database := range databases {
if database == dbName {
fmt.Println("Database exist")
return true, nil
}
}
return true, nil
}
}
return false, nil
return false, nil
}
func checkMysqlDatabaseExist(c DBConfig) (bool, error) {
dsnParts := strings.SplitN(c.DSN, "/", 2)
if len(dsnParts) != 2 {
return false, fmt.Errorf("failed to parse DSN: %s", c.DSN)
}
if len(dsnParts) != 2 {
return false, fmt.Errorf("failed to parse DSN: %s", c.DSN)
}
connectionInfo := dsnParts[0]
dbInfo := dsnParts[1]
dbName := dbInfo
connectionInfo := dsnParts[0]
dbInfo := dsnParts[1]
dbName := dbInfo
queryIndex := strings.Index(dbInfo, "?")
if queryIndex != -1 {
dbName = dbInfo[:queryIndex]
} else {
return false, fmt.Errorf("failed to parse database name from DSN: %s", c.DSN)
}
queryIndex := strings.Index(dbInfo, "?")
if queryIndex != -1 {
dbName = dbInfo[:queryIndex]
} else {
return false, fmt.Errorf("failed to parse database name from DSN: %s", c.DSN)
}
connectionWithoutDB := connectionInfo + "/?" + dbInfo[queryIndex+1:]
connectionWithoutDB := connectionInfo + "/?" + dbInfo[queryIndex+1:]
var dialector gorm.Dialector
switch strings.ToLower(c.DBType) {
case "mysql":
dialector = mysql.Open(connectionWithoutDB)
case "postgres":
dialector = postgres.Open(connectionWithoutDB)
default:
return false, fmt.Errorf("unsupported database type: %s", c.DBType)
}
var dialector gorm.Dialector
switch strings.ToLower(c.DBType) {
case "mysql":
dialector = mysql.Open(connectionWithoutDB)
case "postgres":
dialector = postgres.Open(connectionWithoutDB)
default:
return false, fmt.Errorf("unsupported database type: %s", c.DBType)
}
gconfig := &gorm.Config{
NamingStrategy: schema.NamingStrategy{
TablePrefix: c.TablePrefix,
SingularTable: true,
},
Logger: gormLogger,
}
gconfig := &gorm.Config{
NamingStrategy: schema.NamingStrategy{
TablePrefix: c.TablePrefix,
SingularTable: true,
},
Logger: gormLogger,
}
db, err := gorm.Open(dialector, gconfig)
if err != nil {
return false, fmt.Errorf("failed to open database: %v", err)
}
db, err := gorm.Open(dialector, gconfig)
if err != nil {
return false, fmt.Errorf("failed to open database: %v", err)
}
var databases []string
query := genQuery(c)
if err := db.Raw(query).Scan(&databases).Error; err != nil {
return false, fmt.Errorf("failed to query: %v", err)
}
var databases []string
query := genQuery(c)
if err := db.Raw(query).Scan(&databases).Error; err != nil {
return false, fmt.Errorf("failed to query: %v", err)
}
for _, database := range databases {
if database == dbName {
return true, nil
}
}
for _, database := range databases {
if database == dbName {
return true, nil
}
}
return false, nil
return false, nil
}
func genQuery(c DBConfig) string {
switch strings.ToLower(c.DBType) {
case "mysql":
return "SHOW DATABASES"
case "postgres":
return "SELECT datname FROM pg_database"
case "sqlite":
return ""
default:
return ""
case "mysql":
return "SHOW DATABASES"
case "postgres":
return "SELECT datname FROM pg_database"
case "sqlite":
return ""
default:
return ""
}
}

View File

@@ -63,7 +63,7 @@ func GetByUrl[T any](url string, cfg conf.CenterApi) (T, error) {
Timeout: time.Duration(cfg.Timeout) * time.Millisecond,
}
if useProxy(url) {
if UseProxy(url) {
client.Transport = ProxyTransporter
}
@@ -147,7 +147,7 @@ func PostByUrl[T any](url string, cfg conf.CenterApi, v interface{}) (t T, err e
Timeout: time.Duration(cfg.Timeout) * time.Millisecond,
}
if useProxy(url) {
if UseProxy(url) {
client.Transport = ProxyTransporter
}
@@ -195,7 +195,7 @@ var ProxyTransporter = &http.Transport{
Proxy: http.ProxyFromEnvironment,
}
func useProxy(url string) bool {
func UseProxy(url string) bool {
// N9E_PROXY_URL=oapi.dingtalk.com,feishu.com
patterns := os.Getenv("N9E_PROXY_URL")
if patterns != "" {
@@ -228,7 +228,7 @@ func PostJSON(url string, timeout time.Duration, v interface{}, retries ...int)
Timeout: timeout,
}
if useProxy(url) {
if UseProxy(url) {
client.Transport = ProxyTransporter
}

181
pkg/promql/parser.go Normal file
View File

@@ -0,0 +1,181 @@
package promql
import (
"regexp"
"strings"
"github.com/VictoriaMetrics/metricsql"
"github.com/prometheus/prometheus/promql/parser"
)
func SplitBinaryOp(code string) ([]string, error) {
var lst []string
expr, err := metricsql.Parse(code)
if err != nil {
return lst, err
}
m := make(map[string]struct{})
ParseExpr(expr, false, m)
for k := range m {
lst = append(lst, k)
}
return lst, nil
}
func GetMetric(ql string) (map[string]string, error) {
metrics := make(map[string]string)
expr, err := parser.ParseExpr(ql)
if err != nil {
return metrics, err
}
selectors := parser.ExtractSelectors(expr)
for i := 0; i < len(selectors); i++ {
var metric string
var labels []string
for j := 0; j < len(selectors[i]); j++ {
if selectors[i][j].Name == "__name__" {
metric = selectors[i][j].Value
} else {
labels = append(labels, selectors[i][j].Name+selectors[i][j].Type.String()+"\""+selectors[i][j].Value+"\"")
}
}
if len(labels) != 0 {
metrics[metric] = metric + "{" + strings.Join(labels, ",") + "}"
} else {
metrics[metric] = metric
}
}
return metrics, nil
}
// GetLabels 解析PromQL查询并返回其中的所有标签和它们的值。
func GetLabels(ql string) (map[string]string, error) {
labels := make(map[string]string)
// 解析PromQL表达式
expr, err := parser.ParseExpr(ql)
if err != nil {
return labels, err
}
// 提取所有的选择器
selectors := parser.ExtractSelectors(expr)
for _, selector := range selectors {
for _, labelMatcher := range selector {
if labelMatcher.Name != "__name__" {
labels[labelMatcher.Name] = labelMatcher.Value
}
}
}
return labels, nil
}
func GetLabelsAndMetricName(ql string) (map[string]string, string, error) {
labels := make(map[string]string)
metricName := ""
// 解析PromQL表达式
expr, err := parser.ParseExpr(ql)
if err != nil {
return labels, metricName, err
}
// 提取所有的选择器
selectors := parser.ExtractSelectors(expr)
for _, selector := range selectors {
for _, labelMatcher := range selector {
if labelMatcher.Name != "__name__" {
labels[labelMatcher.Name] = labelMatcher.Value
} else {
metricName = labelMatcher.Value
}
}
}
return labels, metricName, nil
}
type Label struct {
Name string
Value string
Op string
}
func GetLabelsAndMetricNameWithReplace(ql string, rep string) (map[string]Label, string, error) {
labels := make(map[string]Label)
metricName := ""
ql = strings.ReplaceAll(ql, rep, "____")
ql = removeBrackets(ql)
// 解析PromQL表达式
expr, err := parser.ParseExpr(ql)
if err != nil {
return labels, metricName, err
}
// 提取所有的选择器
selectors := parser.ExtractSelectors(expr)
for _, selector := range selectors {
for _, labelMatcher := range selector {
labelMatcher.Value = strings.ReplaceAll(labelMatcher.Value, "____", rep)
if labelMatcher.Name != "__name__" {
label := Label{
Name: labelMatcher.Name,
Value: labelMatcher.Value,
Op: labelMatcher.Type.String(),
}
labels[labelMatcher.Name] = label
} else {
if strings.Contains(labelMatcher.Value, "$") {
continue
}
metricName = labelMatcher.Value
}
}
}
return labels, metricName, nil
}
func GetFirstMetric(ql string) (string, error) {
var metric string
expr, err := parser.ParseExpr(ql)
if err != nil {
return metric, err
}
selectors := parser.ExtractSelectors(expr)
for i := 0; i < len(selectors); i++ {
for j := 0; j < len(selectors[i]); j++ {
if selectors[i][j].Name == "__name__" {
metric = selectors[i][j].Value
return metric, nil
}
}
}
return metric, nil
}
func removeBrackets(promql string) string {
if strings.Contains(promql, "_over_time") || strings.Contains(promql, "rate") || strings.Contains(promql, "increase") ||
strings.Contains(promql, "predict_linear") || strings.Contains(promql, "resets") ||
strings.Contains(promql, "changes") || strings.Contains(promql, "holt_winters") ||
strings.Contains(promql, "delta") || strings.Contains(promql, "deriv") {
return promql
}
if !strings.Contains(promql, "[") {
return promql
}
// 使用正则表达式匹配 [xx] 形式的内容xx 可以是任何字符序列
re := regexp.MustCompile(`\[[^\]]*\]`)
// 删除匹配到的内容
return re.ReplaceAllString(promql, "")
}

218
pkg/promql/perser_test.go Normal file
View File

@@ -0,0 +1,218 @@
package promql
import (
"reflect"
"testing"
)
func TestGetMetric(t *testing.T) {
tests := []struct {
name string
ql string
want map[string]string
wantErr error
}{
{
name: "Valid query with labels",
ql: "metric_name{label1=\"value1\",label2=\"value2\"}",
want: map[string]string{"metric_name": "metric_name{label1=\"value1\",label2=\"value2\"}"},
wantErr: nil,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got, err := GetMetric(tt.ql)
if err != tt.wantErr && err != nil {
t.Errorf("GetMetric() error = %v, wantErr %v ql:%s", err, tt.wantErr, tt.ql)
return
}
if !reflect.DeepEqual(got, tt.want) {
t.Errorf("GetMetric() = %v, want %v", got, tt.want)
}
})
}
}
func TestGetLabels(t *testing.T) {
tests := []struct {
name string
ql string
want map[string]string
wantErr bool
}{
{
name: "Valid query with multiple labels",
ql: "metric_name{label1=\"value1\", label2=\"value2\"} > 3",
want: map[string]string{"label1": "value1", "label2": "value2"},
},
{
name: "Valid query with multiple labels",
ql: "metric_name{label1=\"$value1\", label2=\"$value2\"} > 3",
want: map[string]string{"label1": "$value1", "label2": "$value2"},
},
{
name: "Query without labels",
ql: "metric_name",
want: map[string]string{},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got, err := GetLabels(tt.ql)
if (err != nil) != tt.wantErr {
t.Errorf("GetLabels() error = %v, wantErr %v", err, tt.wantErr)
return
}
if !reflect.DeepEqual(got, tt.want) {
t.Errorf("GetLabels() = %v, want %v ql:%s", got, tt.want, tt.ql)
}
})
}
}
func TestGetLabelsAndMetricNameWithReplace(t *testing.T) {
// 定义测试案例
tests := []struct {
name string
ql string
rep string
expectedLabels map[string]Label
expectedMetricName string
expectError bool
}{
{
name: "正常情况",
ql: `(snmp_arista_system_cpuuse{ent_descr="$ent_descr"} / 100 > $cpu_high_threshold[1m])`,
rep: "$",
expectedLabels: map[string]Label{
"ent_descr": {Name: "ent_descr", Value: "$ent_descr", Op: "="},
},
expectedMetricName: "snmp_arista_system_cpuuse",
expectError: false,
},
{
name: "正常情况",
ql: `rate(snmp_interface_incoming{agent_host='$agent_host',ifname='$ifname'}[2m]) * 8 / 10^9 > snmp_interface_speed{agent_host='$agent_host',ifname='$ifname'}/ 10^3 * $traffic_in and snmp_interface_speed{agent_host='$agent_host',ifname='$ifname'} > 0`,
rep: "$",
expectedLabels: map[string]Label{
"agent_host": {Name: "agent_host", Value: "$agent_host", Op: "="},
"ifname": {Name: "ifname", Value: "$ifname", Op: "="},
},
expectedMetricName: "snmp_interface_speed",
expectError: false,
},
{
name: "正常情况",
ql: `rate(snmp_interface_incoming{agent_host='$agent_host',ifname='$ifname'}[2m]) * 8 / 10^9 > snmp_interface_speed{agent_host='$agent_host',ifname='$ifname'}/ 10^3 * $traffic_in`,
rep: "$",
expectedLabels: map[string]Label{
"agent_host": {Name: "agent_host", Value: "$agent_host", Op: "="},
"ifname": {Name: "ifname", Value: "$ifname", Op: "="},
},
expectedMetricName: "snmp_interface_speed",
expectError: false,
},
{
name: "正常情况",
ql: `rate(snmp_interface_incoming{agent_host='$agent_host',ifname='$ifname'}[2m]) * 8 / 10^9 > 10`,
rep: "$",
expectedLabels: map[string]Label{
"agent_host": {Name: "agent_host", Value: "$agent_host", Op: "="},
"ifname": {Name: "ifname", Value: "$ifname", Op: "="},
},
expectedMetricName: "snmp_interface_incoming",
expectError: false,
},
{
name: "带有替换字符",
ql: `rate(snmp_interface_outgoing{Role=~'ZRT.*',agent_host='$agent_host',ifname='$ifname'}[2m]) * 8 / 10^9 > snmp_interface_speed{Role=~'ZRT.*',agent_host='$agent_host',ifname='$ifname'}/ 10^3 * $outgoing_warning and snmp_interface_speed{Role=~'ZRT.*',agent_host='$agent_host',ifname='$ifname'} > 0`,
rep: "$",
expectedLabels: map[string]Label{
"agent_host": {Name: "agent_host", Value: "$agent_host", Op: "="},
"ifname": {Name: "ifname", Value: "$ifname", Op: "="},
"Role": {Name: "Role", Value: "ZRT.*", Op: "=~"},
},
expectedMetricName: "snmp_interface_speed",
expectError: false,
},
// 更多测试案例...
{
name: "告警规则支持变量",
ql: `mem{test1="$test1", test2="$test2", test3="test3"} > $val`,
rep: "$",
expectedLabels: map[string]Label{},
expectedMetricName: "snmp_interface_speed",
expectError: false,
},
}
// 运行测试案例
for _, tc := range tests {
t.Run(tc.name, func(t *testing.T) {
labels, metricName, err := GetLabelsAndMetricNameWithReplace(tc.ql, tc.rep)
if (err != nil) != tc.expectError {
t.Errorf("ql:%s 测试 '%v' 发生错误: %v, 期望的错误状态: %v", tc.ql, tc.name, err, tc.expectError)
}
if !reflect.DeepEqual(labels, tc.expectedLabels) {
t.Errorf("ql:%s 测试 '%v' 返回的标签不匹配: got %v, want %v", tc.ql, tc.name, labels, tc.expectedLabels)
}
if metricName != tc.expectedMetricName {
t.Errorf("ql:%s 测试 '%v' 返回的度量名称不匹配: got %s, want %s", tc.ql, tc.name, metricName, tc.expectedMetricName)
}
})
}
}
func TestSplitBinaryOp(t *testing.T) {
tests := []struct {
name string
code string
want []string
wantErr bool
}{
{
name: "valid binary operation with spaces",
code: "cpu_usage + memory_usage",
want: []string{"cpu_usage + memory_usage"},
},
{
name: "12",
code: "cpu_usage > 0 and memory_usage>0",
want: []string{"cpu_usage", "memory_usage"},
},
{
name: "12",
code: "cpu_usage +1> 0",
want: []string{"cpu_usage + 1"},
},
{
name: "valid complex binary operation",
code: "(cpu_usage + memory_usage) / 2",
want: []string{"(cpu_usage + memory_usage) / 2"},
},
{
name: "invalid binary operation",
code: "cpu_usage + ",
wantErr: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got, err := SplitBinaryOp(tt.code)
if (err != nil) != tt.wantErr {
t.Errorf("SplitBinaryOp() code:%s error = %v, wantErr %v", tt.code, err, tt.wantErr)
return
}
if !reflect.DeepEqual(got, tt.want) {
t.Errorf("SplitBinaryOp() got = %v, want %v", got, tt.want)
}
})
}
}

104
pkg/promql/promql.go Normal file
View File

@@ -0,0 +1,104 @@
package promql
import (
"github.com/VictoriaMetrics/metricsql"
)
// copy from https://github.com/laixintao/promqlpy/blob/main/go/promql/promql.go
// ModifierExpr represents MetricsQL modifier such as `<op> (...)`
type ModifierExpr struct {
// Op is modifier operation.
Op string `json:"op"`
// Args contains modifier args from parens.
Args []string `json:"args"`
}
type Expression struct {
// if true, all fields are set
// if false, then it's a normal expression, only `code` is set
IsBinaryOp bool `json:"is_binary_op"`
Left *Expression `json:"left"`
Right *Expression `json:"right"`
Op string `json:"op"`
// GroupModifier contains modifier such as "on" or "ignoring".
GroupModifier ModifierExpr `json:"group_modifier"`
// JoinModifier contains modifier such as "group_left" or "group_right".
JoinModifier ModifierExpr `json:"join_modifier"`
Code string `json:"code"`
}
var compareOps = map[string]bool{
"==": true,
"!=": true,
">": true,
"<": true,
">=": true,
"<=": true,
}
var logicalOps = map[string]bool{
"and": true,
"or": true,
"unless": true,
}
// if `mustBeExpression` is true, means that the last level is compareOps
// or ready.
// example:
// (a > 10) > b
// result: a > 10 is expression, compare to b
func ParseExpr(expr metricsql.Expr, mustBeExpression bool, m map[string]struct{}) *Expression {
// I am sure it is a normal expression!
if mustBeExpression {
return &Expression{
Code: string(expr.AppendString(nil)),
IsBinaryOp: false,
}
}
if bop, ok := expr.(*metricsql.BinaryOpExpr); ok {
if logicalOps[bop.Op] {
return &Expression{
Left: ParseExpr(bop.Left, false, m),
Right: ParseExpr(bop.Right, false, m),
GroupModifier: ModifierExpr(bop.GroupModifier),
JoinModifier: ModifierExpr(bop.JoinModifier),
Op: bop.Op,
Code: string(bop.AppendString(nil)),
IsBinaryOp: true,
}
}
if compareOps[bop.Op] {
m[string(bop.Left.AppendString(nil))] = struct{}{}
return &Expression{
Left: ParseExpr(bop.Left, true, m),
Right: ParseExpr(bop.Right, true, m),
GroupModifier: ModifierExpr(bop.GroupModifier),
JoinModifier: ModifierExpr(bop.JoinModifier),
Op: bop.Op,
Code: string(bop.AppendString(nil)),
IsBinaryOp: true,
}
}
}
if len(m) == 0 {
m[string(expr.AppendString(nil))] = struct{}{}
}
// treat +,-,* etc still as normal expression
// default: just return the literal code as it is
return &Expression{
Code: string(expr.AppendString(nil)),
IsBinaryOp: false,
}
}

View File

@@ -114,7 +114,7 @@ func (rt *Router) remoteWrite(c *gin.Context) {
var (
ignoreIdent = ginx.QueryBool(c, "ignore_ident", false)
ignoreHost = ginx.QueryBool(c, "ignore_host", false)
ignoreHost = ginx.QueryBool(c, "ignore_host", true) // 默认值改成 true要不然答疑成本太高。发版的时候通知 telegraf 用户,让他们设置 ignore_host=false
ids = make(map[string]struct{})
)