forked from pneymrl2f/nightingale
parent
2ef9a77325
commit
4e6e70c14d
@ -0,0 +1,7 @@
|
||||
package alert
|
||||
|
||||
import "context"
|
||||
|
||||
func Start(ctx context.Context) {
|
||||
go popEvent()
|
||||
}
|
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,88 @@
|
||||
package backend
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/prometheus/prometheus/promql"
|
||||
|
||||
"github.com/didi/nightingale/v5/vos"
|
||||
"github.com/toolkits/pkg/container/list"
|
||||
|
||||
pp "github.com/didi/nightingale/v5/backend/prome"
|
||||
)
|
||||
|
||||
type BackendSection struct {
|
||||
DataSource string `yaml:"datasource"`
|
||||
Prometheus pp.PromeSection `yaml:"prometheus"`
|
||||
}
|
||||
|
||||
type DataSource interface {
|
||||
PushEndpoint
|
||||
|
||||
QueryData(inputs vos.DataQueryParam) []*vos.DataQueryResp // 查询一段时间
|
||||
QueryTagKeys(recv vos.CommonTagQueryParam) *vos.TagKeyQueryResp // 获取标签的names
|
||||
QueryTagValues(recv vos.CommonTagQueryParam) *vos.TagValueQueryResp // 根据一个label_name获取 values
|
||||
QueryTagPairs(recv vos.CommonTagQueryParam) *vos.TagPairQueryResp // 根据匹配拿到所有 series 上面三个使用统一的结构体
|
||||
QueryMetrics(recv vos.MetricQueryParam) *vos.MetricQueryResp // 根据标签查 metric_names
|
||||
QueryVector(ql string) promql.Vector // prometheus pull alert 所用,其他数据源留空即可
|
||||
CleanUp() // 数据源退出时需要做的清理工作
|
||||
}
|
||||
|
||||
type PushEndpoint interface {
|
||||
Push2Queue(items []*vos.MetricPoint)
|
||||
}
|
||||
|
||||
var (
|
||||
defaultDataSource string
|
||||
registryDataSources = make(map[string]DataSource)
|
||||
registryPushEndpoints = make(map[string]PushEndpoint)
|
||||
)
|
||||
|
||||
func Init(cfg BackendSection) {
|
||||
defaultDataSource = cfg.DataSource
|
||||
|
||||
// init prometheus
|
||||
if cfg.Prometheus.Enable {
|
||||
promeDs := &pp.PromeDataSource{
|
||||
Section: cfg.Prometheus,
|
||||
PushQueue: list.NewSafeListLimited(10240000),
|
||||
}
|
||||
promeDs.Init()
|
||||
RegisterDataSource(cfg.Prometheus.Name, promeDs)
|
||||
}
|
||||
}
|
||||
|
||||
// get backend datasource
|
||||
// (pluginId == "" for default datasource)
|
||||
func GetDataSourceFor(pluginId string) (DataSource, error) {
|
||||
if pluginId == "" {
|
||||
pluginId = defaultDataSource
|
||||
}
|
||||
if source, exists := registryDataSources[pluginId]; exists {
|
||||
return source, nil
|
||||
}
|
||||
return nil, fmt.Errorf("could not find datasource for plugin: %s", pluginId)
|
||||
}
|
||||
|
||||
func DatasourceCleanUp() {
|
||||
for _, ds := range registryDataSources {
|
||||
ds.CleanUp()
|
||||
}
|
||||
}
|
||||
|
||||
// get all push endpoints
|
||||
func GetPushEndpoints() ([]PushEndpoint, error) {
|
||||
if len(registryPushEndpoints) > 0 {
|
||||
items := make([]PushEndpoint, 0, len(registryPushEndpoints))
|
||||
for _, value := range registryPushEndpoints {
|
||||
items = append(items, value)
|
||||
}
|
||||
return items, nil
|
||||
}
|
||||
return nil, fmt.Errorf("could not find any pushendpoint")
|
||||
}
|
||||
|
||||
func RegisterDataSource(pluginId string, datasource DataSource) {
|
||||
registryDataSources[pluginId] = datasource
|
||||
registryPushEndpoints[pluginId] = datasource
|
||||
}
|
@ -0,0 +1,162 @@
|
||||
package backend
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"context"
|
||||
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"regexp"
|
||||
"time"
|
||||
|
||||
"github.com/gogo/protobuf/proto"
|
||||
"github.com/golang/snappy"
|
||||
"github.com/opentracing-contrib/go-stdlib/nethttp"
|
||||
"github.com/opentracing/opentracing-go"
|
||||
"github.com/pkg/errors"
|
||||
"github.com/prometheus/common/model"
|
||||
"github.com/prometheus/prometheus/pkg/labels"
|
||||
"github.com/prometheus/prometheus/prompb"
|
||||
|
||||
"github.com/didi/nightingale/v5/vos"
|
||||
)
|
||||
|
||||
var MetricNameRE = regexp.MustCompile(`^[a-zA-Z_:][a-zA-Z0-9_:]*$`)
|
||||
|
||||
type sample struct {
|
||||
labels labels.Labels
|
||||
t int64
|
||||
v float64
|
||||
}
|
||||
|
||||
func labelsToLabelsProto(labels labels.Labels, buf []prompb.Label) []prompb.Label {
|
||||
result := buf[:0]
|
||||
if cap(buf) < len(labels) {
|
||||
result = make([]prompb.Label, 0, len(labels))
|
||||
}
|
||||
for _, l := range labels {
|
||||
result = append(result, prompb.Label{
|
||||
Name: l.Name,
|
||||
Value: l.Value,
|
||||
})
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
func (pd *PromeDataSource) convertOne(item *vos.MetricPoint) (prompb.TimeSeries, error) {
|
||||
pt := prompb.TimeSeries{}
|
||||
pt.Samples = []prompb.Sample{{}}
|
||||
s := sample{}
|
||||
s.t = item.Time
|
||||
s.v = item.Value
|
||||
// name
|
||||
if !MetricNameRE.MatchString(item.Metric) {
|
||||
return pt, errors.New("invalid metrics name")
|
||||
}
|
||||
nameLs := labels.Label{
|
||||
Name: LABEL_NAME,
|
||||
Value: item.Metric,
|
||||
}
|
||||
s.labels = append(s.labels, nameLs)
|
||||
if item.Ident != "" {
|
||||
identLs := labels.Label{
|
||||
Name: LABEL_IDENT,
|
||||
Value: item.Ident,
|
||||
}
|
||||
s.labels = append(s.labels, identLs)
|
||||
}
|
||||
|
||||
for k, v := range item.TagsMap {
|
||||
if model.LabelNameRE.MatchString(k) {
|
||||
ls := labels.Label{
|
||||
Name: k,
|
||||
Value: v,
|
||||
}
|
||||
s.labels = append(s.labels, ls)
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
pt.Labels = labelsToLabelsProto(s.labels, pt.Labels)
|
||||
// 时间赋值问题,使用毫秒时间戳
|
||||
tsMs := time.Unix(s.t, 0).UnixNano() / 1e6
|
||||
pt.Samples[0].Timestamp = tsMs
|
||||
pt.Samples[0].Value = s.v
|
||||
return pt, nil
|
||||
}
|
||||
|
||||
type RecoverableError struct {
|
||||
error
|
||||
}
|
||||
|
||||
func remoteWritePost(c *HttpClient, req []byte) error {
|
||||
httpReq, err := http.NewRequest("POST", c.url.String(), bytes.NewReader(req))
|
||||
if err != nil {
|
||||
// Errors from NewRequest are from unparsable URLs, so are not
|
||||
// recoverable.
|
||||
return err
|
||||
}
|
||||
|
||||
httpReq.Header.Add("Content-Encoding", "snappy")
|
||||
httpReq.Header.Set("Content-Type", "application/x-protobuf")
|
||||
httpReq.Header.Set("User-Agent", "n9e-v5")
|
||||
httpReq.Header.Set("X-Prometheus-Remote-Write-Version", "0.1.0")
|
||||
ctx, cancel := context.WithTimeout(context.Background(), c.timeout)
|
||||
defer cancel()
|
||||
|
||||
httpReq = httpReq.WithContext(ctx)
|
||||
|
||||
if parentSpan := opentracing.SpanFromContext(ctx); parentSpan != nil {
|
||||
var ht *nethttp.Tracer
|
||||
httpReq, ht = nethttp.TraceRequest(
|
||||
parentSpan.Tracer(),
|
||||
httpReq,
|
||||
nethttp.OperationName("Remote Store"),
|
||||
nethttp.ClientTrace(false),
|
||||
)
|
||||
defer ht.Finish()
|
||||
}
|
||||
|
||||
httpResp, err := c.Client.Do(httpReq)
|
||||
if err != nil {
|
||||
// Errors from Client.Do are from (for example) network errors, so are
|
||||
// recoverable.
|
||||
return RecoverableError{err}
|
||||
}
|
||||
defer func() {
|
||||
io.Copy(ioutil.Discard, httpResp.Body)
|
||||
httpResp.Body.Close()
|
||||
}()
|
||||
|
||||
if httpResp.StatusCode/100 != 2 {
|
||||
scanner := bufio.NewScanner(io.LimitReader(httpResp.Body, 512))
|
||||
line := ""
|
||||
if scanner.Scan() {
|
||||
line = scanner.Text()
|
||||
}
|
||||
err = errors.Errorf("server returned HTTP status %s: %s", httpResp.Status, line)
|
||||
}
|
||||
if httpResp.StatusCode/100 == 5 {
|
||||
return RecoverableError{err}
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
func (pd *PromeDataSource) buildWriteRequest(samples []prompb.TimeSeries) ([]byte, error) {
|
||||
|
||||
req := &prompb.WriteRequest{
|
||||
Timeseries: samples,
|
||||
Metadata: nil,
|
||||
}
|
||||
|
||||
data, err := proto.Marshal(req)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
compressed := snappy.Encode(nil, data)
|
||||
return compressed, nil
|
||||
}
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,8 @@
|
||||
#!/bin/bash
|
||||
|
||||
# release version
|
||||
version=5.0.0-rc1
|
||||
|
||||
export GO111MODULE=on
|
||||
go build -ldflags "-X main.version=${version}" -o n9e-server main.go
|
||||
|
@ -0,0 +1,32 @@
|
||||
package cache
|
||||
|
||||
import (
|
||||
"regexp"
|
||||
"sync"
|
||||
)
|
||||
|
||||
type AlertMuteMap struct {
|
||||
sync.RWMutex
|
||||
Data map[string][]Filter
|
||||
}
|
||||
type Filter struct {
|
||||
ResReg *regexp.Regexp
|
||||
TagsMap map[string]string
|
||||
}
|
||||
|
||||
var AlertMute = &AlertMuteMap{Data: make(map[string][]Filter)}
|
||||
|
||||
func (a *AlertMuteMap) SetAll(m map[string][]Filter) {
|
||||
a.Lock()
|
||||
defer a.Unlock()
|
||||
a.Data = m
|
||||
}
|
||||
|
||||
func (a *AlertMuteMap) GetByKey(key string) ([]Filter, bool) {
|
||||
a.RLock()
|
||||
defer a.RUnlock()
|
||||
|
||||
value, exists := a.Data[key]
|
||||
|
||||
return value, exists
|
||||
}
|
@ -0,0 +1,7 @@
|
||||
package cache
|
||||
|
||||
import (
|
||||
cmap "github.com/orcaman/concurrent-map"
|
||||
)
|
||||
|
||||
var MetricDescMapper = cmap.New()
|
@ -0,0 +1,27 @@
|
||||
package cache
|
||||
|
||||
import (
|
||||
"sync"
|
||||
)
|
||||
|
||||
type ClasspathPrefixMap struct {
|
||||
sync.RWMutex
|
||||
Data map[int64][]int64
|
||||
}
|
||||
|
||||
var ClasspathPrefix = &ClasspathPrefixMap{Data: make(map[int64][]int64)}
|
||||
|
||||
func (c *ClasspathPrefixMap) Get(id int64) ([]int64, bool) {
|
||||
c.RLock()
|
||||
defer c.RUnlock()
|
||||
ids, exists := c.Data[id]
|
||||
return ids, exists
|
||||
}
|
||||
|
||||
func (c *ClasspathPrefixMap) SetAll(data map[int64][]int64) {
|
||||
c.Lock()
|
||||
defer c.Unlock()
|
||||
|
||||
c.Data = data
|
||||
return
|
||||
}
|
@ -0,0 +1,33 @@
|
||||
package cache
|
||||
|
||||
import (
|
||||
"sync"
|
||||
|
||||
"github.com/didi/nightingale/v5/models"
|
||||
)
|
||||
|
||||
type ClasspathResMap struct {
|
||||
sync.RWMutex
|
||||
Data map[int64]*ClasspathAndRes
|
||||
}
|
||||
|
||||
type ClasspathAndRes struct {
|
||||
Res []string
|
||||
Classpath *models.Classpath
|
||||
}
|
||||
|
||||
// classpath_id -> classpath & res_idents
|
||||
var ClasspathRes = &ClasspathResMap{Data: make(map[int64]*ClasspathAndRes)}
|
||||
|
||||
func (c *ClasspathResMap) Get(id int64) (*ClasspathAndRes, bool) {
|
||||
c.RLock()
|
||||
defer c.RUnlock()
|
||||
resources, exists := c.Data[id]
|
||||
return resources, exists
|
||||
}
|
||||
|
||||
func (c *ClasspathResMap) SetAll(collectRulesMap map[int64]*ClasspathAndRes) {
|
||||
c.Lock()
|
||||
defer c.Unlock()
|
||||
c.Data = collectRulesMap
|
||||
}
|
@ -0,0 +1,32 @@
|
||||
package cache
|
||||
|
||||
import (
|
||||
"sync"
|
||||
|
||||
"github.com/didi/nightingale/v5/models"
|
||||
)
|
||||
|
||||
type CollectRuleOfIdentMap struct {
|
||||
sync.RWMutex
|
||||
Data map[string][]*models.CollectRule
|
||||
}
|
||||
|
||||
var CollectRulesOfIdent = &CollectRuleOfIdentMap{Data: make(map[string][]*models.CollectRule)}
|
||||
|
||||
func (c *CollectRuleOfIdentMap) GetBy(ident string) []*models.CollectRule {
|
||||
c.RLock()
|
||||
defer c.RUnlock()
|
||||
return c.Data[ident]
|
||||
}
|
||||
|
||||
func (c *CollectRuleOfIdentMap) Set(node string, collectRules []*models.CollectRule) {
|
||||
c.Lock()
|
||||
defer c.Unlock()
|
||||
c.Data[node] = collectRules
|
||||
}
|
||||
|
||||
func (c *CollectRuleOfIdentMap) SetAll(collectRulesMap map[string][]*models.CollectRule) {
|
||||
c.Lock()
|
||||
defer c.Unlock()
|
||||
c.Data = collectRulesMap
|
||||
}
|
@ -0,0 +1,76 @@
|
||||
package cache
|
||||
|
||||
import (
|
||||
"sync"
|
||||
)
|
||||
|
||||
type SafeDoubleMap struct {
|
||||
sync.RWMutex
|
||||
M map[string]map[string]struct{}
|
||||
}
|
||||
|
||||
// res_ident -> classpath_path -> struct{}{}
|
||||
var ResClasspath = &SafeDoubleMap{M: make(map[string]map[string]struct{})}
|
||||
|
||||
func (s *SafeDoubleMap) GetKeys() []string {
|
||||
s.RLock()
|
||||
defer s.RUnlock()
|
||||
|
||||
keys := make([]string, 0, len(s.M))
|
||||
for key := range s.M {
|
||||
keys = append(keys, key)
|
||||
}
|
||||
|
||||
return keys
|
||||
}
|
||||
|
||||
func (s *SafeDoubleMap) GetValues(key string) []string {
|
||||
s.RLock()
|
||||
defer s.RUnlock()
|
||||
|
||||
valueMap, exists := s.M[key]
|
||||
if !exists {
|
||||
return []string{}
|
||||
}
|
||||
|
||||
values := make([]string, 0, len(valueMap))
|
||||
|
||||
for value := range valueMap {
|
||||
values = append(values, value)
|
||||
}
|
||||
|
||||
return values
|
||||
}
|
||||
|
||||
func (s *SafeDoubleMap) Exists(key string, value string) bool {
|
||||
s.RLock()
|
||||
defer s.RUnlock()
|
||||
|
||||
if _, exists := s.M[key]; !exists {
|
||||
return false
|
||||
}
|
||||
|
||||
if _, exists := s.M[key][value]; !exists {
|
||||
return false
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
func (s *SafeDoubleMap) Set(key string, value string) {
|
||||
s.Lock()
|
||||
defer s.Unlock()
|
||||
|
||||
if _, exists := s.M[key]; !exists {
|
||||
s.M[key] = make(map[string]struct{})
|
||||
}
|
||||
|
||||
s.M[key][value] = struct{}{}
|
||||
}
|
||||
|
||||
func (s *SafeDoubleMap) SetAll(data map[string]map[string]struct{}) {
|
||||
s.Lock()
|
||||
defer s.Unlock()
|
||||
|
||||
s.M = data
|
||||
}
|
@ -0,0 +1,41 @@
|
||||
package cache
|
||||
|
||||
import (
|
||||
"sync"
|
||||
|
||||
"github.com/didi/nightingale/v5/models"
|
||||
)
|
||||
|
||||
type UserMap struct {
|
||||
sync.RWMutex
|
||||
Data map[int64]*models.User
|
||||
}
|
||||
|
||||
var UserCache = &UserMap{Data: make(map[int64]*models.User)}
|
||||
|
||||
func (s *UserMap) GetBy(id int64) *models.User {
|
||||
s.RLock()
|
||||
defer s.RUnlock()
|
||||
|
||||
return s.Data[id]
|
||||
}
|
||||
|
||||
func (s *UserMap) GetByIds(ids []int64) []*models.User {
|
||||
s.RLock()
|
||||
defer s.RUnlock()
|
||||
var users []*models.User
|
||||
for _, id := range ids {
|
||||
if s.Data[id] == nil {
|
||||
continue
|
||||
}
|
||||
users = append(users, s.Data[id])
|
||||
}
|
||||
|
||||
return users
|
||||
}
|
||||
|
||||
func (s *UserMap) SetAll(users map[int64]*models.User) {
|
||||
s.Lock()
|
||||
defer s.Unlock()
|
||||
s.Data = users
|
||||
}
|
@ -0,0 +1,38 @@
|
||||
package cache
|
||||
|
||||
import (
|
||||
"sync"
|
||||
)
|
||||
|
||||
type UserGroupMemberMap struct {
|
||||
sync.RWMutex
|
||||
Data map[int64]map[int64]struct{}
|
||||
}
|
||||
|
||||
// groupid -> userid
|
||||
var UserGroupMember = &UserGroupMemberMap{Data: make(map[int64]map[int64]struct{})}
|
||||
|
||||
func (m *UserGroupMemberMap) Get(id int64) (map[int64]struct{}, bool) {
|
||||
m.RLock()
|
||||
defer m.RUnlock()
|
||||
ids, exists := m.Data[id]
|
||||
return ids, exists
|
||||
}
|
||||
|
||||
func (m *UserGroupMemberMap) Exists(gid, uid int64) bool {
|
||||
m.RLock()
|
||||
defer m.RUnlock()
|
||||
uidMap, exists := m.Data[gid]
|
||||
if !exists {
|
||||
return false
|
||||
}
|
||||
|
||||
_, exists = uidMap[uid]
|
||||
return exists
|
||||
}
|
||||
|
||||
func (m *UserGroupMemberMap) SetAll(data map[int64]map[int64]struct{}) {
|
||||
m.Lock()
|
||||
defer m.Unlock()
|
||||
m.Data = data
|
||||
}
|
@ -0,0 +1,163 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"net"
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
"github.com/spf13/viper"
|
||||
"github.com/toolkits/pkg/file"
|
||||
|
||||
"github.com/didi/nightingale/v5/backend"
|
||||
"github.com/didi/nightingale/v5/models"
|
||||
"github.com/didi/nightingale/v5/pkg/i18n"
|
||||
"github.com/didi/nightingale/v5/pkg/iconf"
|
||||
"github.com/didi/nightingale/v5/pkg/ilog"
|
||||
)
|
||||
|
||||
type ConfigStruct struct {
|
||||
Logger ilog.Config `yaml:"logger"`
|
||||
HTTP httpSection `yaml:"http"`
|
||||
RPC rpcSection `yaml:"rpc"`
|
||||
LDAP models.LdapSection `yaml:"ldap"`
|
||||
MySQL models.MysqlSection `yaml:"mysql"`
|
||||
Heartbeat heartbeatSection `yaml:"heartbeat"`
|
||||
I18N i18n.Config `yaml:"i18n"`
|
||||
Judge judgeSection `yaml:"judge"`
|
||||
Alert alertSection `yaml:"alert"`
|
||||
Trans transSection `yaml:"trans"`
|
||||
ContactKeys []contactKey `yaml:"contactKeys"`
|
||||
NotifyChannels []string `yaml:"notifyChannels"`
|
||||
}
|
||||
|
||||
type alertSection struct {
|
||||
NotifyScriptPath string `yaml:"notifyScriptPath"`
|
||||
NotifyConcurrency int `yaml:"notifyConcurrency"`
|
||||
MutedAlertPersist bool `yaml:"mutedAlertPersist"`
|
||||
}
|
||||
|
||||
type transSection struct {
|
||||
Enable bool `yaml:"enable"`
|
||||
Backend backend.BackendSection `yaml:"backend"`
|
||||
}
|
||||
|
||||
type judgeSection struct {
|
||||
ReadBatch int `yaml:"readBatch"`
|
||||
ConnTimeout int `yaml:"connTimeout"`
|
||||
CallTimeout int `yaml:"callTimeout"`
|
||||
WriterNum int `yaml:"writerNum"`
|
||||
ConnMax int `yaml:"connMax"`
|
||||
ConnIdle int `yaml:"connIdle"`
|
||||
}
|
||||
|
||||
type heartbeatSection struct {
|
||||
IP string `yaml:"ip"`
|
||||
LocalAddr string `yaml:"-"`
|
||||
Interval int64 `yaml:"interval"`
|
||||
}
|
||||
|
||||
type httpSection struct {
|
||||
Mode string `yaml:"mode"`
|
||||
Access bool `yaml:"access"`
|
||||
Listen string `yaml:"listen"`
|
||||
Pprof bool `yaml:"pprof"`
|
||||
CookieName string `yaml:"cookieName"`
|
||||
CookieDomain string `yaml:"cookieDomain"`
|
||||
CookieSecure bool `yaml:"cookieSecure"`
|
||||
CookieHttpOnly bool `yaml:"cookieHttpOnly"`
|
||||
CookieMaxAge int `yaml:"cookieMaxAge"`
|
||||
CookieSecret string `yaml:"cookieSecret"`
|
||||
CsrfSecret string `yaml:"csrfSecret"`
|
||||
}
|
||||
|
||||
type rpcSection struct {
|
||||
Listen string `yaml:"listen"`
|
||||
}
|
||||
|
||||
type contactKey struct {
|
||||
Label string `yaml:"label" json:"label"`
|
||||
Key string `yaml:"key" json:"key"`
|
||||
}
|
||||
|
||||
var Config *ConfigStruct
|
||||
|
||||
func Parse() error {
|
||||
ymlFile := iconf.GetYmlFile("server")
|
||||
if ymlFile == "" {
|
||||
return fmt.Errorf("configuration file of server not found")
|
||||
}
|
||||
|
||||
bs, err := file.ReadBytes(ymlFile)
|
||||
if err != nil {
|
||||
return fmt.Errorf("cannot read yml[%s]: %v", ymlFile, err)
|
||||
}
|
||||
|
||||
viper.SetConfigType("yaml")
|
||||
err = viper.ReadConfig(bytes.NewBuffer(bs))
|
||||
if err != nil {
|
||||
return fmt.Errorf("cannot read yml[%s]: %v", ymlFile, err)
|
||||
}
|
||||
|
||||
// default value settings
|
||||
viper.SetDefault("i18n.lang", "zh")
|
||||
viper.SetDefault("heartbeat.interval", 1000)
|
||||
viper.SetDefault("judge.readBatch", 2000)
|
||||
viper.SetDefault("judge.connTimeout", 2000)
|
||||
viper.SetDefault("judge.callTimeout", 5000)
|
||||
viper.SetDefault("judge.writerNum", 256)
|
||||
viper.SetDefault("judge.connMax", 2560)
|
||||
viper.SetDefault("judge.connIdle", 256)
|
||||
viper.SetDefault("alert.notifyScriptPath", "./etc/script/notify.py")
|
||||
viper.SetDefault("alert.notifyScriptConcurrency", 200)
|
||||
viper.SetDefault("alert.mutedAlertPersist", true)
|
||||
viper.SetDefault("trans.backend.prometheus.lookbackDeltaMinute", 2)
|
||||
viper.SetDefault("trans.backend.prometheus.maxConcurrentQuery", 30)
|
||||
viper.SetDefault("trans.backend.prometheus.maxSamples", 50000000)
|
||||
viper.SetDefault("trans.backend.prometheus.maxFetchAllSeriesLimitMinute", 5)
|
||||
|
||||
err = viper.Unmarshal(&Config)
|
||||
if err != nil {
|
||||
return fmt.Errorf("cannot read yml[%s]: %v", ymlFile, err)
|
||||
}
|
||||
|
||||
fmt.Println("config.file:", ymlFile)
|
||||
|
||||
if Config.Heartbeat.IP == "" {
|
||||
// auto detect
|
||||
Config.Heartbeat.IP = fmt.Sprint(GetOutboundIP())
|
||||
|
||||
if Config.Heartbeat.IP == "" {
|
||||
fmt.Println("heartbeat ip auto got is blank")
|
||||
os.Exit(1)
|
||||
}
|
||||
port := strings.Split(Config.RPC.Listen, ":")[1]
|
||||
endpoint := Config.Heartbeat.IP + ":" + port
|
||||
Config.Heartbeat.LocalAddr = endpoint
|
||||
}
|
||||
|
||||
// 正常情况肯定不是127.0.0.1,但是,如果就是单机部署,并且这个机器没有网络,比如本地调试并且本机没网的时候
|
||||
// if Config.Heartbeat.IP == "127.0.0.1" {
|
||||
// fmt.Println("heartbeat ip is 127.0.0.1 and it is useless, so, exit")
|
||||
// os.Exit(1)
|
||||
// }
|
||||
|
||||
fmt.Println("heartbeat.ip:", Config.Heartbeat.IP)
|
||||
fmt.Printf("heartbeat.interval: %dms\n", Config.Heartbeat.Interval)
|
||||
return nil
|
||||
}
|
||||
|
||||
// Get preferred outbound ip of this machine
|
||||
func GetOutboundIP() net.IP {
|
||||
conn, err := net.Dial("udp", "8.8.8.8:80")
|
||||
if err != nil {
|
||||
fmt.Println("auto get outbound ip fail:", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
defer conn.Close()
|
||||
|
||||
localAddr := conn.LocalAddr().(*net.UDPAddr)
|
||||
|
||||
return localAddr.IP
|
||||
}
|
@ -0,0 +1,65 @@
|
||||
package config
|
||||
|
||||
import "github.com/didi/nightingale/v5/pkg/i18n"
|
||||
|
||||
var (
|
||||
dict = map[string]string{
|
||||
"Login fail, check your username and password": "登录失败,请检查您的用户名和密码",
|
||||
"Internal server error, try again later please": "系统内部错误,请稍后再试",
|
||||
"Each user has at most two tokens": "每个用户至多创建两个密钥",
|
||||
"No such token": "密钥不存在",
|
||||
"Username is blank": "用户名不能为空",
|
||||
"Username has invalid characters": "用户名含有非法字符",
|
||||
"Nickname has invalid characters": "用户昵称含有非法字符",
|
||||
"Phone invalid": "手机号格式有误",
|
||||
"Email invalid": "邮箱格式有误",
|
||||
"Incorrect old password": "旧密码错误",
|
||||
"Username %s already exists": "用户名(%s)已存在",
|
||||
"No such user": "用户不存在",
|
||||
"UserGroup %s already exists": "用户组(%s)已存在",
|
||||
"Group name has invalid characters": "分组名称含有非法字符",
|
||||
"Group note has invalid characters": "分组备注含有非法字符",
|
||||
"No such user group": "用户组不存在",
|
||||
"Classpath path has invalid characters": "机器分组路径含有非法字符",
|
||||
"Classpath note has invalid characters": "机器分组路径备注含有非法字符",
|
||||
"There are still resources under the classpath": "机器分组路径下仍然挂有资源",
|
||||
"There are still collect rules under the classpath": "机器分组路径下仍然存在采集策略",
|
||||
"No such classpath": "机器分组路径不存在",
|
||||
"Classpath %s already exists": "机器分组路径(%s)已存在",
|
||||
"Preset classpath %s cannot delete": "内置机器分组(%s)不允许删除",
|
||||
"No such mute config": "此屏蔽配置不存在",
|
||||
"DashboardGroup name has invalid characters": "大盘分组名称含有非法字符",
|
||||
"DashboardGroup name is blank": "大盘分组名称为空",
|
||||
"DashboardGroup %s already exists": "大盘分组(%s)已存在",
|
||||
"No such dashboard group": "大盘分组不存在",
|
||||
"Dashboard name has invalid characters": "大盘名称含有非法字符",
|
||||
"Dashboard %s already exists": "监控大盘(%s)已存在",
|
||||
"ChartGroup name has invalid characters": "图表分组名称含有非法字符",
|
||||
"No such dashboard": "监控大盘不存在",
|
||||
"No such chart group": "图表分组不存在",
|
||||
"No such chart": "图表不存在",
|
||||
"There are still dashboards under the group": "分组下面仍然存在监控大盘,请先从组内移出",
|
||||
"AlertRuleGroup name has invalid characters": "告警规则分组含有非法字符",
|
||||
"AlertRuleGroup %s already exists": "告警规则分组(%s)已存在",
|
||||
"There are still alert rules under the group": "分组下面仍然存在告警规则",
|
||||
"AlertRule name has invalid characters": "告警规则含有非法字符",
|
||||
"No such alert rule": "告警规则不存在",
|
||||
"No such alert rule group": "告警规则分组不存在",
|
||||
"No such alert event": "告警事件不存在",
|
||||
"No such collect rule": "采集规则不存在",
|
||||
"Decoded metric description empty": "导入的指标释义列表为空",
|
||||
"User disabled": "用户已被禁用",
|
||||
"Tags(%s) invalid": "标签(%s)格式不合法",
|
||||
"Resource filter(Func:%s)'s param invalid": "资源过滤条件(函数:%s)参数不合法(为空或包含空格都不合法)",
|
||||
"Tags filter(Func:%s)'s param invalid": "标签过滤条件(函数:%s)参数不合法(为空或包含空格都不合法)",
|
||||
"Regexp: %s cannot be compiled": "正则表达式(%s)不合法,无法编译",
|
||||
"AppendTags(%s) invalid": "附件标签(%s)格式不合法",
|
||||
}
|
||||
langDict = map[string]map[string]string{
|
||||
"zh": dict,
|
||||
}
|
||||
)
|
||||
|
||||
func init() {
|
||||
i18n.DictRegister(langDict)
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in new issue