Support sending notifications when alert is resolved
Add debug parameter for those wishing to filter some noise from the logs
This commit is contained in:
@ -1,25 +1,34 @@
|
||||
package watchdog
|
||||
|
||||
import (
|
||||
"encoding/base64"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"github.com/TwinProduction/gatus/config"
|
||||
"github.com/TwinProduction/gatus/core"
|
||||
"github.com/TwinProduction/gatus/metric"
|
||||
"log"
|
||||
"net/url"
|
||||
"sync"
|
||||
"time"
|
||||
)
|
||||
|
||||
var (
|
||||
serviceResults = make(map[string][]*core.Result)
|
||||
rwLock sync.RWMutex
|
||||
|
||||
// serviceResultsMutex is used to prevent concurrent map access
|
||||
serviceResultsMutex sync.RWMutex
|
||||
|
||||
// monitoringMutex is used to prevent multiple services from being evaluated at the same time.
|
||||
// Without this, conditions using response time may become inaccurate.
|
||||
monitoringMutex sync.Mutex
|
||||
)
|
||||
|
||||
// GetServiceResults returns a list of the last 20 results for each services
|
||||
func GetServiceResults() *map[string][]*core.Result {
|
||||
return &serviceResults
|
||||
// GetJsonEncodedServiceResults returns a list of the last 20 results for each services encoded using json.Marshal.
|
||||
// The reason why the encoding is done here is because we use a mutex to prevent concurrent map access.
|
||||
func GetJsonEncodedServiceResults() ([]byte, error) {
|
||||
serviceResultsMutex.RLock()
|
||||
data, err := json.Marshal(serviceResults)
|
||||
serviceResultsMutex.RUnlock()
|
||||
return data, err
|
||||
}
|
||||
|
||||
// Monitor loops over each services and starts a goroutine to monitor each services separately
|
||||
@ -33,33 +42,39 @@ func Monitor(cfg *config.Config) {
|
||||
|
||||
// monitor monitors a single service in a loop
|
||||
func monitor(service *core.Service) {
|
||||
cfg := config.Get()
|
||||
for {
|
||||
// By placing the lock here, we prevent multiple services from being monitored at the exact same time, which
|
||||
// could cause performance issues and return inaccurate results
|
||||
rwLock.Lock()
|
||||
log.Printf("[watchdog][monitor] Monitoring serviceName=%s", service.Name)
|
||||
monitoringMutex.Lock()
|
||||
if cfg.Debug {
|
||||
log.Printf("[watchdog][monitor] Monitoring serviceName=%s", service.Name)
|
||||
}
|
||||
result := service.EvaluateConditions()
|
||||
metric.PublishMetricsForService(service, result)
|
||||
serviceResultsMutex.Lock()
|
||||
serviceResults[service.Name] = append(serviceResults[service.Name], result)
|
||||
if len(serviceResults[service.Name]) > 20 {
|
||||
serviceResults[service.Name] = serviceResults[service.Name][1:]
|
||||
}
|
||||
rwLock.Unlock()
|
||||
serviceResultsMutex.Unlock()
|
||||
var extra string
|
||||
if !result.Success {
|
||||
extra = fmt.Sprintf("responseBody=%s", result.Body)
|
||||
}
|
||||
log.Printf(
|
||||
"[watchdog][monitor] Finished monitoring serviceName=%s; errors=%d; requestDuration=%s; %s",
|
||||
"[watchdog][monitor] Monitored serviceName=%s; success=%v; errors=%d; requestDuration=%s; %s",
|
||||
service.Name,
|
||||
result.Success,
|
||||
len(result.Errors),
|
||||
result.Duration.Round(time.Millisecond),
|
||||
extra,
|
||||
)
|
||||
|
||||
handleAlerting(service, result)
|
||||
|
||||
log.Printf("[watchdog][monitor] Waiting for interval=%s before monitoring serviceName=%s", service.Interval, service.Name)
|
||||
if cfg.Debug {
|
||||
log.Printf("[watchdog][monitor] Waiting for interval=%s before monitoring serviceName=%s again", service.Interval, service.Name)
|
||||
}
|
||||
monitoringMutex.Unlock()
|
||||
time.Sleep(service.Interval)
|
||||
}
|
||||
}
|
||||
@ -72,10 +87,43 @@ func handleAlerting(service *core.Service, result *core.Result) {
|
||||
if result.Success {
|
||||
if service.NumberOfFailuresInARow > 0 {
|
||||
for _, alert := range service.Alerts {
|
||||
if !alert.Enabled || !alert.SendOnResolved || alert.Threshold < service.NumberOfFailuresInARow {
|
||||
if !alert.Enabled || !alert.SendOnResolved || alert.Threshold > service.NumberOfFailuresInARow {
|
||||
continue
|
||||
}
|
||||
// TODO
|
||||
var alertProvider *core.CustomAlertProvider
|
||||
if alert.Type == core.SlackAlert {
|
||||
if len(cfg.Alerting.Slack) > 0 {
|
||||
log.Printf("[watchdog][monitor] Sending Slack alert because alert with description=%s has been resolved", alert.Description)
|
||||
alertProvider = core.CreateSlackCustomAlertProvider(cfg.Alerting.Slack, service, alert, result, true)
|
||||
} else {
|
||||
log.Printf("[watchdog][monitor] Not sending Slack alert despite being triggered, because there is no Slack webhook configured")
|
||||
}
|
||||
} else if alert.Type == core.TwilioAlert {
|
||||
if cfg.Alerting.Twilio != nil && cfg.Alerting.Twilio.IsValid() {
|
||||
log.Printf("[watchdog][monitor] Sending Twilio alert because alert with description=%s has been triggered", alert.Description)
|
||||
alertProvider = core.CreateTwilioCustomAlertProvider(cfg.Alerting.Twilio, fmt.Sprintf("%s - %s", service.Name, alert.Description))
|
||||
} else {
|
||||
log.Printf("[watchdog][monitor] Not sending Twilio alert despite being triggered, because Twilio isn't configured properly'")
|
||||
}
|
||||
} else if alert.Type == core.CustomAlert {
|
||||
if cfg.Alerting.Custom != nil && cfg.Alerting.Custom.IsValid() {
|
||||
log.Printf("[watchdog][monitor] Sending custom alert because alert with description=%s has been triggered", alert.Description)
|
||||
alertProvider = &core.CustomAlertProvider{
|
||||
Url: cfg.Alerting.Custom.Url,
|
||||
Method: cfg.Alerting.Custom.Method,
|
||||
Body: cfg.Alerting.Custom.Body,
|
||||
Headers: cfg.Alerting.Custom.Headers,
|
||||
}
|
||||
} else {
|
||||
log.Printf("[watchdog][monitor] Not sending custom alert despite being triggered, because there is no custom url configured")
|
||||
}
|
||||
}
|
||||
if alertProvider != nil {
|
||||
err := alertProvider.Send(service.Name, alert.Description)
|
||||
if err != nil {
|
||||
log.Printf("[watchdog][monitor] Ran into error sending an alert: %s", err.Error())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
service.NumberOfFailuresInARow = 0
|
||||
@ -90,33 +138,16 @@ func handleAlerting(service *core.Service, result *core.Result) {
|
||||
if alert.Type == core.SlackAlert {
|
||||
if len(cfg.Alerting.Slack) > 0 {
|
||||
log.Printf("[watchdog][monitor] Sending Slack alert because alert with description=%s has been triggered", alert.Description)
|
||||
alertProvider = &core.CustomAlertProvider{
|
||||
Url: cfg.Alerting.Slack,
|
||||
Method: "POST",
|
||||
Body: fmt.Sprintf(`{"text":"*[Gatus]*\n*service:* %s\n*description:* %s"}`, service.Name, alert.Description),
|
||||
Headers: map[string]string{"Content-Type": "application/json"},
|
||||
}
|
||||
alertProvider = core.CreateSlackCustomAlertProvider(cfg.Alerting.Slack, service, alert, result, false)
|
||||
} else {
|
||||
log.Printf("[watchdog][monitor] Not sending Slack alert despite being triggered, because there is no Slack webhook configured")
|
||||
}
|
||||
} else if alert.Type == core.TwilioAlert {
|
||||
if cfg.Alerting.Twilio != nil && cfg.Alerting.Twilio.IsValid() {
|
||||
log.Printf("[watchdog][monitor] Sending Twilio alert because alert with description=%s has been triggered", alert.Description)
|
||||
alertProvider = &core.CustomAlertProvider{
|
||||
Url: fmt.Sprintf("https://api.twilio.com/2010-04-01/Accounts/%s/Messages.json", cfg.Alerting.Twilio.SID),
|
||||
Method: "POST",
|
||||
Body: url.Values{
|
||||
"To": {cfg.Alerting.Twilio.To},
|
||||
"From": {cfg.Alerting.Twilio.From},
|
||||
"Body": {fmt.Sprintf("%s - %s", service.Name, alert.Description)},
|
||||
}.Encode(),
|
||||
Headers: map[string]string{
|
||||
"Content-Type": "application/x-www-form-urlencoded",
|
||||
"Authorization": fmt.Sprintf("Basic %s", base64.StdEncoding.EncodeToString([]byte(fmt.Sprintf("%s:%s", cfg.Alerting.Twilio.SID, cfg.Alerting.Twilio.Token)))),
|
||||
},
|
||||
}
|
||||
alertProvider = core.CreateTwilioCustomAlertProvider(cfg.Alerting.Twilio, fmt.Sprintf("%s - %s", service.Name, alert.Description))
|
||||
} else {
|
||||
log.Printf("[watchdog][monitor] Not sending Twilio alert despite being triggered, because twilio config settings missing")
|
||||
log.Printf("[watchdog][monitor] Not sending Twilio alert despite being triggered, because Twilio config settings missing")
|
||||
}
|
||||
} else if alert.Type == core.CustomAlert {
|
||||
if cfg.Alerting.Custom != nil && cfg.Alerting.Custom.IsValid() {
|
||||
|
Reference in New Issue
Block a user