refactor: Break core package into multiple packages under config/endpoint (#759)
* refactor: Partially break core package into dns, result and ssh packages * refactor: Move core package to config/endpoint * refactor: Fix warning about overlapping imported package name with endpoint variable * refactor: Rename EndpointStatus to Status * refactor: Merge result pkg back into endpoint pkg, because it makes more sense * refactor: Rename parameter r to result in Condition.evaluate * refactor: Rename parameter r to result * refactor: Revert accidental change to endpoint.TypeDNS * refactor: Rename parameter r to result * refactor: Merge util package into endpoint package * refactor: Rename parameter r to result
This commit is contained in:
@ -11,7 +11,7 @@ import (
|
||||
|
||||
"github.com/TwiN/gatus/v5/alerting/alert"
|
||||
"github.com/TwiN/gatus/v5/client"
|
||||
"github.com/TwiN/gatus/v5/core"
|
||||
"github.com/TwiN/gatus/v5/config/endpoint"
|
||||
"github.com/google/uuid"
|
||||
)
|
||||
|
||||
@ -51,11 +51,11 @@ func (provider *AlertProvider) IsValid() bool {
|
||||
|
||||
// Send creates an issue in the designed RepositoryURL if the resolved parameter passed is false,
|
||||
// or closes the relevant issue(s) if the resolved parameter passed is true.
|
||||
func (provider *AlertProvider) Send(endpoint *core.Endpoint, alert *alert.Alert, result *core.Result, resolved bool) error {
|
||||
func (provider *AlertProvider) Send(ep *endpoint.Endpoint, alert *alert.Alert, result *endpoint.Result, resolved bool) error {
|
||||
if len(alert.ResolveKey) == 0 {
|
||||
alert.ResolveKey = uuid.NewString()
|
||||
}
|
||||
buffer := bytes.NewBuffer(provider.buildAlertBody(endpoint, alert, result, resolved))
|
||||
buffer := bytes.NewBuffer(provider.buildAlertBody(ep, alert, result, resolved))
|
||||
request, err := http.NewRequest(http.MethodPost, provider.WebhookURL, buffer)
|
||||
if err != nil {
|
||||
return err
|
||||
@ -94,21 +94,21 @@ func (provider *AlertProvider) monitoringTool() string {
|
||||
return "gatus"
|
||||
}
|
||||
|
||||
func (provider *AlertProvider) service(endpoint *core.Endpoint) string {
|
||||
func (provider *AlertProvider) service(ep *endpoint.Endpoint) string {
|
||||
if len(provider.Service) > 0 {
|
||||
return provider.Service
|
||||
}
|
||||
return endpoint.DisplayName()
|
||||
return ep.DisplayName()
|
||||
}
|
||||
|
||||
// buildAlertBody builds the body of the alert
|
||||
func (provider *AlertProvider) buildAlertBody(endpoint *core.Endpoint, alert *alert.Alert, result *core.Result, resolved bool) []byte {
|
||||
func (provider *AlertProvider) buildAlertBody(ep *endpoint.Endpoint, alert *alert.Alert, result *endpoint.Result, resolved bool) []byte {
|
||||
body := AlertBody{
|
||||
Title: fmt.Sprintf("alert(%s): %s", provider.monitoringTool(), provider.service(endpoint)),
|
||||
Title: fmt.Sprintf("alert(%s): %s", provider.monitoringTool(), provider.service(ep)),
|
||||
StartTime: result.Timestamp.Format(time.RFC3339),
|
||||
Service: provider.service(endpoint),
|
||||
Service: provider.service(ep),
|
||||
MonitoringTool: provider.monitoringTool(),
|
||||
Hosts: endpoint.URL,
|
||||
Hosts: ep.URL,
|
||||
GitlabEnvironmentName: provider.EnvironmentName,
|
||||
Severity: provider.Severity,
|
||||
Fingerprint: alert.ResolveKey,
|
||||
@ -135,9 +135,9 @@ func (provider *AlertProvider) buildAlertBody(endpoint *core.Endpoint, alert *al
|
||||
}
|
||||
var message string
|
||||
if resolved {
|
||||
message = fmt.Sprintf("An alert for *%s* has been resolved after passing successfully %d time(s) in a row", endpoint.DisplayName(), alert.SuccessThreshold)
|
||||
message = fmt.Sprintf("An alert for *%s* has been resolved after passing successfully %d time(s) in a row", ep.DisplayName(), alert.SuccessThreshold)
|
||||
} else {
|
||||
message = fmt.Sprintf("An alert for *%s* has been triggered due to having failed %d time(s) in a row", endpoint.DisplayName(), alert.FailureThreshold)
|
||||
message = fmt.Sprintf("An alert for *%s* has been triggered due to having failed %d time(s) in a row", ep.DisplayName(), alert.FailureThreshold)
|
||||
}
|
||||
body.Description = message + description + formattedConditionResults
|
||||
bodyAsJSON, _ := json.Marshal(body)
|
||||
|
@ -7,7 +7,7 @@ import (
|
||||
|
||||
"github.com/TwiN/gatus/v5/alerting/alert"
|
||||
"github.com/TwiN/gatus/v5/client"
|
||||
"github.com/TwiN/gatus/v5/core"
|
||||
"github.com/TwiN/gatus/v5/config/endpoint"
|
||||
"github.com/TwiN/gatus/v5/test"
|
||||
)
|
||||
|
||||
@ -84,10 +84,10 @@ func TestAlertProvider_Send(t *testing.T) {
|
||||
t.Run(scenario.Name, func(t *testing.T) {
|
||||
client.InjectHTTPClient(&http.Client{Transport: scenario.MockRoundTripper})
|
||||
err := scenario.Provider.Send(
|
||||
&core.Endpoint{Name: "endpoint-name", Group: "endpoint-group"},
|
||||
&endpoint.Endpoint{Name: "endpoint-name", Group: "endpoint-group"},
|
||||
&scenario.Alert,
|
||||
&core.Result{
|
||||
ConditionResults: []*core.ConditionResult{
|
||||
&endpoint.Result{
|
||||
ConditionResults: []*endpoint.ConditionResult{
|
||||
{Condition: "[CONNECTED] == true", Success: scenario.Resolved},
|
||||
{Condition: "[STATUS] == 200", Success: scenario.Resolved},
|
||||
},
|
||||
@ -108,21 +108,21 @@ func TestAlertProvider_buildAlertBody(t *testing.T) {
|
||||
firstDescription := "description-1"
|
||||
scenarios := []struct {
|
||||
Name string
|
||||
Endpoint core.Endpoint
|
||||
Endpoint endpoint.Endpoint
|
||||
Provider AlertProvider
|
||||
Alert alert.Alert
|
||||
ExpectedBody string
|
||||
}{
|
||||
{
|
||||
Name: "triggered",
|
||||
Endpoint: core.Endpoint{Name: "endpoint-name", URL: "https://example.org"},
|
||||
Endpoint: endpoint.Endpoint{Name: "endpoint-name", URL: "https://example.org"},
|
||||
Provider: AlertProvider{},
|
||||
Alert: alert.Alert{Description: &firstDescription, FailureThreshold: 3},
|
||||
ExpectedBody: "{\"title\":\"alert(gatus): endpoint-name\",\"description\":\"An alert for *endpoint-name* has been triggered due to having failed 3 time(s) in a row:\\n\\u003e description-1\\n\\n## Condition results\\n- :white_check_mark: - `[CONNECTED] == true`\\n- :x: - `[STATUS] == 200`\\n\",\"start_time\":\"0001-01-01T00:00:00Z\",\"service\":\"endpoint-name\",\"monitoring_tool\":\"gatus\",\"hosts\":\"https://example.org\"}",
|
||||
},
|
||||
{
|
||||
Name: "no-description",
|
||||
Endpoint: core.Endpoint{Name: "endpoint-name", URL: "https://example.org"},
|
||||
Endpoint: endpoint.Endpoint{Name: "endpoint-name", URL: "https://example.org"},
|
||||
Provider: AlertProvider{},
|
||||
Alert: alert.Alert{FailureThreshold: 10},
|
||||
ExpectedBody: "{\"title\":\"alert(gatus): endpoint-name\",\"description\":\"An alert for *endpoint-name* has been triggered due to having failed 10 time(s) in a row\\n\\n## Condition results\\n- :white_check_mark: - `[CONNECTED] == true`\\n- :x: - `[STATUS] == 200`\\n\",\"start_time\":\"0001-01-01T00:00:00Z\",\"service\":\"endpoint-name\",\"monitoring_tool\":\"gatus\",\"hosts\":\"https://example.org\"}",
|
||||
@ -133,8 +133,8 @@ func TestAlertProvider_buildAlertBody(t *testing.T) {
|
||||
body := scenario.Provider.buildAlertBody(
|
||||
&scenario.Endpoint,
|
||||
&scenario.Alert,
|
||||
&core.Result{
|
||||
ConditionResults: []*core.ConditionResult{
|
||||
&endpoint.Result{
|
||||
ConditionResults: []*endpoint.ConditionResult{
|
||||
{Condition: "[CONNECTED] == true", Success: true},
|
||||
{Condition: "[STATUS] == 200", Success: false},
|
||||
},
|
||||
|
Reference in New Issue
Block a user