1
0
Fork 0
mirror of https://github.com/TwiN/gatus.git synced 2024-12-14 11:58:04 +00:00

use last reminder based upon alert type

This commit is contained in:
Jackson Sabey 2024-08-31 15:56:23 -07:00
parent 7e63c00bc6
commit 45bfd47920
3 changed files with 13 additions and 7 deletions

View file

@ -123,7 +123,7 @@ type Endpoint struct {
NumberOfSuccessesInARow int `yaml:"-"`
// LastReminderSent is the time at which the last reminder was sent for this endpoint.
LastReminderSent time.Time `yaml:"-"`
LastReminderSent map[alert.Type]time.Time `yaml:"-"`
}
// IsEnabled returns whether the endpoint is enabled or not
@ -193,6 +193,9 @@ func (e *Endpoint) ValidateAndSetDefaults() error {
if len(e.Headers) == 0 {
e.Headers = make(map[string]string)
}
if len(e.LastReminderSent) == 0 {
e.LastReminderSent = make(map[alert.Type]time.Time)
}
// Automatically add user agent header if there isn't one specified in the endpoint configuration
if _, userAgentHeaderExists := e.Headers[UserAgentHeader]; !userAgentHeaderExists {
e.Headers[UserAgentHeader] = GatusUserAgent

View file

@ -2,6 +2,7 @@ package endpoint
import (
"errors"
"time"
"github.com/TwiN/gatus/v5/alerting/alert"
)
@ -75,6 +76,8 @@ func (externalEndpoint *ExternalEndpoint) ToEndpoint() *Endpoint {
Enabled: externalEndpoint.Enabled,
Name: externalEndpoint.Name,
Group: externalEndpoint.Group,
Headers: make(map[string]string),
LastReminderSent: make(map[alert.Type]time.Time),
Alerts: externalEndpoint.Alerts,
NumberOfFailuresInARow: externalEndpoint.NumberOfFailuresInARow,
NumberOfSuccessesInARow: externalEndpoint.NumberOfSuccessesInARow,

View file

@ -35,16 +35,16 @@ func handleAlertsToTrigger(ep *endpoint.Endpoint, result *endpoint.Result, alert
sendInitialAlert := !endpointAlert.Triggered
// Determine if a reminder should be sent
var lastReminder time.Duration
if !ep.LastReminderSent.IsZero() {
lastReminder = time.Since(ep.LastReminderSent)
if lr, ok := ep.LastReminderSent[endpointAlert.Type]; ok && !lr.IsZero() {
lastReminder = time.Since(lr)
}
sendReminder := endpointAlert.Triggered && endpointAlert.RepeatInterval > 0 &&
(lastReminder == 0 || lastReminder >= endpointAlert.RepeatInterval)
// If neither initial alert nor reminder needs to be sent, skip to the next alert
if !sendInitialAlert && !sendReminder {
if debug {
log.Printf("[watchdog.handleAlertsToTrigger] Alert for endpoint=%s with description='%s' is not due for triggering or reminding (interval: %s last: %s), skipping",
ep.Name, endpointAlert.GetDescription(), endpointAlert.RepeatInterval, lastReminder)
log.Printf("[watchdog.handleAlertsToTrigger] Alert %s for endpoint=%s with description='%s' is not due for triggering (interval: %s last: %s), skipping",
endpointAlert.Type, ep.Name, endpointAlert.GetDescription(), endpointAlert.RepeatInterval, lastReminder)
}
continue
}
@ -70,7 +70,7 @@ func handleAlertsToTrigger(ep *endpoint.Endpoint, result *endpoint.Result, alert
if sendInitialAlert {
endpointAlert.Triggered = true
}
ep.LastReminderSent = time.Now()
ep.LastReminderSent[endpointAlert.Type] = time.Now()
if err := store.Get().UpsertTriggeredEndpointAlert(ep, endpointAlert); err != nil {
log.Printf("[watchdog.handleAlertsToTrigger] Failed to persist triggered endpoint alert for endpoint with key=%s: %s", ep.Key(), err.Error())
}
@ -113,7 +113,7 @@ func handleAlertsToResolve(ep *endpoint.Endpoint, result *endpoint.Result, alert
} else {
log.Printf("[watchdog.handleAlertsToResolve] Not sending alert of type=%s despite being RESOLVED, because the provider wasn't configured properly", endpointAlert.Type)
}
ep.LastReminderSent[endpointAlert.Type] = time.Now()
}
ep.NumberOfFailuresInARow = 0
ep.LastReminderSent = time.Now()
}