#!/usr/bin/env bash #shellcheck source=/dev/null disable=SC2086,SC2154 # netdata # real-time performance and health monitoring, done right! # (C) 2023 Netdata Inc. # SPDX-License-Identifier: GPL-3.0-or-later # # Script to send alarm notifications for netdata # # Features: # - multiple notification methods # - multiple roles per alarm # - multiple recipients per role # - severity filtering per recipient # # Supported notification methods: # - emails by @ktsaou # - slack.com notifications by @ktsaou # - alerta.io notifications by @kattunga # - discord.com notifications by @lowfive # - pushover.net notifications by @ktsaou # - pushbullet.com push notifications by Tiago Peralta @tperalta82 #1070 # - telegram.org notifications by @hashworks #1002 # - twilio.com notifications by Levi Blaney @shadycuz #1211 # - kafka notifications by @ktsaou #1342 # - pagerduty.com notifications by Jim Cooley @jimcooley #1373 # - messagebird.com notifications by @tech_no_logical #1453 # - hipchat notifications by @ktsaou #1561 # - fleep notifications by @Ferroin # - prowlapp.com notifications by @Ferroin # - irc notifications by @manosf # - custom notifications by @ktsaou # - syslog messages by @Ferroin # - Microsoft Team notification by @tioumen # - RocketChat notifications by @Hermsi1337 #3777 # - Dynatrace Event by @illumine # - Opsgenie by @thiaoftsm #9858 # - Gotify by @coffeegrind123 # - ntfy.sh by @Dim-P # ----------------------------------------------------------------------------- # testing notifications cmd_line="'${0}' $(printf "'%s' " "${@}")" if { [ "${1}" = "test" ] || [ "${2}" = "test" ]; } && [ "${#}" -le 2 ]; then if [ "${2}" = "test" ]; then recipient="${1}" else recipient="${2}" fi [ -z "${recipient}" ] && recipient="sysadmin" id=1 last="CLEAR" test_res=0 for x in "WARNING" "CRITICAL" "CLEAR"; do echo >&2 echo >&2 "# SENDING TEST ${x} ALARM TO ROLE: ${recipient}" "${0}" "${recipient}" "$(hostname)" 1 1 "${id}" "$(date +%s)" "test_alarm" "test.chart" "${x}" "${last}" 100 90 "${0}" 1 $((0 + id)) "units" "this is a test alarm to verify notifications work" "new value" "old value" "evaluated expression" "expression variable values" 0 0 "" "" "Test" "command to edit the alarm=0=$(hostname)" "" "" "a test alarm" #shellcheck disable=SC2181 if [ $? -ne 0 ]; then echo >&2 "# FAILED" test_res=1 else echo >&2 "# OK" fi last="${x}" id=$((id + 1)) done exit $test_res fi export PATH="${PATH}:/sbin:/usr/sbin:/usr/local/sbin:@sbindir_POST@" export LC_ALL=C # ----------------------------------------------------------------------------- # logging PROGRAM_NAME="$(basename "${0}")" # these should be the same with syslog() priorities NDLP_EMERG=0 # system is unusable NDLP_ALERT=1 # action must be taken immediately NDLP_CRIT=2 # critical conditions NDLP_ERR=3 # error conditions NDLP_WARN=4 # warning conditions NDLP_NOTICE=5 # normal but significant condition NDLP_INFO=6 # informational NDLP_DEBUG=7 # debug-level messages # the max (numerically) log level we will log LOG_LEVEL=$NDLP_INFO set_log_min_priority() { case "${NETDATA_LOG_LEVEL,,}" in "emerg" | "emergency") LOG_LEVEL=$NDLP_EMERG ;; "alert") LOG_LEVEL=$NDLP_ALERT ;; "crit" | "critical") LOG_LEVEL=$NDLP_CRIT ;; "err" | "error") LOG_LEVEL=$NDLP_ERR ;; "warn" | "warning") LOG_LEVEL=$NDLP_WARN ;; "notice") LOG_LEVEL=$NDLP_NOTICE ;; "info") LOG_LEVEL=$NDLP_INFO ;; "debug") LOG_LEVEL=$NDLP_DEBUG ;; esac } set_log_min_priority log() { local level="${1}" shift 1 [[ -n "$level" && -n "$LOG_LEVEL" && "$level" -gt "$LOG_LEVEL" ]] && return systemd-cat-native --log-as-netdata --newline="--NEWLINE--" <&2 "BASH version 4 or later is required (this is ${BASH_VERSION})." exit 1 fi # ----------------------------------------------------------------------------- docurl() { if [ -z "${curl}" ]; then error "${curl} is unset." return 1 fi if [ "${debug}" = "1" ]; then echo >&2 "--- BEGIN curl command ---" printf >&2 "%q " ${curl} "${@}" echo >&2 echo >&2 "--- END curl command ---" local out code ret out=$(mktemp /tmp/netdata-health-alarm-notify-XXXXXXXX) code=$(${curl} ${curl_options} --write-out "%{http_code}" --output "${out}" --silent --show-error "${@}") ret=$? echo >&2 "--- BEGIN received response ---" cat >&2 "${out}" echo >&2 echo >&2 "--- END received response ---" echo >&2 "RECEIVED HTTP RESPONSE CODE: ${code}" rm "${out}" echo "${code}" return ${ret} fi ${curl} ${curl_options} --write-out "%{http_code}" --output /dev/null --silent --show-error "${@}" return $? } # ----------------------------------------------------------------------------- # List of all the notification mechanisms we support. # Used in a couple of places to write more compact code. method_names=" email pushover pushbullet telegram slack alerta flock discord hipchat twilio messagebird pd fleep syslog custom msteams kavenegar prowl irc awssns rocketchat sms dynatrace matrix ntfy " # ----------------------------------------------------------------------------- # this is to be overwritten by the config file custom_sender() { info "custom notification mechanism is not configured; not sending ${notification_description}" } # ----------------------------------------------------------------------------- # defaults to allow running this script by hand [ -z "${NETDATA_USER_CONFIG_DIR}" ] && NETDATA_USER_CONFIG_DIR="@configdir_POST@" [ -z "${NETDATA_STOCK_CONFIG_DIR}" ] && NETDATA_STOCK_CONFIG_DIR="@libconfigdir_POST@" [ -z "${NETDATA_CACHE_DIR}" ] && NETDATA_CACHE_DIR="@cachedir_POST@" [ -z "${NETDATA_REGISTRY_URL}" ] && NETDATA_REGISTRY_URL="https://registry.my-netdata.io" [ -z "${NETDATA_REGISTRY_CLOUD_BASE_URL}" ] && NETDATA_REGISTRY_CLOUD_BASE_URL="https://app.netdata.cloud" # ----------------------------------------------------------------------------- # parse command line parameters if [[ ${1} = "unittest" ]]; then unittest=1 # enable unit testing mode roles="${2}" # the role that should be used for unit testing cfgfile="${3}" # the location of the config file to use for unit testing status="${4}" # the current status : REMOVED, UNINITIALIZED, UNDEFINED, CLEAR, WARNING, CRITICAL old_status="${5}" # the previous status: REMOVED, UNINITIALIZED, UNDEFINED, CLEAR, WARNING, CRITICAL elif [[ ${1} = "dump_methods" ]]; then dump_methods=1 status="WARNING" else roles="${1}" # the roles that should be notified for this event args_host="${2}" # the host generated this event unique_id="${3}" # the unique id of this event alarm_id="${4}" # the unique id of the alarm that generated this event event_id="${5}" # the incremental id of the event, for this alarm id when="${6}" # the timestamp this event occurred name="${7}" # the name of the alarm, as given in netdata health.d entries chart="${8}" # the name of the chart (type.id) status="${9}" # the current status : REMOVED, UNINITIALIZED, UNDEFINED, CLEAR, WARNING, CRITICAL old_status="${10}" # the previous status: REMOVED, UNINITIALIZED, UNDEFINED, CLEAR, WARNING, CRITICAL value="${11}" # the current value of the alarm old_value="${12}" # the previous value of the alarm src="${13}" # the line number and file the alarm has been configured duration="${14}" # the duration in seconds of the previous alarm state non_clear_duration="${15}" # the total duration in seconds this is/was non-clear units="${16}" # the units of the value info="${17}" # a short description of the alarm value_string="${18}" # friendly value (with units) # shellcheck disable=SC2034 # variable is unused, but https://github.com/netdata/netdata/pull/5164#discussion_r255572947 old_value_string="${19}" # friendly old value (with units), previously named "old_value_string" calc_expression="${20}" # contains the expression that was evaluated to trigger the alarm calc_param_values="${21}" # the values of the parameters in the expression, at the time of the evaluation total_warnings="${22}" # Total number of alarms in WARNING state total_critical="${23}" # Total number of alarms in CRITICAL state total_warn_alarms="${24}" # List of alarms in warning state total_crit_alarms="${25}" # List of alarms in critical state classification="${26}" # The class field from .conf files edit_command_line="${27}" # The command to edit the alarm, with the line number child_machine_guid="${28}" # the machine_guid of the child transition_id="${29}" # the transition_id of the alert summary="${30}" # the summary text field of the alert context="${31}" # the context of the chart component="${32}" type="${33}" fi # ----------------------------------------------------------------------------- # find a suitable hostname to use, if netdata did not supply a hostname if [ -z "${args_host}" ]; then this_host=$(hostname -s 2>/dev/null) host="${this_host}" args_host="${this_host}" else host="${args_host}" fi notification_description="notification to '${roles}' for transition from ${old_status} to ${status}, of alert '${name}' = '${value_string}', of instance '${chart}', context '${context}' on host '${host}'" # ----------------------------------------------------------------------------- # screen statuses we don't need to send a notification # don't do anything if this is not WARNING, CRITICAL or CLEAR if [ "${status}" != "WARNING" ] && [ "${status}" != "CRITICAL" ] && [ "${status}" != "CLEAR" ]; then debug "not sending ${notification_description}" exit 1 fi # don't do anything if this is CLEAR, but it was not WARNING or CRITICAL if [ "${clear_alarm_always}" != "YES" ] && [ "${old_status}" != "WARNING" ] && [ "${old_status}" != "CRITICAL" ] && [ "${status}" = "CLEAR" ]; then debug "not sending ${notification_description}" exit 1 fi # ----------------------------------------------------------------------------- # load configuration # By default fetch images from the global public registry. # This is required by default, since all notification methods need to download # images via the Internet, and private registries might not be reachable. # This can be overwritten at the configuration file. images_base_url="https://registry.my-netdata.io" # curl options to use curl_options="" # hostname handling use_fqdn="NO" # needed commands # if empty they will be searched in the system path curl= sendmail= # enable / disable features for method_name in ${method_names^^}; do declare SEND_${method_name}="YES" declare DEFAULT_RECIPIENT_${method_name} done for method_name in ${method_names}; do declare -A role_recipients_${method_name} done # slack configs SLACK_WEBHOOK_URL= # Microsoft Teams configs MSTEAMS_WEBHOOK_URL= # Legacy Microsoft Teams configs for backwards compatibility: declare -A role_recipients_msteam # rocketchat configs ROCKETCHAT_WEBHOOK_URL= # alerta configs ALERTA_WEBHOOK_URL= ALERTA_API_KEY= # flock configs FLOCK_WEBHOOK_URL= # discord configs DISCORD_WEBHOOK_URL= # pushover configs PUSHOVER_APP_TOKEN= # pushbullet configs PUSHBULLET_ACCESS_TOKEN= PUSHBULLET_SOURCE_DEVICE= # twilio configs TWILIO_ACCOUNT_SID= TWILIO_ACCOUNT_TOKEN= TWILIO_NUMBER= # hipchat configs HIPCHAT_SERVER= HIPCHAT_AUTH_TOKEN= # messagebird configs MESSAGEBIRD_ACCESS_KEY= MESSAGEBIRD_NUMBER= # kavenegar configs KAVENEGAR_API_KEY= KAVENEGAR_SENDER= # telegram configs TELEGRAM_BOT_TOKEN= # kafka configs SEND_KAFKA="YES" KAFKA_URL= KAFKA_SENDER_IP= # pagerduty.com configs PD_SERVICE_KEY= USE_PD_VERSION= # fleep.io configs FLEEP_SENDER="${host}" # Amazon SNS configs AWSSNS_MESSAGE_FORMAT= # Matrix configs MATRIX_HOMESERVER= MATRIX_ACCESSTOKEN= # syslog configs SYSLOG_FACILITY= # email configs EMAIL_SENDER= EMAIL_CHARSET=$(locale charmap 2>/dev/null) EMAIL_THREADING= EMAIL_PLAINTEXT_ONLY= # irc configs IRC_NICKNAME= IRC_REALNAME= IRC_NETWORK= IRC_PORT=6667 # dynatrace configs DYNATRACE_SPACE= DYNATRACE_SERVER= DYNATRACE_TOKEN= DYNATRACE_TAG_VALUE= DYNATRACE_ANNOTATION_TYPE= DYNATRACE_EVENT= SEND_DYNATRACE= # gotify configs GOTIFY_APP_URL= GOTIFY_APP_TOKEN= # opsgenie configs OPSGENIE_API_KEY= # load the stock and user configuration files # these will overwrite the variables above if [ ${unittest} ]; then if source "${cfgfile}"; then error "Failed to load requested config file." exit 1 fi else for CONFIG in "${NETDATA_STOCK_CONFIG_DIR}/health_alarm_notify.conf" "${NETDATA_USER_CONFIG_DIR}/health_alarm_notify.conf"; do if [ -f "${CONFIG}" ]; then debug "Loading config file '${CONFIG}'..." source "${CONFIG}" || error "Failed to load config file '${CONFIG}'." else debug "Cannot find file '${CONFIG}'." fi done fi if [[ ! $curl_options =~ .*\--connect-timeout ]]; then curl_options+=" --connect-timeout 5" fi OPSGENIE_API_URL=${OPSGENIE_API_URL:-"https://api.opsgenie.com"} # If we didn't autodetect the character set for e-mail and it wasn't # set by the user, we need to set it to a reasonable default. UTF-8 # should be correct for almost all modern UNIX systems. if [ -z ${EMAIL_CHARSET} ]; then EMAIL_CHARSET="UTF-8" fi # If we've been asked to use FQDN's for the URL's in the alarm, do so, # unless we're sending an alarm for a child system which we can't get the # FQDN of easily. if [ "${use_fqdn}" = "YES" ] && [ "${host}" = "$(hostname -s 2>/dev/null)" ]; then host="$(hostname -f 2>/dev/null)" fi # ----------------------------------------------------------------------------- # migrate old Microsoft Teams configuration keys after loading configuration msteams_migration() { SEND_MSTEAMS=${SEND_MSTEAM:-$SEND_MSTEAMS} unset -v SEND_MSTEAM DEFAULT_RECIPIENT_MSTEAMS=${DEFAULT_RECIPIENT_MSTEAM:-$DEFAULT_RECIPIENT_MSTEAMS} MSTEAMS_WEBHOOK_URL=${MSTEAM_WEBHOOK_URL:-$MSTEAMS_WEBHOOK_URL} MSTEAMS_ICON_DEFAULT=${MSTEAM_ICON_DEFAULT:-$MSTEAMS_ICON_DEFAULT} MSTEAMS_ICON_CLEAR=${MSTEAM_ICON_CLEAR:-$MSTEAMS_ICON_CLEAR} MSTEAMS_ICON_WARNING=${MSTEAM_ICON_WARNING:-$MSTEAMS_ICON_WARNING} MSTEAMS_ICON_CRITICAL=${MSTEAM_ICON_CRITICAL:-$MSTEAMS_ICON_CRITICAL} MSTEAMS_COLOR_DEFAULT=${MSTEAM_COLOR_DEFAULT:-$MSTEAMS_COLOR_DEFAULT} MSTEAMS_COLOR_CLEAR=${MSTEAM_COLOR_CLEAR:-$MSTEAMS_COLOR_CLEAR} MSTEAMS_COLOR_WARNING=${MSTEAM_COLOR_WARNING:-$MSTEAMS_COLOR_WARNING} MSTEAMS_COLOR_CRITICAL=${MSTEAM_COLOR_CRITICAL:-$MSTEAMS_COLOR_CRITICAL} # migrate role specific recipients: for key in "${!role_recipients_msteam[@]}"; do # Disable check, if role_recipients_msteams is ever used: # The role_recipients_$method are created and used programmatically # by iterating over $methods. shellcheck therefore doesn't realize # that role_recipients_msteams is actually used in the block # "find the recipients' addresses per method". # shellcheck disable=SC2034 role_recipients_msteams["$key"]="${role_recipients_msteam["$key"]}" done } msteams_migration # ----------------------------------------------------------------------------- # filter a recipient based on alarm event severity filter_recipient_by_criticality() { local method="${1}" recipient_arg="${2}" local tracking_dir tracking_file modifier modifiers recipient="${recipient_arg/|*/}" local mod_critical=0 mod_noclear=0 mod_nowarn=0 # no severity filtering for this person [ "${recipient}" = "${recipient_arg}" ] && return 0 # find out which modifiers are set modifiers="${recipient_arg#*|}" modifiers="${modifiers//|/ }" # replace pipes with spaces modifiers="${modifiers,,}" # lowercase for modifier in ${modifiers}; do case "${modifier}" in critical) mod_critical=1 ;; noclear) mod_noclear=1 ;; nowarn) mod_nowarn=1 ;; *) error "SEVERITY FILTERING for ${recipient_arg} VIA ${method}: invalid modifier '${modifier}'." # invalid modifier, always send notification return 0 ;; esac done # set status tracking directory/file var tracking_dir="${NETDATA_CACHE_DIR}/alarm-notify/${method}/${recipient}" tracking_file="${tracking_dir}/${alarm_id}" # create the status tracking directory for this user if "critical" modifier is set [ "${mod_critical}" == "1" ] && [ ! -d "${tracking_dir}" ] && mkdir -p "${tracking_dir}" case "${status}" in CRITICAL) # "critical" modifier set, create tracking file for future status changes if [ "${mod_critical}" == "1" ]; then touch "${tracking_file}" debug "SEVERITY FILTERING for ${recipient_arg} VIA ${method}: ALLOW: the alarm is CRITICAL (will now receive next status change)" return 0 fi # always send CRITICAL notification debug "SEVERITY FILTERING for ${recipient_arg} VIA ${method}: ALLOW: the alarm is CRITICAL" return 0 ;; WARNING) # "nowarn" modifier set, block notification if [ "${mod_nowarn}" == "1" ]; then debug "SEVERITY FILTERING for ${recipient_arg} VIA ${method}: BLOCK: recipient should not receive this notification (nowarn modifier set)" return 1 fi # "critical" modifier not set, send notification if [ "${mod_critical}" == "0" ]; then debug "SEVERITY FILTERING for ${recipient_arg} VIA ${method}: ALLOW: the alarm is WARNING" return 0 fi # "critical" modifier set, send notification if tracking file exists if [ "${mod_critical}" == "1" ] && [ -f "${tracking_file}" ]; then debug "SEVERITY FILTERING for ${recipient_arg} VIA ${method}: ALLOW: recipient has been notified for this alarm in the past (will still receive next status change)" return 0 fi ;; CLEAR) # remove tracking file [ -f "${tracking_file}" ] && rm "${tracking_file}" # "noclear" modifier set, block notification if [ "${mod_noclear}" == "1" ]; then debug "SEVERITY FILTERING for ${recipient_arg} VIA ${method}: BLOCK: recipient should not receive this notification (noclear modifier set)" return 1 fi # "critical" modifier not set, send notification if [ "${mod_critical}" == "0" ]; then debug "SEVERITY FILTERING for ${recipient_arg} VIA ${method}: ALLOW: the alarm is CLEAR" return 0 fi # "critical" modifier set, send notification if tracking file exists if [ "${mod_critical}" == "1" ] && [ -f "${tracking_file}" ]; then debug "SEVERITY FILTERING for ${recipient_arg} VIA ${method}: ALLOW: recipient has been notified for this alarm in the past (no status change will be sent from now)" return 0 fi ;; *) # "critical" modifier set, send notification if tracking file exists if [ "${mod_critical}" == "1" ] && [ -f "${tracking_file}" ]; then debug "SEVERITY FILTERING for ${recipient_arg} VIA ${method}: ALLOW: recipient has been notified for this alarm in the past (will still receive next status change)" return 0 fi ;; esac debug "SEVERITY FILTERING for ${recipient_arg} VIA ${method}: BLOCK: recipient should not receive this notification" return 1 } # ----------------------------------------------------------------------------- # check the configured targets # check email if [ "${SEND_EMAIL}" = "AUTO" ]; then if command -v curl >/dev/null 2>&1; then SEND_EMAIL="YES" else SEND_EMAIL="NO" fi fi # check slack [ -z "${SLACK_WEBHOOK_URL}" ] && SEND_SLACK="NO" # check rocketchat [ -z "${ROCKETCHAT_WEBHOOK_URL}" ] && SEND_ROCKETCHAT="NO" # check alerta [ -z "${ALERTA_WEBHOOK_URL}" ] && SEND_ALERTA="NO" # check flock [ -z "${FLOCK_WEBHOOK_URL}" ] && SEND_FLOCK="NO" # check discord [ -z "${DISCORD_WEBHOOK_URL}" ] && SEND_DISCORD="NO" # check pushover [ -z "${PUSHOVER_APP_TOKEN}" ] && SEND_PUSHOVER="NO" # check pushbullet [ -z "${PUSHBULLET_ACCESS_TOKEN}" ] && SEND_PUSHBULLET="NO" # check twilio { [ -z "${TWILIO_ACCOUNT_TOKEN}" ] || [ -z "${TWILIO_ACCOUNT_SID}" ] || [ -z "${TWILIO_NUMBER}" ]; } && SEND_TWILIO="NO" # check hipchat [ -z "${HIPCHAT_AUTH_TOKEN}" ] && SEND_HIPCHAT="NO" # check messagebird { [ -z "${MESSAGEBIRD_ACCESS_KEY}" ] || [ -z "${MESSAGEBIRD_NUMBER}" ]; } && SEND_MESSAGEBIRD="NO" # check kavenegar { [ -z "${KAVENEGAR_API_KEY}" ] || [ -z "${KAVENEGAR_SENDER}" ]; } && SEND_KAVENEGAR="NO" # check telegram [ -z "${TELEGRAM_BOT_TOKEN}" ] && SEND_TELEGRAM="NO" # check kafka { [ -z "${KAFKA_URL}" ] || [ -z "${KAFKA_SENDER_IP}" ]; } && SEND_KAFKA="NO" # check irc [ -z "${IRC_NETWORK}" ] && SEND_IRC="NO" # check fleep #shellcheck disable=SC2153 { [ -z "${FLEEP_SERVER}" ] || [ -z "${FLEEP_SENDER}" ]; } && SEND_FLEEP="NO" # check dynatrace { [ -z "${DYNATRACE_SPACE}" ] || [ -z "${DYNATRACE_SERVER}" ] || [ -z "${DYNATRACE_TOKEN}" ] || [ -z "${DYNATRACE_TAG_VALUE}" ] || [ -z "${DYNATRACE_EVENT}" ]; } && SEND_DYNATRACE="NO" # check opsgenie [ -z "${OPSGENIE_API_KEY}" ] && SEND_OPSGENIE="NO" # check matrix { [ -z "${MATRIX_HOMESERVER}" ] || [ -z "${MATRIX_ACCESSTOKEN}" ]; } && SEND_MATRIX="NO" # check gotify { [ -z "${GOTIFY_APP_TOKEN}" ] || [ -z "${GOTIFY_APP_URL}" ]; } && SEND_GOTIFY="NO" # check ntfy [ -z "${DEFAULT_RECIPIENT_NTFY}" ] && SEND_NTFY="NO" # check msteams [ -z "${MSTEAMS_WEBHOOK_URL}" ] && SEND_MSTEAMS="NO" # check pd [ -z "${DEFAULT_RECIPIENT_PD}" ] && SEND_PD="NO" # check prowl [ -z "${DEFAULT_RECIPIENT_PROWL}" ] && SEND_PROWL="NO" # check custom [ -z "${DEFAULT_RECIPIENT_CUSTOM}" ] && SEND_CUSTOM="NO" # ----------------------------------------------------------------------------- # check the availability of targets check_supported_targets() { local log=${1} shift if [ "${SEND_PUSHOVER}" = "YES" ] || [ "${SEND_SLACK}" = "YES" ] || [ "${SEND_ROCKETCHAT}" = "YES" ] || [ "${SEND_ALERTA}" = "YES" ] || [ "${SEND_PD}" = "YES" ] || [ "${SEND_FLOCK}" = "YES" ] || [ "${SEND_DISCORD}" = "YES" ] || [ "${SEND_HIPCHAT}" = "YES" ] || [ "${SEND_TWILIO}" = "YES" ] || [ "${SEND_MESSAGEBIRD}" = "YES" ] || [ "${SEND_KAVENEGAR}" = "YES" ] || [ "${SEND_TELEGRAM}" = "YES" ] || [ "${SEND_PUSHBULLET}" = "YES" ] || [ "${SEND_KAFKA}" = "YES" ] || [ "${SEND_FLEEP}" = "YES" ] || [ "${SEND_PROWL}" = "YES" ] || [ "${SEND_MATRIX}" = "YES" ] || [ "${SEND_CUSTOM}" = "YES" ] || [ "${SEND_MSTEAMS}" = "YES" ] || [ "${SEND_DYNATRACE}" = "YES" ] || [ "${SEND_OPSGENIE}" = "YES" ] || [ "${SEND_GOTIFY}" = "YES" ] || [ "${SEND_NTFY}" = "YES" ]; then # if we need curl, check for the curl command if [ -z "${curl}" ]; then curl="$(command -v curl 2>/dev/null)" fi if [ -z "${curl}" ]; then $log "Cannot find curl command in the system path. Disabling all curl based notifications." SEND_PUSHOVER="NO" SEND_PUSHBULLET="NO" SEND_TELEGRAM="NO" SEND_SLACK="NO" SEND_MSTEAMS="NO" SEND_ROCKETCHAT="NO" SEND_ALERTA="NO" SEND_PD="NO" SEND_FLOCK="NO" SEND_DISCORD="NO" SEND_TWILIO="NO" SEND_HIPCHAT="NO" SEND_MESSAGEBIRD="NO" SEND_KAVENEGAR="NO" SEND_KAFKA="NO" SEND_FLEEP="NO" SEND_PROWL="NO" SEND_MATRIX="NO" SEND_CUSTOM="NO" SEND_DYNATRACE="NO" SEND_OPSGENIE="NO" SEND_GOTIFY="NO" SEND_NTFY="NO" fi fi if [ "${SEND_SMS}" = "YES" ]; then if [ -z "${sendsms}" ]; then sendsms="$(command -v sendsms 2>/dev/null)" fi if [ -z "${sendsms}" ]; then SEND_SMS="NO" fi fi # if we need sendmail, check for the sendmail command if [ "${SEND_EMAIL}" = "YES" ] && [ -z "${sendmail}" ]; then sendmail="$(command -v sendmail 2>/dev/null)" if [ -z "${sendmail}" ]; then $log "Cannot find sendmail command in the system path. Disabling email notifications." SEND_EMAIL="NO" fi fi # if we need logger, check for the logger command if [ "${SEND_SYSLOG}" = "YES" ] && [ -z "${logger}" ]; then logger="$(command -v logger 2>/dev/null)" if [ -z "${logger}" ]; then $log "Cannot find logger command in the system path. Disabling syslog notifications." SEND_SYSLOG="NO" fi fi # if we need aws, check for the aws command if [ "${SEND_AWSSNS}" = "YES" ] && [ -z "${aws}" ]; then aws="$(command -v aws 2>/dev/null)" if [ -z "${aws}" ]; then $log "Cannot find aws command in the system path. Disabling Amazon SNS notifications." SEND_AWSSNS="NO" fi fi # if we need nc, check for the nc command if [ "${SEND_IRC}" = "YES" ] && [ -z "${nc}" ]; then nc="$(command -v nc 2>/dev/null)" if [ -z "${nc}" ]; then $log "Cannot find nc command in the system path. Disabling IRC notifications." SEND_IRC="NO" fi fi } if [ ${dump_methods} ]; then check_supported_targets debug for name in "${!SEND_@}"; do if [ "${!name}" = "YES" ]; then echo "$name" fi done exit 0 fi # ----------------------------------------------------------------------------- # find the recipients' addresses per method # netdata may call us with multiple roles, and roles may have multiple but # overlapping recipients - so, here we find the unique recipients. have_to_send_something="NO" for method_name in ${method_names}; do send_var="SEND_${method_name^^}" if [ "${!send_var}" = "NO" ]; then continue fi declare -A arr_var=() for x in ${roles//,/ }; do # the roles 'silent' and 'disabled' mean: # don't send a notification for this role if [ "${x}" = "silent" ] || [ "${x}" = "disabled" ]; then continue fi role_recipients="role_recipients_${method_name}[$x]" default_recipient_var="DEFAULT_RECIPIENT_${method_name^^}" a="${!role_recipients}" [ -z "${a}" ] && a="${!default_recipient_var}" for r in ${a//,/ }; do [ "${r}" != "disabled" ] && filter_recipient_by_criticality ${method_name} "${r}" && arr_var[${r/|*/}]="1" done done # build the list of recipients to_var="to_${method_name}" declare to_${method_name}="${!arr_var[*]}" if [ -z "${!to_var}" ]; then declare ${send_var}="NO" else have_to_send_something="YES" fi done # ----------------------------------------------------------------------------- # handle fixup of the email recipient list. fix_to_email() { to_email= while [ -n "${1}" ]; do [ -n "${to_email}" ] && to_email="${to_email}, " to_email="${to_email}${1}" shift 1 done } # ${to_email} without quotes here fix_to_email ${to_email} # ----------------------------------------------------------------------------- # handle output if we're running in unit test mode if [ ${unittest} ]; then for method_name in ${method_names}; do to_var="to_${method_name}" echo "results: ${method_name}: ${!to_var}" done exit 0 fi # ----------------------------------------------------------------------------- # check that we have at least a method enabled proceed=0 for method in "${SEND_EMAIL}" \ "${SEND_PUSHOVER}" \ "${SEND_TELEGRAM}" \ "${SEND_SLACK}" \ "${SEND_ROCKETCHAT}" \ "${SEND_ALERTA}" \ "${SEND_FLOCK}" \ "${SEND_DISCORD}" \ "${SEND_TWILIO}" \ "${SEND_HIPCHAT}" \ "${SEND_MESSAGEBIRD}" \ "${SEND_KAVENEGAR}" \ "${SEND_PUSHBULLET}" \ "${SEND_KAFKA}" \ "${SEND_PD}" \ "${SEND_FLEEP}" \ "${SEND_PROWL}" \ "${SEND_MATRIX}" \ "${SEND_CUSTOM}" \ "${SEND_IRC}" \ "${SEND_AWSSNS}" \ "${SEND_SYSLOG}" \ "${SEND_SMS}" \ "${SEND_MSTEAMS}" \ "${SEND_DYNATRACE}" \ "${SEND_OPSGENIE}" \ "${SEND_GOTIFY}" \ "${SEND_NTFY}" ; do if [ "${method}" == "YES" ]; then proceed=1 break fi done if [ "$proceed" -eq 0 ]; then if [ "${have_to_send_something}" = "NO" ]; then debug "All notification methods are disabled; not sending ${notification_description}." exit 0 else fatal "All notification methods are disabled; not sending ${notification_description}." fi fi check_supported_targets error # ----------------------------------------------------------------------------- # get the date the alarm happened date=$(date --date=@${when} "${date_format}" 2>/dev/null) [ -z "${date}" ] && date=$(date "${date_format}" 2>/dev/null) [ -z "${date}" ] && date=$(date --date=@${when} 2>/dev/null) [ -z "${date}" ] && date=$(date 2>/dev/null) # ----------------------------------------------------------------------------- # get the date in utc the alarm happened date_utc=$(date --date=@${when} "${date_format}" -u 2>/dev/null) [ -z "${date_utc}" ] && date_utc=$(date -u "${date_format}" 2>/dev/null) [ -z "${date_utc}" ] && date_utc=$(date -u --date=@${when} 2>/dev/null) [ -z "${date_utc}" ] && date_utc=$(date -u 2>/dev/null) # ---------------------------------------------------------------------------- # prepare some extra headers if we've been asked to thread e-mails if [ "${SEND_EMAIL}" == "YES" ] && [ "${EMAIL_THREADING}" != "NO" ]; then email_thread_headers="In-Reply-To: <${chart}-${name}@${host}>\\r\\nReferences: <${chart}-${name}@${host}>" else email_thread_headers= fi # ----------------------------------------------------------------------------- # function to URL encode a string urlencode() { local string="${1}" strlen encoded pos c o strlen=${#string} for ((pos = 0; pos < strlen; pos++)); do c=${string:pos:1} case "${c}" in [-_.~a-zA-Z0-9]) o="${c}" ;; *) printf -v o '%%%02x' "'${c}" ;; esac encoded+="${o}" done REPLY="${encoded}" echo "${REPLY}" } # ----------------------------------------------------------------------------- # function to convert a duration in seconds, to a human readable duration # using DAYS, MINUTES, SECONDS duration4human() { local s="${1}" d=0 h=0 m=0 ds="day" hs="hour" ms="minute" ss="second" ret d=$((s / 86400)) s=$((s - (d * 86400))) h=$((s / 3600)) s=$((s - (h * 3600))) m=$((s / 60)) s=$((s - (m * 60))) if [ ${d} -gt 0 ]; then [ ${m} -ge 30 ] && h=$((h + 1)) [ ${d} -gt 1 ] && ds="days" [ ${h} -gt 1 ] && hs="hours" if [ ${h} -gt 0 ]; then ret="${d} ${ds} and ${h} ${hs}" else ret="${d} ${ds}" fi elif [ ${h} -gt 0 ]; then [ ${s} -ge 30 ] && m=$((m + 1)) [ ${h} -gt 1 ] && hs="hours" [ ${m} -gt 1 ] && ms="minutes" if [ ${m} -gt 0 ]; then ret="${h} ${hs} and ${m} ${ms}" else ret="${h} ${hs}" fi elif [ ${m} -gt 0 ]; then [ ${m} -gt 1 ] && ms="minutes" [ ${s} -gt 1 ] && ss="seconds" if [ ${s} -gt 0 ]; then ret="${m} ${ms} and ${s} ${ss}" else ret="${m} ${ms}" fi else [ ${s} -gt 1 ] && ss="seconds" ret="${s} ${ss}" fi REPLY="${ret}" echo "${REPLY}" } # ----------------------------------------------------------------------------- # email sender send_email() { local ret opts=() sender_email="${EMAIL_SENDER}" sender_name= if [ "${SEND_EMAIL}" = "YES" ]; then if [ -n "${EMAIL_SENDER}" ]; then if [[ ${EMAIL_SENDER} =~ ^\".*\"\ \<.*\>$ ]]; then # the name includes double quotes sender_email="$(echo "${EMAIL_SENDER}" | cut -d '<' -f 2 | cut -d '>' -f 1)" sender_name="$(echo "${EMAIL_SENDER}" | cut -d '"' -f 2)" elif [[ ${EMAIL_SENDER} =~ ^\'.*\'\ \<.*\>$ ]]; then # the name includes single quotes sender_email="$(echo "${EMAIL_SENDER}" | cut -d '<' -f 2 | cut -d '>' -f 1)" sender_name="$(echo "${EMAIL_SENDER}" | cut -d "'" -f 2)" elif [[ ${EMAIL_SENDER} =~ ^.*\ \<.*\>$ ]]; then # the name does not have any quotes sender_email="$(echo "${EMAIL_SENDER}" | cut -d '<' -f 2 | cut -d '>' -f 1)" sender_name="$(echo "${EMAIL_SENDER}" | cut -d '<' -f 1)" fi fi [ -n "${sender_email}" ] && opts+=(-f "${sender_email}") [ -n "${sender_name}" ] && ${sendmail} -F 2>&1 | head -1 | grep -qv "sendmail: unrecognized option: F" && opts+=(-F "${sender_name}") if [ "${debug}" = "1" ]; then echo >&2 "--- BEGIN sendmail command ---" printf >&2 "%q " "${sendmail}" -t "${opts[@]}" echo >&2 echo >&2 "--- END sendmail command ---" fi local cmd_output cmd_output=$("${sendmail}" -t "${opts[@]}" 2>&1) ret=$? if [ ${ret} -eq 0 ]; then info "sent email to '${to_email}' for ${notification_description}" return 0 else error "failed to send email to '${to_email}' for ${notification_description}, with error code ${ret} (${cmd_output})." return 1 fi fi return 1 } # ----------------------------------------------------------------------------- # pushover sender send_pushover() { local apptoken="${1}" usertokens="${2}" when="${3}" url="${4}" status="${5}" title="${6}" message="${7}" httpcode sent=0 user priority if [ "${SEND_PUSHOVER}" = "YES" ] && [ -n "${apptoken}" ] && [ -n "${usertokens}" ] && [ -n "${title}" ] && [ -n "${message}" ]; then # https://pushover.net/api priority=-2 case "${status}" in CLEAR) priority=-1 ;; # low priority: no sound or vibration WARNING) priority=0 ;; # normal priority: respect quiet hours CRITICAL) priority=1 ;; # high priority: bypass quiet hours *) priority=-2 ;; # lowest priority: no notification at all esac for user in ${usertokens}; do httpcode=$(docurl \ --form-string "token=${apptoken}" \ --form-string "user=${user}" \ --form-string "html=1" \ --form-string "title=${title}" \ --form-string "message=${message}" \ --form-string "timestamp=${when}" \ --form-string "url=${url}" \ --form-string "url_title=Open netdata dashboard to view the alarm" \ --form-string "priority=${priority}" \ https://api.pushover.net/1/messages.json) if [ "${httpcode}" = "200" ]; then info "sent pushover notification to '${user}' for ${notification_description}" sent=$((sent + 1)) else error "failed to send pushover notification to '${user}' for ${notification_description}, with HTTP response status code ${httpcode}." fi done [ ${sent} -gt 0 ] && return 0 fi return 1 } # ----------------------------------------------------------------------------- # pushbullet sender send_pushbullet() { local userapikey="${1}" source_device="${2}" recipients="${3}" url="${4}" title="${5}" message="${6}" httpcode sent=0 userOrChannelTag if [ "${SEND_PUSHBULLET}" = "YES" ] && [ -n "${userapikey}" ] && [ -n "${recipients}" ] && [ -n "${message}" ] && [ -n "${title}" ]; then # https://docs.pushbullet.com/#create-push # Accept specification of user(s) (PushBullet account email address) and/or channel tag(s), separated by spaces. # If recipient begins with a "#" then send to channel tag, otherwise send to email recipient. for userOrChannelTag in ${recipients}; do if [ "${userOrChannelTag::1}" = "#" ]; then userOrChannelTag_type="channel_tag" userOrChannelTag="${userOrChannelTag:1}" # Remove hash from start of channel tag (required by pushbullet API) else userOrChannelTag_type="email" fi httpcode=$(docurl \ --header 'Access-Token: '${userapikey}'' \ --header 'Content-Type: application/json' \ --data-binary @<( cat < from the message message="${message///}" message="${message//<\/small>/}" if [ "${SEND_HIPCHAT}" = "YES" ] && [ -n "${HIPCHAT_SERVER}" ] && [ -n "${authtoken}" ] && [ -n "${recipients}" ] && [ -n "${message}" ]; then # Valid values: html, text. # Defaults to 'html'. msg_format="html" # Background color for message. Valid values: yellow, green, red, purple, gray, random. Defaults to 'yellow'. case "${status}" in WARNING) color="yellow" ;; CRITICAL) color="red" ;; CLEAR) color="green" ;; *) color="gray" ;; esac # Whether this message should trigger a user notification (change the tab color, play a sound, notify mobile phones, etc). # Each recipient's notification preferences are taken into account. # Defaults to false. notify="true" for room in ${recipients}; do httpcode=$(docurl -X POST \ -H "Content-type: application/json" \ -H "Authorization: Bearer ${authtoken}" \ -d "{\"color\": \"${color}\", \"from\": \"${host}\", \"message_format\": \"${msg_format}\", \"message\": \"${message}\", \"notify\": \"${notify}\"}" \ "https://${HIPCHAT_SERVER}/v2/room/${room}/notification") if [ "${httpcode}" = "204" ]; then info "sent HipChat notification to '${room}' for ${notification_description}" sent=$((sent + 1)) else error "failed to send HipChat notification to '${room}' for ${notification_description}, with HTTP response status code ${httpcode}." fi done [ ${sent} -gt 0 ] && return 0 fi return 1 } # ----------------------------------------------------------------------------- # messagebird sender send_messagebird() { local accesskey="${1}" messagebirdnumber="${2}" recipients="${3}" title="${4}" message="${5}" httpcode sent=0 user if [ "${SEND_MESSAGEBIRD}" = "YES" ] && [ -n "${accesskey}" ] && [ -n "${messagebirdnumber}" ] && [ -n "${recipients}" ] && [ -n "${message}" ] && [ -n "${title}" ]; then #https://developers.messagebird.com/docs/messaging for user in ${recipients}; do httpcode=$(docurl -X POST \ --data-urlencode "originator=${messagebirdnumber}" \ --data-urlencode "recipients=${user}" \ --data-urlencode "body=${title} ${message}" \ --data-urlencode "datacoding=auto" \ -H "Authorization: AccessKey ${accesskey}" \ "https://rest.messagebird.com/messages") if [ "${httpcode}" = "201" ]; then info "sent Messagebird SMS to '${user}' for ${notification_description}" sent=$((sent + 1)) else error "failed to send Messagebird SMS to '${user}' for ${notification_description}, with HTTP response status code ${httpcode}." fi done [ ${sent} -gt 0 ] && return 0 fi return 1 } # ----------------------------------------------------------------------------- # kavenegar sender send_kavenegar() { local API_KEY="${1}" kavenegarsender="${2}" recipients="${3}" title="${4}" message="${5}" httpcode sent=0 user if [ "${SEND_KAVENEGAR}" = "YES" ] && [ -n "${API_KEY}" ] && [ -n "${kavenegarsender}" ] && [ -n "${recipients}" ] && [ -n "${message}" ] && [ -n "${title}" ]; then # http://api.kavenegar.com/v1/{API-KEY}/sms/send.json for user in ${recipients}; do httpcode=$(docurl -X POST http://api.kavenegar.com/v1/${API_KEY}/sms/send.json \ --data-urlencode "sender=${kavenegarsender}" \ --data-urlencode "receptor=${user}" \ --data-urlencode "message=${title} ${message}") if [ "${httpcode}" = "200" ]; then info "sent Kavenegar SMS to '${user}' for ${notification_description}" sent=$((sent + 1)) else error "failed to send Kavenegar SMS to '${user}' for ${notification_description}, with HTTP response status code ${httpcode}." fi done [ ${sent} -gt 0 ] && return 0 fi return 1 } # ----------------------------------------------------------------------------- # telegram sender send_telegram() { local bottoken="${1}" chatids="${2}" message="${3}" httpcode sent=0 chatid emoji disableNotification="" if [ "${status}" = "CLEAR" ]; then disableNotification="--data-urlencode disable_notification=true"; fi case "${status}" in WARNING) emoji="⚠️" ;; CRITICAL) emoji="🔴" ;; CLEAR) emoji="✅" ;; *) emoji="⚪️" ;; esac if [ "${SEND_TELEGRAM}" = "YES" ] && [ -n "${bottoken}" ] && [ -n "${chatids}" ] && [ -n "${message}" ]; then for chatid in ${chatids}; do notify_telegram=1 notify_retries=${TELEGRAM_RETRIES_ON_LIMIT:-0} while [ ${notify_telegram} -eq 1 ]; do # https://core.telegram.org/bots/api#sendmessage httpcode=$(docurl ${disableNotification} \ --data-urlencode "parse_mode=HTML" \ --data-urlencode "disable_web_page_preview=true" \ --data-urlencode "text=${emoji} ${message}" \ "https://api.telegram.org/bot${bottoken}/sendMessage?chat_id=${chatid}") notify_telegram=0 if [ "${httpcode}" = "200" ]; then info "sent telegram notification to '${chatid}' for ${notification_description}" sent=$((sent + 1)) elif [ "${httpcode}" = "401" ]; then error "failed to send telegram notification to '${chatid}' for ${notification_description}, wrong bot token." elif [ "${httpcode}" = "429" ]; then if [ "$notify_retries" -gt 0 ]; then error "failed to send telegram notification to '${chatid}' for ${notification_description}, rate limit exceeded, retrying after 1s." notify_retries=$((notify_retries - 1)) notify_telegram=1 sleep 1 else error "failed to send telegram notification to '${chatid}' for ${notification_description}, rate limit exceeded." fi else error "failed to send telegram notification to '${chatid}' for ${notification_description}, with HTTP response status code ${httpcode}." fi done done [ ${sent} -gt 0 ] && return 0 fi return 1 } # ----------------------------------------------------------------------------- # Microsoft Team sender send_msteams() { local webhook="${1}" channels="${2}" httpcode sent=0 channel color payload [ "${SEND_MSTEAMS}" != "YES" ] && return 1 case "${status}" in WARNING) icon="${MSTEAMS_ICON_WARNING}" && color="${MSTEAMS_COLOR_WARNING}" ;; CRITICAL) icon="${MSTEAMS_ICON_CRITICAL}" && color="${MSTEAMS_COLOR_CRITICAL}" ;; CLEAR) icon="${MSTEAMS_ICON_CLEAR}" && color="${MSTEAMS_COLOR_CLEAR}" ;; *) icon="${MSTEAMS_ICON_DEFAULT}" && color="${MSTEAMS_COLOR_DEFAULT}" ;; esac for channel in ${channels}; do ## More details are available here regarding the payload syntax options : https://docs.microsoft.com/en-us/outlook/actionable-messages/message-card-reference ## Online designer : https://adaptivecards.io/designer/ payload="$( cat <View Netdata" }, "origin": "netdata/${host}", "type": "netdataAlarm", "rawData": "${BASH_ARGV[@]}" } EOF )" if [ -n "${ALERTA_API_KEY}" ]; then auth="Key ${ALERTA_API_KEY}" fi httpcode=$(docurl -X POST "${webhook}/alert" -H "Content-Type: application/json" -H "Authorization: $auth" --data "${payload}") if [ "${httpcode}" = "200" ] || [ "${httpcode}" = "201" ]; then info "sent alerta notification to '${channel}' for ${notification_description}" sent=$((sent + 1)) elif [ "${httpcode}" = "202" ]; then info "suppressed alerta notification to '${channel}' for ${notification_description}" else error "failed to send alerta notification to '${channel}' for ${notification_description}, with HTTP response status code ${httpcode}." fi done [ ${sent} -gt 0 ] && return 0 return 1 } # ----------------------------------------------------------------------------- # flock sender send_flock() { local webhook="${1}" channels="${2}" httpcode sent=0 channel color payload [ "${SEND_FLOCK}" != "YES" ] && return 1 case "${status}" in WARNING) color="warning" ;; CRITICAL) color="danger" ;; CLEAR) color="good" ;; *) color="#777777" ;; esac for channel in ${channels}; do httpcode=$(docurl -X POST "${webhook}" -H "Content-Type: application/json" -d "{ \"sendAs\": { \"name\" : \"netdata on ${host}\", \"profileImage\" : \"${images_base_url}/images/banner-icon-144x144.png\" }, \"text\": \"${host} *${status_message}*\", \"timestamp\": \"${when}\", \"attachments\": [ { \"description\": \"${chart} - ${info}\", \"color\": \"${color}\", \"title\": \"${alarm}\", \"url\": \"${goto_url}\", \"text\": \"${info}\", \"views\": { \"image\": { \"original\": { \"src\": \"${image}\", \"width\": 400, \"height\": 400 }, \"thumbnail\": { \"src\": \"${image}\", \"width\": 50, \"height\": 50 }, \"filename\": \"${image}\" } } } ] }") if [ "${httpcode}" = "200" ]; then info "sent flock notification to '${channel}' for ${notification_description}" sent=$((sent + 1)) else error "failed to send flock notification to '${channel}' for ${notification_description}, with HTTP response status code ${httpcode}." fi done [ ${sent} -gt 0 ] && return 0 return 1 } # ----------------------------------------------------------------------------- # discord sender send_discord() { local webhook="${1}/slack" channels="${2}" httpcode sent=0 channel color payload username [ "${SEND_DISCORD}" != "YES" ] && return 1 case "${status}" in WARNING) color="warning" ;; CRITICAL) color="danger" ;; CLEAR) color="good" ;; *) color="#777777" ;; esac for channel in ${channels}; do username="netdata on ${host}" [ ${#username} -gt 32 ] && username="${username:0:29}..." payload="$( cat </dev/null; then info "sent Amazon SNS notification to '${target}' for ${notification_description}" sent=$((sent + 1)) else error "failed to send Amazon SNS notification to '${target}' for ${notification_description}" fi done [ ${sent} -gt 0 ] && return 0 return 1 } # ----------------------------------------------------------------------------- # Matrix sender send_matrix() { local homeserver="${1}" webhook accesstoken rooms="${2}" httpcode sent=0 payload [ "${SEND_MATRIX}" != "YES" ] && return 1 [ -z "${MATRIX_ACCESSTOKEN}" ] && return 1 accesstoken="${MATRIX_ACCESSTOKEN}" case "${status}" in WARNING) emoji="⚠️" ;; CRITICAL) emoji="🔴" ;; CLEAR) emoji="✅" ;; *) emoji="⚪️" ;; esac for room in ${rooms}; do webhook="$homeserver/_matrix/client/r0/rooms/$(urlencode $room)/send/m.room.message?access_token=$accesstoken" payload="$( cat <${name//_/ }
${chart}
${alarm}
${info}", "body": "${emoji} ${host} ${status_message} - ${name//_/ } ${chart} ${goto_url} ${alarm} ${info}" } EOF )" httpcode=$(docurl -X POST --data "${payload}" "${webhook}") if [ "${httpcode}" == "200" ]; then info "sent Matrix notification to '${room}' for ${notification_description}" sent=$((sent + 1)) else error "failed to send Matrix notification to '${room}' for ${notification_description}, with HTTP response status code ${httpcode}." fi done [ ${sent} -gt 0 ] && return 0 return 1 } # ----------------------------------------------------------------------------- # syslog sender send_syslog() { local facility=${SYSLOG_FACILITY:-"local6"} level='info' targets="${1}" local priority='' message='' server='' port='' prefix='' local temp1='' temp2='' [ "${SEND_SYSLOG}" = "YES" ] || return 1 if [ "${status}" = "CRITICAL" ]; then level='crit' elif [ "${status}" = "WARNING" ]; then level='warning' fi for target in ${targets}; do priority="${facility}.${level}" message='' server='' port='' prefix='' temp1='' temp2='' prefix=$(echo ${target} | cut -d '/' -f 2) temp1=$(echo ${target} | cut -d '/' -f 1) if [ ${prefix} != ${temp1} ]; then if (echo ${temp1} | grep -q '@'); then temp2=$(echo ${temp1} | cut -d '@' -f 1) server=$(echo ${temp1} | cut -d '@' -f 2) if [ ${temp2} != ${server} ]; then priority=${temp2} fi port=$(echo ${server} | rev | cut -d ':' -f 1 | rev) if (echo ${server} | grep -E -q '\[.*\]'); then if (echo ${port} | grep -q ']'); then port='' else server=$(echo ${server} | rev | cut -d ':' -f 2- | rev) fi else if [ ${port} = ${server} ]; then port='' else server=$(echo ${server} | cut -d ':' -f 1) fi fi else priority=${temp1} fi fi message="${prefix} ${status} on ${host} at ${date}: ${chart} ${value_string}" if [ ${server} ]; then logger_options="${logger_options} -n ${server}" if [ ${port} ]; then logger_options="${logger_options} -P ${port}" fi fi ${logger} -p ${priority} ${logger_options} "${message}" done return $? } # ----------------------------------------------------------------------------- # SMS sender send_sms() { local recipients="${1}" errcode errmessage sent=0 # Human readable SMS local msg="${host} ${status_message}: ${chart}, ${alarm}" # limit it to 160 characters msg="${msg:0:160}" if [ "${SEND_SMS}" = "YES" ] && [ -n "${sendsms}" ] && [ -n "${recipients}" ] && [ -n "${msg}" ]; then # http://api.kavenegar.com/v1/{API-KEY}/sms/send.json for phone in ${recipients}; do errmessage=$($sendsms $phone "$msg" 2>&1) errcode=$? if [ ${errcode} -eq 0 ]; then info "sent smstools3 SMS to '${user}' for ${notification_description}" sent=$((sent + 1)) else error "failed to send smstools3 SMS to '${user}' for ${notification_description}, with error code ${errcode}: ${errmessage}." fi done [ ${sent} -gt 0 ] && return 0 fi return 1 } # ----------------------------------------------------------------------------- # Dynatrace sender send_dynatrace() { [ "${SEND_DYNATRACE}" != "YES" ] && return 1 local dynatrace_url="${DYNATRACE_SERVER}/e/${DYNATRACE_SPACE}/api/v1/events" local description="Netdata Notification for: ${host} ${chart}.${name} is ${status}" local payload="" payload=$(cat </dev/null url_host="${REPLY}" urlencode "${chart}" >/dev/null url_chart="${REPLY}" urlencode "${name}" >/dev/null url_name="${REPLY}" urlencode "${value_string}" >/dev/null url_value_string="${REPLY}" redirect_params="host=${url_host}&chart=${url_chart}&alarm=${url_name}&alarm_unique_id=${unique_id}&alarm_id=${alarm_id}&alarm_event_id=${event_id}&alarm_when=${when}&alarm_status=${status}&alarm_chart=${chart}&alarm_value=${url_value_string}" if [ -z "${NETDATA_REGISTRY_UNIQUE_ID}" ]; then if [ -f "@registrydir_POST@/netdata.public.unique.id" ]; then NETDATA_REGISTRY_UNIQUE_ID="$(cat "@registrydir_POST@/netdata.public.unique.id")" else error "failed to identify this agent via its NETDATA_REGISTRY_UNIQUE_ID." fi fi goto_url="${NETDATA_REGISTRY_URL}/registry-alert-redirect.html?agent_machine_guid=${NETDATA_REGISTRY_UNIQUE_ID}&host_machine_guid=${child_machine_guid}&transition_id=${transition_id}&${redirect_params}" # the severity of the alarm severity="${status}" # the time the alarm was raised duration4human ${duration} >/dev/null duration_txt="${REPLY}" duration4human ${non_clear_duration} >/dev/null non_clear_duration_txt="${REPLY}" raised_for="(was ${old_status,,} for ${duration_txt})" # the key status message status_message="status unknown" # the color of the alarm color="grey" # the alarm value alarm="${summary//_/ } = ${value_string}" # the image of the alarm image="${images_base_url}/images/banner-icon-144x144.png" # have a default email status, in case the following case does not catch it status_email_subject="${status}" # prepare the title based on status case "${status}" in CRITICAL) image="${images_base_url}/images/alert-128-red.png" alarm_badge="https://app.netdata.cloud/static/email/img/label_critical.png" status_message="is critical" status_email_subject="Critical" color="#ca414b" rich_status_raised_for="Raised to critical, for ${non_clear_duration_txt}" background_color="#FFEBEF" border_color="#FF4136" text_color="#FF4136" action_text_color="#FFFFFF" ;; WARNING) image="${images_base_url}/images/alert-128-orange.png" alarm_badge="https://app.netdata.cloud/static/email/img/label_warning.png" status_message="needs attention" status_email_subject="Warning" color="#ffc107" rich_status_raised_for="Raised to warning, for ${non_clear_duration_txt}" background_color="#FFF8E1" border_color="#FFC300" text_color="#536775" action_text_color="#35414A" ;; CLEAR) image="${images_base_url}/images/check-mark-2-128-green.png" alarm_badge="https://app.netdata.cloud/static/email/img/label_recovered.png" status_message="recovered" status_email_subject="Clear" color="#77ca6d" rich_status_raised_for= background_color="#E5F5E8" border_color="#68C47D" text_color="#00AB44" action_text_color="#FFFFFF" ;; esac # the html email subject html_email_subject="${status_email_subject}, ${summary} = ${value_string}, on ${host}" if [ "${status}" = "CLEAR" ]; then severity="Recovered from ${old_status}" if [ ${non_clear_duration} -gt ${duration} ]; then raised_for="(alarm was raised for ${non_clear_duration_txt})" fi rich_status_raised_for="Recovered from ${old_status,,}, ${raised_for}" # don't show the value when the status is CLEAR # for certain alarms, this value might not have any meaning alarm="${summary//_/ } ${raised_for}" html_email_subject="${status_email_subject}, ${summary} ${raised_for}, on ${host}" elif { [ "${old_status}" = "WARNING" ] && [ "${status}" = "CRITICAL" ]; }; then severity="Escalated to ${status}" if [ ${non_clear_duration} -gt ${duration} ]; then raised_for="(alarm is raised for ${non_clear_duration_txt})" fi rich_status_raised_for="Escalated to critical, ${raised_for}" elif { [ "${old_status}" = "CRITICAL" ] && [ "${status}" = "WARNING" ]; }; then severity="Demoted to ${status}" if [ ${non_clear_duration} -gt ${duration} ]; then raised_for="(alarm is raised for ${non_clear_duration_txt})" fi rich_status_raised_for="Demoted to warning, ${raised_for}" else raised_for= fi # prepare HTML versions of elements info_html= [ -n "${info}" ] && info_html="
${info}
" raised_for_html= [ -n "${raised_for}" ] && raised_for_html="
${raised_for}" # ----------------------------------------------------------------------------- # send the slack notification # slack aggregates posts from the same username # so we use "${host} ${status}" as the bot username, to make them diff send_slack "${SLACK_WEBHOOK_URL}" "${to_slack}" SENT_SLACK=$? # ----------------------------------------------------------------------------- # send the Microsoft Teams notification # Microsoft teams aggregates posts from the same username # so we use "${host} ${status}" as the bot username, to make them diff send_msteams "${MSTEAMS_WEBHOOK_URL}" "${to_msteams}" SENT_MSTEAMS=$? # ----------------------------------------------------------------------------- # send the rocketchat notification # rocketchat aggregates posts from the same username # so we use "${host} ${status}" as the bot username, to make them diff send_rocketchat "${ROCKETCHAT_WEBHOOK_URL}" "${to_rocketchat}" SENT_ROCKETCHAT=$? # ----------------------------------------------------------------------------- # send the alerta notification # alerta aggregates posts from the same username # so we use "${host} ${status}" as the bot username, to make them diff send_alerta "${ALERTA_WEBHOOK_URL}" "${to_alerta}" SENT_ALERTA=$? # ----------------------------------------------------------------------------- # send the flock notification # flock aggregates posts from the same username # so we use "${host} ${status}" as the bot username, to make them diff send_flock "${FLOCK_WEBHOOK_URL}" "${to_flock}" SENT_FLOCK=$? # ----------------------------------------------------------------------------- # send the discord notification # discord aggregates posts from the same username # so we use "${host} ${status}" as the bot username, to make them diff send_discord "${DISCORD_WEBHOOK_URL}" "${to_discord}" SENT_DISCORD=$? # ----------------------------------------------------------------------------- # send the pushover notification send_pushover "${PUSHOVER_APP_TOKEN}" "${to_pushover}" "${when}" "${goto_url}" "${status}" "${host} ${status_message} - ${name//_/ } - ${chart}" " ${alarm}${info_html}
  ${chart}
Chart
 
${severity}
Severity
 
${date}${raised_for_html}
Time
 
View Netdata
  The source of this alarm is line ${src} " SENT_PUSHOVER=$? # ----------------------------------------------------------------------------- # send the pushbullet notification send_pushbullet "${PUSHBULLET_ACCESS_TOKEN}" "${PUSHBULLET_SOURCE_DEVICE}" "${to_pushbullet}" "${goto_url}" "${host} ${status_message} - ${name//_/ } - ${chart}" "${alarm}\\n Severity: ${severity}\\n Chart: ${chart}\\n ${date}\\n The source of this alarm is line ${src}" SENT_PUSHBULLET=$? # ----------------------------------------------------------------------------- # send the twilio SMS send_twilio "${TWILIO_ACCOUNT_SID}" "${TWILIO_ACCOUNT_TOKEN}" "${TWILIO_NUMBER}" "${to_twilio}" "${host} ${status_message} - ${name//_/ } - ${chart}" "${alarm} Severity: ${severity} Chart: ${chart} ${info}" SENT_TWILIO=$? # ----------------------------------------------------------------------------- # send the messagebird SMS send_messagebird "${MESSAGEBIRD_ACCESS_KEY}" "${MESSAGEBIRD_NUMBER}" "${to_messagebird}" "${host} ${status_message} - ${name//_/ } - ${chart}" "${alarm} Severity: ${severity} Chart: ${chart} ${info}" SENT_MESSAGEBIRD=$? # ----------------------------------------------------------------------------- # send the kavenegar SMS send_kavenegar "${KAVENEGAR_API_KEY}" "${KAVENEGAR_SENDER}" "${to_kavenegar}" "${host} ${status_message} - ${name//_/ } - ${chart}" "${alarm} Severity: ${severity} Chart: ${chart} ${info}" SENT_KAVENEGAR=$? # ----------------------------------------------------------------------------- # send the telegram.org message # https://core.telegram.org/bots/api#formatting-options send_telegram "${TELEGRAM_BOT_TOKEN}" "${to_telegram}" "${host} ${status_message} - ${name//_/ } ${chart} ${alarm} ${info}" SENT_TELEGRAM=$? # ----------------------------------------------------------------------------- # send the kafka message send_kafka SENT_KAFKA=$? # ----------------------------------------------------------------------------- # send the pagerduty.com message send_pd "${to_pd}" SENT_PD=$? # ----------------------------------------------------------------------------- # send the fleep message send_fleep "${to_fleep}" SENT_FLEEP=$? # ----------------------------------------------------------------------------- # send the Prowl message send_prowl "${to_prowl}" SENT_PROWL=$? # ----------------------------------------------------------------------------- # send the irc message send_irc "${IRC_NICKNAME}" "${IRC_REALNAME}" "${to_irc}" "${IRC_NETWORK}" "${IRC_PORT}" "${host}" "${host} ${status_message} - ${name//_/ } - ${chart} ----- ${alarm} Severity: ${severity} Chart: ${chart} ${info}" SENT_IRC=$? # ----------------------------------------------------------------------------- # send the SMS message with smstools3 send_sms "${to_sms}" SENT_SMS=$? # ----------------------------------------------------------------------------- # send the custom message send_custom() { # is it enabled? [ "${SEND_CUSTOM}" != "YES" ] && return 1 # do we have any sender? [ -z "${1}" ] && return 1 # call the custom_sender function custom_sender "${@}" } send_custom "${to_custom}" SENT_CUSTOM=$? # ----------------------------------------------------------------------------- # send hipchat message send_hipchat "${HIPCHAT_AUTH_TOKEN}" "${to_hipchat}" " \ ${host} ${status_message}
\ ${alarm} ${info_html}
\ ${chart}
\ ${date}${raised_for_html}
\ View netdata dashboard \ (source of alarm ${src}) \ " SENT_HIPCHAT=$? # ----------------------------------------------------------------------------- # send the Amazon SNS message send_awssns "${to_awssns}" SENT_AWSSNS=$? # ----------------------------------------------------------------------------- # send the Matrix message send_matrix "${MATRIX_HOMESERVER}" "${to_matrix}" SENT_MATRIX=$? # ----------------------------------------------------------------------------- # send the syslog message send_syslog "${to_syslog}" SENT_SYSLOG=$? # ----------------------------------------------------------------------------- # send the email IFS='' read -r -d '' email_plaintext_part </dev/null) [ -z "${date_w}" ] && date_w=$(date "${date_format}" 2>/dev/null) [ -z "${date_w}" ] && date_w=$(date --date=@${val} 2>/dev/null) [ -z "${date_w}" ] && date_w=$(date 2>/dev/null) elapsed=$((now - val)) duration4human ${elapsed} >/dev/null elapsed_txt="${REPLY}" WARN_ALARMS+="
${key}
${date_w}
Warning for ${elapsed_txt}
" done <<<"$total_warn_alarms," fi if [ -n "$total_crit_alarms" ]; then while read -d, -r pair; do IFS='=' read -r key val <<<"$pair" date_c=$(date --date=@${val} "${date_format}" 2>/dev/null) [ -z "${date_c}" ] && date_c=$(date "${date_format}" 2>/dev/null) [ -z "${date_c}" ] && date_c=$(date --date=@${val} 2>/dev/null) [ -z "${date_c}" ] && date_c=$(date 2>/dev/null) elapsed=$((now - val)) duration4human ${elapsed} >/dev/null elapsed_txt="${REPLY}" CRIT_ALARMS+="
${key}
${date_c}
Critical for ${elapsed_txt}
" done <<<"$total_crit_alarms," fi if (( total_warnings + total_critical > 15 )); then EXTRA_ALARMS_LIST_TEXT="(Showing latest 15 alerts)" fi if [ -n "$edit_command_line" ]; then IFS='=' read -r edit_command line s_host <<<"$edit_command_line" fi IFS='' read -r -d '' email_html_part <
Netdata Logo
Notification
${summary}
on ${host}
${value_string}
Details: ${info}
Alert: ${name}
Chart: ${chart}
${rich_status_raised_for}

On ${date}
By: ${host}
Global time: ${date_utc}

Classification: ${classification}
Role: ${roles}
Want to know more about this alert?
Join the troubleshooting discussion for this alert on our community forums.
Need to configure this alert?
Edit this alert's configuration file by logging into $s_host and running the following command:
${edit_command}

The alarm to edit is at line ${line}
The node has ${total_warnings} warning and ${total_critical} critical additional active alert(s)
${EXTRA_ALARMS_LIST_TEXT}
${CRIT_ALARMS} ${WARN_ALARMS}
© Netdata $(date +'%Y') - The real-time performance and health monitoring
EOF send_email <