summaryrefslogtreecommitdiffstats
path: root/tests
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2019-08-04 08:57:13 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2019-08-04 08:57:13 +0000
commitcbf70980c060bde02906a8e9de2064459bacc93c (patch)
tree5b9ade02e0ed32a4b33f5e8647092d0c02ea586d /tests
parentReleasing debian version 1.16.0-1. (diff)
downloadnetdata-cbf70980c060bde02906a8e9de2064459bacc93c.tar.xz
netdata-cbf70980c060bde02906a8e9de2064459bacc93c.zip
Merging upstream version 1.16.1.
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to '')
-rw-r--r--tests/Makefile.am9
-rw-r--r--tests/acls/acl.sh.in119
-rw-r--r--tests/acls/netdata.cfg20
-rw-r--r--tests/acls/netdata.ssl.cfg24
-rw-r--r--tests/urls/request.sh.in303
5 files changed, 475 insertions, 0 deletions
diff --git a/tests/Makefile.am b/tests/Makefile.am
index b0f65456e..92e6db0f3 100644
--- a/tests/Makefile.am
+++ b/tests/Makefile.am
@@ -5,6 +5,11 @@ MAINTAINERCLEANFILES = $(srcdir)/Makefile.in
CLEANFILES = \
health_mgmtapi/health-cmdapi-test.sh \
+<<<<<<< HEAD
+ acls/acl.sh \
+=======
+ urls/request.sh \
+>>>>>>> 63a4cadd346df71255d2350128eebcf317e81d0f
$(NULL)
include $(top_srcdir)/build/subst.inc
@@ -22,10 +27,14 @@ dist_noinst_DATA = \
node.d/fronius.process.spec.js \
node.d/fronius.validation.spec.js \
health_mgmtapi/health-cmdapi-test.sh.in \
+ acls/acl.sh.in \
+ urls/request.sh.in \
$(NULL)
dist_plugins_SCRIPTS = \
health_mgmtapi/health-cmdapi-test.sh \
+ acls/acl.sh \
+ urls/request.sh \
$(NULL)
dist_noinst_SCRIPTS = \
diff --git a/tests/acls/acl.sh.in b/tests/acls/acl.sh.in
new file mode 100644
index 000000000..772d66408
--- /dev/null
+++ b/tests/acls/acl.sh.in
@@ -0,0 +1,119 @@
+#!/bin/bash -x
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+BASICURL="http://127.0.0.1"
+BASICURLS="https://127.0.0.1"
+
+NETDATA_VARLIB_DIR="/var/lib/netdata"
+RED='\033[0;31m'
+GREEN='\033[0;32m'
+YELLOW='\033[0;43m'
+
+#change the previous acl file and with a new
+#and store it on a new file
+change_file(){
+ sed "s/$1/$2/g" netdata.cfg > "$4"
+}
+
+change_ssl_file(){
+ KEYROW="ssl key = $3/key.pem"
+ CERTROW="ssl certificate = $3/cert.pem"
+ sed "s@ssl key =@$KEYROW@g" netdata.ssl.cfg > tmp
+ sed "s@ssl certificate =@$CERTROW@g" tmp > tmp2
+ sed "s/$1/$2/g" tmp2 > "$4"
+}
+
+run_acl_tests() {
+ #Give a time for netdata start properly
+ sleep 2
+
+ curl -v -k --tls-max 1.2 --create-dirs -o index.html "$2" 2> log_index.txt
+ curl -v -k --tls-max 1.2 --create-dirs -o netdata.txt "$2/netdata.conf" 2> log_nc.txt
+ curl -v -k --tls-max 1.2 --create-dirs -o badge.csv "$2/api/v1/badge.svg?chart=cpu.cpu0_interrupts" 2> log_badge.txt
+ curl -v -k --tls-max 1.2 --create-dirs -o info.txt "$2/api/v1/info" 2> log_info.txt
+ curl -H "X-Auth-Token: $1" -v -k --tls-max 1.2 --create-dirs -o health.csv "$2/api/v1/manage/health?cmd=LIST" 2> log_health.txt
+
+ TOT=$(grep -c "HTTP/1.1 301" log_*.txt | cut -d: -f2| grep -c 1)
+ if [ "$TOT" -ne "$4" ]; then
+ echo -e "${RED}I got a wrong number of redirects($TOT) when SSL is activated, It was expected $4"
+ rm log_* netdata.conf.test* netdata.txt health.csv index.html badge.csv tmp* key.pem cert.pem info.txt
+ killall netdata
+ exit 1
+ elif [ "$TOT" -eq "$4" ] && [ "$4" -ne "0" ]; then
+ echo -e "${YELLOW}I got the correct number of redirects($4) when SSL is activated and I try to access with HTTP."
+ return
+ fi
+
+ TOT=$(grep -c "HTTP/1.1 200 OK" log_* | cut -d: -f2| grep -c 1)
+ if [ "$TOT" -ne "$3" ]; then
+ echo -e "${RED}I got a wrong number of \"200 OK\" from the queries, it was expected $3."
+ killall netdata
+ rm log_* netdata.conf.test* netdata.txt health.csv index.html badge.csv tmp* key.pem cert.pem info.txt
+ exit 1
+ fi
+
+ echo -e "${GREEN}ACLs were applied correctly"
+}
+
+CONF=$(grep "bind" netdata.cfg)
+MUSER=$(grep run netdata.cfg | cut -d= -f2|sed 's/^[ \t]*//')
+
+openssl req -new -newkey rsa:2048 -days 365 -nodes -x509 -sha512 -subj "/C=US/ST=Denied/L=Somewhere/O=Dis/CN=www.example.com" -keyout key.pem -out cert.pem
+chown "$MUSER" key.pem cert.pem
+CWD=$(pwd)
+
+if [ -f "${NETDATA_VARLIB_DIR}/netdata.api.key" ] ;then
+ read -r TOKEN < "${NETDATA_VARLIB_DIR}/netdata.api.key"
+else
+ TOKEN="NULL"
+fi
+
+change_file "$CONF" " bind to = *" "$CWD" "netdata.conf.test0"
+netdata -c "netdata.conf.test0"
+run_acl_tests $TOKEN "$BASICURL:19999" 5 0
+killall netdata
+
+change_ssl_file "$CONF" " bind to = *=dashboard|registry|badges|management|netdata.conf *:20000=dashboard|registry|badges|management *:20001=dashboard|registry|netdata.conf^SSL=optional *:20002=dashboard|registry" "$CWD" "netdata.conf.test1"
+netdata -c "netdata.conf.test1"
+run_acl_tests $TOKEN "$BASICURL:19999" 5 5
+run_acl_tests $TOKEN "$BASICURLS:19999" 5 0
+
+run_acl_tests $TOKEN "$BASICURL:20000" 4 5
+run_acl_tests $TOKEN "$BASICURLS:20000" 4 0
+
+run_acl_tests $TOKEN "$BASICURL:20001" 4 0
+run_acl_tests $TOKEN "$BASICURLS:20001" 4 0
+
+run_acl_tests $TOKEN "$BASICURL:20002" 3 5
+run_acl_tests $TOKEN "$BASICURLS:20002" 3 0
+killall netdata
+
+change_ssl_file "$CONF" " bind to = *=dashboard|registry|badges|management|netdata.conf *:20000=dashboard|registry|badges|management *:20001=dashboard|registry|netdata.conf^SSL=force *:20002=dashboard|registry" "$CWD" "netdata.conf.test2"
+netdata -c "netdata.conf.test2"
+run_acl_tests $TOKEN "$BASICURL:19999" 5 5
+run_acl_tests $TOKEN "$BASICURLS:19999" 5 0
+
+run_acl_tests $TOKEN "$BASICURL:20000" 4 5
+run_acl_tests $TOKEN "$BASICURLS:20000" 4 0
+
+run_acl_tests $TOKEN "$BASICURL:20001" 4 5
+run_acl_tests $TOKEN "$BASICURLS:20001" 4 0
+
+run_acl_tests $TOKEN "$BASICURL:20002" 3 5
+run_acl_tests $TOKEN "$BASICURLS:20002" 3 0
+killall netdata
+
+change_ssl_file "$CONF" " bind to = *=dashboard|registry|badges|management|netdata.conf *:20000=dashboard|registry|badges|management^SSL=optional *:20001=dashboard|registry|netdata.conf^SSL=force" "$CWD" "netdata.conf.test3"
+netdata -c "netdata.conf.test3"
+run_acl_tests $TOKEN "$BASICURL:19999" 5 5
+run_acl_tests $TOKEN "$BASICURLS:19999" 5 0
+
+run_acl_tests $TOKEN "$BASICURL:20000" 4 0
+run_acl_tests $TOKEN "$BASICURLS:20000" 4 0
+
+run_acl_tests $TOKEN "$BASICURL:20001" 4 5
+run_acl_tests $TOKEN "$BASICURLS:20001" 4 0
+killall netdata
+
+rm log_* netdata.conf.test* netdata.txt health.csv index.html badge.csv tmp* key.pem cert.pem info.txt
+echo "All the tests were successful"
diff --git a/tests/acls/netdata.cfg b/tests/acls/netdata.cfg
new file mode 100644
index 000000000..1dcb4a5c6
--- /dev/null
+++ b/tests/acls/netdata.cfg
@@ -0,0 +1,20 @@
+# netdata configuration
+#
+# You can download the latest version of this file, using:
+#
+# wget -O /etc/netdata/netdata.conf http://localhost:19999/netdata.conf
+# or
+# curl -o /etc/netdata/netdata.conf http://localhost:19999/netdata.conf
+#
+# You can uncomment and change any of the options below.
+# The value shown in the commented settings, is the default value.
+#
+
+[global]
+ run as user = netdata
+
+ # the default database size - 1 hour
+ history = 3600
+
+ # by default do not expose the netdata port
+ bind to = localhost
diff --git a/tests/acls/netdata.ssl.cfg b/tests/acls/netdata.ssl.cfg
new file mode 100644
index 000000000..28e0030d5
--- /dev/null
+++ b/tests/acls/netdata.ssl.cfg
@@ -0,0 +1,24 @@
+# netdata configuration
+#
+# You can download the latest version of this file, using:
+#
+# wget -O /etc/netdata/netdata.conf http://localhost:19999/netdata.conf
+# or
+# curl -o /etc/netdata/netdata.conf http://localhost:19999/netdata.conf
+#
+# You can uncomment and change any of the options below.
+# The value shown in the commented settings, is the default value.
+#
+
+[global]
+ run as user = netdata
+
+ # the default database size - 1 hour
+ history = 3600
+
+ # by default do not expose the netdata port
+ bind to = localhost
+
+[web]
+ ssl key =
+ ssl certificate =
diff --git a/tests/urls/request.sh.in b/tests/urls/request.sh.in
new file mode 100644
index 000000000..fac00bc4e
--- /dev/null
+++ b/tests/urls/request.sh.in
@@ -0,0 +1,303 @@
+#!/bin/bash
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+################################################################################################
+#### ####
+#### GLOBAL VARIABLES ####
+#### ####
+################################################################################################
+
+# The current time
+CT=$(date +'%s')
+
+# The previous time
+PT=$((CT - 30))
+
+# The output directory where we will store the results and error
+OUTDIR="tests"
+OUTEDIR="encoded_tests"
+OUTOPTDIR="options"
+ERRDIR="etests"
+
+################################################################################################
+#### ####
+#### FUNCTIONS ####
+#### ####
+################################################################################################
+
+# Print error message and close script
+netdata_print_error(){
+ echo "Closing due error \"$1\" code \"$2\""
+ exit 1
+}
+
+# Print the header message of the function
+netdata_print_header() {
+ echo "$1"
+}
+
+# Create the main directory where the results will be stored
+netdata_create_directory() {
+ netdata_print_header "Creating directory $1"
+ if [ ! -d "$1" ]; then
+ mkdir "$1"
+ TEST=$?
+ if [ $TEST -ne 0 ]; then
+ netdata_print_error "Cannot create directory $?"
+ fi
+ else
+ echo "Working with directory $OUTDIR"
+ fi
+}
+
+#Check whether download did not have problem
+netdata_test_download(){
+ grep "HTTP/1.1 200 OK" "$1" 2>/dev/null 1>/dev/null
+ TEST=$?
+ if [ $TEST -ne 0 ]; then
+ netdata_print_error "Cannot do download of the page $2" $?
+ exit 1
+ fi
+}
+
+#Check whether download had a problem
+netdata_error_test(){
+ grep "HTTP/1.1 200 OK" "$1" 2>/dev/null 1>/dev/null
+ TEST=$?
+ if [ $TEST -eq 0 ]; then
+ netdata_print_error "The page $2 did not answer with an error" $?
+ exit 1
+ fi
+}
+
+
+# Download information from Netdata
+netdata_download_various() {
+ netdata_print_header "Getting $2"
+ curl -v -k --create-dirs -o "$OUTDIR/$3.out" "$1/$2" 2> "$OUTDIR/$3.err"
+ netdata_test_download "$OUTDIR/$3.err" "$1/$2"
+}
+
+netdata_download_various_with_options() {
+ netdata_print_header "Getting options for $2"
+ curl -X OPTIONS -v -k --create-dirs -o "$OUTOPTDIR/$3.out" "$1/$2" 2> "$OUTOPTDIR/$3.err"
+ netdata_test_download "$OUTOPTDIR/$3.err" "$1/$2"
+}
+
+# Download information from Netdata
+netdata_wrong_request_various() {
+ netdata_print_header "Getting $2"
+ curl -v -k --create-dirs -o "$ERRDIR/$3.out" "$1/$2" 2> "$ERRDIR/$3.err"
+ netdata_error_test "$ERRDIR/$3.err" "$1/$2"
+}
+
+# Download charts from Netdata
+netdata_download_charts() {
+ curl -v -k --create-dirs -o "$OUTDIR/charts.out" "$1/$2" 2> "$OUTDIR/charts.err"
+ netdata_test_download "$OUTDIR/charts.err" "$1/$2"
+
+ #Rewrite the next
+ grep -w "id" tests/charts.out| cut -d: -f2 | grep "\"," | sed s/,//g | sort
+}
+
+#Test options for a specific chart
+netdata_download_chart() {
+ SEPARATOR="&"
+ EQUAL="="
+ OUTD=$OUTDIR
+ ENCODED=" "
+ for I in $(seq 0 1); do
+ if [ "$I" -eq "1" ] ; then
+ SEPARATOR="%26"
+ EQUAL="%3D"
+ OUTD=$OUTEDIR
+ ENCODED="encoded"
+ fi
+
+ NAME=${3//\"/}
+ netdata_print_header "Getting data for $NAME using $4 $ENCODED"
+
+ LDIR=$OUTD"/"$4
+
+ LURL="$1/$2$EQUAL$NAME"
+
+ NAME=$NAME"_$4"
+
+ curl -v -k --create-dirs -o "$LDIR/$NAME.out" "$LURL" 2> "$LDIR/$NAME.err"
+ netdata_test_download "$LDIR/$NAME.err" "$LURL"
+
+ UFILES=( "points" "before" "after" )
+ COUNTER=0
+ for OPT in "points=100" "before=$PT" "after=$CT" ;
+ do
+ LURL="$LURL$SEPARATOR$OPT"
+ LFILE=$NAME"_${UFILES[$COUNTER]}";
+
+ curl -v -k --create-dirs -o "$LDIR/$LFILE.out" "$LURL" 2> "$LDIR/$LFILE.err"
+ netdata_test_download "$LDIR/$LFILE.err" "$LURL"
+
+ COUNTER=$((COUNTER + 1))
+ done
+
+ LURL="$LURL&group$EQUAL"
+ for OPT in "min" "max" "sum" "median" "stddev" "cv" "ses" "des" "incremental_sum" "average";
+ do
+ TURL=$LURL$OPT
+ TFILE=$NAME"_$OPT";
+ curl -v -k --create-dirs -o "$LDIR/$TFILE.out" "$TURL" 2> "$LDIR/$TFILE.err"
+ netdata_test_download "$LDIR/$TFILE.err" "$TURL"
+ for MORE in "jsonp" "json" "ssv" "csv" "datatable" "datasource" "tsv" "ssvcomma" "html" "array";
+ do
+ TURL=$TURL"&format="$MORE
+ TFILE=$NAME"_$OPT""_$MORE";
+ curl -v -k --create-dirs -o "$LDIR/$TFILE.out" "$TURL" 2> "$LDIR/$TFILE.err"
+ netdata_test_download "$LDIR/$TFILE.err" "$TURL"
+ done
+ done
+
+ LURL="$LURL$OPT&gtime=60"
+ NFILE=$NAME"_gtime"
+ curl -v -k --create-dirs -o "$LDIR/$NFILE.out" "$TURL" 2> "$LDIR/$NFILE.err"
+ netdata_test_download "$LDIR/$NFILE.err" "$LURL"
+
+ LURL="$LURL$OPT&options=percentage"
+ NFILE=$NAME"_percentage"
+ curl -v -k --create-dirs -o "$LDIR/$NFILE.out" "$TURL" 2> "$LDIR/$NFILE.err"
+ netdata_test_download "$LDIR/$NFILE.err" "$LURL"
+
+ LURL="$LURL$OPT&dimensions=system%7Cnice"
+ NFILE=$NAME"_dimension"
+ curl -v -k --create-dirs -o "$LDIR/$NFILE.out" "$TURL" 2> "$LDIR/$NFILE.err"
+ netdata_test_download "$LDIR/$NFILE.err" "$LURL"
+
+ LURL="$LURL$OPT&label=testing"
+ NFILE=$NAME"_label"
+ curl -v -k --create-dirs -o "$LDIR/$NFILE.out" "$TURL" 2> "$LDIR/$NFILE.err"
+ netdata_test_download "$LDIR/$NFILE.err" "$LURL"
+ done
+}
+
+# Download information from Netdata
+netdata_download_allmetrics() {
+ netdata_print_header "Getting All metrics"
+ LURL="$1/api/v1/allmetrics?format="
+ for FMT in "shell" "prometheus" "prometheus_all_hosts" "json" ;
+ do
+ TURL=$LURL$FMT
+ for OPT in "yes" "no";
+ do
+ if [ "$FMT" == "prometheus" ]; then
+ TURL="$TURL&help=$OPT&types=$OPT&timestamps=$OPT"
+ fi
+ TURL="$TURL&names=$OPT&oldunits=$OPT&hideunits=$OPT&prefix=ND"
+
+ NAME="allmetrics_$FMT"
+ echo "$OUTDIR/$2/$NAME.out"
+ curl -v -k --create-dirs -o "$OUTDIR/$2/$NAME.out" "$TURL" 2> "$OUTDIR/$2/$NAME.err"
+ netdata_test_download "$OUTDIR/$2/$NAME.err" "$TURL"
+ done
+ done
+}
+
+
+################################################################################################
+#### ####
+#### MAIN ROUTINE ####
+#### ####
+################################################################################################
+MURL="http://127.0.0.1:19999"
+
+netdata_create_directory $OUTDIR
+netdata_create_directory $OUTEDIR
+netdata_create_directory $OUTOPTDIR
+netdata_create_directory $ERRDIR
+
+wget --execute="robots = off" --mirror --convert-links --no-parent http://127.0.0.1:19999
+TEST=$?
+if [ $TEST -ne "0" ] ; then
+ echo "Cannot connect to Netdata"
+ exit 1
+fi
+
+netdata_download_various $MURL "netdata.conf" "netdata.conf"
+
+netdata_download_various_with_options $MURL "netdata.conf" "netdata.conf"
+
+netdata_wrong_request_various $MURL "api/v15/info?this%20could%20not%20be%20here" "err_version"
+
+netdata_wrong_request_various $MURL "api/v1/\(*@&$\!$%%5E\)\!$*%&\)\!$*%%5E*\!%5E%\!%5E$%\!%5E%\(\!*%5E*%5E%\(*@&$%5E%\(\!%5E#*&\!^#$*&\!^%\)@\($%^\)\!*&^\(\!*&^#$&#$\)\!$%^\)\!$*%&\)#$\!^#*$^\!\(*#^#\)\!%^\!\)$*%&\!\(*&$\!^#$*&^\!*#^$\!*^\)%\(\!*&$%\)\(\!&#$\!^*#&$^\!*^%\)\!$%\)\!\(&#$\!^#*&^$" "err_version2"
+
+netdata_download_various $MURL "api/v1/info" "info"
+netdata_download_various_with_options $MURL "api/v1/info" "info"
+netdata_download_various $MURL "api/v1/info?this%20could%20not%20be%20here" "err_info"
+
+netdata_print_header "Getting all the netdata charts"
+CHARTS=$( netdata_download_charts "http://127.0.0.1:19999" "api/v1/charts" )
+WCHARTS=$( netdata_download_charts "http://127.0.0.1:19999" "api/v1/charts?this%20could%20not%20be%20here" )
+WCHARTS2=$( netdata_download_charts "http://127.0.0.1:19999" "api/v1/charts%3fthis%20could%20not%20be%20here" )
+
+if [ ${#CHARTS[@]} -ne ${#WCHARTS[@]} ]; then
+ echo "The number of charts does not match with division not encoded.";
+ exit 2;
+elif [ ${#CHARTS[@]} -ne ${#WCHARTS2[@]} ]; then
+ echo "The number of charts does not match when everything is encoded";
+ exit 3;
+fi
+
+netdata_wrong_request_various $MURL "api/v1/chart" "err_chart_without_chart"
+netdata_wrong_request_various $MURL "api/v1/chart?_=234231424242" "err_chart_arg"
+
+netdata_download_various $MURL "api/v1/chart?chart=cpu.cpu0_interrupts&_=234231424242" "chart_cpu_with_more_args"
+netdata_download_various_with_options $MURL "api/v1/chart?chart=cpu.cpu0_interrupts&_=234231424242" "chart_cpu_with_more_args"
+
+netdata_download_various $MURL "api/v1/chart%3Fchart=cpu.cpu0_interrupts&_=234231424242" "chart_cpu_with_more_args_encoded"
+netdata_download_various_with_options $MURL "api/v1/chart%3Fchart=cpu.cpu0_interrupts&_=234231424242" "chart_cpu_with_more_args_encoded"
+netdata_download_various $MURL "api/v1/chart%3Fchart=cpu.cpu0_interrupts%26_=234231424242" "chart_cpu_with_more_args_encoded2"
+netdata_download_various $MURL "api/v1/chart%3Fchart%3Dcpu.cpu0_interrupts%26_%3D234231424242" "chart_cpu_with_more_args_encoded3"
+
+netdata_create_directory "$OUTDIR/chart"
+for I in $CHARTS ; do
+ NAME=${I//\"/}
+ netdata_download_various $MURL "api/v1/chart?chart=$NAME" "chart/$NAME"
+done
+
+netdata_wrong_request_various $MURL "api/v1/alarm_variables" "err_alarm_variables_without_chart"
+netdata_wrong_request_various $MURL "api/v1/alarm_variables?_=234231424242" "err_alarm_variables_arg"
+netdata_download_various $MURL "api/v1/alarm_variables?chart=cpu.cpu0_interrupts&_=234231424242" "alarm_cpu_with_more_args"
+
+netdata_create_directory "$OUTDIR/alarm_variables"
+for I in $CHARTS ; do
+ NAME=${I//\"/}
+ netdata_download_various $MURL "api/v1/alarm_variables?chart=$NAME" "alarm_variables/$NAME"
+done
+
+netdata_create_directory "$OUTDIR/badge"
+netdata_create_directory "$OUTEDIR/badge"
+for I in $CHARTS ; do
+ netdata_download_chart $MURL "api/v1/badge.svg?chart" "$I" "badge"
+done
+
+netdata_create_directory "$OUTDIR/allmetrics"
+netdata_download_allmetrics $MURL "allmetrics"
+
+netdata_download_various $MURL "api/v1/alarms?all" "alarms_all"
+netdata_download_various $MURL "api/v1/alarms?active" "alarms_active"
+netdata_download_various $MURL "api/v1/alarms" "alarms_nothing"
+
+netdata_download_various $MURL "api/v1/alarm_log?after" "alarm_without"
+netdata_download_various $MURL "api/v1/alarm_log" "alarm_nothing"
+netdata_download_various $MURL "api/v1/alarm_log?after&_=$PT" "alarm_log"
+
+netdata_create_directory "$OUTDIR/data"
+netdata_create_directory "$OUTEDIR/data"
+for I in $CHARTS ; do
+ netdata_download_chart $MURL "api/v1/data?chart" "$I" "data"
+ break;
+done
+
+#http://arch-esxi:19999/api/v1/(*@&$!$%%5E)!$*%&)!$*%%5E*!%5E%!%5E$%!%5E%(!*%5E*%5E%(*@&$%5E%(!%5E#*&!^#$*&!^%)@($%^)!*&^(!*&^#$&#$)!$%^)!$*%&)#$!^#*$^!(*#^#)!%^!)$*%&!(*&$!^#$*&^!*#^$!*^)%(!*&$%)(!&#$!^*#&$^!*^%)!$%)!(&#$!^#*&^$
+
+WHITE='\033[0;37m'
+echo -e "${WHITE}ALL the URLS got 200 as answer!"
+
+exit 0