Repository
Munin (2.0)
Last change
2020-08-17
Graph Categories
Family
auto
Capabilities
Language
Shell
License
GPL-2.0-only
Authors

http_loadtime

Name

http_loadtime - Plugin to graph the HTTP response times of specific pages

Configuration

The following environment variables are used by this plugin

target - comma separated URL(s) to fetch (default: "http://localhost/")
example:
  [http_loadtime]
  env.target http://localhost.de,http://localhost.de/some-site.html
  env.requisites true
  env.warning 5
  env.critical 30

Do not enable the download of page requisites (env.requisites) for https
sites since wget needs incredible long to perform this on big sites...

Author

Unknown authors (2013) Axel Huebl

License

GPLv2

Magic Markers

#%# family=auto
#%# capabilities=autoconf
#!@@GOODSH@@

: << =cut

=head1 NAME

http_loadtime - Plugin to graph the HTTP response times of specific pages

=head1 CONFIGURATION

The following environment variables are used by this plugin

 target - comma separated URL(s) to fetch (default: "http://localhost/")
 example:
   [http_loadtime]
   env.target http://localhost.de,http://localhost.de/some-site.html
   env.requisites true
   env.warning 5
   env.critical 30

 Do not enable the download of page requisites (env.requisites) for https
 sites since wget needs incredible long to perform this on big sites...

=head1 AUTHOR

Unknown authors
(2013) Axel Huebl

=head1 LICENSE

GPLv2

=head1 MAGIC MARKERS

 #%# family=auto
 #%# capabilities=autoconf

=cut

. "$MUNIN_LIBDIR/plugins/plugin.sh"

target=${target:-"http://localhost/"}
requisites=${requisites:-"false"}

urls=$(echo "$target" | tr ',' '\n')


request_url() {
    wget --user-agent "Munin - http_loadtime" --no-cache --quiet --output-document=/dev/null "$@" 2>/dev/null
}


escapeUri() {
    echo "$1" | sed 's/[:/.-]/_/g'
}


if [ "$1" = "autoconf" ]; then
    result="yes"
    command -v tr   >/dev/null 2>&1 || result=1
    command -v wget >/dev/null 2>&1 || result=1
    if [ "$result" != "yes" ]; then
       echo "no (programs wget and tr required)"
       exit 0
    fi

    # check if urls respond
    #
    for uri in $urls
    do
        if ! request_url --spider "$uri"; then
            echo "no (Cannot run wget against \"$uri\")"
            exit 0
        fi
    done

    echo yes
    exit 0
fi


if [ "$1" = "config" ]; then
    echo "graph_title HTTP loadtime of a page"
    echo "graph_args --base 1000 -l 0"
    echo "graph_vlabel Load time in seconds"
    echo "graph_category network"
    echo "graph_info This graph shows the load time in seconds"
    for uri in $urls
    do
        uri_short=$(echo "$uri" | cut -c 1-30)
        if [ "$uri_short" != "$uri" ]; then uri_short="${uri_short}..."; fi
        echo "$(escapeUri "$uri").label $uri_short"
        echo "$(escapeUri "$uri").info page load time"
        print_thresholds "$(escapeUri "$uri")"
    done
    exit 0
fi


for uri in $urls
do
    start=$(date +%s.%N)
    if [ "$requisites" = "true" ]; then
        request_url --page-requisites "$uri"
    else
        request_url "$uri"
    fi
    loadtime=$(echo "$start" "$(date +%s.%N)" | awk '{ print($2 - $1); }')

    echo "$(escapeUri "$uri").value $loadtime"
done