diff options
author | Sven Nierlein <sven@nierlein.de> | 2022-12-22 12:51:18 +0100 |
---|---|---|
committer | GitHub <noreply@github.com> | 2022-12-22 12:51:18 +0100 |
commit | 765b29f09bd3bc2a938260caa5f263343aafadb7 (patch) | |
tree | 5a93e4d22b00e8ad34a71ca2461701701e3cdb26 | |
parent | 763862a61cf5a7ba1a10f607022aac2434c79f57 (diff) | |
download | monitoring-plugins-765b29f.tar.gz |
check_curl: fix checking large bodys (#1823)
check_curl fails on large pages:
HTTP CRITICAL - Invalid HTTP response received from host on port 5080: cURL returned 23 - Failure writing output to destination
for example trying to run check_curl on the test from #1822
I guess the idea is to double the buffer size each time it is to small. But the code
exponentially grows the buffer size which works well 2-3 times, but then fails.
-rw-r--r-- | plugins/check_curl.c | 7 |
1 files changed, 5 insertions, 2 deletions
diff --git a/plugins/check_curl.c b/plugins/check_curl.c index 2ad373c0..55de22fd 100644 --- a/plugins/check_curl.c +++ b/plugins/check_curl.c | |||
@@ -2024,9 +2024,12 @@ curlhelp_buffer_write_callback (void *buffer, size_t size, size_t nmemb, void *s | |||
2024 | curlhelp_write_curlbuf *buf = (curlhelp_write_curlbuf *)stream; | 2024 | curlhelp_write_curlbuf *buf = (curlhelp_write_curlbuf *)stream; |
2025 | 2025 | ||
2026 | while (buf->bufsize < buf->buflen + size * nmemb + 1) { | 2026 | while (buf->bufsize < buf->buflen + size * nmemb + 1) { |
2027 | buf->bufsize *= buf->bufsize * 2; | 2027 | buf->bufsize = buf->bufsize * 2; |
2028 | buf->buf = (char *)realloc (buf->buf, buf->bufsize); | 2028 | buf->buf = (char *)realloc (buf->buf, buf->bufsize); |
2029 | if (buf->buf == NULL) return -1; | 2029 | if (buf->buf == NULL) { |
2030 | fprintf(stderr, "malloc failed (%d) %s\n", errno, strerror(errno)); | ||
2031 | return -1; | ||
2032 | } | ||
2030 | } | 2033 | } |
2031 | 2034 | ||
2032 | memcpy (buf->buf + buf->buflen, buffer, size * nmemb); | 2035 | memcpy (buf->buf + buf->buflen, buffer, size * nmemb); |