diff options
Diffstat (limited to 'NPTest.pm')
-rw-r--r-- | NPTest.pm | 554 |
1 files changed, 554 insertions, 0 deletions
diff --git a/NPTest.pm b/NPTest.pm new file mode 100644 index 00000000..f3c874b8 --- /dev/null +++ b/NPTest.pm | |||
@@ -0,0 +1,554 @@ | |||
1 | package NPTest; | ||
2 | |||
3 | # | ||
4 | # Helper Functions for testing Nagios Plugins | ||
5 | # | ||
6 | |||
7 | require Exporter; | ||
8 | @ISA = qw(Exporter); | ||
9 | @EXPORT = qw(getTestParameter checkCmd skipMissingCmd); | ||
10 | @EXPORT_OK = qw(DetermineTestHarnessDirectory TestsFrom SetCacheFilename); | ||
11 | |||
12 | use strict; | ||
13 | use warnings; | ||
14 | |||
15 | use Cwd; | ||
16 | use File::Basename; | ||
17 | |||
18 | use IO::File; | ||
19 | use Data::Dumper; | ||
20 | |||
21 | use Test; | ||
22 | |||
23 | use vars qw($VERSION); | ||
24 | $VERSION = do { my @r = (q$Revision$ =~ /\d+/g); sprintf "%d."."%02d" x $#r, @r }; # must be all one line, for MakeMaker | ||
25 | |||
26 | =head1 NAME | ||
27 | |||
28 | NPTest - Simplify the testing of Nagios Plugins | ||
29 | |||
30 | =head1 DESCRIPTION | ||
31 | |||
32 | This modules provides convenience functions to assist in the testing | ||
33 | of Nagios Plugins, making the testing code easier to read and write; | ||
34 | hopefully encouraging the development of more complete test suite for | ||
35 | the Nagios Plugins. It is based on the patterns of testing seen in the | ||
36 | 1.4.0 release, and continues to use the L<Test> module as the basis of | ||
37 | testing. | ||
38 | |||
39 | =head1 FUNCTIONS | ||
40 | |||
41 | This module defines three public functions, C<getTestParameter(...)>, | ||
42 | C<checkCmd(...)> and C<skipMissingCmd(...)>. These are exported by | ||
43 | default via the C<use NPTest;> statement. | ||
44 | |||
45 | =over | ||
46 | |||
47 | =item C<getTestParameter(...)> | ||
48 | |||
49 | A flexible and user override-able method of collecting, storing and | ||
50 | retrieving test parameters. This function allows the test harness | ||
51 | developer to interactively request test parameter information from the | ||
52 | user, when the no means of obtaining the information automatically has | ||
53 | been successful. The user is provided with the option of accepting | ||
54 | test harness developer's default value for the parameter, if a suggested | ||
55 | default is provided. | ||
56 | |||
57 | User supplied responses are stored in an external (file-based) | ||
58 | cache. These values are retrieved on subsequent runs alleviating the | ||
59 | user of reconfirming the previous entered responses. The user is able | ||
60 | to override the value of a parameter on any given run by setting the | ||
61 | associated environment variable. These environment variable based | ||
62 | overrides are not stored in the cache, allowing one-time and what-if | ||
63 | based tests on the command line without polluting the cache. | ||
64 | |||
65 | The option exists to store parameters in a scoped means, allowing a | ||
66 | test harness to a localise a parameter should the need arise. This | ||
67 | allows a parameter of the same name to exist in a test harness | ||
68 | specific scope, while not affecting the globally scoped parameter. The | ||
69 | scoping identifier is the name of the test harness sans the trailing | ||
70 | ".t". All cache searches first look to a scoped parameter before | ||
71 | looking for the parameter at global scope. Thus for a test harness | ||
72 | called "check_disk.t" requesting the parameter "mountpoint_valid", the | ||
73 | cache is first searched for "check_disk"/"mountpoint_valid", if this | ||
74 | fails, then a search is conducted for "mountpoint_valid". | ||
75 | |||
76 | The facilitate quick testing setup, it is possible to accept all the | ||
77 | developer provided defaults by setting the environment variable | ||
78 | "NPTEST_ACCEPTDEFAULT" to "1" (or any other perl truth value). Note | ||
79 | that, such defaults are not stored in the cache, as there is currently | ||
80 | no mechanism to edit existing cache entries, save the use of text | ||
81 | editor or removing the cache file completely. | ||
82 | |||
83 | =item C<checkCmd(...)> | ||
84 | |||
85 | This function attempts to encompass the majority of test styles used | ||
86 | in testing Nagios Plugins. As each plug-in is a separate command, the | ||
87 | typical tests we wish to perform are against the exit status of the | ||
88 | command and the output (if any) it generated. Simplifying these tests | ||
89 | into a single function call, makes the test harness easier to read and | ||
90 | maintain and allows additional functionality (such as debugging) to be | ||
91 | provided withoutadditional effort on the part of the test harness | ||
92 | developer. | ||
93 | |||
94 | It is possible to enable debugging via the environment variable | ||
95 | C<NPTEST_DEBUG>. If this environment variable exists and its value in PERL's | ||
96 | boolean context evaluates to true, debugging is enabled. | ||
97 | |||
98 | The function prototype can be expressed as follows: | ||
99 | |||
100 | Parameter 1 : command => DEFINED SCALAR(string) | ||
101 | Parameter 2 : desiredExitStatus => ONE OF | ||
102 | SCALAR(integer) | ||
103 | ARRAYREF(integer) | ||
104 | HASHREF(integer,string) | ||
105 | UNDEFINED | ||
106 | Parameter 3 : desiredOutput => SCALAR(string) OR UNDEFINED | ||
107 | Parameter 4 : exceptions => HASH(integer,string) OR UNDEFINED | ||
108 | Returns : SCALAR(integer) as defined by Test::ok(...) | ||
109 | |||
110 | The function treats the first parameter C<$command> as a command line | ||
111 | to execute as part of the test, it is executed only once and its exit | ||
112 | status (C<$?E<gt>E<gt>8>) and output are captured. | ||
113 | |||
114 | At this point if debugging is enabled the command, its exit status and | ||
115 | output are displayed to the tester. | ||
116 | |||
117 | C<checkCmd(...)> allows the testing of either the exit status or the | ||
118 | generated output or both, not testing either will result in neither | ||
119 | the C<Test::ok(...)> or C<Test::skip(...)> functions being called, | ||
120 | something you probably don't want. Note that each defined test | ||
121 | (C<$desiredExitStatus> and C<$desiredOutput>) results in a invocation | ||
122 | of either C<Test::ok(...)> or C<Test::skip(...)>, so remember this | ||
123 | when counting the number of tests to place in the C<Test::plan(...)> | ||
124 | call. | ||
125 | |||
126 | Many Nagios Plugins test network services, some of which may not be | ||
127 | present on all systems. To cater for this, C<checkCmd(...)> allows the | ||
128 | tester to define exceptions based on the command's exit status. These | ||
129 | exceptions are provided to skip tests if the test case developer | ||
130 | believes the service is not being provided. For example, if a site | ||
131 | does not have a POP3 server, the test harness could map the | ||
132 | appropriate exit status to a useful message the person running the | ||
133 | tests, telling the reason the test is being skipped. | ||
134 | |||
135 | Example: | ||
136 | |||
137 | my %exceptions = ( 2 =E<gt> "No POP Server present?" ); | ||
138 | |||
139 | $t += checkCmd( "./check_pop I<some args>", 0, undef, %exceptions ); | ||
140 | |||
141 | Thus, in the above example, an exit status of 2 does not result in a | ||
142 | failed test case (as the exit status is not the desired value of 0), | ||
143 | but a skipped test case with the message "No POP Server present?" | ||
144 | given as the reason. | ||
145 | |||
146 | Sometimes the exit status of a command should be tested against a set | ||
147 | of possible values, rather than a single value, this could especially | ||
148 | be the case in failure testing. C<checkCmd(...)> support two methods | ||
149 | of testing against a set of desired exit status values. | ||
150 | |||
151 | =over | ||
152 | |||
153 | =item * | ||
154 | |||
155 | Firstly, if C<$desiredExitStatus> is a reference to an array of exit | ||
156 | stati, if the actual exit status of the command is present in the | ||
157 | array, it is used in the call to C<Test::ok(...)> when testing the | ||
158 | exit status. | ||
159 | |||
160 | =item * | ||
161 | |||
162 | Alternatively, if C<$desiredExitStatus> is a reference to a hash of | ||
163 | exit stati (mapped to the strings "continue" or "skip"), similar | ||
164 | processing to the above occurs with the side affect of determining if | ||
165 | any generated output testing should proceed. Note: only the string | ||
166 | "skip" will result in generated output testing being skipped. | ||
167 | |||
168 | =back | ||
169 | |||
170 | =item C<skipMissingCmd(...)> | ||
171 | |||
172 | If a command is missing and the test harness must C<Test::skip()> some | ||
173 | or all of the tests in a given test harness this function provides a | ||
174 | simple iterator to issue an appropriate message the requested number | ||
175 | of times. | ||
176 | |||
177 | =back | ||
178 | |||
179 | =head1 SEE ALSO | ||
180 | |||
181 | L<Test> | ||
182 | |||
183 | The rest of the code, as I have only commented on the major public | ||
184 | functions that test harness writers will use, not all the code present | ||
185 | in this helper module. | ||
186 | |||
187 | =head1 AUTHOR | ||
188 | |||
189 | Copyright (c) 2005 Peter Bray. All rights reserved. | ||
190 | |||
191 | This package is free software and is provided "as is" without express | ||
192 | or implied warranty. It may be used, redistributed and/or modified | ||
193 | under the same terms as the Nagios Plugins release. | ||
194 | |||
195 | =cut | ||
196 | |||
197 | # | ||
198 | # Package Scope Variables | ||
199 | # | ||
200 | |||
201 | my( %CACHE ) = (); | ||
202 | |||
203 | # I'm not really sure wether to house a site-specific cache inside | ||
204 | # or outside of the extracted source / build tree - lets default to outside | ||
205 | my( $CACHEFILENAME ) = ( exists( $ENV{'NPTESTCACHE'} ) && $ENV{'NPTESTCACHE'} ) | ||
206 | ? $ENV{'NPTESTCACHE'} : "/var/tmp/NPTest.cache"; # "../Cache.pdd"; | ||
207 | |||
208 | # | ||
209 | # Testing Functions | ||
210 | # | ||
211 | |||
212 | sub checkCmd | ||
213 | { | ||
214 | my( $command, $desiredExitStatus, $desiredOutput, %exceptions ) = @_; | ||
215 | |||
216 | my $output = `${command}`; | ||
217 | my $exitStatus = $? >> 8; | ||
218 | |||
219 | $output = "" unless defined( $output ); | ||
220 | chomp( $output ); | ||
221 | |||
222 | if ( exists( $ENV{'NPTEST_DEBUG'} ) && $ENV{'NPTEST_DEBUG'} ) | ||
223 | { | ||
224 | my( $pkg, $file, $line ) = caller(0); | ||
225 | |||
226 | print "checkCmd: Called from line $line in $file\n"; | ||
227 | print "Testing : ${command}\n"; | ||
228 | print "Result : ${exitStatus} AND '${output}'\n"; | ||
229 | } | ||
230 | |||
231 | my $testStatus; | ||
232 | |||
233 | my $testOutput = "continue"; | ||
234 | |||
235 | if ( defined( $desiredExitStatus ) ) | ||
236 | { | ||
237 | if ( ref $desiredExitStatus eq "ARRAY" ) | ||
238 | { | ||
239 | if ( scalar( grep { $_ == $exitStatus } @{$desiredExitStatus} ) ) | ||
240 | { | ||
241 | $desiredExitStatus = $exitStatus; | ||
242 | } | ||
243 | else | ||
244 | { | ||
245 | $desiredExitStatus = -1; | ||
246 | } | ||
247 | } | ||
248 | elsif ( ref $desiredExitStatus eq "HASH" ) | ||
249 | { | ||
250 | if ( exists( ${$desiredExitStatus}{$exitStatus} ) ) | ||
251 | { | ||
252 | if ( defined( ${$desiredExitStatus}{$exitStatus} ) ) | ||
253 | { | ||
254 | $testOutput = ${$desiredExitStatus}{$exitStatus}; | ||
255 | } | ||
256 | $desiredExitStatus = $exitStatus; | ||
257 | } | ||
258 | else | ||
259 | { | ||
260 | $desiredExitStatus = -1; | ||
261 | } | ||
262 | } | ||
263 | |||
264 | if ( %exceptions && exists( $exceptions{$exitStatus} ) ) | ||
265 | { | ||
266 | $testStatus += skip( $exceptions{$exitStatus}, $exitStatus, $desiredExitStatus ); | ||
267 | } | ||
268 | else | ||
269 | { | ||
270 | $testStatus += ok( $exitStatus, $desiredExitStatus ); | ||
271 | } | ||
272 | } | ||
273 | |||
274 | if ( defined( $desiredOutput ) ) | ||
275 | { | ||
276 | if ( $testOutput ne "skip" ) | ||
277 | { | ||
278 | $testStatus += ok( $output, $desiredOutput ); | ||
279 | } | ||
280 | else | ||
281 | { | ||
282 | $testStatus += skip( "Skipping output test as requested", $output, $desiredOutput ); | ||
283 | } | ||
284 | } | ||
285 | |||
286 | return $testStatus; | ||
287 | } | ||
288 | |||
289 | |||
290 | sub skipMissingCmd | ||
291 | { | ||
292 | my( $command, $count ) = @_; | ||
293 | |||
294 | my $testStatus; | ||
295 | |||
296 | for ( 1 .. $count ) | ||
297 | { | ||
298 | $testStatus += skip( "Missing ${command} - tests skipped", 1 ); | ||
299 | } | ||
300 | |||
301 | return $testStatus; | ||
302 | } | ||
303 | |||
304 | sub getTestParameter | ||
305 | { | ||
306 | my( $param, $envvar, $default, $brief, $scoped ) = @_; | ||
307 | |||
308 | # Apply default values for optional arguments | ||
309 | $scoped = ( defined( $scoped ) && $scoped ); | ||
310 | |||
311 | my $testharness = basename( (caller(0))[1], ".t" ); # used for scoping | ||
312 | |||
313 | if ( defined( $envvar ) && exists( $ENV{$envvar} ) && $ENV{$envvar} ) | ||
314 | { | ||
315 | return $ENV{$envvar} | ||
316 | } | ||
317 | |||
318 | my $cachedValue = SearchCache( $param, $testharness ); | ||
319 | if ( defined( $cachedValue ) && $cachedValue ) | ||
320 | { | ||
321 | return $cachedValue; | ||
322 | } | ||
323 | |||
324 | my $defaultValid = ( defined( $default ) && $default ); | ||
325 | my $autoAcceptDefault = ( exists( $ENV{'NPTEST_ACCEPTDEFAULT'} ) && $ENV{'NPTEST_ACCEPTDEFAULT'} ); | ||
326 | |||
327 | if ( $autoAcceptDefault && $defaultValid ) | ||
328 | { | ||
329 | return $default; | ||
330 | } | ||
331 | |||
332 | my $userResponse = ""; | ||
333 | |||
334 | while ( $userResponse eq "" ) | ||
335 | { | ||
336 | print STDERR "\n"; | ||
337 | print STDERR "Test Harness : $testharness\n"; | ||
338 | print STDERR "Test Parameter : $param\n"; | ||
339 | print STDERR "Environment Variable : $envvar\n"; | ||
340 | print STDERR "Brief Description : $brief\n"; | ||
341 | print STDERR "Enter value ", ($defaultValid ? "[${default}]" : "[]"), " => "; | ||
342 | $userResponse = <STDIN>; | ||
343 | $userResponse = "" if ! defined( $userResponse ); # Handle EOF | ||
344 | chomp( $userResponse ); | ||
345 | if ( $defaultValid && $userResponse eq "" ) | ||
346 | { | ||
347 | $userResponse = $default; | ||
348 | } | ||
349 | } | ||
350 | |||
351 | print STDERR "\n"; | ||
352 | |||
353 | # define all user responses at global scope | ||
354 | SetCacheParameter( $param, ( $scoped ? $testharness : undef ), $userResponse ); | ||
355 | |||
356 | return $userResponse; | ||
357 | } | ||
358 | |||
359 | # | ||
360 | # Internal Cache Management Functions | ||
361 | # | ||
362 | |||
363 | sub SearchCache | ||
364 | { | ||
365 | my( $param, $scope ) = @_; | ||
366 | |||
367 | LoadCache(); | ||
368 | |||
369 | if ( exists( $CACHE{$scope} ) && exists( $CACHE{$scope}{$param} ) ) | ||
370 | { | ||
371 | return $CACHE{$scope}{$param}; | ||
372 | } | ||
373 | |||
374 | if ( exists( $CACHE{$param} ) ) | ||
375 | { | ||
376 | return $CACHE{$param}; | ||
377 | } | ||
378 | } | ||
379 | |||
380 | sub SetCacheParameter | ||
381 | { | ||
382 | my( $param, $scope, $value ) = @_; | ||
383 | |||
384 | if ( defined( $scope ) ) | ||
385 | { | ||
386 | $CACHE{$scope}{$param} = $value; | ||
387 | } | ||
388 | else | ||
389 | { | ||
390 | $CACHE{$param} = $value; | ||
391 | } | ||
392 | |||
393 | SaveCache(); | ||
394 | } | ||
395 | |||
396 | sub LoadCache | ||
397 | { | ||
398 | return if exists( $CACHE{'_cache_loaded_'} ); | ||
399 | |||
400 | if ( -f $CACHEFILENAME ) | ||
401 | { | ||
402 | my( $fileHandle ) = new IO::File; | ||
403 | |||
404 | if ( ! $fileHandle->open( "< ${CACHEFILENAME}" ) ) | ||
405 | { | ||
406 | print STDERR "NPTest::LoadCache() : Problem opening ${CACHEFILENAME} : $!\n"; | ||
407 | return; | ||
408 | } | ||
409 | |||
410 | my( $fileContents ) = join( "\n", <$fileHandle> ); | ||
411 | |||
412 | $fileHandle->close(); | ||
413 | |||
414 | my( $contentsRef ) = eval $fileContents; | ||
415 | %CACHE = %{$contentsRef}; | ||
416 | |||
417 | } | ||
418 | |||
419 | $CACHE{'_cache_loaded_'} = 1; | ||
420 | } | ||
421 | |||
422 | |||
423 | sub SaveCache | ||
424 | { | ||
425 | delete $CACHE{'_cache_loaded_'}; | ||
426 | |||
427 | my( $fileHandle ) = new IO::File; | ||
428 | |||
429 | if ( ! $fileHandle->open( "> ${CACHEFILENAME}" ) ) | ||
430 | { | ||
431 | print STDERR "NPTest::LoadCache() : Problem saving ${CACHEFILENAME} : $!\n"; | ||
432 | return; | ||
433 | } | ||
434 | |||
435 | my( $dataDumper ) = new Data::Dumper( [ \%CACHE ] ); | ||
436 | |||
437 | $dataDumper->Terse(1); | ||
438 | |||
439 | print $fileHandle $dataDumper->Dump(); | ||
440 | |||
441 | $fileHandle->close(); | ||
442 | |||
443 | $CACHE{'_cache_loaded_'} = 1; | ||
444 | } | ||
445 | |||
446 | # | ||
447 | # (Questionable) Public Cache Management Functions | ||
448 | # | ||
449 | |||
450 | sub SetCacheFilename | ||
451 | { | ||
452 | my( $filename ) = @_; | ||
453 | |||
454 | # Unfortunately we can not validate the filename | ||
455 | # in any meaningful way, as it may not yet exist | ||
456 | $CACHEFILENAME = $filename; | ||
457 | } | ||
458 | |||
459 | |||
460 | # | ||
461 | # Test Harness Wrapper Functions | ||
462 | # | ||
463 | |||
464 | sub DetermineTestHarnessDirectory | ||
465 | { | ||
466 | my( $userSupplied ) = @_; | ||
467 | |||
468 | # User Supplied | ||
469 | if ( defined( $userSupplied ) && $userSupplied ) | ||
470 | { | ||
471 | if ( -d $userSupplied ) | ||
472 | { | ||
473 | return $userSupplied; | ||
474 | } | ||
475 | else | ||
476 | { | ||
477 | return undef; # userSupplied is invalid -> FAIL | ||
478 | } | ||
479 | } | ||
480 | |||
481 | # Simple Case : "t" is a subdirectory of the current directory | ||
482 | if ( -d "./t" ) | ||
483 | { | ||
484 | return "./t"; | ||
485 | } | ||
486 | |||
487 | # To be honest I don't understand which case satisfies the | ||
488 | # original code in test.pl : when $tstdir == `pwd` w.r.t. | ||
489 | # $tstdir =~ s|^(.*)/([^/]+)/?$|$1/$2|; and if (-d "../../$2/t") | ||
490 | # Assuming pwd is "/a/b/c/d/e" then we are testing for "/a/b/c/e/t" | ||
491 | # if I understand the code correctly (a big assumption) | ||
492 | |||
493 | # Simple Case : the current directory is "t" | ||
494 | my $pwd = cwd(); | ||
495 | |||
496 | if ( $pwd =~ m|/t$| ) | ||
497 | { | ||
498 | return $pwd; | ||
499 | |||
500 | # The alternate that might work better is | ||
501 | # chdir( ".." ); | ||
502 | # return "./t"; | ||
503 | # As the current test harnesses assume the application | ||
504 | # to be tested is in the current directory (ie "./check_disk ....") | ||
505 | } | ||
506 | |||
507 | return undef; | ||
508 | } | ||
509 | |||
510 | sub TestsFrom | ||
511 | { | ||
512 | my( $directory, $excludeIfAppMissing ) = @_; | ||
513 | |||
514 | $excludeIfAppMissing = 0 unless defined( $excludeIfAppMissing ); | ||
515 | |||
516 | if ( ! opendir( DIR, $directory ) ) | ||
517 | { | ||
518 | print STDERR "NPTest::TestsFrom() - Failed to open ${directory} : $!\n"; | ||
519 | return (); | ||
520 | } | ||
521 | |||
522 | my( @tests ) = (); | ||
523 | |||
524 | my $filename; | ||
525 | my $application; | ||
526 | |||
527 | while ( $filename = readdir( DIR ) ) | ||
528 | { | ||
529 | if ( $filename =~ m/\.t$/ ) | ||
530 | { | ||
531 | if ( $excludeIfAppMissing ) | ||
532 | { | ||
533 | $application = basename( $filename, ".t" ); | ||
534 | if ( ! -e $application ) | ||
535 | { | ||
536 | print STDERR "No application (${application}) found for test harness (${filename})\n"; | ||
537 | next; | ||
538 | } | ||
539 | } | ||
540 | push @tests, "${directory}/${filename}"; | ||
541 | } | ||
542 | } | ||
543 | |||
544 | closedir( DIR ); | ||
545 | |||
546 | return @tests; | ||
547 | } | ||
548 | |||
549 | |||
550 | |||
551 | 1; | ||
552 | # | ||
553 | # End of File | ||
554 | # | ||