Skip to content
runtests.pl 48.4 KiB
Newer Older
Daniel Stenberg's avatar
Daniel Stenberg committed
#!/usr/bin/env perl
#***************************************************************************
#                                  _   _ ____  _
#  Project                     ___| | | |  _ \| |
#                             / __| | | | |_) | |
#                            | (__| |_| |  _ <| |___
#                             \___|\___/|_| \_\_____|
#
# Copyright (C) 1998 - 2004, Daniel Stenberg, <daniel@haxx.se>, et al.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://curl.haxx.se/docs/copyright.html.
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
# copies of the Software, and permit persons to whom the Software is
# furnished to do so, under the terms of the COPYING file.
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
# KIND, either express or implied.
#
# $Id$
###########################################################################
# These should be the only variables that might be needed to get edited:

require "getpart.pm"; # array functions
Daniel Stenberg's avatar
Daniel Stenberg committed
my $HOSTIP="127.0.0.1";
my $HOST6IP="[::1]";

my $base = 8990; # base port number

my $HTTPPORT; # HTTP server port
my $HTTP6PORT; # HTTP IPv6 server port
my $HTTPSPORT; # HTTPS server port
my $FTPPORT; # FTP server port
my $FTPSPORT; # FTPS server port

my $CURL="../src/curl"; # what curl executable to run on the tests
my $DBGCURL=$CURL; #"../src/.libs/curl";  # alternative for debugging
Daniel Stenberg's avatar
Daniel Stenberg committed
my $LOGDIR="log";
my $LIBDIR="./libtest";
my $SERVERIN="$LOGDIR/server.input"; # what curl sent the server
my $CURLLOG="$LOGDIR/curl.log"; # all command lines run
my $FTPDCMD="$LOGDIR/ftpserver.cmd"; # copy ftp server instructions here

# Normally, all test cases should be run, but at times it is handy to
# simply run a particular one:
Daniel Stenberg's avatar
Daniel Stenberg committed
my $TESTCASES="all";

# To run specific test cases, set them like:
# $TESTCASES="1 2 3 7 8";

#######################################################################
# No variables below this point should need to be modified
#

my $HTTP6PIDFILE=".http6.pid";
my $HTTPSPIDFILE=".https.pid";
my $FTPPIDFILE=".ftp.pid";
my $FTPSPIDFILE=".ftps.pid";
# invoke perl like this:
my $perl="perl -I$srcdir";

# this gets set if curl is compiled with debugging:
my $curl_debug=0;
# name of the file that the memory debugging creates:
my $memdump="memdump";

# the path to the script that analyzes the memory debug output file:
my $memanalyze="./memanalyze.pl";
my $stunnel = checkcmd("stunnel");
my $valgrind = checkcmd("valgrind");

my $valgrind_tool;
if($valgrind) {
    # since valgrind 2.1.x, '--tool' option is mandatory
    # use it, if it is supported by the version installed on the system
    system("valgrind --help 2>&1 | grep -- --tool > /dev/null 2>&1");
    if (($? >> 8)==0) {
        $valgrind_tool="--tool=memcheck ";
    }
}

my $ssl_version; # set if libcurl is built with SSL support
my $large_file;  # set if libcurl is built with large file support
my $has_idn;     # set if libcurl is built with IDN support
my $http_ipv6;   # set if HTTP server has IPv6 support
Daniel Stenberg's avatar
Daniel Stenberg committed
my $has_ipv6;    # set if libcurl is built with IPv6 support
my $has_libz;    # set if libcurl is built with libz support
my $has_getrlimit;  # set if system has getrlimit()
my $skipped=0;  # number of tests skipped; reported in main loop
my %skipped;    # skipped{reason}=counter, reasons for skip
my @teststat;   # teststat[testnum]=reason, reasons for skip
Daniel Stenberg's avatar
Daniel Stenberg committed
#######################################################################
# variables the command line options may set
#

my $short;
my $verbose;
Daniel Stenberg's avatar
Daniel Stenberg committed
my $anyway;
my $gdbthis;      # run test case with gdb debugger
my $keepoutfiles; # keep stdout and stderr files after tests
Daniel Stenberg's avatar
Daniel Stenberg committed
my $listonly;     # only list the tests
my $postmortem;   # display detailed info about failed tests
my $pwd;          # current working directory

my %run;	  # running server

# torture test variables
my $torture;
my $tortnum;
my $tortalloc;

# enable memory debugging if curl is compiled with it
$ENV{'CURL_MEMDEBUG'} = 1;
##########################################################################
# Clear all possible '*_proxy' environment variables for various protocols
# to prevent them to interfere with our testing!

my $protocol;
foreach $protocol (('ftp', 'http', 'ftps', 'https', 'gopher', 'no')) {
    my $proxy = "${protocol}_proxy";
    # clear lowercase version
    $ENV{$proxy}=undef;
    # clear uppercase version
    $ENV{uc($proxy)}=undef;
}

#######################################################################
# Check for a command in the PATH.
#
sub checkcmd {
    my ($cmd)=@_;
    my @paths=("/usr/sbin", "/usr/local/sbin", "/sbin", "/usr/bin",
               "/usr/local/bin", split(":", $ENV{'PATH'}));
    for(@paths) {
        if( -x "$_/$cmd") {
            return "$_/$cmd";
        }
    }
}

#######################################################################
# Return the pid of the server as found in the given pid file
    my $PIDFILE = $_[0];
#######################################################################
# Memory allocation test and failure torture testing.
#
sub torture {
    my $testcmd = shift;
    my $gdbline = shift;
    # remove memdump first to be sure we get a new nice and clean one
    unlink($memdump);
    # First get URL from test server, ignore the output/result
    system($testcmd);
    print " CMD: $testcmd\n" if($verbose);
    # memanalyze -v is our friend, get the number of allocations made
    my @out = `$memanalyze -v $memdump`;
    for(@out) {
        if(/^Allocations: (\d+)/) {
            $count = $1;
            last;
Loading
Loading full blame…