]> git.saurik.com Git - apt.git/commitdiff
handle complete responses to https range requests
authorDavid Kalnischkies <kalnischkies@gmail.com>
Mon, 30 Sep 2013 14:41:16 +0000 (16:41 +0200)
committerDavid Kalnischkies <kalnischkies@gmail.com>
Tue, 1 Oct 2013 09:17:02 +0000 (11:17 +0200)
Servers might respond with a complete file either because they don't
support Ranges at all or the If-Range condition isn't statisfied, so we
have to parse the headers curl gets ourself to seek or truncate the file
we have so far.

This also finially adds the testcase testing a bunch of partial
situations for both, http and https - which is now all green.

Closes: 617643, 667699
LP: 1157943

methods/http.cc
methods/http.h
methods/https.cc
methods/https.h
methods/makefile
methods/server.h
test/integration/apt.pem [new file with mode: 0644]
test/integration/framework
test/integration/test-partial-file-support [new file with mode: 0755]

index d2f084b04afea3e4fb84db7df8d9f92fc3e3e1f5..71a02e53a062e3a7bae2f22dd63dbfea34bdf2c0 100644 (file)
@@ -863,3 +863,8 @@ ServerState * HttpMethod::CreateServerState(URI uri)                        /*{{{*/
    return new HttpServerState(uri, this);
 }
                                                                        /*}}}*/
+void HttpMethod::RotateDNS()                                           /*{{{*/
+{
+   ::RotateDNS();
+}
+                                                                       /*}}}*/
index 112ce171d6aef319be655a10488033144e25085c..02c04e8ae8375befbb6bfb4a8cd8cc7e7fcedfc1 100644 (file)
@@ -126,6 +126,7 @@ class HttpMethod : public ServerMethod
    virtual bool Configuration(std::string Message);
 
    virtual ServerState * CreateServerState(URI uri);
+   virtual void RotateDNS();
 
    protected:
    std::string AutoDetectProxyCmd;
index 4f00842ba8c1f67a1d4592885a39d64734bdb0c3..2a562434b42dab471ae732be2e045a607a6b1957 100644 (file)
                                                                        /*}}}*/
 using namespace std;
 
+size_t
+HttpsMethod::parse_header(void *buffer, size_t size, size_t nmemb, void *userp)
+{
+   size_t len = size * nmemb;
+   HttpsMethod *me = (HttpsMethod *)userp;
+   std::string line((char*) buffer, len);
+   for (--len; len > 0; --len)
+      if (isspace(line[len]) == 0)
+      {
+        ++len;
+        break;
+      }
+   line.erase(len);
+
+   if (line.empty() == true)
+   {
+      if (me->Server->Result != 416 && me->Server->StartPos != 0)
+        ;
+      else if (me->Server->Result == 416 && me->Server->Size == me->File->FileSize())
+      {
+         me->Server->Result = 200;
+        me->Server->StartPos = me->Server->Size;
+      }
+      else
+        me->Server->StartPos = 0;
+
+      me->File->Truncate(me->Server->StartPos);
+      me->File->Seek(me->Server->StartPos);
+   }
+   else if (me->Server->HeaderLine(line) == false)
+      return 0;
+
+   return size*nmemb;
+}
+
 size_t 
 HttpsMethod::write_data(void *buffer, size_t size, size_t nmemb, void *userp)
 {
@@ -59,6 +94,14 @@ HttpsMethod::progress_callback(void *clientp, double dltotal, double dlnow,
    return 0;
 }
 
+// HttpsServerState::HttpsServerState - Constructor                    /*{{{*/
+HttpsServerState::HttpsServerState(URI Srv,HttpsMethod *Owner) : ServerState(Srv, NULL)
+{
+   TimeOut = _config->FindI("Acquire::https::Timeout",TimeOut);
+   Reset();
+}
+                                                                       /*}}}*/
+
 void HttpsMethod::SetupProxy()                                         /*{{{*/
 {
    URI ServerName = Queue->Uri;
@@ -136,6 +179,8 @@ bool HttpsMethod::Fetch(FetchItem *Itm)
 
    // callbacks
    curl_easy_setopt(curl, CURLOPT_URL, static_cast<string>(Uri).c_str());
+   curl_easy_setopt(curl, CURLOPT_HEADERFUNCTION, parse_header);
+   curl_easy_setopt(curl, CURLOPT_WRITEHEADER, this);
    curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, write_data);
    curl_easy_setopt(curl, CURLOPT_WRITEDATA, this);
    curl_easy_setopt(curl, CURLOPT_PROGRESSFUNCTION, progress_callback);
@@ -289,16 +334,13 @@ bool HttpsMethod::Fetch(FetchItem *Itm)
 
    // go for it - if the file exists, append on it
    File = new FileFd(Itm->DestFile, FileFd::WriteAny);
-   if (File->Size() > 0)
-      File->Seek(File->Size());
+   Server = new HttpsServerState(Itm->Uri, this);
 
    // keep apt updated
    Res.Filename = Itm->DestFile;
 
    // get it!
    CURLcode success = curl_easy_perform(curl);
-   long curl_responsecode;
-   curl_easy_getinfo(curl, CURLINFO_RESPONSE_CODE, &curl_responsecode);
 
    // If the server returns 200 OK but the If-Modified-Since condition is not
    // met, CURLINFO_CONDITION_UNMET will be set to 1
@@ -317,7 +359,7 @@ bool HttpsMethod::Fetch(FetchItem *Itm)
    }
 
    // server says file not modified
-   if (curl_responsecode == 304 || curl_condition_unmet == 1)
+   if (Server->Result == 304 || curl_condition_unmet == 1)
    {
       unlink(File->Name().c_str());
       Res.IMSHit = true;
@@ -326,13 +368,14 @@ bool HttpsMethod::Fetch(FetchItem *Itm)
       URIDone(Res);
       return true;
    }
+   Res.IMSHit = false;
 
-   if (curl_responsecode != 200 && // OK
-        curl_responsecode != 206 && // Partial
-        curl_responsecode != 416) // invalid Range
+   if (Server->Result != 200 && // OK
+        Server->Result != 206 && // Partial
+        Server->Result != 416) // invalid Range
    {
       char err[255];
-      snprintf(err, sizeof(err) - 1, "HttpError%ld", curl_responsecode);
+      snprintf(err, sizeof(err) - 1, "HttpError%i", Server->Result);
       SetFailReason(err);
       _error->Error("%s", err);
       // unlink, no need keep 401/404 page content in partial/
@@ -349,7 +392,7 @@ bool HttpsMethod::Fetch(FetchItem *Itm)
    Res.Size = resultStat.st_size;
 
    // invalid range-request
-   if (curl_responsecode == 416)
+   if (Server->Result == 416)
    {
       unlink(File->Name().c_str());
       Res.Size = 0;
@@ -358,18 +401,6 @@ bool HttpsMethod::Fetch(FetchItem *Itm)
       return true;
    }
 
-   // check the downloaded result
-   if (curl_responsecode == 304 || curl_condition_unmet)
-   {
-      unlink(File->Name().c_str());
-      Res.IMSHit = true;
-      Res.LastModified = Itm->LastModified;
-      Res.Size = 0;
-      URIDone(Res);
-      return true;
-   }
-   Res.IMSHit = false;
-
    // Timestamp
    curl_easy_getinfo(curl, CURLINFO_FILETIME, &Res.LastModified);
    if (Res.LastModified != -1)
@@ -408,4 +439,3 @@ int main()
    return Mth.Run();
 }
 
-
index 293e288e06f8d6fe1872bbcf4769ff4d2fc48ae5..8632d6d02b9ff105350ad7ea6b8c12e5c9b1c5e4 100644 (file)
 #include <iostream>
 #include <curl/curl.h>
 
+#include "server.h"
+
 using std::cout;
 using std::endl;
 
 class HttpsMethod;
 class FileFd;
 
+class HttpsServerState : public ServerState
+{
+   protected:
+   virtual bool ReadHeaderLines(std::string &Data) { return false; }
+   virtual bool LoadNextResponse(bool const ToFile, FileFd * const File) { return false; }
+
+   public:
+   virtual bool WriteResponse(std::string const &Data) { return false; }
+
+   /** \brief Transfer the data from the socket */
+   virtual bool RunData(FileFd * const File) { return false; }
+
+   virtual bool Open() { return false; }
+   virtual bool IsOpen() { return false; }
+   virtual bool Close() { return false; }
+   virtual bool InitHashes(FileFd &File) { return false; }
+   virtual Hashes * GetHashes() { return NULL; }
+   virtual bool Die(FileFd &File) { return false; }
+   virtual bool Flush(FileFd * const File) { return false; }
+   virtual bool Go(bool ToFile, FileFd * const File) { return false; }
+
+   HttpsServerState(URI Srv, HttpsMethod *Owner);
+   virtual ~HttpsServerState() {Close();};
+};
+
 class HttpsMethod : public pkgAcqMethod
 {
    // minimum speed in bytes/se that triggers download timeout handling
    static const int DL_MIN_SPEED = 10;
 
    virtual bool Fetch(FetchItem *);
+   static size_t parse_header(void *buffer, size_t size, size_t nmemb, void *userp);
    static size_t write_data(void *buffer, size_t size, size_t nmemb, void *userp);
    static int progress_callback(void *clientp, double dltotal, double dlnow, 
                                double ultotal, double ulnow);
    void SetupProxy();
    CURL *curl;
    FetchResult Res;
+   HttpsServerState *Server;
 
    public:
    FileFd *File;
index f8098de749ab4b7b3931e2f01af5c0c1bc1780c9..6b77812943cbe47a0d807eb9ce070829a7026e6d 100644 (file)
@@ -55,7 +55,7 @@ include $(PROGRAM_H)
 PROGRAM=https
 SLIBS = -lapt-pkg -lcurl $(INTLLIBS)
 LIB_MAKES = apt-pkg/makefile
-SOURCE = https.cc
+SOURCE = https.cc server.cc
 include $(PROGRAM_H)
 
 # The ftp method
index 2d43b332fe3d1187528cae92dbbd8eeb8c64bfc4..4dc6a1f2fbef55c4e209b71a17be9d28f8d41ff7 100644 (file)
@@ -49,11 +49,12 @@ struct ServerState
    protected:
    ServerMethod *Owner;
 
-   bool HeaderLine(std::string Line);
    virtual bool ReadHeaderLines(std::string &Data) = 0;
    virtual bool LoadNextResponse(bool const ToFile, FileFd * const File) = 0;
 
    public:
+   bool HeaderLine(std::string Line);
+
    /** \brief Result of the header acquire */
    enum RunHeadersResult {
       /** \brief Header ok */
@@ -134,6 +135,7 @@ class ServerMethod : public pkgAcqMethod
 
    virtual void SendReq(FetchItem *Itm) = 0;
    virtual ServerState * CreateServerState(URI uri) = 0;
+   virtual void RotateDNS() = 0;
 
    ServerMethod(const char *Ver,unsigned long Flags = 0) : pkgAcqMethod(Ver, Flags), PipelineDepth(0), AllowRedirect(false), Debug(false) {};
    virtual ~ServerMethod() {};
diff --git a/test/integration/apt.pem b/test/integration/apt.pem
new file mode 100644 (file)
index 0000000..f48df05
--- /dev/null
@@ -0,0 +1,49 @@
+-----BEGIN PRIVATE KEY-----
+MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQCt4R1Q2oYF6utL
+19GBhnlHW8L2BI7PRFWge/ZpqIZWsaFcb30FV86Z6aXXZmgfEJ2814ZZYD1IKeCe
+JsJpns7B4vYe1v64r995ZNBQAAKIYjICkKZOBgOphV+ChBfrCctVXgfLbMP6iBdH
+J02wHzSCCdZm0sdVl9tB5l/OyJU8Mb4KB3btBhfZfY2M6lU+FOjcXs1LOduUrv7K
+fZ+DEalvVGkomLtHtD0qb2vkqFrTjVCkziUVWhhxFFflt08oQ01Clxpl+uv7rOQo
+jtkJ1LrMuv7iPfaZ/z3qLiFxZYG1BCGEwTOKCtJo6bgFzXiN3q7Q5FFlmv851x2J
+Dn8C7Qm7AgMBAAECggEAE3q6vAofJZ6Ryadd8zLLd3ESQFl2XkX7icUZb/DPS/sO
+ZrqeuPCDVr7UM3NnisNjyHoktPKRKvp2DYGuGgMOiq4QgJf5ZVten8zpgWze28SU
+cbEe0HLgCifE8Ww2+b/ZJbEpEmMW+YQxh2khzO9SBJdxi4dliXM/vvw+E35pKZsB
+s6glrz6VQAxxa9fY4fLnB2DafHy+pUvRVw8gC6PCM9jXN9tMYAqztsJu7aaanNyT
+HX2UDWa8hxVx6t5UQZuxvst9N+RcEwmVCR2qlfZt/VRBRibBm62crEKbTD00mNHQ
+4AIDn3g6Y3SXpDlgtNpjLyBL3fODPIwqwGdblaSKkQKBgQDYXecu0Eda7kbR5ciW
+IAn8XOxsBIkkh8YVl2gRiiajRVoeiYBHaW9TyuQiaWrftiDQxB/N4G2focTXy/7O
+VJn6e/SUoO/ZGRw2GbTxLUQptgvFsejYCcW9XpC8MCwE/y2swiY7JM0WR8cV2nCk
+a/Cls6f1LjL13aFO0PAorEcahQKBgQDNuth6EHZVwfDgUuqhRw4HIIpfsfiA3UOd
+b5k/NsfQIev1YUqnfucgInNPDq2Jf8eTQw3TKaszo2DCjDffCsEgM09Tym143Bd6
+AsMuqAStsE3IEC7pnmh95l29/7mh4OuG5cp5JUx0Pi5PkuJ6ywA8P1rM1MB9Zf52
+NGJCo1pnPwKBgQCx/n4i+uDYo1DLd/dN2UmdvGwaaJjR3ohMVuQcGcSzaGg82u0W
+0lvtWOnYjVSIeXIBjHaFjW1hd1lSFdWms96AO9z3MHZf6NJWh0tdZNnAXqzMlBFz
+OIbdxJ/Y0OBFtA9FIesFmL7G54GWLr+f49Ry3Jr9jmYJ8au0BRqsux07aQKBgC4q
+CT2KyCMCO/z6XjAGc71hres/UlYIUI3ZZvfqYPfxRLNxO4FOVqq9UEajMomyJKSE
+3WtO5F3YAXRmZnskPKXvHZPdzqbaLGJykD298h7PewSzrPM7WpM1yD9ETPFoOTGy
+CrcYiYlkEpxEh5GqT8k1JjjkXLVG18zKgGoXocedAoGAQyU2DCNfxwzIJfFHKZEG
+zpni72cR68Tu3AhW/38vMR2ZPca4KzXrUA52T+j7vkQC38LHm/mzNXNP7Vya0PJ3
+WoYOcLtg2uFPh0P/35ArEzuNooLsvulgg1jsamPbF8KAvJZKZHr30hlC/JGYSBbV
+bnkzJTShsKzHIUiLtQ8Ja+E=
+-----END PRIVATE KEY-----
+-----BEGIN CERTIFICATE-----
+MIIDezCCAmOgAwIBAgIJAJ39xapQo0vLMA0GCSqGSIb3DQEBBQUAMFMxCzAJBgNV
+BAYTAkRFMRMwEQYDVQQIDApTb21lLVN0YXRlMRswGQYDVQQKDBJBUFQgVGVzdGNh
+c2VzIEdtYkgxEjAQBgNVBAMMCWxvY2FsaG9zdDAgFw0xMzA5MTYwODQ4MzVaGA80
+NzUxMDgxMzA4NDgzNVowUzELMAkGA1UEBhMCREUxEzARBgNVBAgMClNvbWUtU3Rh
+dGUxGzAZBgNVBAoMEkFQVCBUZXN0Y2FzZXMgR21iSDESMBAGA1UEAwwJbG9jYWxo
+b3N0MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAreEdUNqGBerrS9fR
+gYZ5R1vC9gSOz0RVoHv2aaiGVrGhXG99BVfOmeml12ZoHxCdvNeGWWA9SCngnibC
+aZ7OweL2Htb+uK/feWTQUAACiGIyApCmTgYDqYVfgoQX6wnLVV4Hy2zD+ogXRydN
+sB80ggnWZtLHVZfbQeZfzsiVPDG+Cgd27QYX2X2NjOpVPhTo3F7NSznblK7+yn2f
+gxGpb1RpKJi7R7Q9Km9r5Kha041QpM4lFVoYcRRX5bdPKENNQpcaZfrr+6zkKI7Z
+CdS6zLr+4j32mf896i4hcWWBtQQhhMEzigrSaOm4Bc14jd6u0ORRZZr/OdcdiQ5/
+Au0JuwIDAQABo1AwTjAdBgNVHQ4EFgQUhd26E7ykEYRTDbgMzkYtFtENhSkwHwYD
+VR0jBBgwFoAUhd26E7ykEYRTDbgMzkYtFtENhSkwDAYDVR0TBAUwAwEB/zANBgkq
+hkiG9w0BAQUFAAOCAQEAWcyMKi0Vc4beGV7w4Qft0/2P68jjMlQRdgkz+gGXbMVr
+//KhqR3PbgFmHHpUsZ718AHeerNNdfFzOUptiAiOqH2muyAGdeWCxJ8KcU0sic8x
+/h3TOzMYfEozhgMSJp9YW1z655uHcb15S7jb4zZwXwGyQzxwXT35SKj2mCqSbjIb
+G987DGI+MtyoGRXhIwnBEsGTI1ck3NoeXBJ/tS/Ma8gUUC2xldMSprtHjeUHvZV2
+iz/HTqGlMLGW96AVeZiFNiC1fJ6pvref2XW5MkkvQm8tOi2cSrwJc9CgnCpCxkLp
+liRsbwAduwkA26XzEomMR7yyYS5pm0Eu0cO9X39FKQ==
+-----END CERTIFICATE-----
index 4003d932cfffa8b25ff40a547d4fd42500799abf..a2bb871ccb4693110d36e69ee19798760ff75fac 100644 (file)
@@ -795,6 +795,13 @@ signreleasefiles() {
        msgdone "info"
 }
 
+rewritesourceslist() {
+       local APTARCHIVE="file://$(readlink -f "${TMPWORKINGDIRECTORY}/aptarchive")"
+       for LIST in $(find rootdir/etc/apt/sources.list.d/ -name 'apt-test-*.list'); do
+               sed -i $LIST -e "s#$APTARCHIVE#${1}#" -e "s#http://localhost:8080/#${1}#" -e "s#http://localhost:4433/#${1}#"
+       done
+}
+
 changetowebserver() {
        local LOG='/dev/null'
        if test -x ${BUILDDIRECTORY}/aptwebserver; then
@@ -806,31 +813,32 @@ changetowebserver() {
                fi
                addtrap "kill $PID;"
                cd - > /dev/null
-       elif [ $# -gt 0 ]; then
-               msgdie 'Need the aptwebserver when passing arguments for the webserver'
-       elif which weborf > /dev/null; then
-               weborf -xb aptarchive/ >$LOG 2>&1 &
-               addtrap "kill $!;"
-       elif which gatling > /dev/null; then
-               cd aptarchive
-               gatling -p 8080 -F -S >$LOG 2>&1 &
-               addtrap "kill $!;"
-               cd - > /dev/null
-       elif which lighttpd > /dev/null; then
-               echo "server.document-root = \"$(readlink -f ./aptarchive)\"
-server.port = 8080
-server.stat-cache-engine = \"disable\"" > lighttpd.conf
-               lighttpd -t -f lighttpd.conf >/dev/null || msgdie 'Can not change to webserver: our lighttpd config is invalid'
-               lighttpd -D -f lighttpd.conf >$LOG 2>&1 &
-               addtrap "kill $!;"
        else
                msgdie 'You have to build aptwerbserver or install a webserver'
        fi
-       local APTARCHIVE="file://$(readlink -f ./aptarchive)"
-       for LIST in $(find rootdir/etc/apt/sources.list.d/ -name 'apt-test-*.list'); do
-               sed -i $LIST -e "s#$APTARCHIVE#http://localhost:8080/#"
-       done
-       return 0
+       if [ "$1" != '--no-rewrite' ]; then
+               rewritesourceslist 'http://localhost:8080/'
+       fi
+}
+
+changetohttpswebserver() {
+       if ! which stunnel4 >/dev/null; then
+               msgdie 'You need to install stunnel4 for https testcases'
+       fi
+       if [ ! -e "${TMPWORKINGDIRECTORY}/aptarchive/aptwebserver.pid" ]; then
+               changetowebserver --no-rewrite
+       fi
+       echo "pid = ${TMPWORKINGDIRECTORY}/aptarchive/stunnel.pid
+cert = ${TESTDIRECTORY}/apt.pem
+
+[https]
+accept = 4433
+connect = 8080
+" > ${TMPWORKINGDIRECTORY}/stunnel.conf
+       stunnel4 "${TMPWORKINGDIRECTORY}/stunnel.conf"
+       local PID="$(cat ${TMPWORKINGDIRECTORY}/aptarchive/stunnel.pid)"
+       addtrap 'prefix' "kill ${PID};"
+       rewritesourceslist 'https://localhost:4433/'
 }
 
 changetocdrom() {
@@ -848,6 +856,46 @@ changetocdrom() {
        find rootdir/etc/apt/sources.list.d/ -name 'apt-test-*.list' -delete
 }
 
+downloadfile() {
+       PROTO="$(echo "$1" | cut -d':' -f 1)"
+       local DOWNLOG="${TMPWORKINGDIRECTORY}/download.log"
+       rm -f "$DOWNLOG"
+       touch "$DOWNLOG"
+       {
+               echo "601 Configuration
+Config-Item: Acquire::https::CaInfo=${TESTDIR}/apt.pem
+Config-Item: Debug::Acquire::${PROTO}=1
+
+600 Acquire URI
+URI: $1
+Filename: ${2}
+"
+               # simple worker keeping stdin open until we are done (201) or error (400)
+               # and requesting new URIs on try-agains/redirects inbetween
+               { tail -n 999 -f "$DOWNLOG" & echo "TAILPID: $!"; } | while read f1 f2; do
+                       if [ "$f1" = 'TAILPID:' ]; then
+                               TAILPID="$f2"
+                       elif [ "$f1" = 'New-URI:' ]; then
+                               echo "600 Acquire URI
+URI: $f2
+Filename: ${2}
+"
+                       elif [ "$f1" = '201' ] || [ "$f1" = '400' ]; then
+                               # tail would only die on next read – which never happens
+                               test -z "$TAILPID" || kill -s HUP "$TAILPID"
+                               break
+                       fi
+               done
+       } | LD_LIBRARY_PATH=${BUILDDIRECTORY} ${BUILDDIRECTORY}/methods/${PROTO} 2>&1 | tee "$DOWNLOG"
+       rm "$DOWNLOG"
+       # only if the file exists the download was successful
+       if [ -e "$2" ]; then
+               return 0
+       else
+               return 1
+       fi
+}
+
 checkdiff() {
        local DIFFTEXT="$($(which diff) -u $* | sed -e '/^---/ d' -e '/^+++/ d' -e '/^@@/ d')"
        if [ -n "$DIFFTEXT" ]; then
diff --git a/test/integration/test-partial-file-support b/test/integration/test-partial-file-support
new file mode 100755 (executable)
index 0000000..8d1c51a
--- /dev/null
@@ -0,0 +1,107 @@
+#!/bin/sh
+set -e
+
+TESTDIR=$(readlink -f $(dirname $0))
+. $TESTDIR/framework
+setupenvironment
+configarchitecture 'amd64'
+
+changetowebserver
+
+copysource() {
+       dd if="$1" bs=1 count="$2" of="$3" 2>/dev/null
+       touch -d "$(stat --format '%y' "${TESTFILE}")" "$3"
+}
+
+testdownloadfile() {
+       local DOWNLOG='download-testfile.log'
+       rm -f "$DOWNLOG"
+       msgtest "Testing download of file $2 with" "$1"
+       if ! downloadfile "$2" "$3" > "$DOWNLOG"; then
+               cat "$DOWNLOG"
+               msgfail
+       else
+               msgpass
+       fi
+       cat "$DOWNLOG" | while read field hash; do
+               local EXPECTED
+               case "$field" in
+               'MD5Sum-Hash:') EXPECTED="$(md5sum "$TESTFILE" | cut -d' ' -f 1)";;
+               'SHA1-Hash:') EXPECTED="$(sha1sum "$TESTFILE" | cut -d' ' -f 1)";;
+               'SHA256-Hash:') EXPECTED="$(sha256sum "$TESTFILE" | cut -d' ' -f 1)";;
+               'SHA512-Hash:') EXPECTED="$(sha512sum "$TESTFILE" | cut -d' ' -f 1)";;
+               *) continue;;
+               esac
+               if [ "$4" = '=' ]; then
+                       msgtest 'Test downloaded file for correct' "$field"
+               else
+                       msgtest 'Test downloaded file does not match in' "$field"
+               fi
+               if [ "$EXPECTED" "$4" "$hash" ]; then
+                       msgpass
+               else
+                       cat "$DOWNLOG"
+                       msgfail "expected: $EXPECTED ; got: $hash"
+               fi
+       done
+}
+
+testwebserverlaststatuscode() {
+       STATUS="$(mktemp)"
+       addtrap "rm $STATUS;"
+       msgtest 'Test last status code from the webserver was' "$1"
+       downloadfile "http://localhost:8080/_config/find/aptwebserver::last-status-code" "$STATUS" >/dev/null
+       if [ "$(cat "$STATUS")" = "$1" ]; then
+               msgpass
+       else
+               cat download-testfile.log
+               msgfail "Status was $(cat "$STATUS")"
+       fi
+}
+
+
+TESTFILE='aptarchive/testfile'
+cp -a ${TESTDIR}/framework $TESTFILE
+
+testrun() {
+       downloadfile "$1/_config/set/aptwebserver::support::range/true" '/dev/null' >/dev/null
+       testwebserverlaststatuscode '200'
+
+       copysource $TESTFILE 0 ./testfile
+       testdownloadfile 'no data' "${1}/testfile" './testfile' '='
+       testwebserverlaststatuscode '200'
+
+       copysource $TESTFILE 20 ./testfile
+       testdownloadfile 'valid partial data' "${1}/testfile" './testfile' '='
+       testwebserverlaststatuscode '206'
+
+       copysource /dev/zero 20 ./testfile
+       testdownloadfile 'invalid partial data' "${1}/testfile" './testfile' '!='
+       testwebserverlaststatuscode '206'
+
+       copysource $TESTFILE 1M ./testfile
+       testdownloadfile 'completely downloaded file' "${1}/testfile" './testfile' '='
+       testwebserverlaststatuscode '416'
+
+       copysource /dev/zero 1M ./testfile
+       testdownloadfile 'too-big partial file' "${1}/testfile" './testfile' '='
+       testwebserverlaststatuscode '200'
+
+       copysource /dev/zero 20 ./testfile
+       touch ./testfile
+       testdownloadfile 'old data' "${1}/testfile" './testfile' '='
+       testwebserverlaststatuscode '200'
+
+       downloadfile "$1/_config/set/aptwebserver::support::range/false" '/dev/null' >/dev/null
+       testwebserverlaststatuscode '200'
+
+       copysource $TESTFILE 20 ./testfile
+       testdownloadfile 'no server support' "${1}/testfile" './testfile' '='
+       testwebserverlaststatuscode '200'
+}
+
+testrun 'http://localhost:8080'
+
+changetohttpswebserver
+
+testrun 'https://localhost:4433'