]> git.saurik.com Git - apt.git/commitdiff
improve https method queue progress reporting
authorDavid Kalnischkies <david@kalnischkies.de>
Fri, 27 Mar 2015 14:53:43 +0000 (15:53 +0100)
committerDavid Kalnischkies <david@kalnischkies.de>
Sat, 18 Apr 2015 23:13:08 +0000 (01:13 +0200)
The worker expects that the methods tell him when they start or finish
downloading a file. Various information pieces are passed along in this
report including the (expected) filesize. https was using a "global"
struct for reporting which made it 'reuse' incorrect values in some
cases like a non-existent InRelease fallbacking to Release{,.gpg}
resulting in a size-mismatch warning. Reducing the scope and redesigning
the setting of the values we can fix this and related issues.

Closes: 777565, 781509
Thanks: Robert Edmonds and Anders Kaseorg for initial patchs

methods/https.cc
methods/https.h
methods/server.h
test/integration/framework
test/integration/test-apt-https-no-redirect
test/integration/test-apt-update-expected-size
test/integration/test-bug-602412-dequote-redirect

index c69e84d3a61f24df724c9481b17c9d209d37ce68..70f6a1046d1799de2c1b2b8cabee04dbf3f9b319 100644 (file)
                                                                        /*}}}*/
 using namespace std;
 
+struct APT_HIDDEN CURLUserPointer {
+   HttpsMethod * const https;
+   HttpsMethod::FetchResult * const Res;
+   CURLUserPointer(HttpsMethod * const https, HttpsMethod::FetchResult * const Res) : https(https), Res(Res) {}
+};
+
 size_t
 HttpsMethod::parse_header(void *buffer, size_t size, size_t nmemb, void *userp)
 {
    size_t len = size * nmemb;
-   HttpsMethod *me = (HttpsMethod *)userp;
+   CURLUserPointer *me = (CURLUserPointer *)userp;
    std::string line((char*) buffer, len);
    for (--len; len > 0; --len)
       if (isspace(line[len]) == 0)
@@ -53,23 +59,33 @@ HttpsMethod::parse_header(void *buffer, size_t size, size_t nmemb, void *userp)
 
    if (line.empty() == true)
    {
-      if (me->Server->Result != 416 && me->Server->StartPos != 0)
+      if (me->https->Server->Result != 416 && me->https->Server->StartPos != 0)
         ;
-      else if (me->Server->Result == 416 && me->Server->Size == me->File->FileSize())
+      else if (me->https->Server->Result == 416 && me->https->Server->Size == me->https->File->FileSize())
       {
-         me->Server->Result = 200;
-        me->Server->StartPos = me->Server->Size;
+         me->https->Server->Result = 200;
+        me->https->Server->StartPos = me->https->Server->Size;
         // the actual size is not important for https as curl will deal with it
         // by itself and e.g. doesn't bother us with transport-encoding…
-        me->Server->JunkSize = std::numeric_limits<unsigned long long>::max();
+        me->https->Server->JunkSize = std::numeric_limits<unsigned long long>::max();
       }
       else
-        me->Server->StartPos = 0;
+        me->https->Server->StartPos = 0;
+
+      me->https->File->Truncate(me->https->Server->StartPos);
+      me->https->File->Seek(me->https->Server->StartPos);
+
+      me->Res->LastModified = me->https->Server->Date;
+      me->Res->Size = me->https->Server->Size;
+      me->Res->ResumePoint = me->https->Server->StartPos;
 
-      me->File->Truncate(me->Server->StartPos);
-      me->File->Seek(me->Server->StartPos);
+      // we expect valid data, so tell our caller we get the file now
+      if (me->https->Server->Result >= 200 && me->https->Server->Result < 300 &&
+           me->https->Server->JunkSize == 0 &&
+           me->Res->Size != 0 && me->Res->Size > me->Res->ResumePoint)
+        me->https->URIStart(*me->Res);
    }
-   else if (me->Server->HeaderLine(line) == false)
+   else if (me->https->Server->HeaderLine(line) == false)
       return 0;
 
    return size*nmemb;
@@ -85,12 +101,6 @@ HttpsMethod::write_data(void *buffer, size_t size, size_t nmemb, void *userp)
    if (me->Server->JunkSize != 0)
       return buffer_size;
 
-   if (me->Server->ReceivedData == false)
-   {
-      me->URIStart(me->Res);
-      me->Server->ReceivedData = true;
-   }
-
    if(me->File->Write(buffer, buffer_size) != true)
       return 0;
 
@@ -109,27 +119,15 @@ HttpsMethod::write_data(void *buffer, size_t size, size_t nmemb, void *userp)
    return buffer_size;
 }
 
-int
-HttpsMethod::progress_callback(void *clientp, double dltotal, double /*dlnow*/,
-                              double /*ultotal*/, double /*ulnow*/)
-{
-   HttpsMethod *me = (HttpsMethod *)clientp;
-   if(dltotal > 0 && me->Res.Size == 0) {
-      me->Res.Size = (unsigned long long)dltotal;
-   }
-   return 0;
-}
-
 // HttpsServerState::HttpsServerState - Constructor                    /*{{{*/
 HttpsServerState::HttpsServerState(URI Srv,HttpsMethod * Owner) : ServerState(Srv, Owner)
 {
    TimeOut = _config->FindI("Acquire::https::Timeout",TimeOut);
-   ReceivedData = false;
    Reset();
 }
                                                                        /*}}}*/
 
-void HttpsMethod::SetupProxy()                                         /*{{{*/
+void HttpsMethod::SetupProxy()                                         /*{{{*/
 {
    URI ServerName = Queue->Uri;
 
@@ -207,16 +205,16 @@ bool HttpsMethod::Fetch(FetchItem *Itm)
 
    maybe_add_auth (Uri, _config->FindFile("Dir::Etc::netrc"));
 
+   FetchResult Res;
+   CURLUserPointer userp(this, &Res);
    // callbacks
    curl_easy_setopt(curl, CURLOPT_URL, static_cast<string>(Uri).c_str());
    curl_easy_setopt(curl, CURLOPT_HEADERFUNCTION, parse_header);
-   curl_easy_setopt(curl, CURLOPT_WRITEHEADER, this);
+   curl_easy_setopt(curl, CURLOPT_WRITEHEADER, &userp);
    curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, write_data);
    curl_easy_setopt(curl, CURLOPT_WRITEDATA, this);
-   curl_easy_setopt(curl, CURLOPT_PROGRESSFUNCTION, progress_callback);
-   curl_easy_setopt(curl, CURLOPT_PROGRESSDATA, this);
    // options
-   curl_easy_setopt(curl, CURLOPT_NOPROGRESS, false);
+   curl_easy_setopt(curl, CURLOPT_NOPROGRESS, true);
    curl_easy_setopt(curl, CURLOPT_FILETIME, true);
    // only allow curl to handle https, not the other stuff it supports
    curl_easy_setopt(curl, CURLOPT_PROTOCOLS, CURLPROTO_HTTPS);
@@ -414,24 +412,23 @@ bool HttpsMethod::Fetch(FetchItem *Itm)
       return false;
    }
 
-   struct stat resultStat;
-   if (unlikely(stat(File->Name().c_str(), &resultStat) != 0))
-   {
-      _error->Errno("stat", "Unable to access file %s", File->Name().c_str());
-      return false;
-   }
-   Res.Size = resultStat.st_size;
-
    // invalid range-request
    if (Server->Result == 416)
    {
       unlink(File->Name().c_str());
-      Res.Size = 0;
       delete File;
       Redirect(Itm->Uri);
       return true;
    }
 
+   struct stat resultStat;
+   if (unlikely(stat(File->Name().c_str(), &resultStat) != 0))
+   {
+      _error->Errno("stat", "Unable to access file %s", File->Name().c_str());
+      return false;
+   }
+   Res.Size = resultStat.st_size;
+
    // Timestamp
    curl_easy_getinfo(curl, CURLINFO_FILETIME, &Res.LastModified);
    if (Res.LastModified != -1)
@@ -455,7 +452,6 @@ bool HttpsMethod::Fetch(FetchItem *Itm)
    URIDone(Res);
 
    // cleanup
-   Res.Size = 0;
    delete File;
 
    return true;
index 433a84680f4326e7db9433f770b0ae51ad185632..4cc48fc34b7ecdc595e4fd9da1b5c3f560019e04 100644 (file)
@@ -65,7 +65,6 @@ class HttpsMethod : public ServerMethod
                                 double ultotal, double ulnow);
    void SetupProxy();
    CURL *curl;
-   FetchResult Res;
    ServerState *Server;
 
    // Used by ServerMethods unused by https
@@ -77,6 +76,7 @@ class HttpsMethod : public ServerMethod
 
    virtual bool Configuration(std::string Message);
    virtual ServerState * CreateServerState(URI uri);
+   using pkgAcqMethod::FetchResult;
 
    HttpsMethod() : ServerMethod("1.2",Pipeline | SendConfig), File(NULL)
    {
index 3b232dcacab3bf985f1d06cd7b02f120d4c7eb35..b974ec89a1b2f6604468e8f2cecd91794024fb83 100644 (file)
@@ -37,7 +37,6 @@ struct ServerState
    unsigned long long Size; // size of the usable content (aka: the file)
    unsigned long long JunkSize; // size of junk content (aka: server error pages)
    unsigned long long StartPos;
-   bool ReceivedData;
    time_t Date;
    bool HaveContent;
    enum {Chunked,Stream,Closes} Encoding;
@@ -76,7 +75,7 @@ struct ServerState
 
    bool Comp(URI Other) const {return Other.Host == ServerName.Host && Other.Port == ServerName.Port;};
    virtual void Reset() {Major = 0; Minor = 0; Result = 0; Code[0] = '\0'; Size = 0; JunkSize = 0;
-                StartPos = 0; ReceivedData = false; Encoding = Closes; time(&Date); HaveContent = false;
+                StartPos = 0; Encoding = Closes; time(&Date); HaveContent = false;
                 State = Header; Persistent = false; Pipeline = true; MaximumSize = 0;};
    virtual bool WriteResponse(std::string const &Data) = 0;
 
index ec23e41e6454a567e8d017d5df31739138a998af..50c027a2c8d48e87dadb0a05632f11d4557ff9b5 100644 (file)
@@ -1125,8 +1125,10 @@ acquire::cdrom::autodetect 0;" > rootdir/etc/apt/apt.conf.d/00cdrom
 
 downloadfile() {
        local PROTO="${1%%:*}"
-       apthelper -o Debug::Acquire::${PROTO}=1 -o Debug::pkgAcquire::Worker=1 \
-               download-file "$1" "$2" 2>&1 || true
+       if ! apthelper -o Debug::Acquire::${PROTO}=1 -o Debug::pkgAcquire::Worker=1 \
+               download-file "$1" "$2" 2>&1 ; then
+               return 1
+       fi
        # only if the file exists the download was successful
        if [ -r "$2" ]; then
                return 0
@@ -1407,6 +1409,20 @@ testfailureequal() {
        testfileequal "${TMPWORKINGDIRECTORY}/rootdir/tmp/testfailure.output" "$CMP"
 }
 
+testfailuremsg() {
+       local CMP="$1"
+       shift
+       testfailure "$@"
+       msgtest 'Check that the output of the previous failed command has expected' 'failures and warnings'
+       grep '^\(W\|E\):' "${TMPWORKINGDIRECTORY}/rootdir/tmp/testfailure.output" > "${TMPWORKINGDIRECTORY}/rootdir/tmp/testfailureequal.output" 2>&1 || true
+       if echo "$CMP" | checkdiff - "${TMPWORKINGDIRECTORY}/rootdir/tmp/testfailureequal.output"; then
+               msgpass
+       else
+               echo '### Complete output ###'
+               cat "${TMPWORKINGDIRECTORY}/rootdir/tmp/testfailure.output"
+               msgfail
+       fi
+}
 
 testfilestats() {
        msgtest "Test that file $1 has $2 $3" "$4"
index bc744d6f27227d49d2ad634d2afc0fc33d71ae0d..c91c78916ee965eedad706df2f90d1800ad66fba 100755 (executable)
@@ -14,22 +14,15 @@ echo 'alright' > aptarchive/working
 changetohttpswebserver  -o 'aptwebserver::redirect::replace::/redirectme/=http://localhost:8080/'
 
 msgtest 'download of a file works via' 'http'
-downloadfile 'http://localhost:8080/working' httpfile >/dev/null 2>&1 && msgpass || msgfail
+testsuccess --nomsg downloadfile 'http://localhost:8080/working' httpfile
 testfileequal httpfile 'alright'
 
 msgtest 'download of a file works via' 'https'
-downloadfile 'https://localhost:4433/working' httpsfile >/dev/null 2>&1 && msgpass || msgfail
+testsuccess --nomsg downloadfile 'https://localhost:4433/working' httpsfile
 testfileequal httpsfile 'alright'
 
 msgtest 'download of a file does not work if' 'https redirected to http'
-downloadfile 'https://localhost:4433/redirectme/working' redirectfile >curloutput 2>&1 && msgfail || msgpass
+testfailure --nomsg downloadfile 'https://localhost:4433/redirectme/working' redirectfile
 
 msgtest 'libcurl has forbidden access in last request to' 'http resource'
-if grep -q -E -- 'Protocol "?http"? not supported or disabled in libcurl' curloutput; then
-       msgpass
-else
-       cat curloutput
-       msgfail
-fi
-
-
+testsuccess --nomsg grep -q -E -- 'Protocol "?http"? not supported or disabled in libcurl' rootdir/tmp/testfailure.output
index 9711c293a023a1b4b8254c3bebd338acfb227dac..22de13ea5ec704991eddc2a5bd78ded4641a32c3 100755 (executable)
@@ -10,35 +10,61 @@ configarchitecture "i386"
 insertpackage 'unstable' 'apt' 'all' '1.0'
 
 setupaptarchive --no-update
+cp -a aptarchive/dists aptarchive/dists.good
+
+test_inreleasetoobig() {
+       # make InRelease really big to trigger fallback
+       dd if=/dev/zero of=aptarchive/dists/unstable/InRelease bs=1M count=2 2>/dev/null
+       touch -d '+1hour' aptarchive/dists/unstable/InRelease
+       testsuccess aptget update -o Apt::Get::List-Cleanup=0  -o acquire::MaxReleaseFileSize=$((1*1000*1000)) -o Debug::pkgAcquire::worker=0
+       msgtest 'Check that the max write warning is triggered'
+       cp rootdir/tmp/testsuccess.output update.output
+       testsuccess --nomsg grep -q 'Writing more data than expected' update.output
+       rm -f update.output
+       # ensure the failed InRelease file got renamed
+       testsuccess ls rootdir/var/lib/apt/lists/partial/*InRelease.FAILED
+}
+
+test_packagestoobig() {
+       # append junk at the end of the Packages.gz/Packages
+       SIZE="$(stat --printf=%s aptarchive/dists/unstable/main/binary-i386/Packages)"
+       find aptarchive/dists -name 'Packages*' | while read pkg; do
+               echo "1234567890" >> "$pkg"
+               touch -d '+1hour' "$pkg"
+       done
+       NEW_SIZE="$(stat --printf=%s aptarchive/dists/unstable/main/binary-i386/Packages)"
+       testfailuremsg "W: Failed to fetch ${1}/dists/unstable/main/binary-i386/Packages  Writing more data than expected ($NEW_SIZE > $SIZE)
+E: Some index files failed to download. They have been ignored, or old ones used instead." aptget update -o Debug::pkgAcquire::Worker=0
+}
+
+methodtest() {
+       msgmsg 'Test with' "$1" 'and clean start'
+       rm -rf rootdir/var/lib/apt/lists rootdir/var/lib/apt/lists.good
+       # normal update works fine
+       testsuccess aptget update
+       mv rootdir/var/lib/apt/lists rootdir/var/lib/apt/lists.good
+
+       # starting fresh works
+       test_inreleasetoobig "$1"
+       rm -rf aptarchive/dists rootdir/var/lib/apt/lists
+       cp -a aptarchive/dists.good aptarchive/dists
+       test_packagestoobig "$1"
+       rm -rf aptarchive/dists rootdir/var/lib/apt/lists
+       cp -a aptarchive/dists.good aptarchive/dists
+
+       msgmsg 'Test with' "$1" 'and existing old data'
+       cp -a rootdir/var/lib/apt/lists.good rootdir/var/lib/apt/lists
+       test_inreleasetoobig "$1"
+       rm -rf aptarchive/dists rootdir/var/lib/apt/lists
+       cp -a rootdir/var/lib/apt/lists.good rootdir/var/lib/apt/lists
+       cp -a aptarchive/dists.good aptarchive/dists
+       test_packagestoobig "$1"
+       rm -rf aptarchive/dists
+       cp -a aptarchive/dists.good aptarchive/dists
+}
+
 changetowebserver
+methodtest 'http://localhost:8080'
 
-# normal update works fine
-testsuccess aptget update
-
-# make InRelease really big to trigger fallback
-mv aptarchive/dists/unstable/InRelease aptarchive/dists/unstable/InRelease.good
-dd if=/dev/zero of=aptarchive/dists/unstable/InRelease bs=1M count=2 2>/dev/null
-touch -d '+1hour' aptarchive/dists/unstable/InRelease
-testsuccess aptget update -o Apt::Get::List-Cleanup=0  -o acquire::MaxReleaseFileSize=$((1*1000*1000)) -o Debug::pkgAcquire::worker=0
-msgtest 'Check that the max write warning is triggered'
-if grep -q "Writing more data than expected" rootdir/tmp/testsuccess.output; then
-    msgpass
-else
-    cat rootdir/tmp/testsuccess.output
-    msgfail
-fi
-# ensure the failed InRelease file got renamed
-testsuccess ls rootdir/var/lib/apt/lists/partial/*InRelease.FAILED
-mv aptarchive/dists/unstable/InRelease.good aptarchive/dists/unstable/InRelease
-
-
-# append junk at the end of the Packages.gz/Packages
-SIZE="$(stat --printf=%s aptarchive/dists/unstable/main/binary-i386/Packages)"
-find aptarchive -name 'Packages*' | while read pkg; do
-       echo "1234567890" >> "$pkg"
-done
-NEW_SIZE="$(stat --printf=%s aptarchive/dists/unstable/main/binary-i386/Packages)"
-rm -f rootdir/var/lib/apt/lists/localhost*
-testfailureequal "W: Failed to fetch http://localhost:8080/dists/unstable/main/binary-i386/Packages  Writing more data than expected ($NEW_SIZE > $SIZE)
-
-E: Some index files failed to download. They have been ignored, or old ones used instead." aptget update -qq
+changetohttpswebserver
+methodtest 'https://localhost:4433'
index d3573a79a41feb0c5de2e668924edd7ba683c6b1..384c8b1132042d584e47aca394ee8ae659eff146 100755 (executable)
@@ -8,7 +8,7 @@ configarchitecture 'amd64'
 
 buildsimplenativepackage 'unrelated' 'all' '0.5~squeeze1' 'unstable'
 
-setupaptarchive
+setupaptarchive --no-update
 changetowebserver -o aptwebserver::redirect::replace::/pool/=/newpool/ \
                  -o aptwebserver::redirect::replace::/dists/=/newdists/
 
@@ -16,6 +16,7 @@ mv aptarchive/pool aptarchive/newpool
 mv aptarchive/dists aptarchive/newdists
 
 testrun() {
+       msgmsg 'Test redirection works in method boundaries' "$1"
        msgtest 'Test redirection works in' 'apt-get update'
        testsuccess --nomsg aptget update