]> git.saurik.com Git - apt.git/commitdiff
don't ask server if we have entire file in partial/
authorDavid Kalnischkies <david@kalnischkies.de>
Thu, 7 Apr 2016 15:48:17 +0000 (17:48 +0200)
committerDavid Kalnischkies <david@kalnischkies.de>
Mon, 25 Apr 2016 13:35:52 +0000 (15:35 +0200)
We have this situation in cases were parts of the transaction are
refused (e.g. in a hashsum mismatch) and rerun the update (e.g. in the
hope that we get a mirror which is synced this time).

Previously we would ask the server with an if-range and in the best case
recieve a 416 in response (less featureful server might end up giving us
the entire file again or we get the wrong file this time giving us a
hashsum mismatch…), which is a waste of time if we know already by
checking the hashsums that we got the complete and correct file.

methods/server.cc
test/integration/framework
test/integration/test-apt-update-transactions
test/integration/test-pdiff-usage

index 322b8d94ca1dfcdd98f20ff20715aa1659437a97..b323ef4f3cdad29e80f9b9edbec9c5095a6cde29 100644 (file)
@@ -419,36 +419,66 @@ void ServerMethod::SigTerm(int)
    depth. */
 bool ServerMethod::Fetch(FetchItem *)
 {
    depth. */
 bool ServerMethod::Fetch(FetchItem *)
 {
-   if (Server == 0)
+   if (Server == nullptr || QueueBack == nullptr)
       return true;
 
       return true;
 
-   // Queue the requests
-   int Depth = -1;
-   for (FetchItem *I = Queue; I != 0 && Depth < (signed)PipelineDepth; 
-       I = I->Next, Depth++)
-   {
-      if (Depth >= 0)
-      {
-        // If pipelining is disabled, we only queue 1 request
-        if (Server->Pipeline == false)
-           break;
-        // if we have no hashes, do at most one such request
-        // as we can't fixup pipeling misbehaviors otherwise
-        else if (I->ExpectedHashes.usable() == false)
-           break;
-      }
-      
+   // If pipelining is disabled, we only queue 1 request
+   auto const AllowedDepth = Server->Pipeline ? PipelineDepth : 0;
+   // how deep is our pipeline currently?
+   decltype(PipelineDepth) CurrentDepth = 0;
+   for (FetchItem const *I = Queue; I != QueueBack; I = I->Next)
+      ++CurrentDepth;
+
+   do {
       // Make sure we stick with the same server
       // Make sure we stick with the same server
-      if (Server->Comp(I->Uri) == false)
+      if (Server->Comp(QueueBack->Uri) == false)
+        break;
+
+      bool const UsableHashes = QueueBack->ExpectedHashes.usable();
+      // if we have no hashes, do at most one such request
+      // as we can't fixup pipeling misbehaviors otherwise
+      if (CurrentDepth != 0 && UsableHashes == false)
         break;
         break;
-      if (QueueBack == I)
+
+      if (UsableHashes && FileExists(QueueBack->DestFile))
       {
       {
-        QueueBack = I->Next;
-        SendReq(I);
-        continue;
+        FileFd partial(QueueBack->DestFile, FileFd::ReadOnly);
+        Hashes wehave(QueueBack->ExpectedHashes);
+        if (QueueBack->ExpectedHashes.FileSize() == partial.FileSize())
+        {
+           if (wehave.AddFD(partial) &&
+                 wehave.GetHashStringList() == QueueBack->ExpectedHashes)
+           {
+              FetchResult Res;
+              Res.Filename = QueueBack->DestFile;
+              Res.ResumePoint = QueueBack->ExpectedHashes.FileSize();
+              URIStart(Res);
+              // move item to the start of the queue as URIDone will
+              // always dequeued the first item in the queue
+              if (Queue != QueueBack)
+              {
+                 FetchItem *Prev = Queue;
+                 for (; Prev->Next != QueueBack; Prev = Prev->Next)
+                    /* look for the previous queue item */;
+                 Prev->Next = QueueBack->Next;
+                 QueueBack->Next = Queue;
+                 Queue = QueueBack;
+                 QueueBack = Prev->Next;
+              }
+              Res.TakeHashes(wehave);
+              URIDone(Res);
+              continue;
+           }
+           else
+              RemoveFile("Fetch-Partial", QueueBack->DestFile);
+        }
       }
       }
-   }
-   
+      auto const Tmp = QueueBack;
+      QueueBack = QueueBack->Next;
+      SendReq(Tmp);
+      ++CurrentDepth;
+   } while (CurrentDepth <= AllowedDepth && QueueBack != nullptr);
+
    return true;
 }
                                                                        /*}}}*/
    return true;
 }
                                                                        /*}}}*/
index 213169a98375e4373654aa31b282c0d6d4602231..2a78e619462b284c12c11a8362ab6392d45d37f3 100644 (file)
@@ -1505,6 +1505,14 @@ testmarkedmanual() {
        msggroup
 }
 
        msggroup
 }
 
+catfile() {
+       if [ "${1##*.}" = 'deb' ]; then
+               stat >&2 "$1" || true
+               file >&2 "$1" || true
+       else
+               cat >&2 "$1" || true
+       fi
+}
 msgfailoutput() {
        msgreportheader 'msgfailoutput'
        local MSG="$1"
 msgfailoutput() {
        msgreportheader 'msgfailoutput'
        local MSG="$1"
@@ -1514,7 +1522,7 @@ msgfailoutput() {
                echo >&2
                while [ -n "$2" ]; do shift; done
                echo "#### Complete file: $1 ####"
                echo >&2
                while [ -n "$2" ]; do shift; done
                echo "#### Complete file: $1 ####"
-               cat >&2 "$1" || true
+               catfile "$1"
                echo '#### grep output ####'
        elif [ "$1" = 'test' ]; then
                echo >&2
                echo '#### grep output ####'
        elif [ "$1" = 'test' ]; then
                echo >&2
@@ -1529,7 +1537,7 @@ msgfailoutput() {
                                        ls >&2 "$2" || true
                                elif test -e "$2"; then
                                        echo "#### Complete file: $2 ####"
                                        ls >&2 "$2" || true
                                elif test -e "$2"; then
                                        echo "#### Complete file: $2 ####"
-                                       cat >&2 "$2" || true
+                                       catfile "$2"
                                fi
                        fi
                }
                                fi
                        fi
                }
@@ -1543,12 +1551,12 @@ msgfailoutput() {
                echo >&2
                while [ -n "$2" ]; do
                        echo "#### Complete file: $2 ####"
                echo >&2
                while [ -n "$2" ]; do
                        echo "#### Complete file: $2 ####"
-                       cat >&2 "$2" || true
+                       catfile "$2"
                        shift
                done
                echo '#### cmp output ####'
        fi
                        shift
                done
                echo '#### cmp output ####'
        fi
-       cat >&2 "$OUTPUT"
+       catfile "$OUTPUT"
        msgfail "$MSG"
 }
 
        msgfail "$MSG"
 }
 
index 884838038906a9230d9604baf2e382daad4a4c38..d8154b2837afa49a828188966936385d0490c214 100755 (executable)
@@ -44,7 +44,8 @@ testrun() {
        signreleasefiles
 
        onehashbroken() {
        signreleasefiles
 
        onehashbroken() {
-               testfailure aptget update
+               rm -rf rootdir/var/lib/apt/lists/partial
+               testfailure aptget update -o Debug::pkgAcquire::Worker=1 -o Debug::Acquire::http=1
                # each file generates two messages with this string
                testequal '2' grep --count 'Hash Sum mismatch' rootdir/tmp/testfailure.output
                testfileequal "$1" "$(listcurrentlistsdirectory)"
                # each file generates two messages with this string
                testequal '2' grep --count 'Hash Sum mismatch' rootdir/tmp/testfailure.output
                testfileequal "$1" "$(listcurrentlistsdirectory)"
index f219b9193e8804a70152a8a34277e624f4392157..9c7946083d3788b7d9616cee65ebb7d0923489f0 100755 (executable)
@@ -30,7 +30,7 @@ echo 'hacked' > aptarchive/hacked-i386
 compressfile aptarchive/hacked-i386
 
 wasmergeused() {
 compressfile aptarchive/hacked-i386
 
 wasmergeused() {
-       testsuccess apt update "$@" -o Debug::pkgAcquire::Worker=1 -o Debug::Acquire::http=1 -o Debug::pkgAcquire=1
+       testsuccess apt update "$@"
 
        msgtest 'No intermediate patch files' 'still exist'
        local EDS="$(find rootdir/var/lib/apt/lists -name '*.ed' -o -name '*.ed.*')"
 
        msgtest 'No intermediate patch files' 'still exist'
        local EDS="$(find rootdir/var/lib/apt/lists -name '*.ed' -o -name '*.ed.*')"
@@ -224,6 +224,14 @@ SHA256-Download:
        testsuccessequal "$(cat "${PKGFILE}")
 " aptcache show apt oldstuff
 
        testsuccessequal "$(cat "${PKGFILE}")
 " aptcache show apt oldstuff
 
+       # we reuse the entire state of the previous test here
+       msgmsg "Testcase: good files from previous fails are picked up from partial: $*"
+       wasmergeused "$@"
+       testfailure grep '^GET /Packages.diff/Index HTTP/1.1' rootdir/tmp/testsuccess.output
+       testnopackage oldstuff
+       testsuccessequal "$(cat Packages-future)
+" aptcache show apt newstuff futurestuff
+
        # we reuse the archive state of the previous test here
        msgmsg "Testcase: downloading a patch fails, but successful fallback: $*"
        rm -rf rootdir/var/lib/apt/lists
        # we reuse the archive state of the previous test here
        msgmsg "Testcase: downloading a patch fails, but successful fallback: $*"
        rm -rf rootdir/var/lib/apt/lists
@@ -294,7 +302,7 @@ SHA256-Download:
  $(sha256sum "${PATCHFILE}.gz" | cut -d' ' -f 1) $(stat -c%s "${PATCHFILE}.gz")000 $(basename "${PATCHFILE}.gz")" > "$PATCHINDEX"
        generatereleasefiles '+1hour'
        signreleasefiles
  $(sha256sum "${PATCHFILE}.gz" | cut -d' ' -f 1) $(stat -c%s "${PATCHFILE}.gz")000 $(basename "${PATCHFILE}.gz")" > "$PATCHINDEX"
        generatereleasefiles '+1hour'
        signreleasefiles
-       testsuccess apt update -o Debug::pkgAcquire::Diffs=1 "$@"
+       testsuccess apt update "$@"
        cp -f rootdir/tmp/testsuccess.output rootdir/tmp/aptupdate.output
        testsuccess grep 'bytes (Limit is' rootdir/tmp/aptupdate.output
        testnopackage oldstuff
        cp -f rootdir/tmp/testsuccess.output rootdir/tmp/aptupdate.output
        testsuccess grep 'bytes (Limit is' rootdir/tmp/aptupdate.output
        testnopackage oldstuff
@@ -303,6 +311,9 @@ SHA256-Download:
 }
 echo 'Debug::pkgAcquire::Diffs "true";
 Debug::Acquire::Transaction "true";
 }
 echo 'Debug::pkgAcquire::Diffs "true";
 Debug::Acquire::Transaction "true";
+Debug::pkgAcquire::Worker "true";
+Debug::Acquire::http "true";
+Debug::pkgAcquire "true";
 Debug::pkgAcquire::rred "true";' > rootdir/etc/apt/apt.conf.d/rreddebug.conf
 
 testcase() {
 Debug::pkgAcquire::rred "true";' > rootdir/etc/apt/apt.conf.d/rreddebug.conf
 
 testcase() {