]> git.saurik.com Git - apt.git/commitdiff
implement POC client-side merging of pdiffs via apt-file
authorDavid Kalnischkies <david@kalnischkies.de>
Fri, 6 Dec 2013 11:17:48 +0000 (12:17 +0100)
committerDavid Kalnischkies <david@kalnischkies.de>
Fri, 13 Dec 2013 10:59:49 +0000 (11:59 +0100)
The idea of pdiffs is to avoid downloading the hole file by patching the
existing index. This works very well, but becomes slow if a lot of
patches needs to be applied to reconstruct an up-to-date index and in
recent years more and more dinstall (or similar) runs are executed
creating more and more pdiffs in the same amount of time, so pdiffs
became less useful.

The solution is simple: Reduce the amount of patches (which are very
small) which need to be applied on top of the index we have available
(which is usually pretty big).

This can be done in two ways: Either merge the patches on the
server-side so that the client has to download only one patch or the
patches are all downloaded and merged on the client-side.

The first needs a client who is doing one step at a time who can also
skip patches if it needs (APT supports this for a long time now).

The later is implemented by this commit, but depends on the server NOT
merging the patches and the patches being in a strict order in which no
patch is skipped.

This is traditionally the case for dak, but other repository creators
support merging – e.g. reprepro (which helpfully adds a flag indicating
that the patches are merged). To support both or even mixes a client
needs more information which isn't available for now.

This POC uses the external diffindex-rred included in apt-file to
do the heavy lifting of merging & applying all patches in one pass,
hence to test this feature apt-file needs to be installed.

apt-pkg/acquire-item.cc
apt-pkg/acquire-item.h
methods/rred.cc
test/integration/test-pdiff-usage

index 009531c2e08a0421eedbb7ee116cbfb774fc5a91..b5b9577ef829eb4975bf0f7dfe92a6d5d407505c 100644 (file)
@@ -498,14 +498,42 @@ bool pkgAcqDiffIndex::ParseDiffIndex(string IndexDiffFile)                /*{{{*/
       }
 
       // we have something, queue the next diff
-      if(found) 
+      if(found)
       {
         // queue the diffs
         string::size_type const last_space = Description.rfind(" ");
         if(last_space != string::npos)
            Description.erase(last_space, Description.size()-last_space);
-        new pkgAcqIndexDiffs(Owner, RealURI, Description, Desc.ShortDesc,
-                             ExpectedHash, ServerSha1, available_patches);
+
+        /* decide if we should download patches one by one or in one go:
+           The first is good if the server merges patches, but many don't so client
+           based merging can be attempt in which case the second is better.
+           "bad things" will happen if patches are merged on the server,
+           but client side merging is attempt as well */
+        bool pdiff_merge = _config->FindB("Acquire::PDiffs::Merge", true);
+        if (pdiff_merge == true)
+        {
+           // this perl script is provided by apt-file
+           pdiff_merge = FileExists(_config->FindFile("Dir::Bin::rred", "/usr/bin/diffindex-rred"));
+           if (pdiff_merge == true)
+           {
+              // reprepro adds this flag if it has merged patches on the server
+              std::string const precedence = Tags.FindS("X-Patch-Precedence");
+              pdiff_merge = (precedence != "merged");
+           }
+        }
+
+        if (pdiff_merge == false)
+           new pkgAcqIndexDiffs(Owner, RealURI, Description, Desc.ShortDesc,
+                 ExpectedHash, ServerSha1, available_patches);
+        else
+        {
+           std::vector<pkgAcqIndexMergeDiffs*> *diffs = new std::vector<pkgAcqIndexMergeDiffs*>(available_patches.size());
+           for(size_t i = 0; i < available_patches.size(); ++i)
+              (*diffs)[i] = new pkgAcqIndexMergeDiffs(Owner, RealURI, Description, Desc.ShortDesc, ExpectedHash,
+                    available_patches[i], diffs);
+        }
+
         Complete = false;
         Status = StatDone;
         Dequeue();
@@ -754,6 +782,123 @@ void pkgAcqIndexDiffs::Done(string Message,unsigned long long Size,string Md5Has
    }
 }
                                                                        /*}}}*/
+// AcqIndexMergeDiffs::AcqIndexMergeDiffs - Constructor                        /*{{{*/
+pkgAcqIndexMergeDiffs::pkgAcqIndexMergeDiffs(pkgAcquire *Owner,
+                                  string const &URI, string const &URIDesc,
+                                  string const &ShortDesc, HashString const &ExpectedHash,
+                                  DiffInfo const &patch,
+                                  std::vector<pkgAcqIndexMergeDiffs*> const * const allPatches)
+   : Item(Owner), RealURI(URI), ExpectedHash(ExpectedHash),
+     patch(patch),allPatches(allPatches), State(StateFetchDiff)
+{
+
+   DestFile = _config->FindDir("Dir::State::lists") + "partial/";
+   DestFile += URItoFileName(URI);
+
+   Debug = _config->FindB("Debug::pkgAcquire::Diffs",false);
+
+   Description = URIDesc;
+   Desc.Owner = this;
+   Desc.ShortDesc = ShortDesc;
+
+   Desc.URI = string(RealURI) + ".diff/" + patch.file + ".gz";
+   Desc.Description = Description + " " + patch.file + string(".pdiff");
+   DestFile = _config->FindDir("Dir::State::lists") + "partial/";
+   DestFile += URItoFileName(RealURI + ".diff/" + patch.file);
+
+   if(Debug)
+      std::clog << "pkgAcqIndexMergeDiffs: " << Desc.URI << std::endl;
+
+   QueueURI(Desc);
+}
+                                                                       /*}}}*/
+void pkgAcqIndexMergeDiffs::Failed(string Message,pkgAcquire::MethodConfig *Cnf)/*{{{*/
+{
+   if(Debug)
+      std::clog << "pkgAcqIndexMergeDiffs failed: " << Desc.URI << " with " << Message << std::endl;
+   Complete = false;
+   Status = StatDone;
+   Dequeue();
+
+   // check if we are the first to fail, otherwise we are done here
+   State = StateDoneDiff;
+   for (std::vector<pkgAcqIndexMergeDiffs *>::const_iterator I = allPatches->begin();
+        I != allPatches->end(); ++I)
+      if ((*I)->State == StateErrorDiff)
+        return;
+
+   // first failure means we should fallback
+   State = StateErrorDiff;
+   std::clog << "Falling back to normal index file aquire" << std::endl;
+   new pkgAcqIndex(Owner, RealURI, Description,Desc.ShortDesc,
+                  ExpectedHash);
+}
+                                                                       /*}}}*/
+void pkgAcqIndexMergeDiffs::Done(string Message,unsigned long long Size,string Md5Hash,        /*{{{*/
+                           pkgAcquire::MethodConfig *Cnf)
+{
+   if(Debug)
+      std::clog << "pkgAcqIndexMergeDiffs::Done(): " << Desc.URI << std::endl;
+
+   Item::Done(Message,Size,Md5Hash,Cnf);
+
+   string const FinalFile = _config->FindDir("Dir::State::lists") + URItoFileName(RealURI);
+
+   if (State == StateFetchDiff)
+   {
+      // rred expects the patch as $FinalFile.ed.$patchname.gz
+      Rename(DestFile, FinalFile + ".ed." + patch.file + ".gz");
+
+      // check if this is the last completed diff
+      State = StateDoneDiff;
+      for (std::vector<pkgAcqIndexMergeDiffs *>::const_iterator I = allPatches->begin();
+           I != allPatches->end(); ++I)
+        if ((*I)->State != StateDoneDiff)
+        {
+           if(Debug)
+              std::clog << "Not the last done diff in the batch: " << Desc.URI << std::endl;
+           return;
+        }
+
+      // this is the last completed diff, so we are ready to apply now
+      State = StateApplyDiff;
+
+      if(Debug)
+        std::clog << "Sending to rred method: " << FinalFile << std::endl;
+
+      Local = true;
+      Desc.URI = "rred:" + FinalFile;
+      QueueURI(Desc);
+      Mode = "rred";
+      return;
+   }
+   // success in download/apply all diffs, clean up
+   else if (State == StateApplyDiff)
+   {
+      // see if we really got the expected file
+      if(!ExpectedHash.empty() && !ExpectedHash.VerifyFile(DestFile))
+      {
+        RenameOnError(HashSumMismatch);
+        return;
+      }
+
+      // move the result into place
+      if(Debug)
+        std::clog << "Moving patched file in place: " << std::endl
+                  << DestFile << " -> " << FinalFile << std::endl;
+      Rename(DestFile, FinalFile);
+      chmod(FinalFile.c_str(), 0644);
+
+      // otherwise lists cleanup will eat the file
+      DestFile = FinalFile;
+
+      // all set and done
+      Complete = true;
+      if(Debug)
+        std::clog << "allDone: " << DestFile << "\n" << std::endl;
+   }
+}
+                                                                       /*}}}*/
 // AcqIndex::AcqIndex - Constructor                                    /*{{{*/
 // ---------------------------------------------------------------------
 /* The package file is added to the queue and a second class is 
index 6b4f737089fa240c73ed0008a6a71606324c8512..5a1c7979ccc5fed3bac11822050b6a791d9c7f25 100644 (file)
@@ -429,7 +429,105 @@ class pkgAcqDiffIndex : public pkgAcquire::Item
                   std::string ShortDesc, HashString ExpectedHash);
 };
                                                                        /*}}}*/
-/** \brief An item that is responsible for fetching all the patches    {{{
+/** \brief An item that is responsible for fetching client-merge patches {{{
+ *  that need to be applied to a given package index file.
+ *
+ *  Instead of downloading and applying each patch one by one like its
+ *  sister #pkgAcqIndexDiffs this class will download all patches at once
+ *  and call rred with all the patches downloaded once. Rred will then
+ *  merge and apply them in one go, which should be a lot faster – but is
+ *  incompatible with server-based merges of patches like reprepro can do.
+ *
+ *  \sa pkgAcqDiffIndex, pkgAcqIndex
+ */
+class pkgAcqIndexMergeDiffs : public pkgAcquire::Item
+{
+   protected:
+
+   /** \brief If \b true, debugging output will be written to
+    *  std::clog.
+    */
+   bool Debug;
+
+   /** \brief description of the item that is currently being
+    *  downloaded.
+    */
+   pkgAcquire::ItemDesc Desc;
+
+   /** \brief URI of the package index file that is being
+    *  reconstructed.
+    */
+   std::string RealURI;
+
+   /** \brief HashSum of the package index file that is being
+    *  reconstructed.
+    */
+   HashString ExpectedHash;
+
+   /** \brief description of the file being downloaded. */
+   std::string Description;
+
+   /** \brief information about the current patch */
+   struct DiffInfo const patch;
+
+   /** \brief list of all download items for the patches */
+   std::vector<pkgAcqIndexMergeDiffs*> const * const allPatches;
+
+   /** The current status of this patch. */
+   enum DiffState
+   {
+      /** \brief The diff is currently being fetched. */
+      StateFetchDiff,
+
+      /** \brief The diff is currently being applied. */
+      StateApplyDiff,
+
+      /** \brief the work with this diff is done */
+      StateDoneDiff,
+
+      /** \brief something bad happened and fallback was triggered */
+      StateErrorDiff
+   } State;
+
+   public:
+   /** \brief Called when the patch file failed to be downloaded.
+    *
+    *  This method will fall back to downloading the whole index file
+    *  outright; its arguments are ignored.
+    */
+   virtual void Failed(std::string Message,pkgAcquire::MethodConfig *Cnf);
+
+   virtual void Done(std::string Message,unsigned long long Size,std::string Md5Hash,
+                    pkgAcquire::MethodConfig *Cnf);
+   virtual std::string DescURI() {return RealURI + "Index";};
+
+   /** \brief Create an index merge-diff item.
+    *
+    *  \param Owner The pkgAcquire object that owns this item.
+    *
+    *  \param URI The URI of the package index file being
+    *  reconstructed.
+    *
+    *  \param URIDesc A long description of this item.
+    *
+    *  \param ShortDesc A brief description of this item.
+    *
+    *  \param ExpectedHash The expected md5sum of the completely
+    *  reconstructed package index file; the index file will be tested
+    *  against this value when it is entirely reconstructed.
+    *
+    *  \param patch contains infos about the patch this item is supposed
+    *  to download which were read from the index
+    *
+    *  \param allPatches contains all related items so that each item can
+    *  check if it was the last one to complete the download step
+    */
+   pkgAcqIndexMergeDiffs(pkgAcquire *Owner,std::string const &URI,std::string const &URIDesc,
+                   std::string const &ShortDesc, HashString const &ExpectedHash,
+                   DiffInfo const &patch, std::vector<pkgAcqIndexMergeDiffs*> const * const allPatches);
+};
+                                                                       /*}}}*/
+/** \brief An item that is responsible for fetching server-merge patches {{{
  *  that need to be applied to a given package index file.
  *
  *  After downloading and applying a single patch, this item will
index 7c65f8f9223a30d15d552d27d7dfb4ebdc90a852..bea8ed263c100931eda64494f38d125448b3bb35 100644 (file)
@@ -11,6 +11,8 @@
 
 #include <sys/stat.h>
 #include <sys/uio.h>
+#include <sys/types.h>
+#include <fcntl.h>
 #include <unistd.h>
 #include <utime.h>
 #include <stdio.h>
@@ -465,50 +467,112 @@ bool RredMethod::Fetch(FetchItem *Itm)                                           /*{{{*/
    } else
       URIStart(Res);
 
-   if (Debug == true) 
-      std::clog << "Patching " << Path << " with " << Path 
-         << ".ed and putting result into " << Itm->DestFile << std::endl;
-   // Open the source and destination files (the d'tor of FileFd will do 
-   // the cleanup/closing of the fds)
-   FileFd From(Path,FileFd::ReadOnly);
-   FileFd Patch(Path+".ed",FileFd::ReadOnly, FileFd::Gzip);
-   FileFd To(Itm->DestFile,FileFd::WriteAtomic);   
-   To.EraseOnFailure();
-   if (_error->PendingError() == true)
-      return false;
-   
+   std::string lastPatchName;
    Hashes Hash;
-   // now do the actual patching
-   State const result = patchMMap(Patch, From, To, &Hash);
-   if (result == MMAP_FAILED) {
-      // retry with patchFile
-      Patch.Seek(0);
-      From.Seek(0);
-      To.Open(Itm->DestFile,FileFd::WriteAtomic);
+
+   // check for a single ed file
+   if (FileExists(Path+".ed") == true)
+   {
+      if (Debug == true)
+        std::clog << "Patching " << Path << " with " << Path
+           << ".ed and putting result into " << Itm->DestFile << std::endl;
+
+      // Open the source and destination files
+      lastPatchName = Path + ".ed";
+      FileFd From(Path,FileFd::ReadOnly);
+      FileFd To(Itm->DestFile,FileFd::WriteAtomic);
+      To.EraseOnFailure();
+      FileFd Patch(lastPatchName, FileFd::ReadOnly, FileFd::Gzip);
       if (_error->PendingError() == true)
-         return false;
-      if (patchFile(Patch, From, To, &Hash) != ED_OK) {
-        return _error->WarningE("rred", _("Could not patch %s with mmap and with file operation usage - the patch seems to be corrupt."), Path.c_str());
+        return false;
+
+      // now do the actual patching
+      State const result = patchMMap(Patch, From, To, &Hash);
+      if (result == MMAP_FAILED) {
+        // retry with patchFile
+        Patch.Seek(0);
+        From.Seek(0);
+        To.Open(Itm->DestFile,FileFd::WriteAtomic);
+        if (_error->PendingError() == true)
+           return false;
+        if (patchFile(Patch, From, To, &Hash) != ED_OK) {
+           return _error->WarningE("rred", _("Could not patch %s with mmap and with file operation usage - the patch seems to be corrupt."), Path.c_str());
+        } else if (Debug == true) {
+           std::clog << "rred: finished file patching of " << Path  << " after mmap failed." << std::endl;
+        }
+      } else if (result != ED_OK) {
+        return _error->Errno("rred", _("Could not patch %s with mmap (but no mmap specific fail) - the patch seems to be corrupt."), Path.c_str());
       } else if (Debug == true) {
-        std::clog << "rred: finished file patching of " << Path  << " after mmap failed." << std::endl;
+        std::clog << "rred: finished mmap patching of " << Path << std::endl;
       }
-   } else if (result != ED_OK) {
-      return _error->Errno("rred", _("Could not patch %s with mmap (but no mmap specific fail) - the patch seems to be corrupt."), Path.c_str());
-   } else if (Debug == true) {
-      std::clog << "rred: finished mmap patching of " << Path << std::endl;
+
+      // write out the result
+      From.Close();
+      Patch.Close();
+      To.Close();
    }
+   else
+   {
+      if (Debug == true)
+        std::clog << "Patching " << Path << " with all " << Path << ".ed.*.gz files and "
+           << "putting result into " << Itm->DestFile << std::endl;
+
+      int From = open(Path.c_str(), O_RDONLY);
+      unlink(Itm->DestFile.c_str());
+      int To = open(Itm->DestFile.c_str(), O_WRONLY | O_CREAT | O_EXCL, 0644);
+      SetCloseExec(From, false);
+      SetCloseExec(To, false);
+
+      _error->PushToStack();
+      std::vector<std::string> patches = GetListOfFilesInDir(flNotFile(Path), "gz", true, false);
+      _error->RevertToStack();
+
+      std::string externalrred = _config->Find("Dir::Bin::rred", "/usr/bin/diffindex-rred");
+      std::vector<const char *> Args;
+      Args.reserve(22);
+      Args.push_back(externalrred.c_str());
+
+      std::string const baseName = Path + ".ed.";
+      for (std::vector<std::string>::const_iterator p = patches.begin();
+           p != patches.end(); ++p)
+        if (p->compare(0, baseName.length(), baseName) == 0)
+           Args.push_back(p->c_str());
+
+      Args.push_back(NULL);
+
+      pid_t Patcher = ExecFork();
+      if (Patcher == 0) {
+        dup2(From, STDIN_FILENO);
+        dup2(To, STDOUT_FILENO);
+
+        execvp(Args[0], (char **) &Args[0]);
+        std::cerr << "Failed to execute patcher " << Args[0] << "!" << std::endl;
+        _exit(100);
+      }
+      // last is NULL, so the one before is the last patch
+      lastPatchName = Args[Args.size() - 2];
 
-   // write out the result
-   From.Close();
-   Patch.Close();
-   To.Close();
+      if (ExecWait(Patcher, "rred") == false)
+        return _error->Errno("rred", "Patching via external rred failed");
+
+      close(From);
+      close(To);
+
+      struct stat Buf;
+      if (stat(Itm->DestFile.c_str(), &Buf) != 0)
+        return _error->Errno("stat",_("Failed to stat"));
+
+      To = open(Path.c_str(), O_RDONLY);
+      Hash.AddFD(To, Buf.st_size);
+      close(To);
+   }
 
    /* Transfer the modification times from the patch file
       to be able to see in which state the file should be
       and use the access time from the "old" file */
    struct stat BufBase, BufPatch;
    if (stat(Path.c_str(),&BufBase) != 0 ||
-       stat(std::string(Path+".ed").c_str(),&BufPatch) != 0)
+        stat(lastPatchName.c_str(), &BufPatch) != 0)
       return _error->Errno("stat",_("Failed to stat"));
 
    struct utimbuf TimeBuf;
index ac0563b7f8eed739a130f793741477bef4f3bca2..5a06e0ccbffe7c2923b6eaf29aef7483540af561 100755 (executable)
@@ -5,39 +5,146 @@ TESTDIR=$(readlink -f $(dirname $0))
 . $TESTDIR/framework
 
 setupenvironment
-configarchitecture "i386"
+configarchitecture 'i386'
 
 buildaptarchive
 setupflataptarchive
 changetowebserver
-signreleasefiles
-testsuccess aptget update
 
-testnopackage newstuff
 PKGFILE="${TESTDIR}/$(echo "$(basename $0)" | sed 's#^test-#Packages-#')"
-testequal "$(cat ${PKGFILE})
+
+echo '#!/bin/sh
+touch merge-was-used
+/usr/bin/diffindex-rred "$@"' > extrred
+chmod +x extrred
+echo 'Dir::Bin::rred "./extrred";' > rootdir/etc/apt/apt.conf.d/99rred
+
+wasmergeused() {
+       testsuccess aptget update "$@"
+       msgtest 'Check if the right pdiff merger was used'
+       if [ -e ./merge-was-used ]; then
+               rm -f ./merge-was-used
+               if echo "$*" | grep -q -- '-o Acquire::PDiffs::Merge=1'; then
+                       msgpass
+               else
+                       msgfail "Merge shouldn't have been used, but was"
+               fi
+       elif echo "$*" | grep -q -- '-o Acquire::PDiffs::Merge=1'; then
+               msgfail "Merge should have been used, but wasn't"
+       else
+               msgpass
+       fi
+}
+
+testrun() {
+       # setup the base
+       find aptarchive -name 'Packages*' -type f -delete
+       cp ${PKGFILE} aptarchive/Packages
+       compressfile 'aptarchive/Packages'
+       generatereleasefiles
+       signreleasefiles
+       rm -rf aptarchive/Packages.diff rootdir/var/lib/apt/lists
+       testsuccess aptget update "$@"
+       cp -a rootdir/var/lib/apt/lists rootdir/var/lib/apt/lists-bak
+       testnopackage newstuff
+       testequal "$(cat ${PKGFILE})
 " aptcache show apt oldstuff
 
-cp ${PKGFILE}-new aptarchive/Packages
-compressfile 'aptarchive/Packages'
-rm -rf aptarchive/Packages.diff
-mkdir -p aptarchive/Packages.diff
-PATCHFILE="aptarchive/Packages.diff/$(date +%Y-%m-%d-%H%M.%S)"
-diff -e ${PKGFILE} ${PKGFILE}-new > ${PATCHFILE} || true
-cat $PATCHFILE | gzip > ${PATCHFILE}.gz
-PATCHINDEX="aptarchive/Packages.diff/Index"
-echo "SHA1-Current: $(sha1sum ${PKGFILE}-new | cut -d' ' -f 1) $(stat -c%s ${PKGFILE}-new)
+       # apply with one patch
+       cp ${PKGFILE}-new aptarchive/Packages
+       compressfile 'aptarchive/Packages'
+       mkdir -p aptarchive/Packages.diff
+       PATCHFILE="aptarchive/Packages.diff/$(date +%Y-%m-%d-%H%M.%S)"
+       diff -e ${PKGFILE} ${PKGFILE}-new > ${PATCHFILE} || true
+       cat $PATCHFILE | gzip > ${PATCHFILE}.gz
+       PATCHINDEX='aptarchive/Packages.diff/Index'
+       echo "SHA1-Current: $(sha1sum ${PKGFILE}-new | cut -d' ' -f 1) $(stat -c%s ${PKGFILE}-new)
 SHA1-History:
  9f4148e06d7faa37062994ff10d0c842d7017513 33053002 2010-08-18-2013.28
  $(sha1sum $PKGFILE | cut -d' ' -f 1) $(stat -c%s $PKGFILE) $(basename $PATCHFILE)
 SHA1-Patches:
  7651fc0ac57cd83d41c63195a9342e2db5650257 19722 2010-08-18-0814.28
  $(sha1sum $PATCHFILE | cut -d' ' -f 1) $(stat -c%s $PATCHFILE) $(basename $PATCHFILE)" > $PATCHINDEX
-generatereleasefiles '+1hour'
-signreleasefiles
-find aptarchive -name 'Packages*' -type f -delete
-testsuccess aptget update
+       generatereleasefiles '+1hour'
+       signreleasefiles
+       find aptarchive -name 'Packages*' -type f -delete
+       wasmergeused "$@"
+       testnopackage oldstuff
+       testequal "$(cat ${PKGFILE}-new)
+" aptcache show apt newstuff
 
-testnopackage oldstuff
-testequal "$(cat ${PKGFILE}-new)
+       # index is already up-to-date
+       find rootdir/var/lib/apt/lists -name '*.IndexDiff' -type f -delete
+       testsuccess aptget update "$@"
+       testequal "$(cat ${PKGFILE}-new)
 " aptcache show apt newstuff
+
+       # apply with two patches
+       cp ${PKGFILE}-new aptarchive/Packages
+       echo '
+Package: futurestuff
+Version: 1.0
+Architecture: i386
+Maintainer: Joe Sixpack <joe@example.org>
+Installed-Size: 202
+Filename: pool/futurestuff_1.0_i386.deb
+Size: 202200
+MD5sum: 311aeeaaae5ba33aff1ceaf3e1f76671
+SHA1: 3c695e028f7a1ae324deeaae5ba332desa81088c
+SHA256: b46fd154615edaae5ba33c56a5cc0e7deaef23e2da3e4f129727fd660f28f050
+Description: some cool and shiny future stuff
+ This package will appear in the next next mirror update
+Description-md5: d5f89fbbc2ce34c455dfee9b67d82b6b' >> aptarchive/Packages
+
+       compressfile 'aptarchive/Packages'
+       PATCHFILE2="aptarchive/Packages.diff/$(date -d 'now + 1hour' '+%Y-%m-%d-%H%M.%S')"
+       diff -e ${PKGFILE}-new aptarchive/Packages > ${PATCHFILE2} || true
+       cat $PATCHFILE2 | gzip > ${PATCHFILE2}.gz
+       echo "SHA1-Current: $(sha1sum aptarchive/Packages | cut -d' ' -f 1) $(stat -c%s aptarchive/Packages)
+SHA1-History:
+ 9f4148e06d7faa37062994ff10d0c842d7017513 33053002 2010-08-18-2013.28
+ $(sha1sum ${PKGFILE} | cut -d' ' -f 1) $(stat -c%s ${PKGFILE}) $(basename ${PATCHFILE})
+ $(sha1sum ${PKGFILE}-new | cut -d' ' -f 1) $(stat -c%s ${PKGFILE}-new) $(basename ${PATCHFILE2})
+SHA1-Patches:
+ 7651fc0ac57cd83d41c63195a9342e2db5650257 19722 2010-08-18-0814.28
+ $(sha1sum $PATCHFILE | cut -d' ' -f 1) $(stat -c%s $PATCHFILE) $(basename $PATCHFILE)
+ $(sha1sum ${PATCHFILE2} | cut -d' ' -f 1) $(stat -c%s ${PATCHFILE2}) $(basename ${PATCHFILE2})" > $PATCHINDEX
+       generatereleasefiles '+2hour'
+       signreleasefiles
+       cp -a aptarchive/Packages Packages-future
+       find aptarchive -name 'Packages*' -type f -delete
+       rm -rf rootdir/var/lib/apt/lists
+       cp -a rootdir/var/lib/apt/lists-bak rootdir/var/lib/apt/lists
+       wasmergeused "$@"
+       testnopackage oldstuff
+       testequal "$(cat Packages-future)
+" aptcache show apt newstuff futurestuff
+
+       # patch applying fails, but successful fallback
+       rm -rf rootdir/var/lib/apt/lists
+       cp -a rootdir/var/lib/apt/lists-bak rootdir/var/lib/apt/lists
+       cp ${PKGFILE}-new aptarchive/Packages
+       compressfile 'aptarchive/Packages'
+       mkdir -p aptarchive/Packages.diff
+       PATCHFILE="aptarchive/Packages.diff/$(date +%Y-%m-%d-%H%M.%S)"
+       diff -e ${PKGFILE} ${PKGFILE}-new > ${PATCHFILE} || true
+       PATCHINDEX='aptarchive/Packages.diff/Index'
+       echo "SHA1-Current: $(sha1sum ${PKGFILE}-new | cut -d' ' -f 1) $(stat -c%s ${PKGFILE}-new)
+SHA1-History:
+ 9f4148e06d7faa37062994ff10d0c842d7017513 33053002 2010-08-18-2013.28
+ $(sha1sum $PKGFILE | cut -d' ' -f 1) $(stat -c%s $PKGFILE) $(basename $PATCHFILE)
+SHA1-Patches:
+ 7651fc0ac57cd83d41c63195a9342e2db5650257 19722 2010-08-18-0814.28
+ $(sha1sum $PATCHFILE | cut -d' ' -f 1) $(stat -c%s $PATCHFILE) $(basename $PATCHFILE)" > $PATCHINDEX
+       echo 'I am Mallory and I change files' >> $PATCHFILE
+       cat $PATCHFILE | gzip > ${PATCHFILE}.gz
+       generatereleasefiles '+1hour'
+       signreleasefiles
+       testsuccess aptget update "$@"
+       testnopackage oldstuff
+       testequal "$(cat ${PKGFILE}-new)
+" aptcache show apt newstuff
+}
+
+testrun -o Debug::pkgAcquire::Diffs=1 -o Debug::pkgAcquire::rred=1 -o Acquire::PDiffs::Merge=0
+testrun -o Debug::pkgAcquire::Diffs=1 -o Debug::pkgAcquire::rred=1 -o Acquire::PDiffs::Merge=1