]> git.saurik.com Git - apt.git/blobdiff - apt-pkg/acquire-item.cc
set the correct item FileSize in by-hash case
[apt.git] / apt-pkg / acquire-item.cc
index a5a3b564b5bcbad6f61786ee8247698313a92519..38d6c72dd8ca4b518d8c42185b4b5482d81efae5 100644 (file)
@@ -175,41 +175,101 @@ static void ReportMirrorFailureToCentral(pkgAcquire::Item const &I, std::string
 }
                                                                        /*}}}*/
 
-static bool MessageInsecureRepository(bool const isError, std::string const &msg)/*{{{*/
+static APT_NONNULL(2) bool MessageInsecureRepository(bool const isError, char const * const msg, std::string const &repo)/*{{{*/
 {
+   std::string m;
+   strprintf(m, msg, repo.c_str());
    if (isError)
    {
-      _error->Error("%s", msg.c_str());
+      _error->Error("%s", m.c_str());
       _error->Notice("%s", _("Updating from such a repository can't be done securely, and is therefore disabled by default."));
    }
    else
    {
-      _error->Warning("%s", msg.c_str());
+      _error->Warning("%s", m.c_str());
       _error->Notice("%s", _("Data from such a repository can't be authenticated and is therefore potentially dangerous to use."));
    }
    _error->Notice("%s", _("See apt-secure(8) manpage for repository creation and user configuration details."));
    return false;
 }
-static bool APT_NONNULL(2) MessageInsecureRepository(bool const isError, char const * const msg, std::string const &repo)
+                                                                       /*}}}*/
+// AllowInsecureRepositories                                           /*{{{*/
+enum class InsecureType { UNSIGNED, WEAK, NORELEASE };
+static bool TargetIsAllowedToBe(IndexTarget const &Target, InsecureType const type)
 {
-   std::string m;
-   strprintf(m, msg, repo.c_str());
-   return MessageInsecureRepository(isError, m);
+   if (_config->FindB("Acquire::AllowInsecureRepositories"))
+      return true;
+
+   if (Target.OptionBool(IndexTarget::ALLOW_INSECURE))
+      return true;
+
+   switch (type)
+   {
+      case InsecureType::UNSIGNED: break;
+      case InsecureType::NORELEASE: break;
+      case InsecureType::WEAK:
+        if (_config->FindB("Acquire::AllowWeakRepositories"))
+           return true;
+        if (Target.OptionBool(IndexTarget::ALLOW_WEAK))
+           return true;
+        break;
+   }
+   return false;
 }
-                                                                       /*}}}*/
-static bool APT_NONNULL(1, 3, 4, 5) AllowInsecureRepositories(char const * const msg, std::string const &repo,/*{{{*/
+static bool APT_NONNULL(3, 4, 5) AllowInsecureRepositories(InsecureType const msg, std::string const &repo,
       metaIndex const * const MetaIndexParser, pkgAcqMetaClearSig * const TransactionManager, pkgAcquire::Item * const I)
 {
+   // we skip weak downgrades as its unlikely that a repository gets really weaker –
+   // its more realistic that apt got pickier in a newer version
+   if (msg != InsecureType::WEAK)
+   {
+      std::string const FinalInRelease = TransactionManager->GetFinalFilename();
+      std::string const FinalReleasegpg = FinalInRelease.substr(0, FinalInRelease.length() - strlen("InRelease")) + "Release.gpg";
+      if (RealFileExists(FinalReleasegpg) || RealFileExists(FinalInRelease))
+      {
+        char const * msgstr = nullptr;
+        switch (msg)
+        {
+           case InsecureType::UNSIGNED: msgstr = _("The repository '%s' is no longer signed."); break;
+           case InsecureType::NORELEASE: msgstr = _("The repository '%s' does no longer have a Release file."); break;
+           case InsecureType::WEAK: /* unreachable */ break;
+        }
+        if (_config->FindB("Acquire::AllowDowngradeToInsecureRepositories") ||
+              TransactionManager->Target.OptionBool(IndexTarget::ALLOW_DOWNGRADE_TO_INSECURE))
+        {
+           // meh, the users wants to take risks (we still mark the packages
+           // from this repository as unauthenticated)
+           _error->Warning(msgstr, repo.c_str());
+           _error->Warning(_("This is normally not allowed, but the option "
+                    "Acquire::AllowDowngradeToInsecureRepositories was "
+                    "given to override it."));
+        } else {
+           MessageInsecureRepository(true, msgstr, repo);
+           TransactionManager->AbortTransaction();
+           I->Status = pkgAcquire::Item::StatError;
+           return false;
+        }
+      }
+   }
+
    if(MetaIndexParser->GetTrusted() == metaIndex::TRI_YES)
       return true;
 
-   if (_config->FindB("Acquire::AllowInsecureRepositories") == true)
+   char const * msgstr = nullptr;
+   switch (msg)
+   {
+      case InsecureType::UNSIGNED: msgstr = _("The repository '%s' is not signed."); break;
+      case InsecureType::NORELEASE: msgstr = _("The repository '%s' does not have a Release file."); break;
+      case InsecureType::WEAK: msgstr = _("The repository '%s' provides only weak security information."); break;
+   }
+
+   if (TargetIsAllowedToBe(TransactionManager->Target, msg) == true)
    {
-      MessageInsecureRepository(false, msg, repo);
+      MessageInsecureRepository(false, msgstr, repo);
       return true;
    }
 
-   MessageInsecureRepository(true, msg, repo);
+   MessageInsecureRepository(true, msgstr, repo);
    TransactionManager->AbortTransaction();
    I->Status = pkgAcquire::Item::StatError;
    return false;
@@ -239,7 +299,20 @@ APT_CONST bool pkgAcqTransactionItem::HashesRequired() const
       we can at least trust them for integrity of the download itself.
       Only repositories without a Release file can (obviously) not have
       hashes – and they are very uncommon and strongly discouraged */
-   return TransactionManager->MetaIndexParser->GetLoadedSuccessfully() == metaIndex::TRI_YES;
+   if (TransactionManager->MetaIndexParser->GetLoadedSuccessfully() != metaIndex::TRI_YES)
+      return false;
+   if (TargetIsAllowedToBe(Target, InsecureType::WEAK))
+   {
+      /* If we allow weak hashes, we check that we have some (weak) and then
+         declare hashes not needed. That will tip us in the right direction
+        as if hashes exist, they will be used, even if not required */
+      auto const hsl = GetExpectedHashes();
+      if (hsl.usable())
+        return true;
+      if (hsl.empty() == false)
+        return false;
+   }
+   return true;
 }
 HashStringList pkgAcqTransactionItem::GetExpectedHashes() const
 {
@@ -337,7 +410,7 @@ bool pkgAcqTransactionItem::QueueURI(pkgAcquire::ItemDesc &Item)
       return false;
    }
    // If we got the InRelease file via a mirror, pick all indexes directly from this mirror, too
-   if (TransactionManager->BaseURI.empty() == false &&
+   if (TransactionManager->BaseURI.empty() == false && UsedMirror.empty() &&
         URI::SiteOnly(Item.URI) != URI::SiteOnly(TransactionManager->BaseURI))
    {
       // this ensures we rewrite only once and only the first step
@@ -612,10 +685,15 @@ class APT_HIDDEN CleanupItem : public pkgAcqTransactionItem               /*{{{*/
                                                                        /*}}}*/
 
 // Acquire::Item::Item - Constructor                                   /*{{{*/
+class pkgAcquire::Item::Private
+{
+public:
+   std::vector<std::string> PastRedirections;
+};
 APT_IGNORE_DEPRECATED_PUSH
 pkgAcquire::Item::Item(pkgAcquire * const owner) :
    FileSize(0), PartialSize(0), Mode(0), ID(0), Complete(false), Local(false),
-    QueueCounter(0), ExpectedAdditionalItems(0), Owner(owner), d(NULL)
+    QueueCounter(0), ExpectedAdditionalItems(0), Owner(owner), d(new Private())
 {
    Owner->Add(this);
    Status = StatIdle;
@@ -626,6 +704,7 @@ APT_IGNORE_DEPRECATED_POP
 pkgAcquire::Item::~Item()
 {
    Owner->Remove(this);
+   delete d;
 }
                                                                        /*}}}*/
 std::string pkgAcquire::Item::Custom600Headers() const                 /*{{{*/
@@ -693,33 +772,51 @@ void pkgAcquire::Item::Failed(string const &Message,pkgAcquire::MethodConfig con
    }
 
    string const FailReason = LookupTag(Message, "FailReason");
-   enum { MAXIMUM_SIZE_EXCEEDED, HASHSUM_MISMATCH, OTHER } failreason = OTHER;
+   enum { MAXIMUM_SIZE_EXCEEDED, HASHSUM_MISMATCH, WEAK_HASHSUMS, REDIRECTION_LOOP, OTHER } failreason = OTHER;
    if ( FailReason == "MaximumSizeExceeded")
       failreason = MAXIMUM_SIZE_EXCEEDED;
+   else if ( FailReason == "WeakHashSums")
+      failreason = WEAK_HASHSUMS;
+   else if (FailReason == "RedirectionLoop")
+      failreason = REDIRECTION_LOOP;
    else if (Status == StatAuthError)
       failreason = HASHSUM_MISMATCH;
 
    if(ErrorText.empty())
    {
+      std::ostringstream out;
+      switch (failreason)
+      {
+        case HASHSUM_MISMATCH:
+           out << _("Hash Sum mismatch") << std::endl;
+           break;
+        case WEAK_HASHSUMS:
+           out << _("Insufficient information available to perform this download securely") << std::endl;
+           break;
+        case REDIRECTION_LOOP:
+           out << "Redirection loop encountered" << std::endl;
+           break;
+        case MAXIMUM_SIZE_EXCEEDED:
+           out << LookupTag(Message, "Message") << std::endl;
+           break;
+        case OTHER:
+           out << LookupTag(Message, "Message");
+           break;
+      }
+
       if (Status == StatAuthError)
       {
-        std::ostringstream out;
-        switch (failreason)
-        {
-           case HASHSUM_MISMATCH:
-              out << _("Hash Sum mismatch") << std::endl;
-              break;
-           case MAXIMUM_SIZE_EXCEEDED:
-           case OTHER:
-              out << LookupTag(Message, "Message") << std::endl;
-              break;
-        }
         auto const ExpectedHashes = GetExpectedHashes();
         if (ExpectedHashes.empty() == false)
         {
            out << "Hashes of expected file:" << std::endl;
            for (auto const &hs: ExpectedHashes)
-              out << " - " << hs.toStr() << std::endl;
+           {
+              out << " - " << hs.toStr();
+              if (hs.usable() == false)
+                 out << " [weak]";
+              out << std::endl;
+           }
         }
         if (failreason == HASHSUM_MISMATCH)
         {
@@ -729,20 +826,26 @@ void pkgAcquire::Item::Failed(string const &Message,pkgAcquire::MethodConfig con
               std::string const tagname = std::string(*type) + "-Hash";
               std::string const hashsum = LookupTag(Message, tagname.c_str());
               if (hashsum.empty() == false)
-                 out << " - " << HashString(*type, hashsum).toStr() << std::endl;
+              {
+                 auto const hs = HashString(*type, hashsum);
+                 out << " - " << hs.toStr();
+                 if (hs.usable() == false)
+                    out << " [weak]";
+                 out << std::endl;
+              }
            }
            out << "Last modification reported: " << LookupTag(Message, "Last-Modified", "<none>") << std::endl;
         }
-        ErrorText = out.str();
       }
-      else
-        ErrorText = LookupTag(Message,"Message");
+      ErrorText = out.str();
    }
 
    switch (failreason)
    {
       case MAXIMUM_SIZE_EXCEEDED: RenameOnError(MaximumSizeExceeded); break;
       case HASHSUM_MISMATCH: RenameOnError(HashSumMismatch); break;
+      case WEAK_HASHSUMS: break;
+      case REDIRECTION_LOOP: break;
       case OTHER: break;
    }
 
@@ -886,6 +989,27 @@ std::string pkgAcquire::Item::HashSum() const                              /*{{{*/
    return hs != NULL ? hs->toStr() : "";
 }
                                                                        /*}}}*/
+bool pkgAcquire::Item::IsRedirectionLoop(std::string const &NewURI)    /*{{{*/
+{
+   // store can fail due to permission errors and the item will "loop" then
+   if (APT::String::Startswith(NewURI, "store:"))
+      return false;
+   if (d->PastRedirections.empty())
+   {
+      d->PastRedirections.push_back(NewURI);
+      return false;
+   }
+   auto const LastURI = std::prev(d->PastRedirections.end());
+   // redirections to the same file are a way of restarting/resheduling,
+   // individual methods will have to make sure that they aren't looping this way
+   if (*LastURI == NewURI)
+      return false;
+   if (std::find(d->PastRedirections.begin(), LastURI, NewURI) != LastURI)
+      return true;
+   d->PastRedirections.push_back(NewURI);
+   return false;
+}
+                                                                       /*}}}*/
 
 pkgAcqTransactionItem::pkgAcqTransactionItem(pkgAcquire * const Owner, /*{{{*/
       pkgAcqMetaClearSig * const transactionManager, IndexTarget const &target) :
@@ -934,10 +1058,8 @@ static void LoadLastMetaIndexParser(pkgAcqMetaClearSig * const TransactionManage
 // AcqMetaBase - Constructor                                           /*{{{*/
 pkgAcqMetaBase::pkgAcqMetaBase(pkgAcquire * const Owner,
       pkgAcqMetaClearSig * const TransactionManager,
-      std::vector<IndexTarget> const &IndexTargets,
       IndexTarget const &DataTarget)
 : pkgAcqTransactionItem(Owner, TransactionManager, DataTarget), d(NULL),
-   IndexTargets(IndexTargets),
    AuthPass(false), IMSHit(false), State(TransactionStarted)
 {
 }
@@ -958,7 +1080,7 @@ void pkgAcqMetaBase::AbortTransaction()
    {
       case TransactionStarted: break;
       case TransactionAbort: _error->Fatal("Transaction %s was already aborted and is aborted again", TransactionManager->Target.URI.c_str()); return;
-      case TransactionCommit: _error->Fatal("Transaction %s was already aborted and is now commited", TransactionManager->Target.URI.c_str()); return;
+      case TransactionCommit: _error->Fatal("Transaction %s was already aborted and is now committed", TransactionManager->Target.URI.c_str()); return;
    }
    TransactionManager->State = TransactionAbort;
 
@@ -1000,8 +1122,8 @@ void pkgAcqMetaBase::CommitTransaction()
    switch (TransactionManager->State)
    {
       case TransactionStarted: break;
-      case TransactionAbort: _error->Fatal("Transaction %s was already commited and is now aborted", TransactionManager->Target.URI.c_str()); return;
-      case TransactionCommit: _error->Fatal("Transaction %s was already commited and is again commited", TransactionManager->Target.URI.c_str()); return;
+      case TransactionAbort: _error->Fatal("Transaction %s was already committed and is now aborted", TransactionManager->Target.URI.c_str()); return;
+      case TransactionCommit: _error->Fatal("Transaction %s was already committed and is again committed", TransactionManager->Target.URI.c_str()); return;
    }
    TransactionManager->State = TransactionCommit;
 
@@ -1079,7 +1201,7 @@ string pkgAcqMetaBase::Custom600Headers() const
    string const FinalFile = GetFinalFilename();
    struct stat Buf;
    if (stat(FinalFile.c_str(),&Buf) == 0)
-      Header += "\nLast-Modified: " + TimeRFC1123(Buf.st_mtime);
+      Header += "\nLast-Modified: " + TimeRFC1123(Buf.st_mtime, false);
 
    return Header;
 }
@@ -1104,9 +1226,15 @@ bool pkgAcqMetaBase::CheckDownloadDone(pkgAcqTransactionItem * const I, const st
    if (I->UsedMirror.empty() == false && _config->FindB("Acquire::SameMirrorForAllIndexes", true))
    {
       if (APT::String::Endswith(I->Desc.URI, "InRelease"))
+      {
         TransactionManager->BaseURI = I->Desc.URI.substr(0, I->Desc.URI.length() - strlen("InRelease"));
+        TransactionManager->UsedMirror = I->UsedMirror;
+      }
       else if (APT::String::Endswith(I->Desc.URI, "Release"))
+      {
         TransactionManager->BaseURI = I->Desc.URI.substr(0, I->Desc.URI.length() - strlen("Release"));
+        TransactionManager->UsedMirror = I->UsedMirror;
+      }
    }
 
    std::string const FileName = LookupTag(Message,"Filename");
@@ -1151,6 +1279,8 @@ bool pkgAcqMetaBase::CheckAuthDone(string const &Message)         /*{{{*/
    // valid signature from a key in the trusted keyring.  We
    // perform additional verification of its contents, and use them
    // to verify the indexes we are about to download
+   if (_config->FindB("Debug::pkgAcquire::Auth", false))
+      std::cerr << "Signature verification succeeded: " << DestFile << std::endl;
 
    if (TransactionManager->IMSHit == false)
    {
@@ -1171,7 +1301,8 @@ bool pkgAcqMetaBase::CheckAuthDone(string const &Message)         /*{{{*/
       LoadLastMetaIndexParser(TransactionManager, FinalRelease, FinalInRelease);
    }
 
-   if (TransactionManager->MetaIndexParser->Load(DestFile, &ErrorText) == false)
+   bool const GoodAuth = TransactionManager->MetaIndexParser->Load(DestFile, &ErrorText);
+   if (GoodAuth == false && AllowInsecureRepositories(InsecureType::WEAK, Target.Description, TransactionManager->MetaIndexParser, TransactionManager, this) == false)
    {
       Status = StatAuthError;
       return false;
@@ -1183,112 +1314,129 @@ bool pkgAcqMetaBase::CheckAuthDone(string const &Message)             /*{{{*/
       return false;
    }
 
-   if (_config->FindB("Debug::pkgAcquire::Auth", false))
-      std::cerr << "Signature verification succeeded: "
-                << DestFile << std::endl;
-
    // Download further indexes with verification
-   QueueIndexes(true);
+   TransactionManager->QueueIndexes(GoodAuth);
 
-   return true;
+   return GoodAuth;
 }
                                                                        /*}}}*/
-void pkgAcqMetaBase::QueueIndexes(bool const verify)                   /*{{{*/
+void pkgAcqMetaClearSig::QueueIndexes(bool const verify)                       /*{{{*/
 {
    // at this point the real Items are loaded in the fetcher
    ExpectedAdditionalItems = 0;
 
-   bool const metaBaseSupportsByHash = TransactionManager->MetaIndexParser->GetSupportsAcquireByHash();
-   for (std::vector <IndexTarget>::iterator Target = IndexTargets.begin();
-        Target != IndexTargets.end();
-        ++Target)
-   {
+   std::set<std::string> targetsSeen;
+   bool const hasReleaseFile = TransactionManager->MetaIndexParser != NULL;
+   bool const metaBaseSupportsByHash = hasReleaseFile && TransactionManager->MetaIndexParser->GetSupportsAcquireByHash();
+   bool hasHashes = true;
+   auto IndexTargets = TransactionManager->MetaIndexParser->GetIndexTargets();
+   if (hasReleaseFile && verify == false)
+      hasHashes = std::any_of(IndexTargets.begin(), IndexTargets.end(),
+           [&](IndexTarget const &Target) { return TransactionManager->MetaIndexParser->Exists(Target.MetaKey); });
+   for (auto&& Target: IndexTargets)
+   {
+      // if we have seen a target which is created-by a target this one here is declared a
+      // fallback to, we skip acquiring the fallback (but we make sure we clean up)
+      if (targetsSeen.find(Target.Option(IndexTarget::FALLBACK_OF)) != targetsSeen.end())
+      {
+        targetsSeen.emplace(Target.Option(IndexTarget::CREATED_BY));
+        new CleanupItem(Owner, TransactionManager, Target);
+        continue;
+      }
       // all is an implementation detail. Users shouldn't use this as arch
       // We need this support trickery here as e.g. Debian has binary-all files already,
       // but arch:all packages are still in the arch:any files, so we would waste precious
       // download time, bandwidth and diskspace for nothing, BUT Debian doesn't feature all
       // in the set of supported architectures, so we can filter based on this property rather
       // than invent an entirely new flag we would need to carry for all of eternity.
-      if (Target->Option(IndexTarget::ARCHITECTURE) == "all")
+      if (hasReleaseFile && Target.Option(IndexTarget::ARCHITECTURE) == "all")
       {
-        if (TransactionManager->MetaIndexParser->IsArchitectureSupported("all") == false ||
-              TransactionManager->MetaIndexParser->IsArchitectureAllSupportedFor(*Target) == false)
+        if (TransactionManager->MetaIndexParser->IsArchitectureAllSupportedFor(Target) == false)
         {
-           new CleanupItem(Owner, TransactionManager, *Target);
+           new CleanupItem(Owner, TransactionManager, Target);
            continue;
         }
       }
 
-      bool trypdiff = Target->OptionBool(IndexTarget::PDIFFS);
-      if (verify == true)
+      bool trypdiff = Target.OptionBool(IndexTarget::PDIFFS);
+      if (hasReleaseFile == true)
       {
-        if (TransactionManager->MetaIndexParser->Exists(Target->MetaKey) == false)
+        if (TransactionManager->MetaIndexParser->Exists(Target.MetaKey) == false)
         {
            // optional targets that we do not have in the Release file are skipped
-           if (Target->IsOptional)
+           if (hasHashes == true && Target.IsOptional)
            {
-              new CleanupItem(Owner, TransactionManager, *Target);
+              new CleanupItem(Owner, TransactionManager, Target);
               continue;
            }
 
-           std::string const &arch = Target->Option(IndexTarget::ARCHITECTURE);
+           std::string const &arch = Target.Option(IndexTarget::ARCHITECTURE);
            if (arch.empty() == false)
            {
               if (TransactionManager->MetaIndexParser->IsArchitectureSupported(arch) == false)
               {
-                 new CleanupItem(Owner, TransactionManager, *Target);
+                 new CleanupItem(Owner, TransactionManager, Target);
                  _error->Notice(_("Skipping acquire of configured file '%s' as repository '%s' doesn't support architecture '%s'"),
-                       Target->MetaKey.c_str(), TransactionManager->Target.Description.c_str(), arch.c_str());
+                       Target.MetaKey.c_str(), TransactionManager->Target.Description.c_str(), arch.c_str());
                  continue;
               }
               // if the architecture is officially supported but currently no packages for it available,
               // ignore silently as this is pretty much the same as just shipping an empty file.
               // if we don't know which architectures are supported, we do NOT ignore it to notify user about this
-              if (TransactionManager->MetaIndexParser->IsArchitectureSupported("*undefined*") == false)
+              if (hasHashes == true && TransactionManager->MetaIndexParser->IsArchitectureSupported("*undefined*") == false)
               {
-                 new CleanupItem(Owner, TransactionManager, *Target);
+                 new CleanupItem(Owner, TransactionManager, Target);
                  continue;
               }
            }
 
-           Status = StatAuthError;
-           strprintf(ErrorText, _("Unable to find expected entry '%s' in Release file (Wrong sources.list entry or malformed file)"), Target->MetaKey.c_str());
-           return;
+           if (hasHashes == true)
+           {
+              Status = StatAuthError;
+              strprintf(ErrorText, _("Unable to find expected entry '%s' in Release file (Wrong sources.list entry or malformed file)"), Target.MetaKey.c_str());
+              return;
+           }
+           else
+           {
+              new pkgAcqIndex(Owner, TransactionManager, Target);
+              continue;
+           }
         }
-        else
+        else if (verify)
         {
-           auto const hashes = GetExpectedHashesFor(Target->MetaKey);
+           auto const hashes = GetExpectedHashesFor(Target.MetaKey);
            if (hashes.empty() == false)
            {
-              if (hashes.usable() == false)
+              if (hashes.usable() == false && TargetIsAllowedToBe(TransactionManager->Target, InsecureType::WEAK) == false)
               {
-                 new CleanupItem(Owner, TransactionManager, *Target);
+                 new CleanupItem(Owner, TransactionManager, Target);
                  _error->Warning(_("Skipping acquire of configured file '%s' as repository '%s' provides only weak security information for it"),
-                       Target->MetaKey.c_str(), TransactionManager->Target.Description.c_str());
+                       Target.MetaKey.c_str(), TransactionManager->Target.Description.c_str());
                  continue;
               }
               // empty files are skipped as acquiring the very small compressed files is a waste of time
               else if (hashes.FileSize() == 0)
               {
-                 new CleanupItem(Owner, TransactionManager, *Target);
+                 new CleanupItem(Owner, TransactionManager, Target);
+                 targetsSeen.emplace(Target.Option(IndexTarget::CREATED_BY));
                  continue;
               }
            }
         }
 
         // autoselect the compression method
-        std::vector<std::string> types = VectorizeString(Target->Option(IndexTarget::COMPRESSIONTYPES), ' ');
+        std::vector<std::string> types = VectorizeString(Target.Option(IndexTarget::COMPRESSIONTYPES), ' ');
         types.erase(std::remove_if(types.begin(), types.end(), [&](std::string const &t) {
            if (t == "uncompressed")
-              return TransactionManager->MetaIndexParser->Exists(Target->MetaKey) == false;
-           std::string const MetaKey = Target->MetaKey + "." + t;
+              return TransactionManager->MetaIndexParser->Exists(Target.MetaKey) == false;
+           std::string const MetaKey = Target.MetaKey + "." + t;
            return TransactionManager->MetaIndexParser->Exists(MetaKey) == false;
         }), types.end());
         if (types.empty() == false)
         {
            std::ostringstream os;
            // add the special compressiontype byhash first if supported
-           std::string const useByHashConf = Target->Option(IndexTarget::BY_HASH);
+           std::string const useByHashConf = Target.Option(IndexTarget::BY_HASH);
            bool useByHash = false;
            if(useByHashConf == "force")
               useByHash = true;
@@ -1298,12 +1446,12 @@ void pkgAcqMetaBase::QueueIndexes(bool const verify)                    /*{{{*/
               os << "by-hash ";
            std::copy(types.begin(), types.end()-1, std::ostream_iterator<std::string>(os, " "));
            os << *types.rbegin();
-           Target->Options["COMPRESSIONTYPES"] = os.str();
+           Target.Options["COMPRESSIONTYPES"] = os.str();
         }
         else
-           Target->Options["COMPRESSIONTYPES"].clear();
+           Target.Options["COMPRESSIONTYPES"].clear();
 
-        std::string filename = GetExistingFilename(GetFinalFileNameFromURI(Target->URI));
+        std::string filename = GetExistingFilename(GetFinalFileNameFromURI(Target.URI));
         if (filename.empty() == false)
         {
            // if the Release file is a hit and we have an index it must be the current one
@@ -1314,8 +1462,8 @@ void pkgAcqMetaBase::QueueIndexes(bool const verify)                      /*{{{*/
               // see if the file changed since the last Release file
               // we use the uncompressed files as we might compress differently compared to the server,
               // so the hashes might not match, even if they contain the same data.
-              HashStringList const newFile = GetExpectedHashesFromFor(TransactionManager->MetaIndexParser, Target->MetaKey);
-              HashStringList const oldFile = GetExpectedHashesFromFor(TransactionManager->LastMetaIndexParser, Target->MetaKey);
+              HashStringList const newFile = GetExpectedHashesFromFor(TransactionManager->MetaIndexParser, Target.MetaKey);
+              HashStringList const oldFile = GetExpectedHashesFromFor(TransactionManager->LastMetaIndexParser, Target.MetaKey);
               if (newFile != oldFile)
                  filename.clear();
            }
@@ -1327,35 +1475,37 @@ void pkgAcqMetaBase::QueueIndexes(bool const verify)                    /*{{{*/
 
         if (filename.empty() == false)
         {
-           new NoActionItem(Owner, *Target, filename);
-           std::string const idxfilename = GetFinalFileNameFromURI(GetDiffIndexURI(*Target));
+           new NoActionItem(Owner, Target, filename);
+           std::string const idxfilename = GetFinalFileNameFromURI(GetDiffIndexURI(Target));
            if (FileExists(idxfilename))
-              new NoActionItem(Owner, *Target, idxfilename);
+              new NoActionItem(Owner, Target, idxfilename);
+           targetsSeen.emplace(Target.Option(IndexTarget::CREATED_BY));
            continue;
         }
 
         // check if we have patches available
-        trypdiff &= TransactionManager->MetaIndexParser->Exists(GetDiffIndexFileName(Target->MetaKey));
+        trypdiff &= TransactionManager->MetaIndexParser->Exists(GetDiffIndexFileName(Target.MetaKey));
       }
       else
       {
         // if we have no file to patch, no point in trying
-        trypdiff &= (GetExistingFilename(GetFinalFileNameFromURI(Target->URI)).empty() == false);
+        trypdiff &= (GetExistingFilename(GetFinalFileNameFromURI(Target.URI)).empty() == false);
       }
 
       // no point in patching from local sources
       if (trypdiff)
       {
-        std::string const proto = Target->URI.substr(0, strlen("file:/"));
+        std::string const proto = Target.URI.substr(0, strlen("file:/"));
         if (proto == "file:/" || proto == "copy:/" || proto == "cdrom:")
            trypdiff = false;
       }
 
       // Queue the Index file (Packages, Sources, Translation-$foo, …)
+      targetsSeen.emplace(Target.Option(IndexTarget::CREATED_BY));
       if (trypdiff)
-         new pkgAcqDiffIndex(Owner, TransactionManager, *Target);
+         new pkgAcqDiffIndex(Owner, TransactionManager, Target);
       else
-         new pkgAcqIndex(Owner, TransactionManager, *Target);
+         new pkgAcqIndex(Owner, TransactionManager, Target);
    }
 }
                                                                        /*}}}*/
@@ -1447,15 +1597,13 @@ pkgAcqMetaBase::~pkgAcqMetaBase()
 pkgAcqMetaClearSig::pkgAcqMetaClearSig(pkgAcquire * const Owner,       /*{{{*/
       IndexTarget const &ClearsignedTarget,
       IndexTarget const &DetachedDataTarget, IndexTarget const &DetachedSigTarget,
-      std::vector<IndexTarget> const &IndexTargets,
       metaIndex * const MetaIndexParser) :
-   pkgAcqMetaIndex(Owner, this, ClearsignedTarget, DetachedSigTarget, IndexTargets),
-   d(NULL), ClearsignedTarget(ClearsignedTarget),
-   DetachedDataTarget(DetachedDataTarget),
+   pkgAcqMetaIndex(Owner, this, ClearsignedTarget, DetachedSigTarget),
+   d(NULL), DetachedDataTarget(DetachedDataTarget),
    MetaIndexParser(MetaIndexParser), LastMetaIndexParser(NULL)
 {
    // index targets + (worst case:) Release/Release.gpg
-   ExpectedAdditionalItems = IndexTargets.size() + 2;
+   ExpectedAdditionalItems = std::numeric_limits<decltype(ExpectedAdditionalItems)>::max();
    TransactionManager->Add(this);
 }
                                                                        /*}}}*/
@@ -1523,15 +1671,23 @@ void pkgAcqMetaClearSig::Done(std::string const &Message,
         new NoActionItem(Owner, DetachedSigTarget);
       }
    }
+   else if (Status != StatAuthError)
+   {
+      string const FinalFile = GetFinalFileNameFromURI(DetachedDataTarget.URI);
+      string const OldFile = GetFinalFilename();
+      if (TransactionManager->IMSHit == false)
+        TransactionManager->TransactionStageCopy(this, DestFile, FinalFile);
+      else if (RealFileExists(OldFile) == false)
+        new NoActionItem(Owner, DetachedDataTarget);
+      else
+        TransactionManager->TransactionStageCopy(this, OldFile, FinalFile);
+   }
 }
                                                                        /*}}}*/
 void pkgAcqMetaClearSig::Failed(string const &Message,pkgAcquire::MethodConfig const * const Cnf) /*{{{*/
 {
    Item::Failed(Message, Cnf);
 
-   // we failed, we will not get additional items from this method
-   ExpectedAdditionalItems = 0;
-
    if (AuthPass == false)
    {
       if (Status == StatAuthError || Status == StatTransientNetworkError)
@@ -1550,17 +1706,14 @@ void pkgAcqMetaClearSig::Failed(string const &Message,pkgAcquire::MethodConfig c
       TransactionManager->TransactionStageRemoval(this, GetFinalFilename());
       Status = StatDone;
 
-      new pkgAcqMetaIndex(Owner, TransactionManager, DetachedDataTarget, DetachedSigTarget, IndexTargets);
+      new pkgAcqMetaIndex(Owner, TransactionManager, DetachedDataTarget, DetachedSigTarget);
    }
    else
    {
       if(CheckStopAuthentication(this, Message))
          return;
 
-      // No Release file was present, or verification failed, so fall
-      // back to queueing Packages files without verification
-      // only allow going further if the user explicitly wants it
-      if(AllowInsecureRepositories(_("The repository '%s' is not signed."), ClearsignedTarget.Description, TransactionManager->MetaIndexParser, TransactionManager, this) == true)
+      if(AllowInsecureRepositories(InsecureType::UNSIGNED, Target.Description, TransactionManager->MetaIndexParser, TransactionManager, this) == true)
       {
         Status = StatDone;
 
@@ -1579,7 +1732,7 @@ void pkgAcqMetaClearSig::Failed(string const &Message,pkgAcquire::MethodConfig c
         if (TransactionManager->MetaIndexParser->Load(PartialRelease, &ErrorText) == false || VerifyVendor(Message) == false)
            /* expired Release files are still a problem you need extra force for */;
         else
-           QueueIndexes(true);
+           TransactionManager->QueueIndexes(true);
       }
    }
 }
@@ -1588,9 +1741,8 @@ void pkgAcqMetaClearSig::Failed(string const &Message,pkgAcquire::MethodConfig c
 pkgAcqMetaIndex::pkgAcqMetaIndex(pkgAcquire * const Owner,             /*{{{*/
                                  pkgAcqMetaClearSig * const TransactionManager,
                                 IndexTarget const &DataTarget,
-                                IndexTarget const &DetachedSigTarget,
-                                vector<IndexTarget> const &IndexTargets) :
-   pkgAcqMetaBase(Owner, TransactionManager, IndexTargets, DataTarget), d(NULL),
+                                IndexTarget const &DetachedSigTarget) :
+   pkgAcqMetaBase(Owner, TransactionManager, DataTarget), d(NULL),
    DetachedSigTarget(DetachedSigTarget)
 {
    if(_config->FindB("Debug::Acquire::Transaction", false) == true)
@@ -1604,9 +1756,6 @@ pkgAcqMetaIndex::pkgAcqMetaIndex(pkgAcquire * const Owner,                /*{{{*/
    Desc.Owner = this;
    Desc.ShortDesc = DataTarget.ShortDesc;
    Desc.URI = DataTarget.URI;
-
-   // we expect more item
-   ExpectedAdditionalItems = IndexTargets.size();
    QueueURI(Desc);
 }
                                                                        /*}}}*/
@@ -1635,13 +1784,13 @@ void pkgAcqMetaIndex::Failed(string const &Message,
    // No Release file was present so fall
    // back to queueing Packages files without verification
    // only allow going further if the user explicitly wants it
-   if(AllowInsecureRepositories(_("The repository '%s' does not have a Release file."), Target.Description, TransactionManager->MetaIndexParser, TransactionManager, this) == true)
+   if(AllowInsecureRepositories(InsecureType::NORELEASE, Target.Description, TransactionManager->MetaIndexParser, TransactionManager, this) == true)
    {
       // ensure old Release files are removed
       TransactionManager->TransactionStageRemoval(this, GetFinalFilename());
 
       // queue without any kind of hashsum support
-      QueueIndexes(false);
+      TransactionManager->QueueIndexes(false);
    }
 }
                                                                        /*}}}*/
@@ -1735,6 +1884,14 @@ void pkgAcqMetaSig::Done(string const &Message, HashStringList const &Hashes,
         TransactionManager->TransactionStageCopy(MetaIndex, MetaIndex->DestFile, MetaIndex->GetFinalFilename());
       }
    }
+   else if (MetaIndex->Status != StatAuthError)
+   {
+      std::string const FinalFile = MetaIndex->GetFinalFilename();
+      if (TransactionManager->IMSHit == false)
+        TransactionManager->TransactionStageCopy(MetaIndex, MetaIndex->DestFile, FinalFile);
+      else
+        TransactionManager->TransactionStageCopy(MetaIndex, FinalFile, FinalFile);
+   }
 }
                                                                        /*}}}*/
 void pkgAcqMetaSig::Failed(string const &Message,pkgAcquire::MethodConfig const * const Cnf)/*{{{*/
@@ -1745,40 +1902,14 @@ void pkgAcqMetaSig::Failed(string const &Message,pkgAcquire::MethodConfig const
    if (MetaIndex->AuthPass == true && MetaIndex->CheckStopAuthentication(this, Message))
          return;
 
-   string const FinalRelease = MetaIndex->GetFinalFilename();
-   string const FinalReleasegpg = GetFinalFilename();
-   string const FinalInRelease = TransactionManager->GetFinalFilename();
-
-   if (RealFileExists(FinalReleasegpg) || RealFileExists(FinalInRelease))
-   {
-      std::string downgrade_msg;
-      strprintf(downgrade_msg, _("The repository '%s' is no longer signed."),
-                MetaIndex->Target.Description.c_str());
-      if(_config->FindB("Acquire::AllowDowngradeToInsecureRepositories"))
-      {
-         // meh, the users wants to take risks (we still mark the packages
-         // from this repository as unauthenticated)
-         _error->Warning("%s", downgrade_msg.c_str());
-         _error->Warning(_("This is normally not allowed, but the option "
-                           "Acquire::AllowDowngradeToInsecureRepositories was "
-                           "given to override it."));
-         Status = StatDone;
-      } else {
-        MessageInsecureRepository(true, downgrade_msg);
-        if (TransactionManager->IMSHit == false)
-           Rename(MetaIndex->DestFile, MetaIndex->DestFile + ".FAILED");
-        Item::Failed("Message: " + downgrade_msg, Cnf);
-         TransactionManager->AbortTransaction();
-         return;
-      }
-   }
-
    // ensures that a Release.gpg file in the lists/ is removed by the transaction
    TransactionManager->TransactionStageRemoval(this, DestFile);
 
    // only allow going further if the user explicitly wants it
-   if (AllowInsecureRepositories(_("The repository '%s' is not signed."), MetaIndex->Target.Description, TransactionManager->MetaIndexParser, TransactionManager, this) == true)
+   if (AllowInsecureRepositories(InsecureType::UNSIGNED, MetaIndex->Target.Description, TransactionManager->MetaIndexParser, TransactionManager, this) == true)
    {
+      string const FinalRelease = MetaIndex->GetFinalFilename();
+      string const FinalInRelease = TransactionManager->GetFinalFilename();
       LoadLastMetaIndexParser(TransactionManager, FinalRelease, FinalInRelease);
 
       // we parse the indexes here because at this point the user wanted
@@ -1787,10 +1918,12 @@ void pkgAcqMetaSig::Failed(string const &Message,pkgAcquire::MethodConfig const
       if (MetaIndex->VerifyVendor(Message) == false)
         /* expired Release files are still a problem you need extra force for */;
       else
-        MetaIndex->QueueIndexes(GoodLoad);
+        TransactionManager->QueueIndexes(GoodLoad);
 
-      TransactionManager->TransactionStageCopy(MetaIndex, MetaIndex->DestFile, MetaIndex->GetFinalFilename());
+      TransactionManager->TransactionStageCopy(MetaIndex, MetaIndex->DestFile, FinalRelease);
    }
+   else if (TransactionManager->IMSHit == false)
+      Rename(MetaIndex->DestFile, MetaIndex->DestFile + ".FAILED");
 
    // FIXME: this is used often (e.g. in pkgAcqIndexTrans) so refactor
    if (Cnf->LocalOnly == true ||
@@ -1822,7 +1955,7 @@ void pkgAcqBaseIndex::Failed(std::string const &Message,pkgAcquire::MethodConfig
    if (timespec == 0)
       ErrorText.append("<unknown>");
    else
-      ErrorText.append(TimeRFC1123(timespec));
+      ErrorText.append(TimeRFC1123(timespec, true));
    ErrorText.append("\n");
 }
                                                                        /*}}}*/
@@ -1840,6 +1973,9 @@ pkgAcqDiffIndex::pkgAcqDiffIndex(pkgAcquire * const Owner,
                                  IndexTarget const &Target)
    : pkgAcqBaseIndex(Owner, TransactionManager, Target), d(NULL), diffs(NULL)
 {
+   // FIXME: Magic number as an upper bound on pdiffs we will reasonably acquire
+   ExpectedAdditionalItems = 40;
+
    Debug = _config->FindB("Debug::pkgAcquire::Diffs",false);
 
    Desc.Owner = this;
@@ -1872,7 +2008,7 @@ string pkgAcqDiffIndex::Custom600Headers() const
    if (stat(Final.c_str(),&Buf) != 0)
       return "\nIndex-File: true";
    
-   return "\nIndex-File: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime);
+   return "\nIndex-File: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime, false);
 }
                                                                        /*}}}*/
 void pkgAcqDiffIndex::QueueOnIMSHit() const                            /*{{{*/
@@ -1882,8 +2018,21 @@ void pkgAcqDiffIndex::QueueOnIMSHit() const                              /*{{{*/
    new pkgAcqIndexDiffs(Owner, TransactionManager, Target);
 }
                                                                        /*}}}*/
+static bool RemoveFileForBootstrapLinking(bool const Debug, std::string const &For, std::string const &Boot)/*{{{*/
+{
+   if (FileExists(Boot) && RemoveFile("Bootstrap-linking", Boot) == false)
+   {
+      if (Debug)
+        std::clog << "Bootstrap-linking for patching " << For
+           << " by removing stale " << Boot << " failed!" << std::endl;
+      return false;
+   }
+   return true;
+}
+                                                                       /*}}}*/
 bool pkgAcqDiffIndex::ParseDiffIndex(string const &IndexDiffFile)      /*{{{*/
 {
+   ExpectedAdditionalItems = 0;
    // failing here is fine: our caller will take care of trying to
    // get the complete file if patching fails
    if(Debug)
@@ -1902,6 +2051,7 @@ bool pkgAcqDiffIndex::ParseDiffIndex(string const &IndexDiffFile) /*{{{*/
    HashStringList ServerHashes;
    unsigned long long ServerSize = 0;
 
+   auto const &posix = std::locale("C.UTF-8");
    for (char const * const * type = HashString::SupportedHashes(); *type != NULL; ++type)
    {
       std::string tagname = *type;
@@ -1913,6 +2063,7 @@ bool pkgAcqDiffIndex::ParseDiffIndex(string const &IndexDiffFile) /*{{{*/
       string hash;
       unsigned long long size;
       std::stringstream ss(tmp);
+      ss.imbue(posix);
       ss >> hash >> size;
       if (unlikely(hash.empty() == true))
         continue;
@@ -1991,6 +2142,7 @@ bool pkgAcqDiffIndex::ParseDiffIndex(string const &IndexDiffFile) /*{{{*/
       string hash, filename;
       unsigned long long size;
       std::stringstream ss(tmp);
+      ss.imbue(posix);
 
       while (ss >> hash >> size >> filename)
       {
@@ -2049,6 +2201,7 @@ bool pkgAcqDiffIndex::ParseDiffIndex(string const &IndexDiffFile) /*{{{*/
       string hash, filename;
       unsigned long long size;
       std::stringstream ss(tmp);
+      ss.imbue(posix);
 
       while (ss >> hash >> size >> filename)
       {
@@ -2086,6 +2239,7 @@ bool pkgAcqDiffIndex::ParseDiffIndex(string const &IndexDiffFile) /*{{{*/
       string hash, filename;
       unsigned long long size;
       std::stringstream ss(tmp);
+      ss.imbue(posix);
 
       // FIXME: all of pdiff supports only .gz compressed patches
       while (ss >> hash >> size >> filename)
@@ -2216,23 +2370,15 @@ bool pkgAcqDiffIndex::ParseDiffIndex(string const &IndexDiffFile)       /*{{{*/
       if (unlikely(Final.empty())) // because we wouldn't be called in such a case
         return false;
       std::string const PartialFile = GetPartialFileNameFromURI(Target.URI);
-      if (FileExists(PartialFile) && RemoveFile("Bootstrap-linking", PartialFile) == false)
-      {
-        if (Debug)
-           std::clog << "Bootstrap-linking for patching " << CurrentPackagesFile
-              << " by removing stale " << PartialFile << " failed!" << std::endl;
+      std::string const PatchedFile = GetKeepCompressedFileName(PartialFile + "-patched", Target);
+      if (RemoveFileForBootstrapLinking(Debug, CurrentPackagesFile, PartialFile) == false ||
+           RemoveFileForBootstrapLinking(Debug, CurrentPackagesFile, PatchedFile) == false)
         return false;
-      }
       for (auto const &ext : APT::Configuration::getCompressorExtensions())
       {
-        std::string const Partial = PartialFile + ext;
-        if (FileExists(Partial) && RemoveFile("Bootstrap-linking", Partial) == false)
-        {
-           if (Debug)
-              std::clog << "Bootstrap-linking for patching " << CurrentPackagesFile
-                 << " by removing stale " << Partial << " failed!" << std::endl;
+        if (RemoveFileForBootstrapLinking(Debug, CurrentPackagesFile, PartialFile + ext) == false ||
+              RemoveFileForBootstrapLinking(Debug, CurrentPackagesFile, PatchedFile + ext) == false)
            return false;
-        }
       }
       std::string const Ext = Final.substr(CurrentPackagesFile.length());
       std::string const Partial = PartialFile + Ext;
@@ -2267,6 +2413,7 @@ void pkgAcqDiffIndex::Failed(string const &Message,pkgAcquire::MethodConfig cons
 {
    pkgAcqBaseIndex::Failed(Message,Cnf);
    Status = StatDone;
+   ExpectedAdditionalItems = 0;
 
    if(Debug)
       std::clog << "pkgAcqDiffIndex failed: " << Desc.URI << " with " << Message << std::endl
@@ -2513,9 +2660,10 @@ std::string pkgAcqIndexDiffs::Custom600Headers() const                   /*{{{*/
    if(State != StateApplyDiff)
       return pkgAcqBaseIndex::Custom600Headers();
    std::ostringstream patchhashes;
-   HashStringList const ExpectedHashes = available_patches[0].patch_hashes;
-   for (HashStringList::const_iterator hs = ExpectedHashes.begin(); hs != ExpectedHashes.end(); ++hs)
-      patchhashes <<  "\nPatch-0-" << hs->HashType() << "-Hash: " << hs->HashValue();
+   for (auto && hs : available_patches[0].result_hashes)
+      patchhashes <<  "\nStart-" << hs.HashType() << "-Hash: " << hs.HashValue();
+   for (auto && hs : available_patches[0].patch_hashes)
+      patchhashes <<  "\nPatch-0-" << hs.HashType() << "-Hash: " << hs.HashValue();
    patchhashes << pkgAcqBaseIndex::Custom600Headers();
    return patchhashes.str();
 }
@@ -2662,12 +2810,14 @@ std::string pkgAcqIndexMergeDiffs::Custom600Headers() const             /*{{{*/
       return pkgAcqBaseIndex::Custom600Headers();
    std::ostringstream patchhashes;
    unsigned int seen_patches = 0;
+   for (auto && hs : (*allPatches)[0]->patch.result_hashes)
+      patchhashes <<  "\nStart-" << hs.HashType() << "-Hash: " << hs.HashValue();
    for (std::vector<pkgAcqIndexMergeDiffs *>::const_iterator I = allPatches->begin();
         I != allPatches->end(); ++I)
    {
       HashStringList const ExpectedHashes = (*I)->patch.patch_hashes;
       for (HashStringList::const_iterator hs = ExpectedHashes.begin(); hs != ExpectedHashes.end(); ++hs)
-        patchhashes <<  "\nPatch-" << seen_patches << "-" << hs->HashType() << "-Hash: " << hs->HashValue();
+        patchhashes <<  "\nPatch-" << std::to_string(seen_patches) << "-" << hs->HashType() << "-Hash: " << hs->HashValue();
       ++seen_patches;
    }
    patchhashes << pkgAcqBaseIndex::Custom600Headers();
@@ -2715,10 +2865,6 @@ void pkgAcqIndex::Init(string const &URI, string const &URIDesc,
    DestFile = GetPartialFileNameFromURI(URI);
    NextCompressionExtension(CurrentCompressionExtension, CompressionExtensions, false);
 
-   // store file size of the download to ensure the fetcher gives
-   // accurate progress reporting
-   FileSize = GetExpectedHashes().FileSize();
-
    if (CurrentCompressionExtension == "uncompressed")
    {
       Desc.URI = URI;
@@ -2755,6 +2901,9 @@ void pkgAcqIndex::Init(string const &URI, string const &URIDesc,
       DestFile = DestFile + '.' + CurrentCompressionExtension;
    }
 
+   // store file size of the download to ensure the fetcher gives
+   // accurate progress reporting
+   FileSize = GetExpectedHashes().FileSize();
 
    Desc.Description = URIDesc;
    Desc.Owner = this;
@@ -2777,7 +2926,7 @@ string pkgAcqIndex::Custom600Headers() const
 
       struct stat Buf;
       if (stat(Final.c_str(),&Buf) == 0)
-        msg += "\nLast-Modified: " + TimeRFC1123(Buf.st_mtime);
+        msg += "\nLast-Modified: " + TimeRFC1123(Buf.st_mtime, false);
    }
 
    if(Target.IsOptional)
@@ -2791,6 +2940,20 @@ void pkgAcqIndex::Failed(string const &Message,pkgAcquire::MethodConfig const *
 {
    pkgAcqBaseIndex::Failed(Message,Cnf);
 
+   if (UsedMirror.empty() == false && UsedMirror != "DIRECT" &&
+        LookupTag(Message, "FailReason") == "HttpError404")
+   {
+      UsedMirror = "DIRECT";
+      if (Desc.URI.find("/by-hash/") != std::string::npos)
+        CompressionExtensions = "by-hash " + CompressionExtensions;
+      else
+        CompressionExtensions = CurrentCompressionExtension + ' ' + CompressionExtensions;
+      Desc.Description = Target.Description;
+      Init(Target.URI, Desc.Description, Desc.ShortDesc);
+      Status = StatIdle;
+      return;
+   }
+
    // authorisation matches will not be fixed by other compression types
    if (Status != StatAuthError)
    {