]> git.saurik.com Git - apt.git/blobdiff - apt-pkg/acquire-item.cc
fix arguments for MarkInstall so packages are really marked as automatic
[apt.git] / apt-pkg / acquire-item.cc
index ffbe66d7d151118f47621b06b32bbe5d356c9c32..cf88ded7b243da48ac69de813331172712a8908b 100644 (file)
@@ -15,8 +15,8 @@
 // Include Files                                                       /*{{{*/
 #include <apt-pkg/acquire-item.h>
 #include <apt-pkg/configuration.h>
 // Include Files                                                       /*{{{*/
 #include <apt-pkg/acquire-item.h>
 #include <apt-pkg/configuration.h>
+#include <apt-pkg/aptconfiguration.h>
 #include <apt-pkg/sourcelist.h>
 #include <apt-pkg/sourcelist.h>
-#include <apt-pkg/vendorlist.h>
 #include <apt-pkg/error.h>
 #include <apt-pkg/strutl.h>
 #include <apt-pkg/fileutl.h>
 #include <apt-pkg/error.h>
 #include <apt-pkg/strutl.h>
 #include <apt-pkg/fileutl.h>
@@ -32,6 +32,7 @@
 #include <string>
 #include <sstream>
 #include <stdio.h>
 #include <string>
 #include <sstream>
 #include <stdio.h>
+#include <ctime>
                                                                        /*}}}*/
 
 using namespace std;
                                                                        /*}}}*/
 
 using namespace std;
@@ -63,6 +64,7 @@ void pkgAcquire::Item::Failed(string Message,pkgAcquire::MethodConfig *Cnf)
 {
    Status = StatIdle;
    ErrorText = LookupTag(Message,"Message");
 {
    Status = StatIdle;
    ErrorText = LookupTag(Message,"Message");
+   UsedMirror =  LookupTag(Message,"UsedMirror");
    if (QueueCounter <= 1)
    {
       /* This indicates that the file is not available right now but might
    if (QueueCounter <= 1)
    {
       /* This indicates that the file is not available right now but might
@@ -75,10 +77,17 @@ void pkgAcquire::Item::Failed(string Message,pkgAcquire::MethodConfig *Cnf)
         Dequeue();
         return;
       }
         Dequeue();
         return;
       }
-      
+
       Status = StatError;
       Dequeue();
    }   
       Status = StatError;
       Dequeue();
    }   
+
+   // report mirror failure back to LP if we actually use a mirror
+   string FailReason = LookupTag(Message, "FailReason");
+   if(FailReason.size() != 0)
+      ReportMirrorFailure(FailReason);
+   else
+      ReportMirrorFailure(ErrorText);
 }
                                                                        /*}}}*/
 // Acquire::Item::Start - Item has begun to download                   /*{{{*/
 }
                                                                        /*}}}*/
 // Acquire::Item::Start - Item has begun to download                   /*{{{*/
@@ -100,7 +109,7 @@ void pkgAcquire::Item::Done(string Message,unsigned long Size,string Hash,
 {
    // We just downloaded something..
    string FileName = LookupTag(Message,"Filename");
 {
    // We just downloaded something..
    string FileName = LookupTag(Message,"Filename");
-   // we only inform the Log class if it was actually not a local thing
+   UsedMirror =  LookupTag(Message,"UsedMirror");
    if (Complete == false && !Local && FileName == DestFile)
    {
       if (Owner->Log != 0)
    if (Complete == false && !Local && FileName == DestFile)
    {
       if (Owner->Log != 0)
@@ -109,7 +118,6 @@ void pkgAcquire::Item::Done(string Message,unsigned long Size,string Hash,
 
    if (FileSize == 0)
       FileSize= Size;
 
    if (FileSize == 0)
       FileSize= Size;
-   
    Status = StatDone;
    ErrorText = string();
    Owner->Dequeue(this);
    Status = StatDone;
    ErrorText = string();
    Owner->Dequeue(this);
@@ -131,6 +139,195 @@ void pkgAcquire::Item::Rename(string From,string To)
    }   
 }
                                                                        /*}}}*/
    }   
 }
                                                                        /*}}}*/
+// Acquire::Item::ReportMirrorFailure                                  /*{{{*/
+// ---------------------------------------------------------------------
+void pkgAcquire::Item::ReportMirrorFailure(string FailCode)
+{
+   // we only act if a mirror was used at all
+   if(UsedMirror.empty())
+      return;
+#if 0
+   std::cerr << "\nReportMirrorFailure: " 
+            << UsedMirror
+            << " Uri: " << DescURI()
+            << " FailCode: " 
+            << FailCode << std::endl;
+#endif
+   const char *Args[40];
+   unsigned int i = 0;
+   string report = _config->Find("Methods::Mirror::ProblemReporting", 
+                                "/usr/lib/apt/apt-report-mirror-failure");
+   if(!FileExists(report))
+      return;
+   Args[i++] = report.c_str();
+   Args[i++] = UsedMirror.c_str();
+   Args[i++] = DescURI().c_str();
+   Args[i++] = FailCode.c_str();
+   Args[i++] = NULL;
+   pid_t pid = ExecFork();
+   if(pid < 0) 
+   {
+      _error->Error("ReportMirrorFailure Fork failed");
+      return;
+   }
+   else if(pid == 0) 
+   {
+      execvp(Args[0], (char**)Args);
+      std::cerr << "Could not exec " << Args[0] << std::endl;
+      _exit(100);
+   }
+   if(!ExecWait(pid, "report-mirror-failure")) 
+   {
+      _error->Warning("Couldn't report problem to '%s'",
+                     _config->Find("Methods::Mirror::ProblemReporting").c_str());
+   }
+}
+                                                                       /*}}}*/
+// AcqSubIndex::AcqSubIndex - Constructor                              /*{{{*/
+// ---------------------------------------------------------------------
+/* Get the Index file first and see if there are languages available
+ * If so, create a pkgAcqIndexTrans for the found language(s).
+ */
+pkgAcqSubIndex::pkgAcqSubIndex(pkgAcquire *Owner, string const &URI,
+                                string const &URIDesc, string const &ShortDesc,
+                                HashString const &ExpectedHash)
+   : Item(Owner), ExpectedHash(ExpectedHash)
+{
+   Debug = _config->FindB("Debug::pkgAcquire::SubIndex",false);
+
+   DestFile = _config->FindDir("Dir::State::lists") + "partial/";
+   DestFile += URItoFileName(URI);
+
+   Desc.URI = URI;
+   Desc.Description = URIDesc;
+   Desc.Owner = this;
+   Desc.ShortDesc = ShortDesc;
+
+   QueueURI(Desc);
+
+   if(Debug)
+      std::clog << "pkgAcqSubIndex: " << Desc.URI << std::endl;
+}
+                                                                       /*}}}*/
+// AcqSubIndex::Custom600Headers - Insert custom request headers       /*{{{*/
+// ---------------------------------------------------------------------
+/* The only header we use is the last-modified header. */
+string pkgAcqSubIndex::Custom600Headers()
+{
+   string Final = _config->FindDir("Dir::State::lists");
+   Final += URItoFileName(Desc.URI);
+
+   struct stat Buf;
+   if (stat(Final.c_str(),&Buf) != 0)
+      return "\nIndex-File: true\nFail-Ignore: true\n";
+   return "\nIndex-File: true\nFail-Ignore: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime);
+}
+                                                                       /*}}}*/
+void pkgAcqSubIndex::Failed(string Message,pkgAcquire::MethodConfig *Cnf)      /*{{{*/
+{
+   if(Debug)
+      std::clog << "pkgAcqSubIndex failed: " << Desc.URI << std::endl;
+
+   Complete = false;
+   Status = StatDone;
+   Dequeue();
+
+   // No good Index is provided, so try guessing
+   std::vector<std::string> langs = APT::Configuration::getLanguages(true);
+   for (std::vector<std::string>::const_iterator l = langs.begin();
+       l != langs.end(); ++l)
+   {
+      if (*l == "none") continue;
+      string const file = "Translation-" + *l;
+      new pkgAcqIndexTrans(Owner, Desc.URI.substr(0, Desc.URI.rfind('/')+1).append(file),
+               Desc.Description.erase(Desc.Description.rfind(' ')+1).append(file),
+               file);
+   }
+}
+                                                                       /*}}}*/
+void pkgAcqSubIndex::Done(string Message,unsigned long Size,string Md5Hash,    /*{{{*/
+                          pkgAcquire::MethodConfig *Cnf)
+{
+   if(Debug)
+      std::clog << "pkgAcqSubIndex::Done(): " << Desc.URI << std::endl;
+
+   string FileName = LookupTag(Message,"Filename");
+   if (FileName.empty() == true)
+   {
+      Status = StatError;
+      ErrorText = "Method gave a blank filename";
+      return;
+   }
+
+   if (FileName != DestFile)
+   {
+      Local = true;
+      Desc.URI = "copy:" + FileName;
+      QueueURI(Desc);
+      return;
+   }
+
+   Item::Done(Message,Size,Md5Hash,Cnf);
+
+   string FinalFile = _config->FindDir("Dir::State::lists")+URItoFileName(Desc.URI);
+
+   // sucess in downloading the index
+   // rename the index
+   if(Debug)
+      std::clog << "Renaming: " << DestFile << " -> " << FinalFile << std::endl;
+   Rename(DestFile,FinalFile);
+   chmod(FinalFile.c_str(),0644);
+   DestFile = FinalFile;
+
+   if(ParseIndex(DestFile) == false)
+      return Failed("", NULL);
+
+   Complete = true;
+   Status = StatDone;
+   Dequeue();
+   return;
+}
+                                                                       /*}}}*/
+bool pkgAcqSubIndex::ParseIndex(string const &IndexFile)               /*{{{*/
+{
+   indexRecords SubIndexParser;
+   if (FileExists(IndexFile) == false || SubIndexParser.Load(IndexFile) == false)
+      return false;
+
+   std::vector<std::string> lang = APT::Configuration::getLanguages(true);
+   for (std::vector<std::string>::const_iterator l = lang.begin();
+       l != lang.end(); ++l)
+   {
+      if (*l == "none")
+        continue;
+
+      string file = "Translation-" + *l;
+      indexRecords::checkSum const *Record = SubIndexParser.Lookup(file);
+      HashString expected;
+      if (Record == NULL)
+      {
+        // FIXME: the Index file provided by debian currently only includes bz2 records
+        Record = SubIndexParser.Lookup(file + ".bz2");
+        if (Record == NULL)
+           continue;
+      }
+      else
+      {
+        expected = Record->Hash;
+        if (expected.empty() == true)
+           continue;
+      }
+
+      IndexTarget target;
+      target.Description = Desc.Description.erase(Desc.Description.rfind(' ')+1).append(file);
+      target.MetaKey = file;
+      target.ShortDesc = file;
+      target.URI = Desc.URI.substr(0, Desc.URI.rfind('/')+1).append(file);
+      new pkgAcqIndexTrans(Owner, &target, expected, &SubIndexParser);
+   }
+   return true;
+}
+                                                                       /*}}}*/
 // AcqDiffIndex::AcqDiffIndex - Constructor                            /*{{{*/
 // ---------------------------------------------------------------------
 /* Get the DiffIndex file first and see if there are patches availabe 
 // AcqDiffIndex::AcqDiffIndex - Constructor                            /*{{{*/
 // ---------------------------------------------------------------------
 /* Get the DiffIndex file first and see if there are patches availabe 
@@ -218,19 +415,19 @@ bool pkgAcqDiffIndex::ParseDiffIndex(string IndexDiffFile)                /*{{{*/
 
    if(TF.Step(Tags) == true)
    {
 
    if(TF.Step(Tags) == true)
    {
-      string local_sha1;
       bool found = false;
       DiffInfo d;
       string size;
 
       bool found = false;
       DiffInfo d;
       string size;
 
-      string tmp = Tags.FindS("SHA1-Current");
+      string const tmp = Tags.FindS("SHA1-Current");
       std::stringstream ss(tmp);
       std::stringstream ss(tmp);
-      ss >> ServerSha1;
+      ss >> ServerSha1 >> size;
+      unsigned long const ServerSize = atol(size.c_str());
 
       FileFd fd(CurrentPackagesFile, FileFd::ReadOnly);
       SHA1Summation SHA1;
       SHA1.AddFD(fd.Fd(), fd.Size());
 
       FileFd fd(CurrentPackagesFile, FileFd::ReadOnly);
       SHA1Summation SHA1;
       SHA1.AddFD(fd.Fd(), fd.Size());
-      local_sha1 = string(SHA1.Result());
+      string const local_sha1 = SHA1.Result();
 
       if(local_sha1 == ServerSha1) 
       {
 
       if(local_sha1 == ServerSha1) 
       {
@@ -244,23 +441,59 @@ bool pkgAcqDiffIndex::ParseDiffIndex(string IndexDiffFile)                /*{{{*/
       else 
       {
         if(Debug)
       else 
       {
         if(Debug)
-           std::clog << "SHA1-Current: " << ServerSha1 << std::endl;
+           std::clog << "SHA1-Current: " << ServerSha1 << " and we start at "<< fd.Name() << " " << fd.Size() << " " << local_sha1 << std::endl;
 
         // check the historie and see what patches we need
 
         // check the historie and see what patches we need
-        string history = Tags.FindS("SHA1-History");     
+        string const history = Tags.FindS("SHA1-History");
         std::stringstream hist(history);
         std::stringstream hist(history);
-        while(hist >> d.sha1 >> size >> d.file) 
+        while(hist >> d.sha1 >> size >> d.file)
         {
         {
-           d.size = atoi(size.c_str());
            // read until the first match is found
            // read until the first match is found
+           // from that point on, we probably need all diffs
            if(d.sha1 == local_sha1) 
               found=true;
            if(d.sha1 == local_sha1) 
               found=true;
-           // from that point on, we probably need all diffs
-           if(found) 
+           else if (found == false)
+              continue;
+
+           if(Debug)
+              std::clog << "Need to get diff: " << d.file << std::endl;
+           available_patches.push_back(d);
+        }
+
+        if (available_patches.empty() == false)
+        {
+           // patching with too many files is rather slow compared to a fast download
+           unsigned long const fileLimit = _config->FindI("Acquire::PDiffs::FileLimit", 0);
+           if (fileLimit != 0 && fileLimit < available_patches.size())
+           {
+              if (Debug)
+                 std::clog << "Need " << available_patches.size() << " diffs (Limit is " << fileLimit
+                       << ") so fallback to complete download" << std::endl;
+              return false;
+           }
+
+           // see if the patches are too big
+           found = false; // it was true and it will be true again at the end
+           d = *available_patches.begin();
+           string const firstPatch = d.file;
+           unsigned long patchesSize = 0;
+           std::stringstream patches(Tags.FindS("SHA1-Patches"));
+           while(patches >> d.sha1 >> size >> d.file)
+           {
+              if (firstPatch == d.file)
+                 found = true;
+              else if (found == false)
+                 continue;
+
+              patchesSize += atol(size.c_str());
+           }
+           unsigned long const sizeLimit = ServerSize * _config->FindI("Acquire::PDiffs::SizeLimit", 100);
+           if (sizeLimit > 0 && (sizeLimit/100) < patchesSize)
            {
            {
-              if(Debug)
-                 std::clog << "Need to get diff: " << d.file << std::endl;
-              available_patches.push_back(d);
+              if (Debug)
+                 std::clog << "Need " << patchesSize << " bytes (Limit is " << sizeLimit/100
+                       << ") so fallback to complete download" << std::endl;
+              return false;
            }
         }
       }
            }
         }
       }
@@ -269,11 +502,11 @@ bool pkgAcqDiffIndex::ParseDiffIndex(string IndexDiffFile)                /*{{{*/
       if(found) 
       {
         // queue the diffs
       if(found) 
       {
         // queue the diffs
-        string::size_type last_space = Description.rfind(" ");
+        string::size_type const last_space = Description.rfind(" ");
         if(last_space != string::npos)
            Description.erase(last_space, Description.size()-last_space);
         new pkgAcqIndexDiffs(Owner, RealURI, Description, Desc.ShortDesc,
         if(last_space != string::npos)
            Description.erase(last_space, Description.size()-last_space);
         new pkgAcqIndexDiffs(Owner, RealURI, Description, Desc.ShortDesc,
-                             ExpectedHash, available_patches);
+                             ExpectedHash, ServerSha1, available_patches);
         Complete = false;
         Status = StatDone;
         Dequeue();
         Complete = false;
         Status = StatDone;
         Dequeue();
@@ -341,9 +574,10 @@ void pkgAcqDiffIndex::Done(string Message,unsigned long Size,string Md5Hash,       /*{
 pkgAcqIndexDiffs::pkgAcqIndexDiffs(pkgAcquire *Owner,
                                   string URI,string URIDesc,string ShortDesc,
                                   HashString ExpectedHash, 
 pkgAcqIndexDiffs::pkgAcqIndexDiffs(pkgAcquire *Owner,
                                   string URI,string URIDesc,string ShortDesc,
                                   HashString ExpectedHash, 
+                                  string ServerSha1,
                                   vector<DiffInfo> diffs)
    : Item(Owner), RealURI(URI), ExpectedHash(ExpectedHash), 
                                   vector<DiffInfo> diffs)
    : Item(Owner), RealURI(URI), ExpectedHash(ExpectedHash), 
-     available_patches(diffs)
+     available_patches(diffs), ServerSha1(ServerSha1)
 {
    
    DestFile = _config->FindDir("Dir::State::lists") + "partial/";
 {
    
    DestFile = _config->FindDir("Dir::State::lists") + "partial/";
@@ -429,6 +663,13 @@ bool pkgAcqIndexDiffs::QueueNextDiff()                                     /*{{{*/
       std::clog << "QueueNextDiff: " 
                << FinalFile << " (" << local_sha1 << ")"<<std::endl;
 
       std::clog << "QueueNextDiff: " 
                << FinalFile << " (" << local_sha1 << ")"<<std::endl;
 
+   // final file reached before all patches are applied
+   if(local_sha1 == ServerSha1)
+   {
+      Finish(true);
+      return true;
+   }
+
    // remove all patches until the next matching patch is found
    // this requires the Index file to be ordered
    for(vector<DiffInfo>::iterator I=available_patches.begin();
    // remove all patches until the next matching patch is found
    // this requires the Index file to be ordered
    for(vector<DiffInfo>::iterator I=available_patches.begin();
@@ -473,24 +714,7 @@ void pkgAcqIndexDiffs::Done(string Message,unsigned long Size,string Md5Hash,      /*
    FinalFile = _config->FindDir("Dir::State::lists")+URItoFileName(RealURI);
 
    // sucess in downloading a diff, enter ApplyDiff state
    FinalFile = _config->FindDir("Dir::State::lists")+URItoFileName(RealURI);
 
    // sucess in downloading a diff, enter ApplyDiff state
-   if(State == StateFetchDiff) 
-   {
-
-      if(Debug)
-        std::clog << "Sending to gzip method: " << FinalFile << std::endl;
-
-      string FileName = LookupTag(Message,"Filename");
-      State = StateUnzipDiff;
-      Local = true;
-      Desc.URI = "gzip:" + FileName;
-      DestFile += ".decomp";
-      QueueURI(Desc);
-      Mode = "gzip";
-      return;
-   } 
-
-   // sucess in downloading a diff, enter ApplyDiff state
-   if(State == StateUnzipDiff) 
+   if(State == StateFetchDiff)
    {
 
       // rred excepts the patch as $FinalFile.ed
    {
 
       // rred excepts the patch as $FinalFile.ed
@@ -526,7 +750,7 @@ void pkgAcqIndexDiffs::Done(string Message,unsigned long Size,string Md5Hash,       /*
       // see if there is more to download
       if(available_patches.size() > 0) {
         new pkgAcqIndexDiffs(Owner, RealURI, Description, Desc.ShortDesc,
       // see if there is more to download
       if(available_patches.size() > 0) {
         new pkgAcqIndexDiffs(Owner, RealURI, Description, Desc.ShortDesc,
-                             ExpectedHash, available_patches);
+                             ExpectedHash, ServerSha1, available_patches);
         return Finish();
       } else 
         return Finish(true);
         return Finish();
       } else 
         return Finish(true);
@@ -542,39 +766,61 @@ pkgAcqIndex::pkgAcqIndex(pkgAcquire *Owner,
                         HashString ExpectedHash, string comprExt)
    : Item(Owner), RealURI(URI), ExpectedHash(ExpectedHash)
 {
                         HashString ExpectedHash, string comprExt)
    : Item(Owner), RealURI(URI), ExpectedHash(ExpectedHash)
 {
+   if(comprExt.empty() == true)
+   {
+      // autoselect the compression method
+      std::vector<std::string> types = APT::Configuration::getCompressionTypes();
+      for (std::vector<std::string>::const_iterator t = types.begin(); t != types.end(); ++t)
+        comprExt.append(*t).append(" ");
+      if (comprExt.empty() == false)
+        comprExt.erase(comprExt.end()-1);
+   }
+   CompressionExtension = comprExt;
+
+   Init(URI, URIDesc, ShortDesc);
+}
+pkgAcqIndex::pkgAcqIndex(pkgAcquire *Owner, IndexTarget const *Target,
+                        HashString const &ExpectedHash, indexRecords const *MetaIndexParser)
+   : Item(Owner), RealURI(Target->URI), ExpectedHash(ExpectedHash)
+{
+   // autoselect the compression method
+   std::vector<std::string> types = APT::Configuration::getCompressionTypes();
+   CompressionExtension = "";
+   if (ExpectedHash.empty() == false)
+   {
+      for (std::vector<std::string>::const_iterator t = types.begin(); t != types.end(); ++t)
+        if (*t == "uncompressed" || MetaIndexParser->Exists(string(Target->MetaKey).append(".").append(*t)) == true)
+           CompressionExtension.append(*t).append(" ");
+   }
+   else
+   {
+      for (std::vector<std::string>::const_iterator t = types.begin(); t != types.end(); ++t)
+        CompressionExtension.append(*t).append(" ");
+   }
+   if (CompressionExtension.empty() == false)
+      CompressionExtension.erase(CompressionExtension.end()-1);
+
+   Init(Target->URI, Target->Description, Target->ShortDesc);
+}
+                                                                       /*}}}*/
+// AcqIndex::Init - defered Constructor                                        /*{{{*/
+void pkgAcqIndex::Init(string const &URI, string const &URIDesc, string const &ShortDesc) {
    Decompression = false;
    Erase = false;
 
    DestFile = _config->FindDir("Dir::State::lists") + "partial/";
    DestFile += URItoFileName(URI);
 
    Decompression = false;
    Erase = false;
 
    DestFile = _config->FindDir("Dir::State::lists") + "partial/";
    DestFile += URItoFileName(URI);
 
-   if(comprExt.empty()) 
-   {
-      // autoselect the compression method
-      Configuration::Item const *Opts = _config->Tree("Acquire::CompressionTypes");
-      if (Opts != 0)
-        Opts = Opts->Child;
-
-      const char dirBin[] = "Dir::Bin::";
-      for (; Opts != 0; Opts = Opts->Next)
-      {
-        if (Opts->Tag.empty() == true || Opts->Value.empty() == true)
-           continue;
-        const string bin = _config->FindFile(string(dirBin).append(Opts->Value).c_str(),"");
-        if (bin != "" && !FileExists(bin))
-           continue;
-        comprExt = '.' + Opts->Tag;
-        break;
-      }
-   }
-   CompressionExtension = ((comprExt == "plain" || comprExt == ".") ? "" : comprExt);
-
-   Desc.URI = URI + CompressionExtension;
+   std::string const comprExt = CompressionExtension.substr(0, CompressionExtension.find(' '));
+   if (comprExt == "uncompressed")
+      Desc.URI = URI;
+   else
+      Desc.URI = URI + '.' + comprExt;
 
    Desc.Description = URIDesc;
    Desc.Owner = this;
    Desc.ShortDesc = ShortDesc;
 
    Desc.Description = URIDesc;
    Desc.Owner = this;
    Desc.ShortDesc = ShortDesc;
-      
+
    QueueURI(Desc);
 }
                                                                        /*}}}*/
    QueueURI(Desc);
 }
                                                                        /*}}}*/
@@ -585,52 +831,35 @@ string pkgAcqIndex::Custom600Headers()
 {
    string Final = _config->FindDir("Dir::State::lists");
    Final += URItoFileName(RealURI);
 {
    string Final = _config->FindDir("Dir::State::lists");
    Final += URItoFileName(RealURI);
+   if (_config->FindB("Acquire::GzipIndexes",false))
+      Final += ".gz";
    
    
+   string msg = "\nIndex-File: true";
+   // FIXME: this really should use "IndexTarget::IsOptional()" but that
+   //        seems to be difficult without breaking ABI
+   if (ShortDesc().find("Translation") != 0)
+      msg += "\nFail-Ignore: true";
    struct stat Buf;
    if (stat(Final.c_str(),&Buf) != 0)
    struct stat Buf;
    if (stat(Final.c_str(),&Buf) != 0)
-      return "\nIndex-File: true";
-   
-   return "\nIndex-File: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime);
+      msg += "\nLast-Modified: " + TimeRFC1123(Buf.st_mtime);
+
+   return msg;
 }
                                                                        /*}}}*/
 void pkgAcqIndex::Failed(string Message,pkgAcquire::MethodConfig *Cnf) /*{{{*/
 {
 }
                                                                        /*}}}*/
 void pkgAcqIndex::Failed(string Message,pkgAcquire::MethodConfig *Cnf) /*{{{*/
 {
-   Configuration::Item const *Opts = _config->Tree("Acquire::CompressionTypes");
-   if (Opts != 0)
-      Opts = Opts->Child;
-
-   const char dirBin[] = "Dir::Bin::";
-   for (; Opts != 0; Opts = Opts->Next)
+   size_t const nextExt = CompressionExtension.find(' ');
+   if (nextExt != std::string::npos)
    {
    {
-      if (Opts->Tag.empty() == true || Opts->Value.empty() == true)
-        continue;
-
-      // jump over all already checked compression types
-      const unsigned int nameLen = Desc.URI.size() - Opts->Tag.size();
-      if(Desc.URI.substr(nameLen) != Opts->Tag || Opts->Next == 0)
-        continue;
-
-      // check if we need an external binary for this compression type
-      const string bin = _config->FindFile(string(dirBin).append(Opts->Next->Value).c_str(),"");
-      if (bin != "" && !FileExists(bin))
-        continue;
-
-      // retry with the next extension
-      Desc.URI = Desc.URI.substr(0, nameLen) + Opts->Next->Tag;
-
-      new pkgAcqIndex(Owner, RealURI, Desc.Description, Desc.ShortDesc,
-                       ExpectedHash, string(".").append(Opts->Next->Tag));
-
-      Status = StatDone;
-      Complete = false;
-      Dequeue();
+      CompressionExtension = CompressionExtension.substr(nextExt+1);
+      Init(RealURI, Desc.Description, Desc.ShortDesc);
       return;
    }
 
    // on decompression failure, remove bad versions in partial/
       return;
    }
 
    // on decompression failure, remove bad versions in partial/
-   if(Decompression && Erase) {
+   if (Decompression && Erase) {
       string s = _config->FindDir("Dir::State::lists") + "partial/";
       string s = _config->FindDir("Dir::State::lists") + "partial/";
-      s += URItoFileName(RealURI);
+      s.append(URItoFileName(RealURI));
       unlink(s.c_str());
    }
 
       unlink(s.c_str());
    }
 
@@ -662,6 +891,7 @@ void pkgAcqIndex::Done(string Message,unsigned long Size,string Hash,
          Status = StatAuthError;
          ErrorText = _("Hash Sum mismatch");
          Rename(DestFile,DestFile + ".FAILED");
          Status = StatAuthError;
          ErrorText = _("Hash Sum mismatch");
          Rename(DestFile,DestFile + ".FAILED");
+        ReportMirrorFailure("HashChecksumFailure");
          return;
       }
       // Done, move it into position
          return;
       }
       // Done, move it into position
@@ -706,28 +936,41 @@ void pkgAcqIndex::Done(string Message,unsigned long Size,string Hash,
       Status = StatError;
       ErrorText = "Method gave a blank filename";
    }
       Status = StatError;
       ErrorText = "Method gave a blank filename";
    }
-   
+
+   std::string const compExt = CompressionExtension.substr(0, CompressionExtension.find(' '));
+
    // The files timestamp matches
    // The files timestamp matches
-   if (StringToBool(LookupTag(Message,"IMS-Hit"),false) == true)
+   if (StringToBool(LookupTag(Message,"IMS-Hit"),false) == true) {
+       if (_config->FindB("Acquire::GzipIndexes",false) && compExt == "gz")
+         // Update DestFile for .gz suffix so that the clean operation keeps it
+         DestFile += ".gz";
       return;
       return;
+    }
 
    if (FileName == DestFile)
       Erase = true;
    else
       Local = true;
    
 
    if (FileName == DestFile)
       Erase = true;
    else
       Local = true;
    
-   string compExt = flExtension(flNotDir(URI(Desc.URI).Path));
    string decompProg;
 
    string decompProg;
 
+   // If we enable compressed indexes and already have gzip, keep it
+   if (_config->FindB("Acquire::GzipIndexes",false) && compExt == "gz" && !Local) {
+      string FinalFile = _config->FindDir("Dir::State::lists");
+      FinalFile += URItoFileName(RealURI) + ".gz";
+      Rename(DestFile,FinalFile);
+      chmod(FinalFile.c_str(),0644);
+      
+      // Update DestFile for .gz suffix so that the clean operation keeps it
+      DestFile = _config->FindDir("Dir::State::lists") + "partial/";
+      DestFile += URItoFileName(RealURI) + ".gz";
+      return;
+    }
+
    // get the binary name for your used compression type
    decompProg = _config->Find(string("Acquire::CompressionTypes::").append(compExt),"");
    if(decompProg.empty() == false);
    // get the binary name for your used compression type
    decompProg = _config->Find(string("Acquire::CompressionTypes::").append(compExt),"");
    if(decompProg.empty() == false);
-   // flExtensions returns the full name if no extension is found
-   // this is why we have this complicated compare operation here
-   // FIMXE: add a new flJustExtension() that return "" if no
-   //        extension is found and use that above so that it can
-   //        be tested against ""
-   else if(compExt == flNotDir(URI(Desc.URI).Path))
+   else if(compExt == "uncompressed")
       decompProg = "copy";
    else {
       _error->Error("Unsupported extension: %s", compExt.c_str());
       decompProg = "copy";
    else {
       _error->Error("Unsupported extension: %s", compExt.c_str());
@@ -748,6 +991,24 @@ pkgAcqIndexTrans::pkgAcqIndexTrans(pkgAcquire *Owner,
                            string URI,string URIDesc,string ShortDesc) 
   : pkgAcqIndex(Owner, URI, URIDesc, ShortDesc, HashString(), "")
 {
                            string URI,string URIDesc,string ShortDesc) 
   : pkgAcqIndex(Owner, URI, URIDesc, ShortDesc, HashString(), "")
 {
+}
+pkgAcqIndexTrans::pkgAcqIndexTrans(pkgAcquire *Owner, IndexTarget const *Target,
+                        HashString const &ExpectedHash, indexRecords const *MetaIndexParser)
+  : pkgAcqIndex(Owner, Target, ExpectedHash, MetaIndexParser)
+{
+}
+                                                                       /*}}}*/
+// AcqIndexTrans::Custom600Headers - Insert custom request headers     /*{{{*/
+// ---------------------------------------------------------------------
+string pkgAcqIndexTrans::Custom600Headers()
+{
+   string Final = _config->FindDir("Dir::State::lists");
+   Final += URItoFileName(RealURI);
+
+   struct stat Buf;
+   if (stat(Final.c_str(),&Buf) != 0)
+      return "\nFail-Ignore: true\nIndex-File: true";
+   return "\nFail-Ignore: true\nIndex-File: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime);
 }
                                                                        /*}}}*/
 // AcqIndexTrans::Failed - Silence failure messages for missing files  /*{{{*/
 }
                                                                        /*}}}*/
 // AcqIndexTrans::Failed - Silence failure messages for missing files  /*{{{*/
@@ -755,6 +1016,15 @@ pkgAcqIndexTrans::pkgAcqIndexTrans(pkgAcquire *Owner,
 /* */
 void pkgAcqIndexTrans::Failed(string Message,pkgAcquire::MethodConfig *Cnf)
 {
 /* */
 void pkgAcqIndexTrans::Failed(string Message,pkgAcquire::MethodConfig *Cnf)
 {
+   size_t const nextExt = CompressionExtension.find(' ');
+   if (nextExt != std::string::npos)
+   {
+      CompressionExtension = CompressionExtension.substr(nextExt+1);
+      Init(RealURI, Desc.Description, Desc.ShortDesc);
+      Status = StatIdle;
+      return;
+   }
+
    if (Cnf->LocalOnly == true || 
        StringToBool(LookupTag(Message,"Transient-Failure"),false) == false)
    {      
    if (Cnf->LocalOnly == true || 
        StringToBool(LookupTag(Message,"Transient-Failure"),false) == false)
    {      
@@ -764,7 +1034,7 @@ void pkgAcqIndexTrans::Failed(string Message,pkgAcquire::MethodConfig *Cnf)
       Dequeue();
       return;
    }
       Dequeue();
       return;
    }
-   
+
    Item::Failed(Message,Cnf);
 }
                                                                        /*}}}*/
    Item::Failed(Message,Cnf);
 }
                                                                        /*}}}*/
@@ -852,8 +1122,9 @@ void pkgAcqMetaSig::Done(string Message,unsigned long Size,string MD5,
       Rename(LastGoodSig, DestFile);
 
    // queue a pkgAcqMetaIndex to be verified against the sig we just retrieved
       Rename(LastGoodSig, DestFile);
 
    // queue a pkgAcqMetaIndex to be verified against the sig we just retrieved
-   new pkgAcqMetaIndex(Owner, MetaIndexURI, MetaIndexURIDesc, MetaIndexShortDesc,
-                      DestFile, IndexTargets, MetaIndexParser);
+   new pkgAcqMetaIndex(Owner, MetaIndexURI, MetaIndexURIDesc, 
+                      MetaIndexShortDesc,  DestFile, IndexTargets, 
+                      MetaIndexParser);
 
 }
                                                                        /*}}}*/
 
 }
                                                                        /*}}}*/
@@ -866,7 +1137,7 @@ void pkgAcqMetaSig::Failed(string Message,pkgAcquire::MethodConfig *Cnf)/*{{{*/
    {
       Item::Failed(Message,Cnf);
       // move the sigfile back on transient network failures 
    {
       Item::Failed(Message,Cnf);
       // move the sigfile back on transient network failures 
-      if(FileExists(DestFile))
+      if(FileExists(LastGoodSig))
         Rename(LastGoodSig,Final);
 
       // set the status back to , Item::Failed likes to reset it
         Rename(LastGoodSig,Final);
 
       // set the status back to , Item::Failed likes to reset it
@@ -941,6 +1212,9 @@ void pkgAcqMetaIndex::Done(string Message,unsigned long Size,string Hash,  /*{{{*
    if (AuthPass == true)
    {
       AuthDone(Message);
    if (AuthPass == true)
    {
       AuthDone(Message);
+
+      // all cool, move Release file into place
+      Complete = true;
    }
    else
    {
    }
    else
    {
@@ -967,8 +1241,20 @@ void pkgAcqMetaIndex::Done(string Message,unsigned long Size,string Hash, /*{{{*
          Desc.URI = "gpgv:" + SigFile;
          QueueURI(Desc);
          Mode = "gpgv";
          Desc.URI = "gpgv:" + SigFile;
          QueueURI(Desc);
          Mode = "gpgv";
+        return;
       }
    }
       }
    }
+
+   if (Complete == true)
+   {
+      string FinalFile = _config->FindDir("Dir::State::lists");
+      FinalFile += URItoFileName(RealURI);
+      if (SigFile == DestFile)
+        SigFile = FinalFile;
+      Rename(DestFile,FinalFile);
+      chmod(FinalFile.c_str(),0644);
+      DestFile = FinalFile;
+   }
 }
                                                                        /*}}}*/
 void pkgAcqMetaIndex::RetrievalDone(string Message)                    /*{{{*/
 }
                                                                        /*}}}*/
 void pkgAcqMetaIndex::RetrievalDone(string Message)                    /*{{{*/
@@ -992,22 +1278,17 @@ void pkgAcqMetaIndex::RetrievalDone(string Message)                      /*{{{*/
       return;
    }
 
       return;
    }
 
-   // see if the download was a IMSHit
+   // make sure to verify against the right file on I-M-S hit
    IMSHit = StringToBool(LookupTag(Message,"IMS-Hit"),false);
    IMSHit = StringToBool(LookupTag(Message,"IMS-Hit"),false);
+   if(IMSHit)
+   {
+      string FinalFile = _config->FindDir("Dir::State::lists");
+      FinalFile += URItoFileName(RealURI);
+      if (SigFile == DestFile)
+        SigFile = FinalFile;
+      DestFile = FinalFile;
+   }
    Complete = true;
    Complete = true;
-
-   string FinalFile = _config->FindDir("Dir::State::lists");
-   FinalFile += URItoFileName(RealURI);
-
-   // If we get a IMS hit we can remove the empty file in partial
-   // othersie we move the file in place
-   if (IMSHit)
-      unlink(DestFile.c_str());
-   else
-      Rename(DestFile,FinalFile);
-
-   chmod(FinalFile.c_str(),0644);
-   DestFile = FinalFile;
 }
                                                                        /*}}}*/
 void pkgAcqMetaIndex::AuthDone(string Message)                         /*{{{*/
 }
                                                                        /*}}}*/
 void pkgAcqMetaIndex::AuthDone(string Message)                         /*{{{*/
@@ -1036,8 +1317,11 @@ void pkgAcqMetaIndex::AuthDone(string Message)                           /*{{{*/
    // Download further indexes with verification
    QueueIndexes(true);
 
    // Download further indexes with verification
    QueueIndexes(true);
 
-   // Done, move signature file into position
+   // is it a clearsigned MetaIndex file?
+   if (DestFile == SigFile)
+      return;
 
 
+   // Done, move signature file into position
    string VerifiedSigFile = _config->FindDir("Dir::State::lists") +
       URItoFileName(RealURI) + ".gpg";
    Rename(SigFile,VerifiedSigFile);
    string VerifiedSigFile = _config->FindDir("Dir::State::lists") +
       URItoFileName(RealURI) + ".gpg";
    Rename(SigFile,VerifiedSigFile);
@@ -1053,65 +1337,58 @@ void pkgAcqMetaIndex::QueueIndexes(bool verify)                         /*{{{*/
       HashString ExpectedIndexHash;
       if (verify)
       {
       HashString ExpectedIndexHash;
       if (verify)
       {
-         const indexRecords::checkSum *Record = MetaIndexParser->Lookup((*Target)->MetaKey);
-         if (!Record)
-         {
-            Status = StatAuthError;
-            ErrorText = "Unable to find expected entry  "
-               + (*Target)->MetaKey + " in Meta-index file (malformed Release file?)";
-            return;
-         }
-         ExpectedIndexHash = Record->Hash;
-         if (_config->FindB("Debug::pkgAcquire::Auth", false))
-         {
-            std::cerr << "Queueing: " << (*Target)->URI << std::endl;
-            std::cerr << "Expected Hash: " << ExpectedIndexHash.toStr() << std::endl;
-         }
-         if (ExpectedIndexHash.empty())
-         {
-            Status = StatAuthError;
-            ErrorText = "Unable to find hash sum for "
-               + (*Target)->MetaKey + " in Meta-index file";
-            return;
-         }
+        const indexRecords::checkSum *Record = MetaIndexParser->Lookup((*Target)->MetaKey);
+        if (Record == NULL)
+        {
+           if ((*Target)->IsOptional() == false)
+           {
+              Status = StatAuthError;
+              strprintf(ErrorText, _("Unable to find expected entry '%s' in Release file (Wrong sources.list entry or malformed file)"), (*Target)->MetaKey.c_str());
+              return;
+           }
+        }
+        else
+        {
+           ExpectedIndexHash = Record->Hash;
+           if (_config->FindB("Debug::pkgAcquire::Auth", false))
+           {
+              std::cerr << "Queueing: " << (*Target)->URI << std::endl;
+              std::cerr << "Expected Hash: " << ExpectedIndexHash.toStr() << std::endl;
+           }
+           if (ExpectedIndexHash.empty() == true && (*Target)->IsOptional() == false)
+           {
+              Status = StatAuthError;
+              strprintf(ErrorText, _("Unable to find hash sum for '%s' in Release file"), (*Target)->MetaKey.c_str());
+              return;
+           }
+        }
       }
       }
-      
-      // Queue Packages file (either diff or full packages files, depending
-      // on the users option)
-      if(_config->FindB("Acquire::PDiffs",true) == true) 
+
+      if ((*Target)->IsOptional() == true)
+      {
+        if ((*Target)->IsSubIndex() == true)
+           new pkgAcqSubIndex(Owner, (*Target)->URI, (*Target)->Description,
+                               (*Target)->ShortDesc, ExpectedIndexHash);
+        else
+           new pkgAcqIndexTrans(Owner, *Target, ExpectedIndexHash, MetaIndexParser);
+        continue;
+      }
+
+      /* Queue Packages file (either diff or full packages files, depending
+         on the users option) - we also check if the PDiff Index file is listed
+         in the Meta-Index file. Ideal would be if pkgAcqDiffIndex would test this
+         instead, but passing the required info to it is to much hassle */
+      if(_config->FindB("Acquire::PDiffs",true) == true && (verify == false ||
+         MetaIndexParser->Exists(string((*Target)->MetaKey).append(".diff/Index")) == true))
         new pkgAcqDiffIndex(Owner, (*Target)->URI, (*Target)->Description,
                             (*Target)->ShortDesc, ExpectedIndexHash);
         new pkgAcqDiffIndex(Owner, (*Target)->URI, (*Target)->Description,
                             (*Target)->ShortDesc, ExpectedIndexHash);
-      else 
-        new pkgAcqIndex(Owner, (*Target)->URI, (*Target)->Description,
-                           (*Target)->ShortDesc, ExpectedIndexHash);
+      else
+        new pkgAcqIndex(Owner, *Target, ExpectedIndexHash, MetaIndexParser);
    }
 }
                                                                        /*}}}*/
 bool pkgAcqMetaIndex::VerifyVendor(string Message)                     /*{{{*/
 {
    }
 }
                                                                        /*}}}*/
 bool pkgAcqMetaIndex::VerifyVendor(string Message)                     /*{{{*/
 {
-//    // Maybe this should be made available from above so we don't have
-//    // to read and parse it every time?
-//    pkgVendorList List;
-//    List.ReadMainList();
-
-//    const Vendor* Vndr = NULL;
-//    for (std::vector<string>::const_iterator I = GPGVOutput.begin(); I != GPGVOutput.end(); I++)
-//    {
-//       string::size_type pos = (*I).find("VALIDSIG ");
-//       if (_config->FindB("Debug::Vendor", false))
-//          std::cerr << "Looking for VALIDSIG in \"" << (*I) << "\": pos " << pos 
-//                    << std::endl;
-//       if (pos != std::string::npos)
-//       {
-//          string Fingerprint = (*I).substr(pos+sizeof("VALIDSIG"));
-//          if (_config->FindB("Debug::Vendor", false))
-//             std::cerr << "Looking for \"" << Fingerprint << "\" in vendor..." <<
-//                std::endl;
-//          Vndr = List.FindVendor(Fingerprint) != "";
-//          if (Vndr != NULL);
-//          break;
-//       }
-//    }
    string::size_type pos;
 
    // check for missing sigs (that where not fatal because otherwise we had
    string::size_type pos;
 
    // check for missing sigs (that where not fatal because otherwise we had
@@ -1147,6 +1424,17 @@ bool pkgAcqMetaIndex::VerifyVendor(string Message)                       /*{{{*/
       Transformed = "";
    }
 
       Transformed = "";
    }
 
+   if (_config->FindB("Acquire::Check-Valid-Until", true) == true &&
+       MetaIndexParser->GetValidUntil() > 0) {
+      time_t const invalid_since = time(NULL) - MetaIndexParser->GetValidUntil();
+      if (invalid_since > 0)
+        // TRANSLATOR: The first %s is the URL of the bad Release file, the second is
+        // the time since then the file is invalid - formated in the same way as in
+        // the download progress display (e.g. 7d 3h 42min 1s)
+        return _error->Error(_("Release file expired, ignoring %s (invalid since %s)"),
+                             RealURI.c_str(), TimeToStr(invalid_since).c_str());
+   }
+
    if (_config->FindB("Debug::pkgAcquire::Auth", false)) 
    {
       std::cerr << "Got Codename: " << MetaIndexParser->GetDist() << std::endl;
    if (_config->FindB("Debug::pkgAcquire::Auth", false)) 
    {
       std::cerr << "Got Codename: " << MetaIndexParser->GetDist() << std::endl;
@@ -1164,7 +1452,7 @@ bool pkgAcqMetaIndex::VerifyVendor(string Message)                        /*{{{*/
 //       return false;
       if (!Transformed.empty())
       {
 //       return false;
       if (!Transformed.empty())
       {
-         _error->Warning("Conflicting distribution: %s (expected %s but got %s)",
+         _error->Warning(_("Conflicting distribution: %s (expected %s but got %s)"),
                          Desc.Description.c_str(),
                          Transformed.c_str(),
                          MetaIndexParser->GetDist().c_str());
                          Desc.Description.c_str(),
                          Transformed.c_str(),
                          MetaIndexParser->GetDist().c_str());
@@ -1181,30 +1469,37 @@ void pkgAcqMetaIndex::Failed(string Message,pkgAcquire::MethodConfig *Cnf)
 {
    if (AuthPass == true)
    {
 {
    if (AuthPass == true)
    {
-      // if we fail the authentication but got the file via a IMS-Hit 
-      // this means that the file wasn't downloaded and that it might be
-      // just stale (server problem, proxy etc). we delete what we have
-      // queue it again without i-m-s 
-      // alternatively we could just unlink the file and let the user try again
-      if (IMSHit)
-      {
-        Complete = false;
-        Local = false;
-        AuthPass = false;
-        unlink(DestFile.c_str());
+      // gpgv method failed, if we have a good signature 
+      string LastGoodSigFile = _config->FindDir("Dir::State::lists");
+      if (DestFile == SigFile)
+        LastGoodSigFile.append(URItoFileName(RealURI));
+      else
+        LastGoodSigFile.append("partial/").append(URItoFileName(RealURI)).append(".gpg.reverify");
 
 
-        DestFile = _config->FindDir("Dir::State::lists") + "partial/";
-        DestFile += URItoFileName(RealURI);
-        Desc.URI = RealURI;
-        QueueURI(Desc);
+      if(FileExists(LastGoodSigFile))
+      {
+        if (DestFile != SigFile)
+        {
+           string VerifiedSigFile = _config->FindDir("Dir::State::lists") +
+                                       URItoFileName(RealURI) + ".gpg";
+           Rename(LastGoodSigFile,VerifiedSigFile);
+        }
+        Status = StatTransientNetworkError;
+        _error->Warning(_("A error occurred during the signature "
+                          "verification. The repository is not updated "
+                          "and the previous index files will be used. "
+                          "GPG error: %s: %s\n"),
+                        Desc.Description.c_str(),
+                        LookupTag(Message,"Message").c_str());
+        RunScripts("APT::Update::Auth-Failure");
         return;
         return;
+      } else {
+        _error->Warning(_("GPG error: %s: %s"),
+                        Desc.Description.c_str(),
+                        LookupTag(Message,"Message").c_str());
       }
       }
-
       // gpgv method failed 
       // gpgv method failed 
-      _error->Warning("GPG error: %s: %s",
-                      Desc.Description.c_str(),
-                      LookupTag(Message,"Message").c_str());
-
+      ReportMirrorFailure("GPGFailure");
    }
 
    // No Release file was present, or verification failed, so fall
    }
 
    // No Release file was present, or verification failed, so fall
@@ -1212,6 +1507,50 @@ void pkgAcqMetaIndex::Failed(string Message,pkgAcquire::MethodConfig *Cnf)
    QueueIndexes(false);
 }
                                                                        /*}}}*/
    QueueIndexes(false);
 }
                                                                        /*}}}*/
+pkgAcqMetaClearSig::pkgAcqMetaClearSig(pkgAcquire *Owner,              /*{{{*/
+               string const &URI, string const &URIDesc, string const &ShortDesc,
+               string const &MetaIndexURI, string const &MetaIndexURIDesc, string const &MetaIndexShortDesc,
+               string const &MetaSigURI, string const &MetaSigURIDesc, string const &MetaSigShortDesc,
+               const vector<struct IndexTarget*>* IndexTargets,
+               indexRecords* MetaIndexParser) :
+       pkgAcqMetaIndex(Owner, URI, URIDesc, ShortDesc, "", IndexTargets, MetaIndexParser),
+       MetaIndexURI(MetaIndexURI), MetaIndexURIDesc(MetaIndexURIDesc), MetaIndexShortDesc(MetaIndexShortDesc),
+       MetaSigURI(MetaSigURI), MetaSigURIDesc(MetaSigURIDesc), MetaSigShortDesc(MetaSigShortDesc)
+{
+   SigFile = DestFile;
+}
+                                                                       /*}}}*/
+// pkgAcqMetaClearSig::Custom600Headers - Insert custom request headers        /*{{{*/
+// ---------------------------------------------------------------------
+// FIXME: this can go away once the InRelease file is used widely
+string pkgAcqMetaClearSig::Custom600Headers()
+{
+   string Final = _config->FindDir("Dir::State::lists");
+   Final += URItoFileName(RealURI);
+
+   struct stat Buf;
+   if (stat(Final.c_str(),&Buf) != 0)
+      return "\nIndex-File: true\nFail-Ignore: true\n";
+
+   return "\nIndex-File: true\nFail-Ignore: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime);
+}
+                                                                       /*}}}*/
+void pkgAcqMetaClearSig::Failed(string Message,pkgAcquire::MethodConfig *Cnf) /*{{{*/
+{
+   if (AuthPass == false)
+   {
+      new pkgAcqMetaSig(Owner,
+                       MetaSigURI, MetaSigURIDesc, MetaSigShortDesc,
+                       MetaIndexURI, MetaIndexURIDesc, MetaIndexShortDesc,
+                       IndexTargets, MetaIndexParser);
+      if (Cnf->LocalOnly == true ||
+         StringToBool(LookupTag(Message, "Transient-Failure"), false) == false)
+        Dequeue();
+   }
+   else
+      pkgAcqMetaIndex::Failed(Message, Cnf);
+}
+                                                                       /*}}}*/
 // AcqArchive::AcqArchive - Constructor                                        /*{{{*/
 // ---------------------------------------------------------------------
 /* This just sets up the initial fetch environment and queues the first
 // AcqArchive::AcqArchive - Constructor                                        /*{{{*/
 // ---------------------------------------------------------------------
 /* This just sets up the initial fetch environment and queues the first
@@ -1297,7 +1636,8 @@ pkgAcqArchive::pkgAcqArchive(pkgAcquire *Owner,pkgSourceList *Sources,
    the archive is already available in the cache and stashs the MD5 for
    checking later. */
 bool pkgAcqArchive::QueueNext()
    the archive is already available in the cache and stashs the MD5 for
    checking later. */
 bool pkgAcqArchive::QueueNext()
-{   
+{
+   string const ForceHash = _config->Find("Acquire::ForceHash");
    for (; Vf.end() == false; Vf++)
    {
       // Ignore not source sources
    for (; Vf.end() == false; Vf++)
    {
       // Ignore not source sources
@@ -1320,12 +1660,25 @@ bool pkgAcqArchive::QueueNext()
         return false;
       
       string PkgFile = Parse.FileName();
         return false;
       
       string PkgFile = Parse.FileName();
-      if(Parse.SHA256Hash() != "")
-        ExpectedHash = HashString("SHA256", Parse.SHA256Hash());
-      else if (Parse.SHA1Hash() != "")
-        ExpectedHash = HashString("SHA1", Parse.SHA1Hash());
-      else 
-        ExpectedHash = HashString("MD5Sum", Parse.MD5Hash());
+      if (ForceHash.empty() == false)
+      {
+        if(stringcasecmp(ForceHash, "sha256") == 0)
+           ExpectedHash = HashString("SHA256", Parse.SHA256Hash());
+        else if (stringcasecmp(ForceHash, "sha1") == 0)
+           ExpectedHash = HashString("SHA1", Parse.SHA1Hash());
+        else
+           ExpectedHash = HashString("MD5Sum", Parse.MD5Hash());
+      }
+      else
+      {
+        string Hash;
+        if ((Hash = Parse.SHA256Hash()).empty() == false)
+           ExpectedHash = HashString("SHA256", Hash);
+        else if ((Hash = Parse.SHA1Hash()).empty() == false)
+           ExpectedHash = HashString("SHA1", Hash);
+        else
+           ExpectedHash = HashString("MD5Sum", Parse.MD5Hash());
+      }
       if (PkgFile.empty() == true)
         return _error->Error(_("The package index files are corrupted. No Filename: "
                              "field for package %s."),
       if (PkgFile.empty() == true)
         return _error->Error(_("The package index files are corrupted. No Filename: "
                              "field for package %s."),
@@ -1517,8 +1870,9 @@ void pkgAcqArchive::Finished()
 /* The file is added to the queue */
 pkgAcqFile::pkgAcqFile(pkgAcquire *Owner,string URI,string Hash,
                       unsigned long Size,string Dsc,string ShortDesc,
 /* The file is added to the queue */
 pkgAcqFile::pkgAcqFile(pkgAcquire *Owner,string URI,string Hash,
                       unsigned long Size,string Dsc,string ShortDesc,
-                      const string &DestDir, const string &DestFilename) :
-                       Item(Owner), ExpectedHash(Hash)
+                      const string &DestDir, const string &DestFilename,
+                       bool IsIndexFile) :
+                       Item(Owner), ExpectedHash(Hash), IsIndexFile(IsIndexFile)
 {
    Retries = _config->FindI("Acquire::Retries",0);
    
 {
    Retries = _config->FindI("Acquire::Retries",0);
    
@@ -1564,7 +1918,7 @@ void pkgAcqFile::Done(string Message,unsigned long Size,string CalcHash,
    if(!ExpectedHash.empty() && ExpectedHash.toStr() != CalcHash)
    {
       Status = StatError;
    if(!ExpectedHash.empty() && ExpectedHash.toStr() != CalcHash)
    {
       Status = StatError;
-      ErrorText = "Hash Sum mismatch";
+      ErrorText = _("Hash Sum mismatch");
       Rename(DestFile,DestFile + ".FAILED");
       return;
    }
       Rename(DestFile,DestFile + ".FAILED");
       return;
    }
@@ -1633,3 +1987,23 @@ void pkgAcqFile::Failed(string Message,pkgAcquire::MethodConfig *Cnf)
    Item::Failed(Message,Cnf);
 }
                                                                        /*}}}*/
    Item::Failed(Message,Cnf);
 }
                                                                        /*}}}*/
+// AcqIndex::Custom600Headers - Insert custom request headers          /*{{{*/
+// ---------------------------------------------------------------------
+/* The only header we use is the last-modified header. */
+string pkgAcqFile::Custom600Headers()
+{
+   if (IsIndexFile)
+      return "\nIndex-File: true";
+   return "";
+}
+                                                                       /*}}}*/
+bool IndexTarget::IsOptional() const {
+   if (strncmp(ShortDesc.c_str(), "Translation", 11) != 0)
+      return false;
+   return true;
+}
+bool IndexTarget::IsSubIndex() const {
+   if (ShortDesc != "TranslationIndex")
+      return false;
+   return true;
+}