]> git.saurik.com Git - apt.git/blobdiff - apt-pkg/acquire-item.cc
merge from trunk
[apt.git] / apt-pkg / acquire-item.cc
index a65630afd08ec43dd94ce4533abc0cf8501e0c45..2c4ce91a01496bde3cd24e66982995ffd8d71d3a 100644 (file)
@@ -15,6 +15,7 @@
 // Include Files                                                       /*{{{*/
 #include <apt-pkg/acquire-item.h>
 #include <apt-pkg/configuration.h>
+#include <apt-pkg/aptconfiguration.h>
 #include <apt-pkg/sourcelist.h>
 #include <apt-pkg/vendorlist.h>
 #include <apt-pkg/error.h>
@@ -32,6 +33,7 @@
 #include <string>
 #include <sstream>
 #include <stdio.h>
+#include <ctime>
                                                                        /*}}}*/
 
 using namespace std;
@@ -80,7 +82,7 @@ void pkgAcquire::Item::Failed(string Message,pkgAcquire::MethodConfig *Cnf)
       Status = StatError;
       Dequeue();
    }   
-   
+
    // report mirror failure back to LP if we actually use a mirror
    string FailReason = LookupTag(Message, "FailReason");
    if(FailReason.size() != 0)
@@ -138,7 +140,8 @@ void pkgAcquire::Item::Rename(string From,string To)
    }   
 }
                                                                        /*}}}*/
-
+// Acquire::Item::ReportMirrorFailure                                  /*{{{*/
+// ---------------------------------------------------------------------
 void pkgAcquire::Item::ReportMirrorFailure(string FailCode)
 {
    // we only act if a mirror was used at all
@@ -180,7 +183,7 @@ void pkgAcquire::Item::ReportMirrorFailure(string FailCode)
                      _config->Find("Methods::Mirror::ProblemReporting").c_str());
    }
 }
-
+                                                                       /*}}}*/
 // AcqDiffIndex::AcqDiffIndex - Constructor                            /*{{{*/
 // ---------------------------------------------------------------------
 /* Get the DiffIndex file first and see if there are patches availabe 
@@ -268,19 +271,19 @@ bool pkgAcqDiffIndex::ParseDiffIndex(string IndexDiffFile)                /*{{{*/
 
    if(TF.Step(Tags) == true)
    {
-      string local_sha1;
       bool found = false;
       DiffInfo d;
       string size;
 
-      string tmp = Tags.FindS("SHA1-Current");
+      string const tmp = Tags.FindS("SHA1-Current");
       std::stringstream ss(tmp);
-      ss >> ServerSha1;
+      ss >> ServerSha1 >> size;
+      unsigned long const ServerSize = atol(size.c_str());
 
       FileFd fd(CurrentPackagesFile, FileFd::ReadOnly);
       SHA1Summation SHA1;
       SHA1.AddFD(fd.Fd(), fd.Size());
-      local_sha1 = string(SHA1.Result());
+      string const local_sha1 = SHA1.Result();
 
       if(local_sha1 == ServerSha1) 
       {
@@ -294,23 +297,59 @@ bool pkgAcqDiffIndex::ParseDiffIndex(string IndexDiffFile)                /*{{{*/
       else 
       {
         if(Debug)
-           std::clog << "SHA1-Current: " << ServerSha1 << std::endl;
+           std::clog << "SHA1-Current: " << ServerSha1 << " and we start at "<< fd.Name() << " " << fd.Size() << " " << local_sha1 << std::endl;
 
         // check the historie and see what patches we need
-        string history = Tags.FindS("SHA1-History");     
+        string const history = Tags.FindS("SHA1-History");
         std::stringstream hist(history);
-        while(hist >> d.sha1 >> size >> d.file) 
+        while(hist >> d.sha1 >> size >> d.file)
         {
-           d.size = atoi(size.c_str());
            // read until the first match is found
+           // from that point on, we probably need all diffs
            if(d.sha1 == local_sha1) 
               found=true;
-           // from that point on, we probably need all diffs
-           if(found) 
+           else if (found == false)
+              continue;
+
+           if(Debug)
+              std::clog << "Need to get diff: " << d.file << std::endl;
+           available_patches.push_back(d);
+        }
+
+        if (available_patches.empty() == false)
+        {
+           // patching with too many files is rather slow compared to a fast download
+           unsigned long const fileLimit = _config->FindI("Acquire::PDiffs::FileLimit", 0);
+           if (fileLimit != 0 && fileLimit < available_patches.size())
            {
-              if(Debug)
-                 std::clog << "Need to get diff: " << d.file << std::endl;
-              available_patches.push_back(d);
+              if (Debug)
+                 std::clog << "Need " << available_patches.size() << " diffs (Limit is " << fileLimit
+                       << ") so fallback to complete download" << std::endl;
+              return false;
+           }
+
+           // see if the patches are too big
+           found = false; // it was true and it will be true again at the end
+           d = *available_patches.begin();
+           string const firstPatch = d.file;
+           unsigned long patchesSize = 0;
+           std::stringstream patches(Tags.FindS("SHA1-Patches"));
+           while(patches >> d.sha1 >> size >> d.file)
+           {
+              if (firstPatch == d.file)
+                 found = true;
+              else if (found == false)
+                 continue;
+
+              patchesSize += atol(size.c_str());
+           }
+           unsigned long const sizeLimit = ServerSize * _config->FindI("Acquire::PDiffs::SizeLimit", 100);
+           if (sizeLimit > 0 && (sizeLimit/100) < patchesSize)
+           {
+              if (Debug)
+                 std::clog << "Need " << patchesSize << " bytes (Limit is " << sizeLimit/100
+                       << ") so fallback to complete download" << std::endl;
+              return false;
            }
         }
       }
@@ -319,11 +358,11 @@ bool pkgAcqDiffIndex::ParseDiffIndex(string IndexDiffFile)                /*{{{*/
       if(found) 
       {
         // queue the diffs
-        string::size_type last_space = Description.rfind(" ");
+        string::size_type const last_space = Description.rfind(" ");
         if(last_space != string::npos)
            Description.erase(last_space, Description.size()-last_space);
         new pkgAcqIndexDiffs(Owner, RealURI, Description, Desc.ShortDesc,
-                             ExpectedHash, available_patches);
+                             ExpectedHash, ServerSha1, available_patches);
         Complete = false;
         Status = StatDone;
         Dequeue();
@@ -391,9 +430,10 @@ void pkgAcqDiffIndex::Done(string Message,unsigned long Size,string Md5Hash,       /*{
 pkgAcqIndexDiffs::pkgAcqIndexDiffs(pkgAcquire *Owner,
                                   string URI,string URIDesc,string ShortDesc,
                                   HashString ExpectedHash, 
+                                  string ServerSha1,
                                   vector<DiffInfo> diffs)
    : Item(Owner), RealURI(URI), ExpectedHash(ExpectedHash), 
-     available_patches(diffs)
+     available_patches(diffs), ServerSha1(ServerSha1)
 {
    
    DestFile = _config->FindDir("Dir::State::lists") + "partial/";
@@ -479,6 +519,13 @@ bool pkgAcqIndexDiffs::QueueNextDiff()                                     /*{{{*/
       std::clog << "QueueNextDiff: " 
                << FinalFile << " (" << local_sha1 << ")"<<std::endl;
 
+   // final file reached before all patches are applied
+   if(local_sha1 == ServerSha1)
+   {
+      Finish(true);
+      return true;
+   }
+
    // remove all patches until the next matching patch is found
    // this requires the Index file to be ordered
    for(vector<DiffInfo>::iterator I=available_patches.begin();
@@ -576,7 +623,7 @@ void pkgAcqIndexDiffs::Done(string Message,unsigned long Size,string Md5Hash,       /*
       // see if there is more to download
       if(available_patches.size() > 0) {
         new pkgAcqIndexDiffs(Owner, RealURI, Description, Desc.ShortDesc,
-                             ExpectedHash, available_patches);
+                             ExpectedHash, ServerSha1, available_patches);
         return Finish();
       } else 
         return Finish(true);
@@ -601,13 +648,14 @@ pkgAcqIndex::pkgAcqIndex(pkgAcquire *Owner,
    if(comprExt.empty()) 
    {
       // autoselect the compression method
-      if(FileExists("/bin/bzip2")) 
-        CompressionExtension = ".bz2";
-      else 
-        CompressionExtension = ".gz";
-   } else {
-      CompressionExtension = (comprExt == "plain" ? "" : comprExt);
+      std::vector<std::string> types = APT::Configuration::getCompressionTypes();
+      if (types.empty() == true)
+        comprExt = "plain";
+      else
+        comprExt = "." + types[0];
    }
+   CompressionExtension = ((comprExt == "plain" || comprExt == ".") ? "" : comprExt);
+
    Desc.URI = URI + CompressionExtension;
 
    Desc.Description = URIDesc;
@@ -624,6 +672,8 @@ string pkgAcqIndex::Custom600Headers()
 {
    string Final = _config->FindDir("Dir::State::lists");
    Final += URItoFileName(RealURI);
+   if (_config->FindB("Acquire::GzipIndexes",false))
+      Final += ".gz";
    
    struct stat Buf;
    if (stat(Final.c_str(),&Buf) != 0)
@@ -633,24 +683,27 @@ string pkgAcqIndex::Custom600Headers()
                                                                        /*}}}*/
 void pkgAcqIndex::Failed(string Message,pkgAcquire::MethodConfig *Cnf) /*{{{*/
 {
-   bool descChanged = false;
-   // no .bz2 found, retry with .gz
-   if(Desc.URI.substr(Desc.URI.size()-3) == "bz2") {
-      Desc.URI = Desc.URI.substr(0,Desc.URI.size()-3) + "gz";
-
-      new pkgAcqIndex(Owner, RealURI, Desc.Description,Desc.ShortDesc,
-                     ExpectedHash, string(".gz"));
-         descChanged = true;
-   }
-   // no .gz found, retry with uncompressed
-   else if(Desc.URI.substr(Desc.URI.size()-2) == "gz") {
-      Desc.URI = Desc.URI.substr(0,Desc.URI.size()-2);
+   std::vector<std::string> types = APT::Configuration::getCompressionTypes();
 
-      new pkgAcqIndex(Owner, RealURI, Desc.Description,Desc.ShortDesc,
-                     ExpectedHash, string("plain"));
-         descChanged = true;
-   }
-   if (descChanged) {
+   for (std::vector<std::string>::const_iterator t = types.begin();
+       t != types.end(); t++)
+   {
+      // jump over all already tried compression types
+      const unsigned int nameLen = Desc.URI.size() - (*t).size();
+      if(Desc.URI.substr(nameLen) != *t)
+        continue;
+
+      // we want to try it with the next extension (and make sure to 
+      // not skip over the end)
+      t++;
+      if (t == types.end())
+        break;
+
+      // queue new download
+      Desc.URI = Desc.URI.substr(0, nameLen) + *t;
+      new pkgAcqIndex(Owner, RealURI, Desc.Description, Desc.ShortDesc,
+      ExpectedHash, string(".").append(*t));
+      
       Status = StatDone;
       Complete = false;
       Dequeue();
@@ -738,21 +791,39 @@ void pkgAcqIndex::Done(string Message,unsigned long Size,string Hash,
       ErrorText = "Method gave a blank filename";
    }
    
+   string compExt = flExtension(flNotDir(URI(Desc.URI).Path));
+
    // The files timestamp matches
-   if (StringToBool(LookupTag(Message,"IMS-Hit"),false) == true)
+   if (StringToBool(LookupTag(Message,"IMS-Hit"),false) == true) {
+       if (_config->FindB("Acquire::GzipIndexes",false) && compExt == "gz")
+         // Update DestFile for .gz suffix so that the clean operation keeps it
+         DestFile += ".gz";
       return;
+    }
 
    if (FileName == DestFile)
       Erase = true;
    else
       Local = true;
    
-   string compExt = flExtension(flNotDir(URI(Desc.URI).Path));
-   const char *decompProg;
-   if(compExt == "bz2") 
-      decompProg = "bzip2";
-   else if(compExt == "gz") 
-      decompProg = "gzip";
+   string decompProg;
+
+   // If we enable compressed indexes and already have gzip, keep it
+   if (_config->FindB("Acquire::GzipIndexes",false) && compExt == "gz" && !Local) {
+      string FinalFile = _config->FindDir("Dir::State::lists");
+      FinalFile += URItoFileName(RealURI) + ".gz";
+      Rename(DestFile,FinalFile);
+      chmod(FinalFile.c_str(),0644);
+      
+      // Update DestFile for .gz suffix so that the clean operation keeps it
+      DestFile = _config->FindDir("Dir::State::lists") + "partial/";
+      DestFile += URItoFileName(RealURI) + ".gz";
+      return;
+    }
+
+   // get the binary name for your used compression type
+   decompProg = _config->Find(string("Acquire::CompressionTypes::").append(compExt),"");
+   if(decompProg.empty() == false);
    // flExtensions returns the full name if no extension is found
    // this is why we have this complicated compare operation here
    // FIMXE: add a new flJustExtension() that return "" if no
@@ -767,9 +838,9 @@ void pkgAcqIndex::Done(string Message,unsigned long Size,string Hash,
 
    Decompression = true;
    DestFile += ".decomp";
-   Desc.URI = string(decompProg) + ":" + FileName;
+   Desc.URI = decompProg + ":" + FileName;
    QueueURI(Desc);
-   Mode = decompProg;
+   Mode = decompProg.c_str();
 }
                                                                        /*}}}*/
 // AcqIndexTrans::pkgAcqIndexTrans - Constructor                       /*{{{*/
@@ -779,6 +850,19 @@ pkgAcqIndexTrans::pkgAcqIndexTrans(pkgAcquire *Owner,
                            string URI,string URIDesc,string ShortDesc) 
   : pkgAcqIndex(Owner, URI, URIDesc, ShortDesc, HashString(), "")
 {
+}
+                                                                       /*}}}*/
+// AcqIndexTrans::Custom600Headers - Insert custom request headers     /*{{{*/
+// ---------------------------------------------------------------------
+string pkgAcqIndexTrans::Custom600Headers()
+{
+   string Final = _config->FindDir("Dir::State::lists");
+   Final += URItoFileName(RealURI);
+
+   struct stat Buf;
+   if (stat(Final.c_str(),&Buf) != 0)
+      return "\nFail-Ignore: true";
+   return "\nFail-Ignore: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime);
 }
                                                                        /*}}}*/
 // AcqIndexTrans::Failed - Silence failure messages for missing files  /*{{{*/
@@ -976,12 +1060,6 @@ void pkgAcqMetaIndex::Done(string Message,unsigned long Size,string Hash, /*{{{*
 
       // all cool, move Release file into place
       Complete = true;
-
-      string FinalFile = _config->FindDir("Dir::State::lists");
-      FinalFile += URItoFileName(RealURI);
-      Rename(DestFile,FinalFile);
-      chmod(FinalFile.c_str(),0644);
-      DestFile = FinalFile;
    }
    else
    {
@@ -1008,8 +1086,18 @@ void pkgAcqMetaIndex::Done(string Message,unsigned long Size,string Hash,        /*{{{*
          Desc.URI = "gpgv:" + SigFile;
          QueueURI(Desc);
          Mode = "gpgv";
+        return;
       }
    }
+
+   if (Complete == true)
+   {
+      string FinalFile = _config->FindDir("Dir::State::lists");
+      FinalFile += URItoFileName(RealURI);
+      Rename(DestFile,FinalFile);
+      chmod(FinalFile.c_str(),0644);
+      DestFile = FinalFile;
+   }
 }
                                                                        /*}}}*/
 void pkgAcqMetaIndex::RetrievalDone(string Message)                    /*{{{*/
@@ -1108,13 +1196,16 @@ void pkgAcqMetaIndex::QueueIndexes(bool verify)                         /*{{{*/
             return;
          }
       }
-      
-      // Queue Packages file (either diff or full packages files, depending
-      // on the users option)
-      if(_config->FindB("Acquire::PDiffs",false) == true) 
+
+      /* Queue Packages file (either diff or full packages files, depending
+         on the users option) - we also check if the PDiff Index file is listed
+         in the Meta-Index file. Ideal would be if pkgAcqDiffIndex would test this
+         instead, but passing the required info to it is to much hassle */
+      if(_config->FindB("Acquire::PDiffs",true) == true && (verify == false ||
+         MetaIndexParser->Exists(string((*Target)->MetaKey).append(".diff/Index")) == true))
         new pkgAcqDiffIndex(Owner, (*Target)->URI, (*Target)->Description,
                             (*Target)->ShortDesc, ExpectedIndexHash);
-      else 
+      else
         new pkgAcqIndex(Owner, (*Target)->URI, (*Target)->Description,
                            (*Target)->ShortDesc, ExpectedIndexHash);
    }
@@ -1180,6 +1271,17 @@ bool pkgAcqMetaIndex::VerifyVendor(string Message)                       /*{{{*/
       Transformed = "";
    }
 
+   if (_config->FindB("Acquire::Check-Valid-Until", true) == true &&
+       MetaIndexParser->GetValidUntil() > 0) {
+      time_t const invalid_since = time(NULL) - MetaIndexParser->GetValidUntil();
+      if (invalid_since > 0)
+        // TRANSLATOR: The first %s is the URL of the bad Release file, the second is
+        // the time since then the file is invalid - formated in the same way as in
+        // the download progress display (e.g. 7d 3h 42min 1s)
+        return _error->Error(_("Release file expired, ignoring %s (invalid since %s)"),
+                             RealURI.c_str(), TimeToStr(invalid_since).c_str());
+   }
+
    if (_config->FindB("Debug::pkgAcquire::Auth", false)) 
    {
       std::cerr << "Got Codename: " << MetaIndexParser->GetDist() << std::endl;
@@ -1197,7 +1299,7 @@ bool pkgAcqMetaIndex::VerifyVendor(string Message)                        /*{{{*/
 //       return false;
       if (!Transformed.empty())
       {
-         _error->Warning("Conflicting distribution: %s (expected %s but got %s)",
+         _error->Warning(_("Conflicting distribution: %s (expected %s but got %s)"),
                          Desc.Description.c_str(),
                          Transformed.c_str(),
                          MetaIndexParser->GetDist().c_str());
@@ -1225,7 +1327,7 @@ void pkgAcqMetaIndex::Failed(string Message,pkgAcquire::MethodConfig *Cnf)
         Status = StatTransientNetworkError;
         _error->Warning(_("A error occurred during the signature "
                           "verification. The repository is not updated "
-                          "and the previous index files will be used."
+                          "and the previous index files will be used. "
                           "GPG error: %s: %s\n"),
                         Desc.Description.c_str(),
                         LookupTag(Message,"Message").c_str());
@@ -1330,7 +1432,8 @@ pkgAcqArchive::pkgAcqArchive(pkgAcquire *Owner,pkgSourceList *Sources,
    the archive is already available in the cache and stashs the MD5 for
    checking later. */
 bool pkgAcqArchive::QueueNext()
-{   
+{
+   string const ForceHash = _config->Find("Acquire::ForceHash");
    for (; Vf.end() == false; Vf++)
    {
       // Ignore not source sources
@@ -1353,12 +1456,25 @@ bool pkgAcqArchive::QueueNext()
         return false;
       
       string PkgFile = Parse.FileName();
-      if(Parse.SHA256Hash() != "")
-        ExpectedHash = HashString("SHA256", Parse.SHA256Hash());
-      else if (Parse.SHA1Hash() != "")
-        ExpectedHash = HashString("SHA1", Parse.SHA1Hash());
-      else 
-        ExpectedHash = HashString("MD5Sum", Parse.MD5Hash());
+      if (ForceHash.empty() == false)
+      {
+        if(stringcasecmp(ForceHash, "sha256") == 0)
+           ExpectedHash = HashString("SHA256", Parse.SHA256Hash());
+        else if (stringcasecmp(ForceHash, "sha1") == 0)
+           ExpectedHash = HashString("SHA1", Parse.SHA1Hash());
+        else
+           ExpectedHash = HashString("MD5Sum", Parse.MD5Hash());
+      }
+      else
+      {
+        string Hash;
+        if ((Hash = Parse.SHA256Hash()).empty() == false)
+           ExpectedHash = HashString("SHA256", Hash);
+        else if ((Hash = Parse.SHA1Hash()).empty() == false)
+           ExpectedHash = HashString("SHA1", Hash);
+        else
+           ExpectedHash = HashString("MD5Sum", Parse.MD5Hash());
+      }
       if (PkgFile.empty() == true)
         return _error->Error(_("The package index files are corrupted. No Filename: "
                              "field for package %s."),
@@ -1550,8 +1666,9 @@ void pkgAcqArchive::Finished()
 /* The file is added to the queue */
 pkgAcqFile::pkgAcqFile(pkgAcquire *Owner,string URI,string Hash,
                       unsigned long Size,string Dsc,string ShortDesc,
-                      const string &DestDir, const string &DestFilename) :
-                       Item(Owner), ExpectedHash(Hash)
+                      const string &DestDir, const string &DestFilename,
+                       bool IsIndexFile) :
+                       Item(Owner), ExpectedHash(Hash), IsIndexFile(IsIndexFile)
 {
    Retries = _config->FindI("Acquire::Retries",0);
    
@@ -1597,7 +1714,7 @@ void pkgAcqFile::Done(string Message,unsigned long Size,string CalcHash,
    if(!ExpectedHash.empty() && ExpectedHash.toStr() != CalcHash)
    {
       Status = StatError;
-      ErrorText = "Hash Sum mismatch";
+      ErrorText = _("Hash Sum mismatch");
       Rename(DestFile,DestFile + ".FAILED");
       return;
    }
@@ -1666,3 +1783,13 @@ void pkgAcqFile::Failed(string Message,pkgAcquire::MethodConfig *Cnf)
    Item::Failed(Message,Cnf);
 }
                                                                        /*}}}*/
+// AcqIndex::Custom600Headers - Insert custom request headers          /*{{{*/
+// ---------------------------------------------------------------------
+/* The only header we use is the last-modified header. */
+string pkgAcqFile::Custom600Headers()
+{
+   if (IsIndexFile)
+      return "\nIndex-File: true";
+   return "";
+}
+                                                                       /*}}}*/