X-Git-Url: https://git.saurik.com/apt.git/blobdiff_plain/e878aedb8b53b311295a2df55ce5e865b1ad92b9..8d3489abd90a64715b92976a7c60b42a386a4c5c:/apt-pkg/acquire-item.cc?ds=inline diff --git a/apt-pkg/acquire-item.cc b/apt-pkg/acquire-item.cc index 94341c81a..a289fb7ba 100644 --- a/apt-pkg/acquire-item.cc +++ b/apt-pkg/acquire-item.cc @@ -33,6 +33,7 @@ #include #include #include +#include /*}}}*/ using namespace std; @@ -64,6 +65,7 @@ void pkgAcquire::Item::Failed(string Message,pkgAcquire::MethodConfig *Cnf) { Status = StatIdle; ErrorText = LookupTag(Message,"Message"); + UsedMirror = LookupTag(Message,"UsedMirror"); if (QueueCounter <= 1) { /* This indicates that the file is not available right now but might @@ -76,10 +78,17 @@ void pkgAcquire::Item::Failed(string Message,pkgAcquire::MethodConfig *Cnf) Dequeue(); return; } - + Status = StatError; Dequeue(); } + + // report mirror failure back to LP if we actually use a mirror + string FailReason = LookupTag(Message, "FailReason"); + if(FailReason.size() != 0) + ReportMirrorFailure(FailReason); + else + ReportMirrorFailure(ErrorText); } /*}}}*/ // Acquire::Item::Start - Item has begun to download /*{{{*/ @@ -101,7 +110,7 @@ void pkgAcquire::Item::Done(string Message,unsigned long Size,string Hash, { // We just downloaded something.. string FileName = LookupTag(Message,"Filename"); - // we only inform the Log class if it was actually not a local thing + UsedMirror = LookupTag(Message,"UsedMirror"); if (Complete == false && !Local && FileName == DestFile) { if (Owner->Log != 0) @@ -110,7 +119,6 @@ void pkgAcquire::Item::Done(string Message,unsigned long Size,string Hash, if (FileSize == 0) FileSize= Size; - Status = StatDone; ErrorText = string(); Owner->Dequeue(this); @@ -132,6 +140,50 @@ void pkgAcquire::Item::Rename(string From,string To) } } /*}}}*/ +// Acquire::Item::ReportMirrorFailure /*{{{*/ +// --------------------------------------------------------------------- +void pkgAcquire::Item::ReportMirrorFailure(string FailCode) +{ + // we only act if a mirror was used at all + if(UsedMirror.empty()) + return; +#if 0 + std::cerr << "\nReportMirrorFailure: " + << UsedMirror + << " Uri: " << DescURI() + << " FailCode: " + << FailCode << std::endl; +#endif + const char *Args[40]; + unsigned int i = 0; + string report = _config->Find("Methods::Mirror::ProblemReporting", + "/usr/lib/apt/apt-report-mirror-failure"); + if(!FileExists(report)) + return; + Args[i++] = report.c_str(); + Args[i++] = UsedMirror.c_str(); + Args[i++] = DescURI().c_str(); + Args[i++] = FailCode.c_str(); + Args[i++] = NULL; + pid_t pid = ExecFork(); + if(pid < 0) + { + _error->Error("ReportMirrorFailure Fork failed"); + return; + } + else if(pid == 0) + { + execvp(Args[0], (char**)Args); + std::cerr << "Could not exec " << Args[0] << std::endl; + _exit(100); + } + if(!ExecWait(pid, "report-mirror-failure")) + { + _error->Warning("Couldn't report problem to '%s'", + _config->Find("Methods::Mirror::ProblemReporting").c_str()); + } +} + /*}}}*/ // AcqDiffIndex::AcqDiffIndex - Constructor /*{{{*/ // --------------------------------------------------------------------- /* Get the DiffIndex file first and see if there are patches availabe @@ -219,19 +271,19 @@ bool pkgAcqDiffIndex::ParseDiffIndex(string IndexDiffFile) /*{{{*/ if(TF.Step(Tags) == true) { - string local_sha1; bool found = false; DiffInfo d; string size; - string tmp = Tags.FindS("SHA1-Current"); + string const tmp = Tags.FindS("SHA1-Current"); std::stringstream ss(tmp); - ss >> ServerSha1; + ss >> ServerSha1 >> size; + unsigned long const ServerSize = atol(size.c_str()); - FileFd fd(CurrentPackagesFile, FileFd::ReadOnly); + FileFd fd(CurrentPackagesFile, FileFd::ReadOnlyGzip); SHA1Summation SHA1; SHA1.AddFD(fd.Fd(), fd.Size()); - local_sha1 = string(SHA1.Result()); + string const local_sha1 = SHA1.Result(); if(local_sha1 == ServerSha1) { @@ -248,20 +300,56 @@ bool pkgAcqDiffIndex::ParseDiffIndex(string IndexDiffFile) /*{{{*/ std::clog << "SHA1-Current: " << ServerSha1 << std::endl; // check the historie and see what patches we need - string history = Tags.FindS("SHA1-History"); + string const history = Tags.FindS("SHA1-History"); std::stringstream hist(history); - while(hist >> d.sha1 >> size >> d.file) + while(hist >> d.sha1 >> size >> d.file) { - d.size = atoi(size.c_str()); // read until the first match is found + // from that point on, we probably need all diffs if(d.sha1 == local_sha1) found=true; - // from that point on, we probably need all diffs - if(found) + else if (found == false) + continue; + + if(Debug) + std::clog << "Need to get diff: " << d.file << std::endl; + available_patches.push_back(d); + } + + if (available_patches.empty() == false) + { + // patching with too many files is rather slow compared to a fast download + unsigned long const fileLimit = _config->FindI("Acquire::PDiffs::FileLimit", 0); + if (fileLimit != 0 && fileLimit < available_patches.size()) { - if(Debug) - std::clog << "Need to get diff: " << d.file << std::endl; - available_patches.push_back(d); + if (Debug) + std::clog << "Need " << available_patches.size() << " diffs (Limit is " << fileLimit + << ") so fallback to complete download" << std::endl; + return false; + } + + // see if the patches are too big + found = false; // it was true and it will be true again at the end + d = *available_patches.begin(); + string const firstPatch = d.file; + unsigned long patchesSize = 0; + std::stringstream patches(Tags.FindS("SHA1-Patches")); + while(patches >> d.sha1 >> size >> d.file) + { + if (firstPatch == d.file) + found = true; + else if (found == false) + continue; + + patchesSize += atol(size.c_str()); + } + unsigned long const sizeLimit = ServerSize * _config->FindI("Acquire::PDiffs::SizeLimit", 100); + if (sizeLimit > 0 && (sizeLimit/100) < patchesSize) + { + if (Debug) + std::clog << "Need " << patchesSize << " bytes (Limit is " << sizeLimit/100 + << ") so fallback to complete download" << std::endl; + return false; } } } @@ -270,11 +358,11 @@ bool pkgAcqDiffIndex::ParseDiffIndex(string IndexDiffFile) /*{{{*/ if(found) { // queue the diffs - string::size_type last_space = Description.rfind(" "); + string::size_type const last_space = Description.rfind(" "); if(last_space != string::npos) Description.erase(last_space, Description.size()-last_space); new pkgAcqIndexDiffs(Owner, RealURI, Description, Desc.ShortDesc, - ExpectedHash, available_patches); + ExpectedHash, ServerSha1, available_patches); Complete = false; Status = StatDone; Dequeue(); @@ -342,9 +430,10 @@ void pkgAcqDiffIndex::Done(string Message,unsigned long Size,string Md5Hash, /*{ pkgAcqIndexDiffs::pkgAcqIndexDiffs(pkgAcquire *Owner, string URI,string URIDesc,string ShortDesc, HashString ExpectedHash, + string ServerSha1, vector diffs) : Item(Owner), RealURI(URI), ExpectedHash(ExpectedHash), - available_patches(diffs) + available_patches(diffs), ServerSha1(ServerSha1) { DestFile = _config->FindDir("Dir::State::lists") + "partial/"; @@ -422,7 +511,7 @@ bool pkgAcqIndexDiffs::QueueNextDiff() /*{{{*/ string FinalFile = _config->FindDir("Dir::State::lists"); FinalFile += URItoFileName(RealURI); - FileFd fd(FinalFile, FileFd::ReadOnly); + FileFd fd(FinalFile, FileFd::ReadOnlyGzip); SHA1Summation SHA1; SHA1.AddFD(fd.Fd(), fd.Size()); string local_sha1 = string(SHA1.Result()); @@ -430,6 +519,13 @@ bool pkgAcqIndexDiffs::QueueNextDiff() /*{{{*/ std::clog << "QueueNextDiff: " << FinalFile << " (" << local_sha1 << ")"<::iterator I=available_patches.begin(); @@ -527,7 +623,7 @@ void pkgAcqIndexDiffs::Done(string Message,unsigned long Size,string Md5Hash, /* // see if there is more to download if(available_patches.size() > 0) { new pkgAcqIndexDiffs(Owner, RealURI, Description, Desc.ShortDesc, - ExpectedHash, available_patches); + ExpectedHash, ServerSha1, available_patches); return Finish(); } else return Finish(true); @@ -576,11 +672,12 @@ string pkgAcqIndex::Custom600Headers() { string Final = _config->FindDir("Dir::State::lists"); Final += URItoFileName(RealURI); + if (_config->FindB("Acquire::GzipIndexes",false)) + Final += ".gz"; struct stat Buf; if (stat(Final.c_str(),&Buf) != 0) return "\nIndex-File: true"; - return "\nIndex-File: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime); } /*}}}*/ @@ -596,21 +693,21 @@ void pkgAcqIndex::Failed(string Message,pkgAcquire::MethodConfig *Cnf) /*{{{*/ if(Desc.URI.substr(nameLen) != *t) continue; - // we want to try it with the next extension + // we want to try it with the next extension (and make sure to + // not skip over the end) t++; + if (t == types.end()) + break; - if (t != types.end()) - { - Desc.URI = Desc.URI.substr(0, nameLen) + *t; - - new pkgAcqIndex(Owner, RealURI, Desc.Description, Desc.ShortDesc, - ExpectedHash, string(".").append(*t)); - - Status = StatDone; - Complete = false; - Dequeue(); - return; - } + // queue new download + Desc.URI = Desc.URI.substr(0, nameLen) + *t; + new pkgAcqIndex(Owner, RealURI, Desc.Description, Desc.ShortDesc, + ExpectedHash, string(".").append(*t)); + + Status = StatDone; + Complete = false; + Dequeue(); + return; } // on decompression failure, remove bad versions in partial/ @@ -648,6 +745,7 @@ void pkgAcqIndex::Done(string Message,unsigned long Size,string Hash, Status = StatAuthError; ErrorText = _("Hash Sum mismatch"); Rename(DestFile,DestFile + ".FAILED"); + ReportMirrorFailure("HashChecksumFailure"); return; } // Done, move it into position @@ -693,18 +791,36 @@ void pkgAcqIndex::Done(string Message,unsigned long Size,string Hash, ErrorText = "Method gave a blank filename"; } + string compExt = flExtension(flNotDir(URI(Desc.URI).Path)); + // The files timestamp matches - if (StringToBool(LookupTag(Message,"IMS-Hit"),false) == true) + if (StringToBool(LookupTag(Message,"IMS-Hit"),false) == true) { + if (_config->FindB("Acquire::GzipIndexes",false) && compExt == "gz") + // Update DestFile for .gz suffix so that the clean operation keeps it + DestFile += ".gz"; return; + } if (FileName == DestFile) Erase = true; else Local = true; - string compExt = flExtension(flNotDir(URI(Desc.URI).Path)); string decompProg; + // If we enable compressed indexes and already have gzip, keep it + if (_config->FindB("Acquire::GzipIndexes",false) && compExt == "gz" && !Local) { + string FinalFile = _config->FindDir("Dir::State::lists"); + FinalFile += URItoFileName(RealURI) + ".gz"; + Rename(DestFile,FinalFile); + chmod(FinalFile.c_str(),0644); + + // Update DestFile for .gz suffix so that the clean operation keeps it + DestFile = _config->FindDir("Dir::State::lists") + "partial/"; + DestFile += URItoFileName(RealURI) + ".gz"; + return; + } + // get the binary name for your used compression type decompProg = _config->Find(string("Acquire::CompressionTypes::").append(compExt),""); if(decompProg.empty() == false); @@ -734,6 +850,19 @@ pkgAcqIndexTrans::pkgAcqIndexTrans(pkgAcquire *Owner, string URI,string URIDesc,string ShortDesc) : pkgAcqIndex(Owner, URI, URIDesc, ShortDesc, HashString(), "") { +} + /*}}}*/ +// AcqIndexTrans::Custom600Headers - Insert custom request headers /*{{{*/ +// --------------------------------------------------------------------- +string pkgAcqIndexTrans::Custom600Headers() +{ + string Final = _config->FindDir("Dir::State::lists"); + Final += URItoFileName(RealURI); + + struct stat Buf; + if (stat(Final.c_str(),&Buf) != 0) + return "\nFail-Ignore: true"; + return "\nFail-Ignore: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime); } /*}}}*/ // AcqIndexTrans::Failed - Silence failure messages for missing files /*{{{*/ @@ -838,8 +967,9 @@ void pkgAcqMetaSig::Done(string Message,unsigned long Size,string MD5, Rename(LastGoodSig, DestFile); // queue a pkgAcqMetaIndex to be verified against the sig we just retrieved - new pkgAcqMetaIndex(Owner, MetaIndexURI, MetaIndexURIDesc, MetaIndexShortDesc, - DestFile, IndexTargets, MetaIndexParser); + new pkgAcqMetaIndex(Owner, MetaIndexURI, MetaIndexURIDesc, + MetaIndexShortDesc, DestFile, IndexTargets, + MetaIndexParser); } /*}}}*/ @@ -852,7 +982,7 @@ void pkgAcqMetaSig::Failed(string Message,pkgAcquire::MethodConfig *Cnf)/*{{{*/ { Item::Failed(Message,Cnf); // move the sigfile back on transient network failures - if(FileExists(DestFile)) + if(FileExists(LastGoodSig)) Rename(LastGoodSig,Final); // set the status back to , Item::Failed likes to reset it @@ -927,6 +1057,15 @@ void pkgAcqMetaIndex::Done(string Message,unsigned long Size,string Hash, /*{{{* if (AuthPass == true) { AuthDone(Message); + + // all cool, move Release file into place + Complete = true; + + string FinalFile = _config->FindDir("Dir::State::lists"); + FinalFile += URItoFileName(RealURI); + Rename(DestFile,FinalFile); + chmod(FinalFile.c_str(),0644); + DestFile = FinalFile; } else { @@ -978,22 +1117,15 @@ void pkgAcqMetaIndex::RetrievalDone(string Message) /*{{{*/ return; } - // see if the download was a IMSHit + // make sure to verify against the right file on I-M-S hit IMSHit = StringToBool(LookupTag(Message,"IMS-Hit"),false); + if(IMSHit) + { + string FinalFile = _config->FindDir("Dir::State::lists"); + FinalFile += URItoFileName(RealURI); + DestFile = FinalFile; + } Complete = true; - - string FinalFile = _config->FindDir("Dir::State::lists"); - FinalFile += URItoFileName(RealURI); - - // If we get a IMS hit we can remove the empty file in partial - // othersie we move the file in place - if (IMSHit) - unlink(DestFile.c_str()); - else - Rename(DestFile,FinalFile); - - chmod(FinalFile.c_str(),0644); - DestFile = FinalFile; } /*}}}*/ void pkgAcqMetaIndex::AuthDone(string Message) /*{{{*/ @@ -1023,7 +1155,6 @@ void pkgAcqMetaIndex::AuthDone(string Message) /*{{{*/ QueueIndexes(true); // Done, move signature file into position - string VerifiedSigFile = _config->FindDir("Dir::State::lists") + URItoFileName(RealURI) + ".gpg"; Rename(SigFile,VerifiedSigFile); @@ -1061,13 +1192,16 @@ void pkgAcqMetaIndex::QueueIndexes(bool verify) /*{{{*/ return; } } - - // Queue Packages file (either diff or full packages files, depending - // on the users option) - if(_config->FindB("Acquire::PDiffs",true) == true) + + /* Queue Packages file (either diff or full packages files, depending + on the users option) - we also check if the PDiff Index file is listed + in the Meta-Index file. Ideal would be if pkgAcqDiffIndex would test this + instead, but passing the required info to it is to much hassle */ + if(_config->FindB("Acquire::PDiffs",true) == true && (verify == false || + MetaIndexParser->Exists(string((*Target)->MetaKey).append(".diff/Index")) == true)) new pkgAcqDiffIndex(Owner, (*Target)->URI, (*Target)->Description, (*Target)->ShortDesc, ExpectedIndexHash); - else + else new pkgAcqIndex(Owner, (*Target)->URI, (*Target)->Description, (*Target)->ShortDesc, ExpectedIndexHash); } @@ -1133,6 +1267,17 @@ bool pkgAcqMetaIndex::VerifyVendor(string Message) /*{{{*/ Transformed = ""; } + if (_config->FindB("Acquire::Check-Valid-Until", true) == true && + MetaIndexParser->GetValidUntil() > 0) { + time_t const invalid_since = time(NULL) - MetaIndexParser->GetValidUntil(); + if (invalid_since > 0) + // TRANSLATOR: The first %s is the URL of the bad Release file, the second is + // the time since then the file is invalid - formated in the same way as in + // the download progress display (e.g. 7d 3h 42min 1s) + return _error->Error(_("Release file expired, ignoring %s (invalid since %s)"), + RealURI.c_str(), TimeToStr(invalid_since).c_str()); + } + if (_config->FindB("Debug::pkgAcquire::Auth", false)) { std::cerr << "Got Codename: " << MetaIndexParser->GetDist() << std::endl; @@ -1150,7 +1295,7 @@ bool pkgAcqMetaIndex::VerifyVendor(string Message) /*{{{*/ // return false; if (!Transformed.empty()) { - _error->Warning("Conflicting distribution: %s (expected %s but got %s)", + _error->Warning(_("Conflicting distribution: %s (expected %s but got %s)"), Desc.Description.c_str(), Transformed.c_str(), MetaIndexParser->GetDist().c_str()); @@ -1167,30 +1312,30 @@ void pkgAcqMetaIndex::Failed(string Message,pkgAcquire::MethodConfig *Cnf) { if (AuthPass == true) { - // if we fail the authentication but got the file via a IMS-Hit - // this means that the file wasn't downloaded and that it might be - // just stale (server problem, proxy etc). we delete what we have - // queue it again without i-m-s - // alternatively we could just unlink the file and let the user try again - if (IMSHit) + // gpgv method failed, if we have a good signature + string LastGoodSigFile = _config->FindDir("Dir::State::lists") + + "partial/" + URItoFileName(RealURI) + ".gpg.reverify"; + if(FileExists(LastGoodSigFile)) { - Complete = false; - Local = false; - AuthPass = false; - unlink(DestFile.c_str()); - - DestFile = _config->FindDir("Dir::State::lists") + "partial/"; - DestFile += URItoFileName(RealURI); - Desc.URI = RealURI; - QueueURI(Desc); + string VerifiedSigFile = _config->FindDir("Dir::State::lists") + + URItoFileName(RealURI) + ".gpg"; + Rename(LastGoodSigFile,VerifiedSigFile); + Status = StatTransientNetworkError; + _error->Warning(_("A error occurred during the signature " + "verification. The repository is not updated " + "and the previous index files will be used." + "GPG error: %s: %s\n"), + Desc.Description.c_str(), + LookupTag(Message,"Message").c_str()); + RunScripts("APT::Update::Auth-Failure"); return; + } else { + _error->Warning(_("GPG error: %s: %s"), + Desc.Description.c_str(), + LookupTag(Message,"Message").c_str()); } - // gpgv method failed - _error->Warning("GPG error: %s: %s", - Desc.Description.c_str(), - LookupTag(Message,"Message").c_str()); - + ReportMirrorFailure("GPGFailure"); } // No Release file was present, or verification failed, so fall @@ -1283,7 +1428,8 @@ pkgAcqArchive::pkgAcqArchive(pkgAcquire *Owner,pkgSourceList *Sources, the archive is already available in the cache and stashs the MD5 for checking later. */ bool pkgAcqArchive::QueueNext() -{ +{ + string const ForceHash = _config->Find("Acquire::ForceHash"); for (; Vf.end() == false; Vf++) { // Ignore not source sources @@ -1306,12 +1452,25 @@ bool pkgAcqArchive::QueueNext() return false; string PkgFile = Parse.FileName(); - if(Parse.SHA256Hash() != "") - ExpectedHash = HashString("SHA256", Parse.SHA256Hash()); - else if (Parse.SHA1Hash() != "") - ExpectedHash = HashString("SHA1", Parse.SHA1Hash()); - else - ExpectedHash = HashString("MD5Sum", Parse.MD5Hash()); + if (ForceHash.empty() == false) + { + if(stringcasecmp(ForceHash, "sha256") == 0) + ExpectedHash = HashString("SHA256", Parse.SHA256Hash()); + else if (stringcasecmp(ForceHash, "sha1") == 0) + ExpectedHash = HashString("SHA1", Parse.SHA1Hash()); + else + ExpectedHash = HashString("MD5Sum", Parse.MD5Hash()); + } + else + { + string Hash; + if ((Hash = Parse.SHA256Hash()).empty() == false) + ExpectedHash = HashString("SHA256", Hash); + else if ((Hash = Parse.SHA1Hash()).empty() == false) + ExpectedHash = HashString("SHA1", Hash); + else + ExpectedHash = HashString("MD5Sum", Parse.MD5Hash()); + } if (PkgFile.empty() == true) return _error->Error(_("The package index files are corrupted. No Filename: " "field for package %s."), @@ -1503,8 +1662,9 @@ void pkgAcqArchive::Finished() /* The file is added to the queue */ pkgAcqFile::pkgAcqFile(pkgAcquire *Owner,string URI,string Hash, unsigned long Size,string Dsc,string ShortDesc, - const string &DestDir, const string &DestFilename) : - Item(Owner), ExpectedHash(Hash) + const string &DestDir, const string &DestFilename, + bool IsIndexFile) : + Item(Owner), ExpectedHash(Hash), IsIndexFile(IsIndexFile) { Retries = _config->FindI("Acquire::Retries",0); @@ -1550,7 +1710,7 @@ void pkgAcqFile::Done(string Message,unsigned long Size,string CalcHash, if(!ExpectedHash.empty() && ExpectedHash.toStr() != CalcHash) { Status = StatError; - ErrorText = "Hash Sum mismatch"; + ErrorText = _("Hash Sum mismatch"); Rename(DestFile,DestFile + ".FAILED"); return; } @@ -1619,3 +1779,13 @@ void pkgAcqFile::Failed(string Message,pkgAcquire::MethodConfig *Cnf) Item::Failed(Message,Cnf); } /*}}}*/ +// AcqIndex::Custom600Headers - Insert custom request headers /*{{{*/ +// --------------------------------------------------------------------- +/* The only header we use is the last-modified header. */ +string pkgAcqFile::Custom600Headers() +{ + if (IsIndexFile) + return "\nIndex-File: true"; + return ""; +} + /*}}}*/