X-Git-Url: https://git.saurik.com/apt.git/blobdiff_plain/09af1a917c739e50e4e8590cc8402525b3f1f64d..d4a6c3bbb1a57494256ad09c08cc21c9d4f2182a:/apt-pkg/acquire-item.cc diff --git a/apt-pkg/acquire-item.cc b/apt-pkg/acquire-item.cc index c4c0b73a9..b48972c22 100644 --- a/apt-pkg/acquire-item.cc +++ b/apt-pkg/acquire-item.cc @@ -103,7 +103,8 @@ void pkgAcquire::Item::Done(string Message,unsigned long Size,string, { // We just downloaded something.. string FileName = LookupTag(Message,"Filename"); - if (Complete == false && FileName == DestFile) + // we only inform the Log class if it was actually not a local thing + if (Complete == false && !Local && FileName == DestFile) { if (Owner->Log != 0) Owner->Log->Fetched(Size,atoi(LookupTag(Message,"Resume-Point","0").c_str())); @@ -165,20 +166,23 @@ pkgAcqDiffIndex::pkgAcqDiffIndex(pkgAcquire *Owner, CurrentPackagesFile = _config->FindDir("Dir::State::lists"); CurrentPackagesFile += URItoFileName(RealURI); + // FIXME: this file:/ check is a hack to prevent fetching + // from local sources. this is really silly, and + // should be fixed cleanly as soon as possible if(!FileExists(CurrentPackagesFile) || - !_config->FindB("Acquire::Diffs",true)) { + Desc.URI.substr(0,strlen("file:/")) == "file:/") + { // we don't have a pkg file or we don't want to queue if(Debug) - std::clog << "No index file or canceld by user" << std::endl; + std::clog << "No index file, local or canceld by user" << std::endl; Failed("", NULL); return; } - if(Debug) { + if(Debug) std::clog << "pkgAcqIndexDiffs::pkgAcqIndexDiffs(): " << CurrentPackagesFile << std::endl; - } - + QueueURI(Desc); } @@ -233,26 +237,32 @@ bool pkgAcqDiffIndex::ParseDiffIndex(string IndexDiffFile) SHA1.AddFD(fd.Fd(), fd.Size()); local_sha1 = string(SHA1.Result()); - if(local_sha1 == ServerSha1) { + if(local_sha1 == ServerSha1) + { + // we have the same sha1 as the server if(Debug) std::clog << "Package file is up-to-date" << std::endl; // set found to true, this will queue a pkgAcqIndexDiffs with // a empty availabe_patches found = true; - } else { + } + else + { if(Debug) std::clog << "SHA1-Current: " << ServerSha1 << std::endl; // check the historie and see what patches we need string history = Tags.FindS("SHA1-History"); std::stringstream hist(history); - while(hist >> d.sha1 >> size >> d.file) { + while(hist >> d.sha1 >> size >> d.file) + { d.size = atoi(size.c_str()); // read until the first match is found if(d.sha1 == local_sha1) found=true; // from that point on, we probably need all diffs - if(found) { + if(found) + { if(Debug) std::clog << "Need to get diff: " << d.file << std::endl; available_patches.push_back(d); @@ -261,12 +271,15 @@ bool pkgAcqDiffIndex::ParseDiffIndex(string IndexDiffFile) } // no information how to get the patches, bail out - if(!found) { + if(!found) + { if(Debug) std::clog << "Can't find a patch in the index file" << std::endl; // Failed will queue a big package file Failed("", NULL); - } else { + } + else + { // queue the diffs new pkgAcqIndexDiffs(Owner, RealURI, Description, Desc.ShortDesc, ExpectedMD5, available_patches); @@ -347,10 +360,13 @@ pkgAcqIndexDiffs::pkgAcqIndexDiffs(pkgAcquire *Owner, Desc.Owner = this; Desc.ShortDesc = ShortDesc; - if(available_patches.size() == 0) { + if(available_patches.size() == 0) + { // we are done (yeah!) Finish(true); - } else { + } + else + { // get the next diff State = StateFetchDiff; QueueNextDiff(); @@ -374,10 +390,28 @@ void pkgAcqIndexDiffs::Finish(bool allDone) { // we restore the original name, this is required, otherwise // the file will be cleaned - if(allDone) { - // this is for the "real" finish + if(allDone) + { DestFile = _config->FindDir("Dir::State::lists"); DestFile += URItoFileName(RealURI); + + // do the final md5sum checking + MD5Summation sum; + FileFd Fd(DestFile, FileFd::ReadOnly); + sum.AddFD(Fd.Fd(), Fd.Size()); + Fd.Close(); + string MD5 = (string)sum.Result(); + + if (!ExpectedMD5.empty() && MD5 != ExpectedMD5) + { + Status = StatAuthError; + ErrorText = _("MD5Sum mismatch"); + Rename(DestFile,DestFile + ".FAILED"); + Dequeue(); + return; + } + + // this is for the "real" finish Complete = true; Status = StatDone; Dequeue(); @@ -414,14 +448,17 @@ bool pkgAcqIndexDiffs::QueueNextDiff() // remove all patches until the next matching patch is found // this requires the Index file to be ordered for(vector::iterator I=available_patches.begin(); - available_patches.size() > 0 && I != available_patches.end() - && (*I).sha1 != local_sha1; - I++) { + available_patches.size() > 0 && + I != available_patches.end() && + (*I).sha1 != local_sha1; + I++) + { available_patches.erase(I); } // error checking and falling back if no patch was found - if(available_patches.size() == 0) { + if(available_patches.size() == 0) + { Failed("", NULL); return false; } @@ -463,6 +500,7 @@ void pkgAcqIndexDiffs::Done(string Message,unsigned long Size,string Md5Hash, string FileName = LookupTag(Message,"Filename"); State = StateUnzipDiff; + Local = true; Desc.URI = "gzip:" + FileName; DestFile += ".decomp"; QueueURI(Desc); @@ -481,6 +519,7 @@ void pkgAcqIndexDiffs::Done(string Message,unsigned long Size,string Md5Hash, std::clog << "Sending to rred method: " << FinalFile << std::endl; State = StateApplyDiff; + Local = true; Desc.URI = "rred:" + FinalFile; QueueURI(Desc); Mode = "rred"; @@ -501,6 +540,7 @@ void pkgAcqIndexDiffs::Done(string Message,unsigned long Size,string Md5Hash, << DestFile << " -> " << FinalFile << std::endl; } Rename(DestFile,FinalFile); + chmod(FinalFile.c_str(),0644); // see if there is more to download if(available_patches.size() > 0) { @@ -531,7 +571,7 @@ pkgAcqIndex::pkgAcqIndex(pkgAcquire *Owner, if(comprExt.empty()) { // autoselect the compression method - if(FileExists("/usr/bin/bzip2")) + if(FileExists("/bin/bzip2")) CompressionExtension = ".bz2"; else CompressionExtension = ".gz"; @@ -565,8 +605,9 @@ string pkgAcqIndex::Custom600Headers() void pkgAcqIndex::Failed(string Message,pkgAcquire::MethodConfig *Cnf) { + // no .bz2 found, retry with .gz - if(Desc.URI.substr(Desc.URI.size()-3,Desc.URI.size()-1) == "bz2") { + if(Desc.URI.substr(Desc.URI.size()-3) == "bz2") { Desc.URI = Desc.URI.substr(0,Desc.URI.size()-3) + "gz"; // retry with a gzip one @@ -576,9 +617,15 @@ void pkgAcqIndex::Failed(string Message,pkgAcquire::MethodConfig *Cnf) Complete = false; Dequeue(); return; + } + + // on decompression failure, remove bad versions in partial/ + if(Decompression && Erase) { + string s = _config->FindDir("Dir::State::lists") + "partial/"; + s += URItoFileName(RealURI); + unlink(s.c_str()); } - Item::Failed(Message,Cnf); } @@ -672,7 +719,7 @@ void pkgAcqIndex::Done(string Message,unsigned long Size,string MD5, else Local = true; - string compExt = Desc.URI.substr(Desc.URI.size()-3,Desc.URI.size()-1); + string compExt = Desc.URI.substr(Desc.URI.size()-3); char *decompProg; if(compExt == "bz2") decompProg = "bzip2"; @@ -697,10 +744,9 @@ pkgAcqMetaSig::pkgAcqMetaSig(pkgAcquire *Owner, const vector* IndexTargets, indexRecords* MetaIndexParser) : Item(Owner), RealURI(URI), MetaIndexURI(MetaIndexURI), - MetaIndexURIDesc(MetaIndexURIDesc), MetaIndexShortDesc(MetaIndexShortDesc) + MetaIndexURIDesc(MetaIndexURIDesc), MetaIndexShortDesc(MetaIndexShortDesc), + MetaIndexParser(MetaIndexParser), IndexTargets(IndexTargets) { - this->MetaIndexParser = MetaIndexParser; - this->IndexTargets = IndexTargets; DestFile = _config->FindDir("Dir::State::lists") + "partial/"; DestFile += URItoFileName(URI); @@ -723,12 +769,6 @@ pkgAcqMetaSig::pkgAcqMetaSig(pkgAcquire *Owner, // File was already in place. It needs to be re-verified // because Release might have changed, so Move it into partial Rename(Final,DestFile); - // unlink the file and do not try to use I-M-S and Last-Modified - // if the users proxy is broken - if(_config->FindB("Acquire::BrokenProxy", false) == true) { - std::cerr << "forcing re-get of the signature file as requested" << std::endl; - unlink(DestFile.c_str()); - } } QueueURI(Desc); @@ -778,18 +818,19 @@ void pkgAcqMetaSig::Done(string Message,unsigned long Size,string MD5, /*}}}*/ void pkgAcqMetaSig::Failed(string Message,pkgAcquire::MethodConfig *Cnf) { - // Delete any existing sigfile, so that this source isn't - // mistakenly trusted - string Final = _config->FindDir("Dir::State::lists") + URItoFileName(RealURI); - unlink(Final.c_str()); - // if we get a timeout if fail + // if we get a network error we fail gracefully if(LookupTag(Message,"FailReason") == "Timeout" || - LookupTag(Message,"FailReason") == "TmpResolveFailure") { + LookupTag(Message,"FailReason") == "TmpResolveFailure" || + LookupTag(Message,"FailReason") == "ConnectionRefused") { Item::Failed(Message,Cnf); return; } + // Delete any existing sigfile when the acquire failed + string Final = _config->FindDir("Dir::State::lists") + URItoFileName(RealURI); + unlink(Final.c_str()); + // queue a pkgAcqMetaIndex with no sigfile new pkgAcqMetaIndex(Owner, MetaIndexURI, MetaIndexURIDesc, MetaIndexShortDesc, "", IndexTargets, MetaIndexParser); @@ -812,11 +853,9 @@ pkgAcqMetaIndex::pkgAcqMetaIndex(pkgAcquire *Owner, string SigFile, const vector* IndexTargets, indexRecords* MetaIndexParser) : - Item(Owner), RealURI(URI), SigFile(SigFile) + Item(Owner), RealURI(URI), SigFile(SigFile), AuthPass(false), + MetaIndexParser(MetaIndexParser), IndexTargets(IndexTargets), IMSHit(false) { - this->AuthPass = false; - this->MetaIndexParser = MetaIndexParser; - this->IndexTargets = IndexTargets; DestFile = _config->FindDir("Dir::State::lists") + "partial/"; DestFile += URItoFileName(URI); @@ -908,6 +947,9 @@ void pkgAcqMetaIndex::RetrievalDone(string Message) return; } + // see if the download was a IMSHit + IMSHit = StringToBool(LookupTag(Message,"IMS-Hit"),false); + Complete = true; string FinalFile = _config->FindDir("Dir::State::lists"); @@ -919,6 +961,7 @@ void pkgAcqMetaIndex::RetrievalDone(string Message) // Move it into position Rename(DestFile,FinalFile); } + chmod(FinalFile.c_str(),0644); DestFile = FinalFile; } @@ -936,7 +979,7 @@ void pkgAcqMetaIndex::AuthDone(string Message) return; } - if (!VerifyVendor()) + if (!VerifyVendor(Message)) { return; } @@ -988,13 +1031,18 @@ void pkgAcqMetaIndex::QueueIndexes(bool verify) } } - // Queue Packages file - new pkgAcqDiffIndex(Owner, (*Target)->URI, (*Target)->Description, - (*Target)->ShortDesc, ExpectedIndexMD5); + // Queue Packages file (either diff or full packages files, depending + // on the users option) + if(_config->FindB("Acquire::PDiffs",true) == true) + new pkgAcqDiffIndex(Owner, (*Target)->URI, (*Target)->Description, + (*Target)->ShortDesc, ExpectedIndexMD5); + else + new pkgAcqIndex(Owner, (*Target)->URI, (*Target)->Description, + (*Target)->ShortDesc, ExpectedIndexMD5); } } -bool pkgAcqMetaIndex::VerifyVendor() +bool pkgAcqMetaIndex::VerifyVendor(string Message) { // // Maybe this should be made available from above so we don't have // // to read and parse it every time? @@ -1019,6 +1067,22 @@ bool pkgAcqMetaIndex::VerifyVendor() // break; // } // } + string::size_type pos; + + // check for missing sigs (that where not fatal because otherwise we had + // bombed earlier) + string missingkeys; + string msg = _("There is no public key available for the " + "following key IDs:\n"); + pos = Message.find("NO_PUBKEY "); + if (pos != std::string::npos) + { + string::size_type start = pos+strlen("NO_PUBKEY "); + string Fingerprint = Message.substr(start, Message.find("\n")-start); + missingkeys += (Fingerprint); + } + if(!missingkeys.empty()) + _error->Warning("%s", string(msg+missingkeys).c_str()); string Transformed = MetaIndexParser->GetExpectedDist(); @@ -1027,7 +1091,7 @@ bool pkgAcqMetaIndex::VerifyVendor() Transformed = "experimental"; } - string::size_type pos = Transformed.rfind('/'); + pos = Transformed.rfind('/'); if (pos != string::npos) { Transformed = Transformed.substr(0, pos); @@ -1073,10 +1137,30 @@ void pkgAcqMetaIndex::Failed(string Message,pkgAcquire::MethodConfig *Cnf) { if (AuthPass == true) { - // gpgv method failed + // if we fail the authentication but got the file via a IMS-Hit + // this means that the file wasn't downloaded and that it might be + // just stale (server problem, proxy etc). we delete what we have + // queue it again without i-m-s + // alternatively we could just unlink the file and let the user try again + if (IMSHit) + { + Complete = false; + Local = false; + AuthPass = false; + unlink(DestFile.c_str()); + + DestFile = _config->FindDir("Dir::State::lists") + "partial/"; + DestFile += URItoFileName(RealURI); + Desc.URI = RealURI; + QueueURI(Desc); + return; + } + + // gpgv method failed _error->Warning("GPG error: %s: %s", Desc.Description.c_str(), LookupTag(Message,"Message").c_str()); + } // No Release file was present, or verification failed, so fall @@ -1152,6 +1236,12 @@ pkgAcqArchive::pkgAcqArchive(pkgAcquire *Owner,pkgSourceList *Sources, } } + // "allow-unauthenticated" restores apts old fetching behaviour + // that means that e.g. unauthenticated file:// uris are higher + // priority than authenticated http:// uris + if (_config->FindB("APT::Get::AllowUnauthenticated",false) == true) + Trusted = false; + // Select a source if (QueueNext() == false && _error->PendingError() == false) _error->Error(_("I wasn't able to locate file for the %s package. " @@ -1384,13 +1474,19 @@ void pkgAcqArchive::Finished() // --------------------------------------------------------------------- /* The file is added to the queue */ pkgAcqFile::pkgAcqFile(pkgAcquire *Owner,string URI,string MD5, - unsigned long Size,string Dsc,string ShortDesc) : + unsigned long Size,string Dsc,string ShortDesc, + const string &DestDir, const string &DestFilename) : Item(Owner), Md5Hash(MD5) { Retries = _config->FindI("Acquire::Retries",0); - DestFile = flNotDir(URI); - + if(!DestFilename.empty()) + DestFile = DestFilename; + else if(!DestDir.empty()) + DestFile = DestDir + "/" + flNotDir(URI); + else + DestFile = flNotDir(URI); + // Create the item Desc.URI = URI; Desc.Description = Dsc; @@ -1410,7 +1506,7 @@ pkgAcqFile::pkgAcqFile(pkgAcquire *Owner,string URI,string MD5, else PartialSize = Buf.st_size; } - + QueueURI(Desc); } /*}}}*/