X-Git-Url: https://git.saurik.com/apt.git/blobdiff_plain/7adf8bb88faba98793998bed1e6c60f4b5483531..ca15786a9ae637048e329c159831dd486c54dea7:/apt-pkg/acquire-item.cc diff --git a/apt-pkg/acquire-item.cc b/apt-pkg/acquire-item.cc index 39ce90dda..7ad6a794e 100644 --- a/apt-pkg/acquire-item.cc +++ b/apt-pkg/acquire-item.cc @@ -13,6 +13,8 @@ ##################################################################### */ /*}}}*/ // Include Files /*{{{*/ +#include + #include #include #include @@ -20,12 +22,21 @@ #include #include #include -#include #include #include - -#include - +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include +#include #include #include #include @@ -33,16 +44,30 @@ #include #include #include + +#include /*}}}*/ using namespace std; +static void printHashSumComparision(std::string const &URI, HashStringList const &Expected, HashStringList const &Actual) /*{{{*/ +{ + if (_config->FindB("Debug::Acquire::HashSumMismatch", false) == false) + return; + std::cerr << std::endl << URI << ":" << std::endl << " Expected Hash: " << std::endl; + for (HashStringList::const_iterator hs = Expected.begin(); hs != Expected.end(); ++hs) + std::cerr << "\t- " << hs->toStr() << std::endl; + std::cerr << " Actual Hash: " << std::endl; + for (HashStringList::const_iterator hs = Actual.begin(); hs != Actual.end(); ++hs) + std::cerr << "\t- " << hs->toStr() << std::endl; +} + /*}}}*/ + // Acquire::Item::Item - Constructor /*{{{*/ -// --------------------------------------------------------------------- -/* */ -pkgAcquire::Item::Item(pkgAcquire *Owner) : Owner(Owner), FileSize(0), - PartialSize(0), Mode(0), ID(0), Complete(false), - Local(false), QueueCounter(0) +pkgAcquire::Item::Item(pkgAcquire *Owner, HashStringList const &ExpectedHashes) : + Owner(Owner), FileSize(0), PartialSize(0), Mode(0), ID(0), Complete(false), + Local(false), QueueCounter(0), ExpectedAdditionalItems(0), + ExpectedHashes(ExpectedHashes) { Owner->Add(this); Status = StatIdle; @@ -94,7 +119,7 @@ void pkgAcquire::Item::Failed(string Message,pkgAcquire::MethodConfig *Cnf) // --------------------------------------------------------------------- /* Stash status and the file size. Note that setting Complete means sub-phases of the acquire process such as decompresion are operating */ -void pkgAcquire::Item::Start(string /*Message*/,unsigned long Size) +void pkgAcquire::Item::Start(string /*Message*/,unsigned long long Size) { Status = StatFetching; if (FileSize == 0 && Complete == false) @@ -104,8 +129,8 @@ void pkgAcquire::Item::Start(string /*Message*/,unsigned long Size) // Acquire::Item::Done - Item downloaded OK /*{{{*/ // --------------------------------------------------------------------- /* */ -void pkgAcquire::Item::Done(string Message,unsigned long Size,string Hash, - pkgAcquire::MethodConfig *Cnf) +void pkgAcquire::Item::Done(string Message,unsigned long long Size,HashStringList const &/*Hash*/, + pkgAcquire::MethodConfig * /*Cnf*/) { // We just downloaded something.. string FileName = LookupTag(Message,"Filename"); @@ -125,7 +150,7 @@ void pkgAcquire::Item::Done(string Message,unsigned long Size,string Hash, /*}}}*/ // Acquire::Item::Rename - Rename a file /*{{{*/ // --------------------------------------------------------------------- -/* This helper function is used by alot of item methods as thier final +/* This helper function is used by a lot of item methods as their final step */ void pkgAcquire::Item::Rename(string From,string To) { @@ -139,6 +164,32 @@ void pkgAcquire::Item::Rename(string From,string To) } } /*}}}*/ +bool pkgAcquire::Item::RenameOnError(pkgAcquire::Item::RenameOnErrorState const error)/*{{{*/ +{ + if(FileExists(DestFile)) + Rename(DestFile, DestFile + ".FAILED"); + + switch (error) + { + case HashSumMismatch: + ErrorText = _("Hash Sum mismatch"); + Status = StatAuthError; + ReportMirrorFailure("HashChecksumFailure"); + break; + case SizeMismatch: + ErrorText = _("Size mismatch"); + Status = StatAuthError; + ReportMirrorFailure("SizeFailure"); + break; + case InvalidFormat: + ErrorText = _("Invalid file format"); + Status = StatError; + // do not report as usually its not the mirrors fault, but Portal/Proxy + break; + } + return false; +} + /*}}}*/ // Acquire::Item::ReportMirrorFailure /*{{{*/ // --------------------------------------------------------------------- void pkgAcquire::Item::ReportMirrorFailure(string FailCode) @@ -185,14 +236,14 @@ void pkgAcquire::Item::ReportMirrorFailure(string FailCode) /*}}}*/ // AcqSubIndex::AcqSubIndex - Constructor /*{{{*/ // --------------------------------------------------------------------- -/* Get the Index file first and see if there are languages available - * If so, create a pkgAcqIndexTrans for the found language(s). - */ +/* Get a sub-index file based on checksums from a 'master' file and + possibly query additional files */ pkgAcqSubIndex::pkgAcqSubIndex(pkgAcquire *Owner, string const &URI, string const &URIDesc, string const &ShortDesc, - HashString const &ExpectedHash) - : Item(Owner), ExpectedHash(ExpectedHash) + HashStringList const &ExpectedHashes) + : Item(Owner, ExpectedHashes) { + /* XXX: Beware: Currently this class does nothing (of value) anymore ! */ Debug = _config->FindB("Debug::pkgAcquire::SubIndex",false); DestFile = _config->FindDir("Dir::State::lists") + "partial/"; @@ -212,7 +263,7 @@ pkgAcqSubIndex::pkgAcqSubIndex(pkgAcquire *Owner, string const &URI, // AcqSubIndex::Custom600Headers - Insert custom request headers /*{{{*/ // --------------------------------------------------------------------- /* The only header we use is the last-modified header. */ -string pkgAcqSubIndex::Custom600Headers() +string pkgAcqSubIndex::Custom600Headers() const { string Final = _config->FindDir("Dir::State::lists"); Final += URItoFileName(Desc.URI); @@ -223,29 +274,19 @@ string pkgAcqSubIndex::Custom600Headers() return "\nIndex-File: true\nFail-Ignore: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime); } /*}}}*/ -void pkgAcqSubIndex::Failed(string Message,pkgAcquire::MethodConfig *Cnf) /*{{{*/ +void pkgAcqSubIndex::Failed(string Message,pkgAcquire::MethodConfig * /*Cnf*/)/*{{{*/ { if(Debug) - std::clog << "pkgAcqSubIndex failed: " << Desc.URI << std::endl; + std::clog << "pkgAcqSubIndex failed: " << Desc.URI << " with " << Message << std::endl; Complete = false; Status = StatDone; Dequeue(); - // No good Index is provided, so try guessing - std::vector langs = APT::Configuration::getLanguages(true); - for (std::vector::const_iterator l = langs.begin(); - l != langs.end(); ++l) - { - if (*l == "none") continue; - string const file = "Translation-" + *l; - new pkgAcqIndexTrans(Owner, Desc.URI.substr(0, Desc.URI.rfind('/')+1).append(file), - Desc.Description.erase(Desc.Description.rfind(' ')+1).append(file), - file); - } + // No good Index is provided } /*}}}*/ -void pkgAcqSubIndex::Done(string Message,unsigned long Size,string Md5Hash, /*{{{*/ +void pkgAcqSubIndex::Done(string Message,unsigned long long Size,HashStringList const &Hashes, /*{{{*/ pkgAcquire::MethodConfig *Cnf) { if(Debug) @@ -267,7 +308,7 @@ void pkgAcqSubIndex::Done(string Message,unsigned long Size,string Md5Hash, /*{{ return; } - Item::Done(Message,Size,Md5Hash,Cnf); + Item::Done(Message, Size, Hashes, Cnf); string FinalFile = _config->FindDir("Dir::State::lists")+URItoFileName(Desc.URI); @@ -279,7 +320,7 @@ void pkgAcqSubIndex::Done(string Message,unsigned long Size,string Md5Hash, /*{{ return; } - // sucess in downloading the index + // success in downloading the index // rename the index if(Debug) std::clog << "Renaming: " << DestFile << " -> " << FinalFile << std::endl; @@ -301,64 +342,34 @@ bool pkgAcqSubIndex::ParseIndex(string const &IndexFile) /*{{{*/ indexRecords SubIndexParser; if (FileExists(IndexFile) == false || SubIndexParser.Load(IndexFile) == false) return false; - - std::vector lang = APT::Configuration::getLanguages(true); - for (std::vector::const_iterator l = lang.begin(); - l != lang.end(); ++l) - { - if (*l == "none") - continue; - - string file = "Translation-" + *l; - indexRecords::checkSum const *Record = SubIndexParser.Lookup(file); - HashString expected; - if (Record == NULL) - { - // FIXME: the Index file provided by debian currently only includes bz2 records - Record = SubIndexParser.Lookup(file + ".bz2"); - if (Record == NULL) - continue; - } - else - { - expected = Record->Hash; - if (expected.empty() == true) - continue; - } - - IndexTarget target; - target.Description = Desc.Description.erase(Desc.Description.rfind(' ')+1).append(file); - target.MetaKey = file; - target.ShortDesc = file; - target.URI = Desc.URI.substr(0, Desc.URI.rfind('/')+1).append(file); - new pkgAcqIndexTrans(Owner, &target, expected, &SubIndexParser); - } + // so something with the downloaded index return true; } /*}}}*/ // AcqDiffIndex::AcqDiffIndex - Constructor /*{{{*/ // --------------------------------------------------------------------- -/* Get the DiffIndex file first and see if there are patches availabe +/* Get the DiffIndex file first and see if there are patches available * If so, create a pkgAcqIndexDiffs fetcher that will get and apply the * patches. If anything goes wrong in that process, it will fall back to * the original packages file */ pkgAcqDiffIndex::pkgAcqDiffIndex(pkgAcquire *Owner, - string URI,string URIDesc,string ShortDesc, - HashString ExpectedHash) - : Item(Owner), RealURI(URI), ExpectedHash(ExpectedHash), - Description(URIDesc) + IndexTarget const * const Target, + HashStringList const &ExpectedHashes, + indexRecords *MetaIndexParser) + : pkgAcqBaseIndex(Owner, Target, ExpectedHashes, MetaIndexParser) { Debug = _config->FindB("Debug::pkgAcquire::Diffs",false); - Desc.Description = URIDesc + "/DiffIndex"; + RealURI = Target->URI; Desc.Owner = this; - Desc.ShortDesc = ShortDesc; - Desc.URI = URI + ".diff/Index"; + Desc.Description = Target->Description + "/DiffIndex"; + Desc.ShortDesc = Target->ShortDesc; + Desc.URI = Target->URI + ".diff/Index"; DestFile = _config->FindDir("Dir::State::lists") + "partial/"; - DestFile += URItoFileName(URI) + string(".DiffIndex"); + DestFile += URItoFileName(Desc.URI); if(Debug) std::clog << "pkgAcqDiffIndex: " << Desc.URI << std::endl; @@ -380,10 +391,10 @@ pkgAcqDiffIndex::pkgAcqDiffIndex(pkgAcquire *Owner, return; } - if(Debug) - std::clog << "pkgAcqIndexDiffs::pkgAcqIndexDiffs(): " - << CurrentPackagesFile << std::endl; - + if(Debug) + std::clog << "pkgAcqDiffIndex::pkgAcqDiffIndex(): " + << CurrentPackagesFile << std::endl; + QueueURI(Desc); } @@ -391,10 +402,10 @@ pkgAcqDiffIndex::pkgAcqDiffIndex(pkgAcquire *Owner, // AcqIndex::Custom600Headers - Insert custom request headers /*{{{*/ // --------------------------------------------------------------------- /* The only header we use is the last-modified header. */ -string pkgAcqDiffIndex::Custom600Headers() +string pkgAcqDiffIndex::Custom600Headers() const { string Final = _config->FindDir("Dir::State::lists"); - Final += URItoFileName(RealURI) + string(".IndexDiff"); + Final += URItoFileName(Desc.URI); if(Debug) std::clog << "Custom600Header-IMS: " << Final << std::endl; @@ -409,8 +420,8 @@ string pkgAcqDiffIndex::Custom600Headers() bool pkgAcqDiffIndex::ParseDiffIndex(string IndexDiffFile) /*{{{*/ { if(Debug) - std::clog << "pkgAcqIndexDiffs::ParseIndexDiff() " << IndexDiffFile - << std::endl; + std::clog << "pkgAcqDiffIndex::ParseIndexDiff() " << IndexDiffFile + << std::endl; pkgTagSection Tags; string ServerSha1; @@ -434,19 +445,21 @@ bool pkgAcqDiffIndex::ParseDiffIndex(string IndexDiffFile) /*{{{*/ FileFd fd(CurrentPackagesFile, FileFd::ReadOnly); SHA1Summation SHA1; - SHA1.AddFD(fd.Fd(), fd.Size()); + SHA1.AddFD(fd); string const local_sha1 = SHA1.Result(); - if(local_sha1 == ServerSha1) + if(local_sha1 == ServerSha1) { - // we have the same sha1 as the server + // we have the same sha1 as the server so we are done here if(Debug) std::clog << "Package file is up-to-date" << std::endl; - // set found to true, this will queue a pkgAcqIndexDiffs with - // a empty availabe_patches - found = true; - } - else + // list cleanup needs to know that this file as well as the already + // present index is ours, so we create an empty diff to save it for us + new pkgAcqIndexDiffs(Owner, Target, ExpectedHashes, MetaIndexParser, + ServerSha1, available_patches); + return true; + } + else { if(Debug) std::clog << "SHA1-Current: " << ServerSha1 << " and we start at "<< fd.Name() << " " << fd.Size() << " " << local_sha1 << std::endl; @@ -507,14 +520,42 @@ bool pkgAcqDiffIndex::ParseDiffIndex(string IndexDiffFile) /*{{{*/ } // we have something, queue the next diff - if(found) + if(found) { // queue the diffs string::size_type const last_space = Description.rfind(" "); if(last_space != string::npos) Description.erase(last_space, Description.size()-last_space); - new pkgAcqIndexDiffs(Owner, RealURI, Description, Desc.ShortDesc, - ExpectedHash, ServerSha1, available_patches); + + /* decide if we should download patches one by one or in one go: + The first is good if the server merges patches, but many don't so client + based merging can be attempt in which case the second is better. + "bad things" will happen if patches are merged on the server, + but client side merging is attempt as well */ + bool pdiff_merge = _config->FindB("Acquire::PDiffs::Merge", true); + if (pdiff_merge == true) + { + // reprepro adds this flag if it has merged patches on the server + std::string const precedence = Tags.FindS("X-Patch-Precedence"); + pdiff_merge = (precedence != "merged"); + } + + if (pdiff_merge == false) + { + new pkgAcqIndexDiffs(Owner, Target, ExpectedHashes, MetaIndexParser, + ServerSha1, available_patches); + } + else + { + std::vector *diffs = new std::vector(available_patches.size()); + for(size_t i = 0; i < available_patches.size(); ++i) + (*diffs)[i] = new pkgAcqIndexMergeDiffs(Owner, Target, + ExpectedHashes, + MetaIndexParser, + available_patches[i], + diffs); + } + Complete = false; Status = StatDone; Dequeue(); @@ -530,32 +571,31 @@ bool pkgAcqDiffIndex::ParseDiffIndex(string IndexDiffFile) /*{{{*/ return false; } /*}}}*/ -void pkgAcqDiffIndex::Failed(string Message,pkgAcquire::MethodConfig *Cnf) /*{{{*/ +void pkgAcqDiffIndex::Failed(string Message,pkgAcquire::MethodConfig * /*Cnf*/)/*{{{*/ { if(Debug) - std::clog << "pkgAcqDiffIndex failed: " << Desc.URI << std::endl - << "Falling back to normal index file aquire" << std::endl; + std::clog << "pkgAcqDiffIndex failed: " << Desc.URI << " with " << Message << std::endl + << "Falling back to normal index file acquire" << std::endl; - new pkgAcqIndex(Owner, RealURI, Description, Desc.ShortDesc, - ExpectedHash); + new pkgAcqIndex(Owner, Target, ExpectedHashes, MetaIndexParser); Complete = false; Status = StatDone; Dequeue(); } /*}}}*/ -void pkgAcqDiffIndex::Done(string Message,unsigned long Size,string Md5Hash, /*{{{*/ +void pkgAcqDiffIndex::Done(string Message,unsigned long long Size,HashStringList const &Hashes, /*{{{*/ pkgAcquire::MethodConfig *Cnf) { if(Debug) std::clog << "pkgAcqDiffIndex::Done(): " << Desc.URI << std::endl; - Item::Done(Message,Size,Md5Hash,Cnf); + Item::Done(Message, Size, Hashes, Cnf); string FinalFile; FinalFile = _config->FindDir("Dir::State::lists")+URItoFileName(RealURI); - // sucess in downloading the index + // success in downloading the index // rename the index FinalFile += string(".IndexDiff"); if(Debug) @@ -580,24 +620,26 @@ void pkgAcqDiffIndex::Done(string Message,unsigned long Size,string Md5Hash, /*{ * for each diff and the index */ pkgAcqIndexDiffs::pkgAcqIndexDiffs(pkgAcquire *Owner, - string URI,string URIDesc,string ShortDesc, - HashString ExpectedHash, + struct IndexTarget const * const Target, + HashStringList const &ExpectedHashes, + indexRecords *MetaIndexParser, string ServerSha1, vector diffs) - : Item(Owner), RealURI(URI), ExpectedHash(ExpectedHash), + : pkgAcqBaseIndex(Owner, Target, ExpectedHashes, MetaIndexParser), available_patches(diffs), ServerSha1(ServerSha1) { DestFile = _config->FindDir("Dir::State::lists") + "partial/"; - DestFile += URItoFileName(URI); + DestFile += URItoFileName(Target->URI); Debug = _config->FindB("Debug::pkgAcquire::Diffs",false); - Description = URIDesc; + RealURI = Target->URI; Desc.Owner = this; - Desc.ShortDesc = ShortDesc; + Description = Target->Description; + Desc.ShortDesc = Target->ShortDesc; - if(available_patches.size() == 0) + if(available_patches.empty() == true) { // we are done (yeah!) Finish(true); @@ -610,13 +652,12 @@ pkgAcqIndexDiffs::pkgAcqIndexDiffs(pkgAcquire *Owner, } } /*}}}*/ -void pkgAcqIndexDiffs::Failed(string Message,pkgAcquire::MethodConfig *Cnf) /*{{{*/ +void pkgAcqIndexDiffs::Failed(string Message,pkgAcquire::MethodConfig * /*Cnf*/)/*{{{*/ { if(Debug) - std::clog << "pkgAcqIndexDiffs failed: " << Desc.URI << std::endl - << "Falling back to normal index file aquire" << std::endl; - new pkgAcqIndex(Owner, RealURI, Description,Desc.ShortDesc, - ExpectedHash); + std::clog << "pkgAcqIndexDiffs failed: " << Desc.URI << " with " << Message << std::endl + << "Falling back to normal index file acquire" << std::endl; + new pkgAcqIndex(Owner, Target, ExpectedHashes, MetaIndexParser); Finish(); } /*}}}*/ @@ -630,11 +671,9 @@ void pkgAcqIndexDiffs::Finish(bool allDone) DestFile = _config->FindDir("Dir::State::lists"); DestFile += URItoFileName(RealURI); - if(!ExpectedHash.empty() && !ExpectedHash.VerifyFile(DestFile)) + if(HashSums().usable() && !HashSums().VerifyFile(DestFile)) { - Status = StatAuthError; - ErrorText = _("MD5Sum mismatch"); - Rename(DestFile,DestFile + ".FAILED"); + RenameOnError(HashSumMismatch); Dequeue(); return; } @@ -665,7 +704,7 @@ bool pkgAcqIndexDiffs::QueueNextDiff() /*{{{*/ FileFd fd(FinalFile, FileFd::ReadOnly); SHA1Summation SHA1; - SHA1.AddFD(fd.Fd(), fd.Size()); + SHA1.AddFD(fd); string local_sha1 = string(SHA1.Result()); if(Debug) std::clog << "QueueNextDiff: " @@ -697,7 +736,7 @@ bool pkgAcqIndexDiffs::QueueNextDiff() /*{{{*/ } // queue the right diff - Desc.URI = string(RealURI) + ".diff/" + available_patches[0].file + ".gz"; + Desc.URI = RealURI + ".diff/" + available_patches[0].file + ".gz"; Desc.Description = Description + " " + available_patches[0].file + string(".pdiff"); DestFile = _config->FindDir("Dir::State::lists") + "partial/"; DestFile += URItoFileName(RealURI + ".diff/" + available_patches[0].file); @@ -710,18 +749,18 @@ bool pkgAcqIndexDiffs::QueueNextDiff() /*{{{*/ return true; } /*}}}*/ -void pkgAcqIndexDiffs::Done(string Message,unsigned long Size,string Md5Hash, /*{{{*/ +void pkgAcqIndexDiffs::Done(string Message,unsigned long long Size, HashStringList const &Hashes, /*{{{*/ pkgAcquire::MethodConfig *Cnf) { if(Debug) std::clog << "pkgAcqIndexDiffs::Done(): " << Desc.URI << std::endl; - Item::Done(Message,Size,Md5Hash,Cnf); + Item::Done(Message, Size, Hashes, Cnf); string FinalFile; FinalFile = _config->FindDir("Dir::State::lists")+URItoFileName(RealURI); - // sucess in downloading a diff, enter ApplyDiff state + // success in downloading a diff, enter ApplyDiff state if(State == StateFetchDiff) { @@ -745,6 +784,7 @@ void pkgAcqIndexDiffs::Done(string Message,unsigned long Size,string Md5Hash, /* { // remove the just applied patch available_patches.erase(available_patches.begin()); + unlink((FinalFile + ".ed").c_str()); // move into place if(Debug) @@ -757,22 +797,149 @@ void pkgAcqIndexDiffs::Done(string Message,unsigned long Size,string Md5Hash, /* // see if there is more to download if(available_patches.empty() == false) { - new pkgAcqIndexDiffs(Owner, RealURI, Description, Desc.ShortDesc, - ExpectedHash, ServerSha1, available_patches); + new pkgAcqIndexDiffs(Owner, Target, + ExpectedHashes, MetaIndexParser, + ServerSha1, available_patches); return Finish(); } else return Finish(true); } } /*}}}*/ +// AcqIndexMergeDiffs::AcqIndexMergeDiffs - Constructor /*{{{*/ +pkgAcqIndexMergeDiffs::pkgAcqIndexMergeDiffs(pkgAcquire *Owner, + struct IndexTarget const * const Target, + HashStringList const &ExpectedHashes, + indexRecords *MetaIndexParser, + DiffInfo const &patch, + std::vector const * const allPatches) + : pkgAcqBaseIndex(Owner, Target, ExpectedHashes, MetaIndexParser), + patch(patch), allPatches(allPatches), State(StateFetchDiff) +{ + + DestFile = _config->FindDir("Dir::State::lists") + "partial/"; + DestFile += URItoFileName(Target->URI); + + Debug = _config->FindB("Debug::pkgAcquire::Diffs",false); + + RealURI = Target->URI; + Desc.Owner = this; + Description = Target->Description; + Desc.ShortDesc = Target->ShortDesc; + + Desc.URI = RealURI + ".diff/" + patch.file + ".gz"; + Desc.Description = Description + " " + patch.file + string(".pdiff"); + DestFile = _config->FindDir("Dir::State::lists") + "partial/"; + DestFile += URItoFileName(RealURI + ".diff/" + patch.file); + + if(Debug) + std::clog << "pkgAcqIndexMergeDiffs: " << Desc.URI << std::endl; + + QueueURI(Desc); +} + /*}}}*/ +void pkgAcqIndexMergeDiffs::Failed(string Message,pkgAcquire::MethodConfig * /*Cnf*/)/*{{{*/ +{ + if(Debug) + std::clog << "pkgAcqIndexMergeDiffs failed: " << Desc.URI << " with " << Message << std::endl; + Complete = false; + Status = StatDone; + Dequeue(); + + // check if we are the first to fail, otherwise we are done here + State = StateDoneDiff; + for (std::vector::const_iterator I = allPatches->begin(); + I != allPatches->end(); ++I) + if ((*I)->State == StateErrorDiff) + return; + + // first failure means we should fallback + State = StateErrorDiff; + std::clog << "Falling back to normal index file acquire" << std::endl; + new pkgAcqIndex(Owner, Target, ExpectedHashes, MetaIndexParser); +} + /*}}}*/ +void pkgAcqIndexMergeDiffs::Done(string Message,unsigned long long Size,HashStringList const &Hashes, /*{{{*/ + pkgAcquire::MethodConfig *Cnf) +{ + if(Debug) + std::clog << "pkgAcqIndexMergeDiffs::Done(): " << Desc.URI << std::endl; + + Item::Done(Message,Size,Hashes,Cnf); + + string const FinalFile = _config->FindDir("Dir::State::lists") + URItoFileName(RealURI); + + if (State == StateFetchDiff) + { + // rred expects the patch as $FinalFile.ed.$patchname.gz + Rename(DestFile, FinalFile + ".ed." + patch.file + ".gz"); + + // check if this is the last completed diff + State = StateDoneDiff; + for (std::vector::const_iterator I = allPatches->begin(); + I != allPatches->end(); ++I) + if ((*I)->State != StateDoneDiff) + { + if(Debug) + std::clog << "Not the last done diff in the batch: " << Desc.URI << std::endl; + return; + } + + // this is the last completed diff, so we are ready to apply now + State = StateApplyDiff; + + if(Debug) + std::clog << "Sending to rred method: " << FinalFile << std::endl; + + Local = true; + Desc.URI = "rred:" + FinalFile; + QueueURI(Desc); + Mode = "rred"; + return; + } + // success in download/apply all diffs, clean up + else if (State == StateApplyDiff) + { + // see if we really got the expected file + if(ExpectedHashes.usable() && !ExpectedHashes.VerifyFile(DestFile)) + { + RenameOnError(HashSumMismatch); + return; + } + + // move the result into place + if(Debug) + std::clog << "Moving patched file in place: " << std::endl + << DestFile << " -> " << FinalFile << std::endl; + Rename(DestFile, FinalFile); + chmod(FinalFile.c_str(), 0644); + + // otherwise lists cleanup will eat the file + DestFile = FinalFile; + + // ensure the ed's are gone regardless of list-cleanup + for (std::vector::const_iterator I = allPatches->begin(); + I != allPatches->end(); ++I) + { + std::string patch = FinalFile + ".ed." + (*I)->patch.file + ".gz"; + unlink(patch.c_str()); + } + + // all set and done + Complete = true; + if(Debug) + std::clog << "allDone: " << DestFile << "\n" << std::endl; + } +} + /*}}}*/ // AcqIndex::AcqIndex - Constructor /*{{{*/ // --------------------------------------------------------------------- /* The package file is added to the queue and a second class is instantiated to fetch the revision file */ pkgAcqIndex::pkgAcqIndex(pkgAcquire *Owner, string URI,string URIDesc,string ShortDesc, - HashString ExpectedHash, string comprExt) - : Item(Owner), RealURI(URI), ExpectedHash(ExpectedHash) + HashStringList const &ExpectedHash, string comprExt) + : pkgAcqBaseIndex(Owner, NULL, ExpectedHash, NULL), RealURI(URI) { if(comprExt.empty() == true) { @@ -788,13 +955,15 @@ pkgAcqIndex::pkgAcqIndex(pkgAcquire *Owner, Init(URI, URIDesc, ShortDesc); } pkgAcqIndex::pkgAcqIndex(pkgAcquire *Owner, IndexTarget const *Target, - HashString const &ExpectedHash, indexRecords const *MetaIndexParser) - : Item(Owner), RealURI(Target->URI), ExpectedHash(ExpectedHash) + HashStringList const &ExpectedHash, + indexRecords *MetaIndexParser) + : pkgAcqBaseIndex(Owner, Target, ExpectedHash, MetaIndexParser), + RealURI(Target->URI) { // autoselect the compression method std::vector types = APT::Configuration::getCompressionTypes(); CompressionExtension = ""; - if (ExpectedHash.empty() == false) + if (ExpectedHashes.usable()) { for (std::vector::const_iterator t = types.begin(); t != types.end(); ++t) if (*t == "uncompressed" || MetaIndexParser->Exists(string(Target->MetaKey).append(".").append(*t)) == true) @@ -820,10 +989,29 @@ void pkgAcqIndex::Init(string const &URI, string const &URIDesc, string const &S DestFile += URItoFileName(URI); std::string const comprExt = CompressionExtension.substr(0, CompressionExtension.find(' ')); + std::string MetaKey; if (comprExt == "uncompressed") + { Desc.URI = URI; + if(Target) + MetaKey = string(Target->MetaKey); + } else + { Desc.URI = URI + '.' + comprExt; + if(Target) + MetaKey = string(Target->MetaKey) + '.' + comprExt; + } + + // load the filesize + if(MetaIndexParser) + { + indexRecords::checkSum *Record = MetaIndexParser->Lookup(MetaKey); + if(Record) + FileSize = Record->Size; + + InitByHashIfNeeded(MetaKey); + } Desc.Description = URIDesc; Desc.Owner = this; @@ -832,10 +1020,42 @@ void pkgAcqIndex::Init(string const &URI, string const &URIDesc, string const &S QueueURI(Desc); } /*}}}*/ +// AcqIndex::AdjustForByHash - modify URI for by-hash support /*{{{*/ +// --------------------------------------------------------------------- +/* */ +void pkgAcqIndex::InitByHashIfNeeded(const std::string MetaKey) +{ + // TODO: + // - (maybe?) add support for by-hash into the sources.list as flag + // - make apt-ftparchive generate the hashes (and expire?) + std::string HostKnob = "APT::Acquire::" + ::URI(Desc.URI).Host + "::By-Hash"; + if(_config->FindB("APT::Acquire::By-Hash", false) == true || + _config->FindB(HostKnob, false) == true || + MetaIndexParser->GetSupportsAcquireByHash()) + { + indexRecords::checkSum *Record = MetaIndexParser->Lookup(MetaKey); + if(Record) + { + // FIXME: should we really use the best hash here? or a fixed one? + const HashString *TargetHash = Record->Hashes.find(""); + std::string ByHash = "/by-hash/" + TargetHash->HashType() + "/" + TargetHash->HashValue(); + size_t trailing_slash = Desc.URI.find_last_of("/"); + Desc.URI = Desc.URI.replace( + trailing_slash, + Desc.URI.substr(trailing_slash+1).size()+1, + ByHash); + } else { + _error->Warning( + "Fetching ByHash requested but can not find record for %s", + MetaKey.c_str()); + } + } +} + /*}}}*/ // AcqIndex::Custom600Headers - Insert custom request headers /*{{{*/ // --------------------------------------------------------------------- /* The only header we use is the last-modified header. */ -string pkgAcqIndex::Custom600Headers() +string pkgAcqIndex::Custom600Headers() const { string Final = _config->FindDir("Dir::State::lists"); Final += URItoFileName(RealURI); @@ -874,6 +1094,31 @@ void pkgAcqIndex::Failed(string Message,pkgAcquire::MethodConfig *Cnf) /*{{{*/ Item::Failed(Message,Cnf); } /*}}}*/ +// pkgAcqIndex::GetFinalFilename - Return the full final file path /*{{{*/ +std::string pkgAcqIndex::GetFinalFilename(std::string const &URI, + std::string const &compExt) +{ + std::string FinalFile = _config->FindDir("Dir::State::lists"); + FinalFile += URItoFileName(URI); + if (_config->FindB("Acquire::GzipIndexes",false) && compExt == "gz") + FinalFile += ".gz"; + return FinalFile; +} + /*}}}*/ +// AcqIndex::ReverifyAfterIMS - Reverify index after an ims-hit /*{{{*/ +void pkgAcqIndex::ReverifyAfterIMS(std::string const &FileName) +{ + std::string const compExt = CompressionExtension.substr(0, CompressionExtension.find(' ')); + if (_config->FindB("Acquire::GzipIndexes",false) && compExt == "gz") + DestFile += ".gz"; + + string FinalFile = GetFinalFilename(RealURI, compExt); + Rename(FinalFile, FileName); + Decompression = true; + Desc.URI = "copy:" + FileName; + QueueURI(Desc); +} + /*}}}*/ // AcqIndex::Done - Finished a fetch /*{{{*/ // --------------------------------------------------------------------- /* This goes through a number of states.. On the initial fetch the @@ -881,65 +1126,59 @@ void pkgAcqIndex::Failed(string Message,pkgAcquire::MethodConfig *Cnf) /*{{{*/ to the uncompressed version of the file. If this is so the file is copied into the partial directory. In all other cases the file is decompressed with a gzip uri. */ -void pkgAcqIndex::Done(string Message,unsigned long Size,string Hash, +void pkgAcqIndex::Done(string Message,unsigned long long Size,HashStringList const &Hashes, pkgAcquire::MethodConfig *Cfg) { - Item::Done(Message,Size,Hash,Cfg); + Item::Done(Message,Size,Hashes,Cfg); + std::string const compExt = CompressionExtension.substr(0, CompressionExtension.find(' ')); if (Decompression == true) { - if (_config->FindB("Debug::pkgAcquire::Auth", false)) + if (ExpectedHashes.usable() && ExpectedHashes != Hashes) { - std::cerr << std::endl << RealURI << ": Computed Hash: " << Hash; - std::cerr << " Expected Hash: " << ExpectedHash.toStr() << std::endl; - } - - if (!ExpectedHash.empty() && ExpectedHash.toStr() != Hash) - { - Status = StatAuthError; - ErrorText = _("Hash Sum mismatch"); - Rename(DestFile,DestFile + ".FAILED"); - ReportMirrorFailure("HashChecksumFailure"); + Desc.URI = RealURI; + RenameOnError(HashSumMismatch); + printHashSumComparision(RealURI, ExpectedHashes, Hashes); return; } - /* Verify the index file for correctness (all indexes must - * have a Package field) (LP: #346386) (Closes: #627642) */ + // FIXME: this can go away once we only ever download stuff that + // has a valid hash and we never do GET based probing + // + /* Always verify the index file for correctness (all indexes must + * have a Package field) (LP: #346386) (Closes: #627642) + */ + FileFd fd(DestFile, FileFd::ReadOnlyGzip); + // Only test for correctness if the file is not empty (empty is ok) + if (fd.Size() > 0) { - FileFd fd(DestFile, FileFd::ReadOnly); - pkgTagSection sec; - pkgTagFile tag(&fd); - - // Only test for correctness if the file is not empty (empty is ok) - if (fd.Size() > 0) { - if (_error->PendingError() || !tag.Step(sec)) { - Status = StatError; - _error->DumpErrors(); - Rename(DestFile,DestFile + ".FAILED"); - return; - } else if (!sec.Exists("Package")) { - Status = StatError; - ErrorText = ("Encountered a section with no Package: header"); - Rename(DestFile,DestFile + ".FAILED"); - return; - } + pkgTagSection sec; + pkgTagFile tag(&fd); + + // all our current indexes have a field 'Package' in each section + if (_error->PendingError() == true || tag.Step(sec) == false || sec.Exists("Package") == false) + { + RenameOnError(InvalidFormat); + return; } } // Done, move it into position - string FinalFile = _config->FindDir("Dir::State::lists"); - FinalFile += URItoFileName(RealURI); + string FinalFile = GetFinalFilename(RealURI, compExt); Rename(DestFile,FinalFile); chmod(FinalFile.c_str(),0644); - + /* We restore the original name to DestFile so that the clean operation will work OK */ DestFile = _config->FindDir("Dir::State::lists") + "partial/"; DestFile += URItoFileName(RealURI); - + if (_config->FindB("Acquire::GzipIndexes",false) && compExt == "gz") + DestFile += ".gz"; + // Remove the compressed version. if (Erase == true) unlink(DestFile.c_str()); + return; } @@ -950,9 +1189,6 @@ void pkgAcqIndex::Done(string Message,unsigned long Size,string Hash, string FileName = LookupTag(Message,"Alt-Filename"); if (FileName.empty() == false) { - // The files timestamp matches - if (StringToBool(LookupTag(Message,"Alt-IMS-Hit"),false) == true) - return; Decompression = true; Local = true; DestFile += ".decomp"; @@ -969,33 +1205,37 @@ void pkgAcqIndex::Done(string Message,unsigned long Size,string Hash, ErrorText = "Method gave a blank filename"; } - std::string const compExt = CompressionExtension.substr(0, CompressionExtension.find(' ')); - - // The files timestamp matches - if (StringToBool(LookupTag(Message,"IMS-Hit"),false) == true) { - if (_config->FindB("Acquire::GzipIndexes",false) && compExt == "gz") - // Update DestFile for .gz suffix so that the clean operation keeps it - DestFile += ".gz"; - return; - } - if (FileName == DestFile) Erase = true; else Local = true; - + + // do not reverify cdrom sources as apt-cdrom may rewrite the Packages + // file when its doing the indexcopy + if (RealURI.substr(0,6) == "cdrom:" && + StringToBool(LookupTag(Message,"IMS-Hit"),false) == true) + return; + + // The files timestamp matches, for non-local URLs reverify the local + // file, for local file, uncompress again to ensure the hashsum is still + // matching the Release file + if (!Local && StringToBool(LookupTag(Message,"IMS-Hit"),false) == true) + { + ReverifyAfterIMS(FileName); + return; + } string decompProg; - // If we enable compressed indexes and already have gzip, keep it - if (_config->FindB("Acquire::GzipIndexes",false) && compExt == "gz" && !Local) { - string FinalFile = _config->FindDir("Dir::State::lists"); - FinalFile += URItoFileName(RealURI) + ".gz"; - Rename(DestFile,FinalFile); - chmod(FinalFile.c_str(),0644); - - // Update DestFile for .gz suffix so that the clean operation keeps it - DestFile = _config->FindDir("Dir::State::lists") + "partial/"; + // If we enable compressed indexes, queue for hash verification + if (_config->FindB("Acquire::GzipIndexes",false) && compExt == "gz" && !Local) + { + DestFile = _config->FindDir("Dir::State::lists"); DestFile += URItoFileName(RealURI) + ".gz"; + + Decompression = true; + Desc.URI = "copy:" + FileName; + QueueURI(Desc); + return; } @@ -1013,6 +1253,8 @@ void pkgAcqIndex::Done(string Message,unsigned long Size,string Hash, DestFile += ".decomp"; Desc.URI = decompProg + ":" + FileName; QueueURI(Desc); + + // FIXME: this points to a c++ string that goes out of scope Mode = decompProg.c_str(); } /*}}}*/ @@ -1021,22 +1263,29 @@ void pkgAcqIndex::Done(string Message,unsigned long Size,string Hash, /* The Translation file is added to the queue */ pkgAcqIndexTrans::pkgAcqIndexTrans(pkgAcquire *Owner, string URI,string URIDesc,string ShortDesc) - : pkgAcqIndex(Owner, URI, URIDesc, ShortDesc, HashString(), "") + : pkgAcqIndex(Owner, URI, URIDesc, ShortDesc, HashStringList(), "") { } -pkgAcqIndexTrans::pkgAcqIndexTrans(pkgAcquire *Owner, IndexTarget const *Target, - HashString const &ExpectedHash, indexRecords const *MetaIndexParser) - : pkgAcqIndex(Owner, Target, ExpectedHash, MetaIndexParser) +pkgAcqIndexTrans::pkgAcqIndexTrans(pkgAcquire *Owner, IndexTarget const * const Target, + HashStringList const &ExpectedHashes, indexRecords *MetaIndexParser) + : pkgAcqIndex(Owner, Target, ExpectedHashes, MetaIndexParser) { + // load the filesize + indexRecords::checkSum *Record = MetaIndexParser->Lookup(string(Target->MetaKey)); + if(Record) + FileSize = Record->Size; } /*}}}*/ // AcqIndexTrans::Custom600Headers - Insert custom request headers /*{{{*/ // --------------------------------------------------------------------- -string pkgAcqIndexTrans::Custom600Headers() +string pkgAcqIndexTrans::Custom600Headers() const { string Final = _config->FindDir("Dir::State::lists"); Final += URItoFileName(RealURI); + if (_config->FindB("Acquire::GzipIndexes",false)) + Final += ".gz"; + struct stat Buf; if (stat(Final.c_str(),&Buf) != 0) return "\nFail-Ignore: true\nIndex-File: true"; @@ -1076,7 +1325,7 @@ pkgAcqMetaSig::pkgAcqMetaSig(pkgAcquire *Owner, /*{{{*/ string MetaIndexShortDesc, const vector* IndexTargets, indexRecords* MetaIndexParser) : - Item(Owner), RealURI(URI), MetaIndexURI(MetaIndexURI), + Item(Owner, HashStringList()), RealURI(URI), MetaIndexURI(MetaIndexURI), MetaIndexURIDesc(MetaIndexURIDesc), MetaIndexShortDesc(MetaIndexShortDesc), MetaIndexParser(MetaIndexParser), IndexTargets(IndexTargets) { @@ -1096,11 +1345,10 @@ pkgAcqMetaSig::pkgAcqMetaSig(pkgAcquire *Owner, /*{{{*/ string Final = _config->FindDir("Dir::State::lists"); Final += URItoFileName(RealURI); - struct stat Buf; - if (stat(Final.c_str(),&Buf) == 0) + if (RealFileExists(Final) == true) { // File was already in place. It needs to be re-downloaded/verified - // because Release might have changed, we do give it a differnt + // because Release might have changed, we do give it a different // name than DestFile because otherwise the http method will // send If-Range requests and there are too many broken servers // out there that do not understand them @@ -1108,13 +1356,29 @@ pkgAcqMetaSig::pkgAcqMetaSig(pkgAcquire *Owner, /*{{{*/ Rename(Final,LastGoodSig); } + // we expect the indextargets + one additional Release file + ExpectedAdditionalItems = IndexTargets->size() + 1; + QueueURI(Desc); +} + /*}}}*/ +pkgAcqMetaSig::~pkgAcqMetaSig() /*{{{*/ +{ + // if the file was never queued undo file-changes done in the constructor + if (QueueCounter == 1 && Status == StatIdle && FileSize == 0 && Complete == false && + LastGoodSig.empty() == false) + { + string const Final = _config->FindDir("Dir::State::lists") + URItoFileName(RealURI); + if (RealFileExists(Final) == false && RealFileExists(LastGoodSig) == true) + Rename(LastGoodSig, Final); + } + } /*}}}*/ // pkgAcqMetaSig::Custom600Headers - Insert custom request headers /*{{{*/ // --------------------------------------------------------------------- /* The only header we use is the last-modified header. */ -string pkgAcqMetaSig::Custom600Headers() +string pkgAcqMetaSig::Custom600Headers() const { struct stat Buf; if (stat(LastGoodSig.c_str(),&Buf) != 0) @@ -1123,10 +1387,10 @@ string pkgAcqMetaSig::Custom600Headers() return "\nIndex-File: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime); } -void pkgAcqMetaSig::Done(string Message,unsigned long Size,string MD5, +void pkgAcqMetaSig::Done(string Message,unsigned long long Size, HashStringList const &Hashes, pkgAcquire::MethodConfig *Cfg) { - Item::Done(Message,Size,MD5,Cfg); + Item::Done(Message, Size, Hashes, Cfg); string FileName = LookupTag(Message,"Filename"); if (FileName.empty() == true) @@ -1147,6 +1411,9 @@ void pkgAcqMetaSig::Done(string Message,unsigned long Size,string MD5, Complete = true; + // at this point pkgAcqMetaIndex takes over + ExpectedAdditionalItems = 0; + // put the last known good file back on i-m-s hit (it will // be re-verified again) // Else do nothing, we have the new file in DestFile then @@ -1164,6 +1431,9 @@ void pkgAcqMetaSig::Failed(string Message,pkgAcquire::MethodConfig *Cnf)/*{{{*/ { string Final = _config->FindDir("Dir::State::lists") + URItoFileName(RealURI); + // at this point pkgAcqMetaIndex takes over + ExpectedAdditionalItems = 0; + // if we get a network error we fail gracefully if(Status == StatTransientNetworkError) { @@ -1200,9 +1470,9 @@ void pkgAcqMetaSig::Failed(string Message,pkgAcquire::MethodConfig *Cnf)/*{{{*/ pkgAcqMetaIndex::pkgAcqMetaIndex(pkgAcquire *Owner, /*{{{*/ string URI,string URIDesc,string ShortDesc, string SigFile, - const vector* IndexTargets, + const vector* IndexTargets, indexRecords* MetaIndexParser) : - Item(Owner), RealURI(URI), SigFile(SigFile), IndexTargets(IndexTargets), + Item(Owner, HashStringList()), RealURI(URI), SigFile(SigFile), IndexTargets(IndexTargets), MetaIndexParser(MetaIndexParser), AuthPass(false), IMSHit(false) { DestFile = _config->FindDir("Dir::State::lists") + "partial/"; @@ -1214,13 +1484,16 @@ pkgAcqMetaIndex::pkgAcqMetaIndex(pkgAcquire *Owner, /*{{{*/ Desc.ShortDesc = ShortDesc; Desc.URI = URI; + // we expect more item + ExpectedAdditionalItems = IndexTargets->size(); + QueueURI(Desc); } /*}}}*/ // pkgAcqMetaIndex::Custom600Headers - Insert custom request headers /*{{{*/ // --------------------------------------------------------------------- /* The only header we use is the last-modified header. */ -string pkgAcqMetaIndex::Custom600Headers() +string pkgAcqMetaIndex::Custom600Headers() const { string Final = _config->FindDir("Dir::State::lists"); Final += URItoFileName(RealURI); @@ -1232,10 +1505,10 @@ string pkgAcqMetaIndex::Custom600Headers() return "\nIndex-File: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime); } /*}}}*/ -void pkgAcqMetaIndex::Done(string Message,unsigned long Size,string Hash, /*{{{*/ +void pkgAcqMetaIndex::Done(string Message,unsigned long long Size,HashStringList const &Hashes, /*{{{*/ pkgAcquire::MethodConfig *Cfg) { - Item::Done(Message,Size,Hash,Cfg); + Item::Done(Message,Size,Hashes,Cfg); // MetaIndexes are done in two passes: one to download the // metaindex with an appropriate method, and a second to verify it @@ -1264,9 +1537,20 @@ void pkgAcqMetaIndex::Done(string Message,unsigned long Size,string Hash, /*{{{* } else { + // FIXME: move this into pkgAcqMetaClearSig::Done on the next + // ABI break + + // if we expect a ClearTextSignature (InRelase), ensure that + // this is what we get and if not fail to queue a + // Release/Release.gpg, see #346386 + if (SigFile == DestFile && !StartsWithGPGClearTextSignature(DestFile)) + { + Failed(Message, Cfg); + return; + } + // There was a signature file, so pass it to gpgv for // verification - if (_config->FindB("Debug::pkgAcquire::Auth", false)) std::cerr << "Metaindex acquired, queueing gpg verification (" << SigFile << "," << DestFile << ")\n"; @@ -1318,7 +1602,14 @@ void pkgAcqMetaIndex::RetrievalDone(string Message) /*{{{*/ string FinalFile = _config->FindDir("Dir::State::lists"); FinalFile += URItoFileName(RealURI); if (SigFile == DestFile) + { SigFile = FinalFile; + // constructor of pkgAcqMetaClearSig moved it out of the way, + // now move it back in on IMS hit for the 'old' file + string const OldClearSig = DestFile + ".reverify"; + if (RealFileExists(OldClearSig) == true) + Rename(OldClearSig, FinalFile); + } DestFile = FinalFile; } Complete = true; @@ -1347,6 +1638,28 @@ void pkgAcqMetaIndex::AuthDone(string Message) /*{{{*/ std::cerr << "Signature verification succeeded: " << DestFile << std::endl; + // do not trust any previously unverified content that we may have + string LastGoodSigFile = _config->FindDir("Dir::State::lists").append("partial/").append(URItoFileName(RealURI)); + if (DestFile != SigFile) + LastGoodSigFile.append(".gpg"); + LastGoodSigFile.append(".reverify"); + if(IMSHit == false && RealFileExists(LastGoodSigFile) == false) + { + for (vector ::const_iterator Target = IndexTargets->begin(); + Target != IndexTargets->end(); + ++Target) + { + // remove old indexes + std::string index = _config->FindDir("Dir::State::lists") + + URItoFileName((*Target)->URI); + unlink(index.c_str()); + // and also old gzipindexes + index += ".gz"; + unlink(index.c_str()); + } + } + + // Download further indexes with verification QueueIndexes(true); @@ -1373,15 +1686,40 @@ void pkgAcqMetaIndex::QueueIndexes(bool verify) /*{{{*/ return; } #endif - for (vector ::const_iterator Target = IndexTargets->begin(); + bool transInRelease = false; + { + std::vector const keys = MetaIndexParser->MetaKeys(); + for (std::vector::const_iterator k = keys.begin(); k != keys.end(); ++k) + // FIXME: Feels wrong to check for hardcoded string here, but what should we do else… + if (k->find("Translation-") != std::string::npos) + { + transInRelease = true; + break; + } + } + + // at this point the real Items are loaded in the fetcher + ExpectedAdditionalItems = 0; + for (vector ::const_iterator Target = IndexTargets->begin(); Target != IndexTargets->end(); ++Target) { - HashString ExpectedIndexHash; + HashStringList ExpectedIndexHashes; const indexRecords::checkSum *Record = MetaIndexParser->Lookup((*Target)->MetaKey); + bool compressedAvailable = false; if (Record == NULL) { - if (verify == true && (*Target)->IsOptional() == false) + if ((*Target)->IsOptional() == true) + { + std::vector types = APT::Configuration::getCompressionTypes(); + for (std::vector::const_iterator t = types.begin(); t != types.end(); ++t) + if (MetaIndexParser->Exists((*Target)->MetaKey + "." + *t) == true) + { + compressedAvailable = true; + break; + } + } + else if (verify == true) { Status = StatAuthError; strprintf(ErrorText, _("Unable to find expected entry '%s' in Release file (Wrong sources.list entry or malformed file)"), (*Target)->MetaKey.c_str()); @@ -1390,14 +1728,16 @@ void pkgAcqMetaIndex::QueueIndexes(bool verify) /*{{{*/ } else { - ExpectedIndexHash = Record->Hash; + ExpectedIndexHashes = Record->Hashes; if (_config->FindB("Debug::pkgAcquire::Auth", false)) { - std::cerr << "Queueing: " << (*Target)->URI << std::endl; - std::cerr << "Expected Hash: " << ExpectedIndexHash.toStr() << std::endl; + std::cerr << "Queueing: " << (*Target)->URI << std::endl + << "Expected Hash:" << std::endl; + for (HashStringList::const_iterator hs = ExpectedIndexHashes.begin(); hs != ExpectedIndexHashes.end(); ++hs) + std::cerr << "\t- " << hs->toStr() << std::endl; std::cerr << "For: " << Record->MetaKeyFilename << std::endl; } - if (verify == true && ExpectedIndexHash.empty() == true && (*Target)->IsOptional() == false) + if (verify == true && ExpectedIndexHashes.empty() == true && (*Target)->IsOptional() == false) { Status = StatAuthError; strprintf(ErrorText, _("Unable to find hash sum for '%s' in Release file"), (*Target)->MetaKey.c_str()); @@ -1409,9 +1749,15 @@ void pkgAcqMetaIndex::QueueIndexes(bool verify) /*{{{*/ { if ((*Target)->IsSubIndex() == true) new pkgAcqSubIndex(Owner, (*Target)->URI, (*Target)->Description, - (*Target)->ShortDesc, ExpectedIndexHash); - else - new pkgAcqIndexTrans(Owner, *Target, ExpectedIndexHash, MetaIndexParser); + (*Target)->ShortDesc, ExpectedIndexHashes); + else if (transInRelease == false || Record != NULL || compressedAvailable == true) + { + if (_config->FindB("Acquire::PDiffs",true) == true && transInRelease == true && + MetaIndexParser->Exists((*Target)->MetaKey + ".diff/Index") == true) + new pkgAcqDiffIndex(Owner, *Target, ExpectedIndexHashes, MetaIndexParser); + else + new pkgAcqIndexTrans(Owner, *Target, ExpectedIndexHashes, MetaIndexParser); + } continue; } @@ -1420,11 +1766,10 @@ void pkgAcqMetaIndex::QueueIndexes(bool verify) /*{{{*/ in the Meta-Index file. Ideal would be if pkgAcqDiffIndex would test this instead, but passing the required info to it is to much hassle */ if(_config->FindB("Acquire::PDiffs",true) == true && (verify == false || - MetaIndexParser->Exists(string((*Target)->MetaKey).append(".diff/Index")) == true)) - new pkgAcqDiffIndex(Owner, (*Target)->URI, (*Target)->Description, - (*Target)->ShortDesc, ExpectedIndexHash); + MetaIndexParser->Exists((*Target)->MetaKey + ".diff/Index") == true)) + new pkgAcqDiffIndex(Owner, *Target, ExpectedIndexHashes, MetaIndexParser); else - new pkgAcqIndex(Owner, *Target, ExpectedIndexHash, MetaIndexParser); + new pkgAcqIndex(Owner, *Target, ExpectedIndexHashes, MetaIndexParser); } } /*}}}*/ @@ -1445,7 +1790,7 @@ bool pkgAcqMetaIndex::VerifyVendor(string Message) /*{{{*/ missingkeys += (Fingerprint); } if(!missingkeys.empty()) - _error->Warning("%s", string(msg+missingkeys).c_str()); + _error->Warning("%s", (msg + missingkeys).c_str()); string Transformed = MetaIndexParser->GetExpectedDist(); @@ -1508,27 +1853,24 @@ bool pkgAcqMetaIndex::VerifyVendor(string Message) /*{{{*/ // pkgAcqMetaIndex::Failed - no Release file present or no signature file present /*{{{*/ // --------------------------------------------------------------------- /* */ -void pkgAcqMetaIndex::Failed(string Message,pkgAcquire::MethodConfig *Cnf) +void pkgAcqMetaIndex::Failed(string Message,pkgAcquire::MethodConfig * /*Cnf*/) { if (AuthPass == true) { // gpgv method failed, if we have a good signature - string LastGoodSigFile = _config->FindDir("Dir::State::lists"); - if (DestFile == SigFile) - LastGoodSigFile.append(URItoFileName(RealURI)); - else - LastGoodSigFile.append("partial/").append(URItoFileName(RealURI)).append(".gpg.reverify"); + string LastGoodSigFile = _config->FindDir("Dir::State::lists").append("partial/").append(URItoFileName(RealURI)); + if (DestFile != SigFile) + LastGoodSigFile.append(".gpg"); + LastGoodSigFile.append(".reverify"); if(FileExists(LastGoodSigFile)) { + string VerifiedSigFile = _config->FindDir("Dir::State::lists") + URItoFileName(RealURI); if (DestFile != SigFile) - { - string VerifiedSigFile = _config->FindDir("Dir::State::lists") + - URItoFileName(RealURI) + ".gpg"; - Rename(LastGoodSigFile,VerifiedSigFile); - } + VerifiedSigFile.append(".gpg"); + Rename(LastGoodSigFile, VerifiedSigFile); Status = StatTransientNetworkError; - _error->Warning(_("A error occurred during the signature " + _error->Warning(_("An error occurred during the signature " "verification. The repository is not updated " "and the previous index files will be used. " "GPG error: %s: %s\n"), @@ -1580,34 +1922,72 @@ pkgAcqMetaClearSig::pkgAcqMetaClearSig(pkgAcquire *Owner, /*{{{*/ string const &URI, string const &URIDesc, string const &ShortDesc, string const &MetaIndexURI, string const &MetaIndexURIDesc, string const &MetaIndexShortDesc, string const &MetaSigURI, string const &MetaSigURIDesc, string const &MetaSigShortDesc, - const vector* IndexTargets, + const vector* IndexTargets, indexRecords* MetaIndexParser) : pkgAcqMetaIndex(Owner, URI, URIDesc, ShortDesc, "", IndexTargets, MetaIndexParser), MetaIndexURI(MetaIndexURI), MetaIndexURIDesc(MetaIndexURIDesc), MetaIndexShortDesc(MetaIndexShortDesc), MetaSigURI(MetaSigURI), MetaSigURIDesc(MetaSigURIDesc), MetaSigShortDesc(MetaSigShortDesc) { SigFile = DestFile; + + // index targets + (worst case:) Release/Release.gpg + ExpectedAdditionalItems = IndexTargets->size() + 2; + + + // keep the old InRelease around in case of transistent network errors + string const Final = _config->FindDir("Dir::State::lists") + URItoFileName(RealURI); + if (RealFileExists(Final) == true) + { + string const LastGoodSig = DestFile + ".reverify"; + Rename(Final,LastGoodSig); + } +} + /*}}}*/ +pkgAcqMetaClearSig::~pkgAcqMetaClearSig() /*{{{*/ +{ + // if the file was never queued undo file-changes done in the constructor + if (QueueCounter == 1 && Status == StatIdle && FileSize == 0 && Complete == false) + { + string const Final = _config->FindDir("Dir::State::lists") + URItoFileName(RealURI); + string const LastGoodSig = DestFile + ".reverify"; + if (RealFileExists(Final) == false && RealFileExists(LastGoodSig) == true) + Rename(LastGoodSig, Final); + } } /*}}}*/ // pkgAcqMetaClearSig::Custom600Headers - Insert custom request headers /*{{{*/ // --------------------------------------------------------------------- // FIXME: this can go away once the InRelease file is used widely -string pkgAcqMetaClearSig::Custom600Headers() +string pkgAcqMetaClearSig::Custom600Headers() const { string Final = _config->FindDir("Dir::State::lists"); Final += URItoFileName(RealURI); struct stat Buf; if (stat(Final.c_str(),&Buf) != 0) - return "\nIndex-File: true\nFail-Ignore: true\n"; + { + Final = DestFile + ".reverify"; + if (stat(Final.c_str(),&Buf) != 0) + return "\nIndex-File: true\nFail-Ignore: true\n"; + } return "\nIndex-File: true\nFail-Ignore: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime); } /*}}}*/ void pkgAcqMetaClearSig::Failed(string Message,pkgAcquire::MethodConfig *Cnf) /*{{{*/ { + // we failed, we will not get additional items from this method + ExpectedAdditionalItems = 0; + if (AuthPass == false) { + // Remove the 'old' InRelease file if we try Release.gpg now as otherwise + // the file will stay around and gives a false-auth impression (CVE-2012-0214) + string FinalFile = _config->FindDir("Dir::State::lists"); + FinalFile.append(URItoFileName(RealURI)); + if (FileExists(FinalFile)) + unlink(FinalFile.c_str()); + new pkgAcqMetaSig(Owner, MetaSigURI, MetaSigURIDesc, MetaSigShortDesc, MetaIndexURI, MetaIndexURIDesc, MetaIndexShortDesc, @@ -1627,7 +2007,7 @@ void pkgAcqMetaClearSig::Failed(string Message,pkgAcquire::MethodConfig *Cnf) /* pkgAcqArchive::pkgAcqArchive(pkgAcquire *Owner,pkgSourceList *Sources, pkgRecords *Recs,pkgCache::VerIterator const &Version, string &StoreFilename) : - Item(Owner), Version(Version), Sources(Sources), Recs(Recs), + Item(Owner, HashStringList()), Version(Version), Sources(Sources), Recs(Recs), StoreFilename(StoreFilename), Vf(Version.FileList()), Trusted(false) { @@ -1638,7 +2018,7 @@ pkgAcqArchive::pkgAcqArchive(pkgAcquire *Owner,pkgSourceList *Sources, _error->Error(_("I wasn't able to locate a file for the %s package. " "This might mean you need to manually fix this package. " "(due to missing arch)"), - Version.ParentPkg().Name()); + Version.ParentPkg().FullName().c_str()); return; } @@ -1646,7 +2026,7 @@ pkgAcqArchive::pkgAcqArchive(pkgAcquire *Owner,pkgSourceList *Sources, assumption here that all the available sources for this version share the same extension.. */ // Skip not source sources, they do not have file fields. - for (; Vf.end() == false; Vf++) + for (; Vf.end() == false; ++Vf) { if ((Vf.File()->Flags & pkgCache::Flag::NotSource) != 0) continue; @@ -1669,34 +2049,40 @@ pkgAcqArchive::pkgAcqArchive(pkgAcquire *Owner,pkgSourceList *Sources, } // check if we have one trusted source for the package. if so, switch - // to "TrustedOnly" mode + // to "TrustedOnly" mode - but only if not in AllowUnauthenticated mode + bool const allowUnauth = _config->FindB("APT::Get::AllowUnauthenticated", false); + bool const debugAuth = _config->FindB("Debug::pkgAcquire::Auth", false); + bool seenUntrusted = false; for (pkgCache::VerFileIterator i = Version.FileList(); i.end() == false; ++i) { pkgIndexFile *Index; if (Sources->FindIndex(i.File(),Index) == false) continue; - if (_config->FindB("Debug::pkgAcquire::Auth", false)) - { + + if (debugAuth == true) std::cerr << "Checking index: " << Index->Describe() - << "(Trusted=" << Index->IsTrusted() << ")\n"; - } - if (Index->IsTrusted()) { + << "(Trusted=" << Index->IsTrusted() << ")" << std::endl; + + if (Index->IsTrusted() == true) + { Trusted = true; - break; + if (allowUnauth == false) + break; } + else + seenUntrusted = true; } // "allow-unauthenticated" restores apts old fetching behaviour // that means that e.g. unauthenticated file:// uris are higher // priority than authenticated http:// uris - if (_config->FindB("APT::Get::AllowUnauthenticated",false) == true) + if (allowUnauth == true && seenUntrusted == true) Trusted = false; // Select a source if (QueueNext() == false && _error->PendingError() == false) - _error->Error(_("I wasn't able to locate a file for the %s package. " - "This might mean you need to manually fix this package."), - Version.ParentPkg().Name()); + _error->Error(_("Can't find a source to download version '%s' of '%s'"), + Version.VerStr(), Version.ParentPkg().FullName(false).c_str()); } /*}}}*/ // AcqArchive::QueueNext - Queue the next file source /*{{{*/ @@ -1706,7 +2092,6 @@ pkgAcqArchive::pkgAcqArchive(pkgAcquire *Owner,pkgSourceList *Sources, checking later. */ bool pkgAcqArchive::QueueNext() { - string const ForceHash = _config->Find("Acquire::ForceHash"); for (; Vf.end() == false; ++Vf) { // Ignore not source sources @@ -1727,27 +2112,10 @@ bool pkgAcqArchive::QueueNext() pkgRecords::Parser &Parse = Recs->Lookup(Vf); if (_error->PendingError() == true) return false; - + string PkgFile = Parse.FileName(); - if (ForceHash.empty() == false) - { - if(stringcasecmp(ForceHash, "sha256") == 0) - ExpectedHash = HashString("SHA256", Parse.SHA256Hash()); - else if (stringcasecmp(ForceHash, "sha1") == 0) - ExpectedHash = HashString("SHA1", Parse.SHA1Hash()); - else - ExpectedHash = HashString("MD5Sum", Parse.MD5Hash()); - } - else - { - string Hash; - if ((Hash = Parse.SHA256Hash()).empty() == false) - ExpectedHash = HashString("SHA256", Hash); - else if ((Hash = Parse.SHA1Hash()).empty() == false) - ExpectedHash = HashString("SHA1", Hash); - else - ExpectedHash = HashString("MD5Sum", Parse.MD5Hash()); - } + ExpectedHashes = Parse.Hashes(); + if (PkgFile.empty() == true) return _error->Error(_("The package index files are corrupted. No Filename: " "field for package %s."), @@ -1756,7 +2124,7 @@ bool pkgAcqArchive::QueueNext() Desc.URI = Index->ArchiveURI(PkgFile); Desc.Description = Index->ArchiveInfo(Version); Desc.Owner = this; - Desc.ShortDesc = Version.ParentPkg().Name(); + Desc.ShortDesc = Version.ParentPkg().FullName(true); // See if we already have the file. (Legacy filenames) FileSize = Version->Size; @@ -1765,7 +2133,7 @@ bool pkgAcqArchive::QueueNext() if (stat(FinalFile.c_str(),&Buf) == 0) { // Make sure the size matches - if ((unsigned)Buf.st_size == Version->Size) + if ((unsigned long long)Buf.st_size == Version->Size) { Complete = true; Local = true; @@ -1784,7 +2152,7 @@ bool pkgAcqArchive::QueueNext() if (stat(FinalFile.c_str(),&Buf) == 0) { // Make sure the size matches - if ((unsigned)Buf.st_size == Version->Size) + if ((unsigned long long)Buf.st_size == Version->Size) { Complete = true; Local = true; @@ -1793,7 +2161,7 @@ bool pkgAcqArchive::QueueNext() return true; } - /* Hmm, we have a file and its size does not match, this shouldnt + /* Hmm, we have a file and its size does not match, this shouldn't happen.. */ unlink(FinalFile.c_str()); } @@ -1804,18 +2172,25 @@ bool pkgAcqArchive::QueueNext() if (stat(DestFile.c_str(),&Buf) == 0) { // Hmm, the partial file is too big, erase it - if ((unsigned)Buf.st_size > Version->Size) + if ((unsigned long long)Buf.st_size > Version->Size) unlink(DestFile.c_str()); else PartialSize = Buf.st_size; } - + + // Disables download of archives - useful if no real installation follows, + // e.g. if we are just interested in proposed installation order + if (_config->FindB("Debug::pkgAcqArchive::NoQueue", false) == true) + { + Complete = true; + Local = true; + Status = StatDone; + StoreFilename = DestFile = FinalFile; + return true; + } + // Create the item Local = false; - Desc.URI = Index->ArchiveURI(PkgFile); - Desc.Description = Index->ArchiveInfo(Version); - Desc.Owner = this; - Desc.ShortDesc = Version.ParentPkg().Name(); QueueURI(Desc); ++Vf; @@ -1827,26 +2202,23 @@ bool pkgAcqArchive::QueueNext() // AcqArchive::Done - Finished fetching /*{{{*/ // --------------------------------------------------------------------- /* */ -void pkgAcqArchive::Done(string Message,unsigned long Size,string CalcHash, +void pkgAcqArchive::Done(string Message,unsigned long long Size, HashStringList const &CalcHashes, pkgAcquire::MethodConfig *Cfg) { - Item::Done(Message,Size,CalcHash,Cfg); + Item::Done(Message, Size, CalcHashes, Cfg); // Check the size if (Size != Version->Size) { - Status = StatError; - ErrorText = _("Size mismatch"); + RenameOnError(SizeMismatch); return; } - - // Check the hash - if(ExpectedHash.toStr() != CalcHash) + + // FIXME: could this empty() check impose *any* sort of security issue? + if(ExpectedHashes.usable() && ExpectedHashes != CalcHashes) { - Status = StatError; - ErrorText = _("Hash Sum mismatch"); - if(FileExists(DestFile)) - Rename(DestFile,DestFile + ".FAILED"); + RenameOnError(HashSumMismatch); + printHashSumComparision(DestFile, ExpectedHashes, CalcHashes); return; } @@ -1918,7 +2290,7 @@ void pkgAcqArchive::Failed(string Message,pkgAcquire::MethodConfig *Cnf) /*}}}*/ // AcqArchive::IsTrusted - Determine whether this archive comes from a trusted source /*{{{*/ // --------------------------------------------------------------------- -bool pkgAcqArchive::IsTrusted() +APT_PURE bool pkgAcqArchive::IsTrusted() const { return Trusted; } @@ -1937,11 +2309,11 @@ void pkgAcqArchive::Finished() // AcqFile::pkgAcqFile - Constructor /*{{{*/ // --------------------------------------------------------------------- /* The file is added to the queue */ -pkgAcqFile::pkgAcqFile(pkgAcquire *Owner,string URI,string Hash, - unsigned long Size,string Dsc,string ShortDesc, +pkgAcqFile::pkgAcqFile(pkgAcquire *Owner,string URI, HashStringList const &Hashes, + unsigned long long Size,string Dsc,string ShortDesc, const string &DestDir, const string &DestFilename, bool IsIndexFile) : - Item(Owner), ExpectedHash(Hash), IsIndexFile(IsIndexFile) + Item(Owner, Hashes), IsIndexFile(IsIndexFile) { Retries = _config->FindI("Acquire::Retries",0); @@ -1966,7 +2338,7 @@ pkgAcqFile::pkgAcqFile(pkgAcquire *Owner,string URI,string Hash, if (stat(DestFile.c_str(),&Buf) == 0) { // Hmm, the partial file is too big, erase it - if ((unsigned)Buf.st_size > Size) + if ((Size > 0) && (unsigned long long)Buf.st_size > Size) unlink(DestFile.c_str()); else PartialSize = Buf.st_size; @@ -1978,17 +2350,16 @@ pkgAcqFile::pkgAcqFile(pkgAcquire *Owner,string URI,string Hash, // AcqFile::Done - Item downloaded OK /*{{{*/ // --------------------------------------------------------------------- /* */ -void pkgAcqFile::Done(string Message,unsigned long Size,string CalcHash, +void pkgAcqFile::Done(string Message,unsigned long long Size,HashStringList const &CalcHashes, pkgAcquire::MethodConfig *Cnf) { - Item::Done(Message,Size,CalcHash,Cnf); + Item::Done(Message,Size,CalcHashes,Cnf); // Check the hash - if(!ExpectedHash.empty() && ExpectedHash.toStr() != CalcHash) + if(ExpectedHashes.usable() && ExpectedHashes != CalcHashes) { - Status = StatError; - ErrorText = _("Hash Sum mismatch"); - Rename(DestFile,DestFile + ".FAILED"); + RenameOnError(HashSumMismatch); + printHashSumComparision(DestFile, ExpectedHashes, CalcHashes); return; } @@ -2059,20 +2430,10 @@ void pkgAcqFile::Failed(string Message,pkgAcquire::MethodConfig *Cnf) // AcqIndex::Custom600Headers - Insert custom request headers /*{{{*/ // --------------------------------------------------------------------- /* The only header we use is the last-modified header. */ -string pkgAcqFile::Custom600Headers() +string pkgAcqFile::Custom600Headers() const { if (IsIndexFile) return "\nIndex-File: true"; return ""; } /*}}}*/ -bool IndexTarget::IsOptional() const { - if (strncmp(ShortDesc.c_str(), "Translation", 11) != 0) - return false; - return true; -} -bool IndexTarget::IsSubIndex() const { - if (ShortDesc != "TranslationIndex") - return false; - return true; -}