X-Git-Url: https://git.saurik.com/apt.git/blobdiff_plain/4f51fd8636592a96aecf17c8bf4cfdb3ea2207cc..1ba0302352b320108b3ca23130ceca1d46f0a999:/apt-pkg/acquire-item.cc diff --git a/apt-pkg/acquire-item.cc b/apt-pkg/acquire-item.cc index a3f47242f..7f31d1449 100644 --- a/apt-pkg/acquire-item.cc +++ b/apt-pkg/acquire-item.cc @@ -22,16 +22,17 @@ #include #include #include -#include #include -#include +#include #include #include #include #include #include #include +#include +#include #include #include #include @@ -41,9 +42,9 @@ #include #include #include -#include #include #include +#include #include /*}}}*/ @@ -79,18 +80,33 @@ static std::string GetFinalFileNameFromURI(std::string const &uri) /*{{{*/ return _config->FindDir("Dir::State::lists") + URItoFileName(uri); } /*}}}*/ -static std::string GetCompressedFileName(std::string const &URI, std::string const &Name, std::string const &Ext) /*{{{*/ +static std::string GetKeepCompressedFileName(std::string file, IndexTarget const &Target)/*{{{*/ +{ + if (Target.KeepCompressed == false) + return file; + + std::string const CompressionTypes = Target.Option(IndexTarget::COMPRESSIONTYPES); + if (CompressionTypes.empty() == false) + { + std::string const ext = CompressionTypes.substr(0, CompressionTypes.find(' ')); + if (ext != "uncompressed") + file.append(".").append(ext); + } + return file; +} + /*}}}*/ +static std::string GetCompressedFileName(IndexTarget const &Target, std::string const &Name, std::string const &Ext) /*{{{*/ { if (Ext.empty() || Ext == "uncompressed") return Name; // do not reverify cdrom sources as apt-cdrom may rewrite the Packages // file when its doing the indexcopy - if (URI.substr(0,6) == "cdrom:") + if (Target.URI.substr(0,6) == "cdrom:") return Name; // adjust DestFile if its compressed on disk - if (_config->FindB("Acquire::GzipIndexes",false) == true) + if (Target.KeepCompressed == true) return Name + '.' + Ext; return Name; } @@ -107,18 +123,81 @@ static std::string GetDiffsPatchFileName(std::string const &Final) /*{{{*/ return Final + ".ed"; } /*}}}*/ +static bool BootstrapPDiffWith(std::string const &PartialFile, std::string const &FinalFile, IndexTarget const &Target)/*{{{*/ +{ + // patching needs to be bootstrapped with the 'old' version + std::vector types = VectorizeString(Target.Option(IndexTarget::COMPRESSIONTYPES), ' '); + auto typeItr = types.cbegin(); + for (; typeItr != types.cend(); ++typeItr) + { + std::string Final = FinalFile; + if (*typeItr != "uncompressed") + Final.append(".").append(*typeItr); + if (RealFileExists(Final) == false) + continue; + std::string Partial = PartialFile; + if (*typeItr != "uncompressed") + Partial.append(".").append(*typeItr); + if (FileExists(Partial.c_str()) == true) + return true; + if (symlink(Final.c_str(), Partial.c_str()) != 0) + return false; + break; + } + return typeItr != types.cend(); +} + /*}}}*/ -static bool AllowInsecureRepositories(indexRecords const * const MetaIndexParser, pkgAcqMetaBase * const TransactionManager, pkgAcquire::Item * const I) /*{{{*/ +static bool MessageInsecureRepository(bool const isError, std::string const &msg)/*{{{*/ { - if(MetaIndexParser->IsAlwaysTrusted() || _config->FindB("Acquire::AllowInsecureRepositories") == true) + if (isError) + { + _error->Error("%s", msg.c_str()); + _error->Notice("%s", _("Updating from such a repository can't be done securely, and is therefore disabled by default.")); + } + else + { + _error->Warning("%s", msg.c_str()); + _error->Notice("%s", _("Data from such a repository can't be authenticated and is therefore potentially dangerous to use.")); + } + _error->Notice("%s", _("See apt-secure(8) manpage for repository creation and user configuration details.")); + return false; +} +static bool MessageInsecureRepository(bool const isError, char const * const msg, std::string const &repo) +{ + std::string m; + strprintf(m, msg, repo.c_str()); + return MessageInsecureRepository(isError, m); +} + /*}}}*/ +static bool AllowInsecureRepositories(char const * const msg, std::string const &repo,/*{{{*/ + metaIndex const * const MetaIndexParser, pkgAcqMetaClearSig * const TransactionManager, pkgAcquire::Item * const I) +{ + if(MetaIndexParser->GetTrusted() == metaIndex::TRI_YES) return true; - _error->Error(_("Use --allow-insecure-repositories to force the update")); + if (_config->FindB("Acquire::AllowInsecureRepositories") == true) + { + MessageInsecureRepository(false, msg, repo); + return true; + } + + MessageInsecureRepository(true, msg, repo); TransactionManager->AbortTransaction(); I->Status = pkgAcquire::Item::StatError; return false; } /*}}}*/ +static HashStringList GetExpectedHashesFromFor(metaIndex * const Parser, std::string const &MetaKey)/*{{{*/ +{ + if (Parser == NULL) + return HashStringList(); + metaIndex::checkSum * const R = Parser->Lookup(MetaKey); + if (R == NULL) + return HashStringList(); + return R->Hashes; +} + /*}}}*/ // all ::HashesRequired and ::GetExpectedHashes implementations /*{{{*/ /* ::GetExpectedHashes is abstract and has to be implemented by all subclasses. @@ -133,7 +212,8 @@ APT_CONST bool pkgAcqTransactionItem::HashesRequired() const we can at least trust them for integrity of the download itself. Only repositories without a Release file can (obviously) not have hashes – and they are very uncommon and strongly discouraged */ - return TransactionManager->MetaIndexParser != NULL; + return TransactionManager->MetaIndexParser != NULL && + TransactionManager->MetaIndexParser->GetLoadedSuccessfully() == metaIndex::TRI_YES; } HashStringList pkgAcqTransactionItem::GetExpectedHashes() const { @@ -181,7 +261,7 @@ HashStringList pkgAcqIndexMergeDiffs::GetExpectedHashes() const if (State == StateFetchDiff) return patch.download_hashes; else if (State == StateApplyDiff) - return GetExpectedHashesFor(Target->MetaKey); + return GetExpectedHashesFor(Target.MetaKey); return HashStringList(); } @@ -256,20 +336,20 @@ std::string pkgAcqDiffIndex::GetFinalFilename() const } std::string pkgAcqIndex::GetFinalFilename() const { - std::string const FinalFile = GetFinalFileNameFromURI(Target->URI); - return GetCompressedFileName(Target->URI, FinalFile, CurrentCompressionExtension); + std::string const FinalFile = GetFinalFileNameFromURI(Target.URI); + return GetCompressedFileName(Target, FinalFile, CurrentCompressionExtension); } std::string pkgAcqMetaSig::GetFinalFilename() const { - return GetFinalFileNameFromURI(Target->URI); + return GetFinalFileNameFromURI(Target.URI); } std::string pkgAcqBaseIndex::GetFinalFilename() const { - return GetFinalFileNameFromURI(Target->URI); + return GetFinalFileNameFromURI(Target.URI); } std::string pkgAcqMetaBase::GetFinalFilename() const { - return GetFinalFileNameFromURI(DataTarget.URI); + return GetFinalFileNameFromURI(Target.URI); } std::string pkgAcqArchive::GetFinalFilename() const { @@ -279,17 +359,17 @@ std::string pkgAcqArchive::GetFinalFilename() const // pkgAcqTransactionItem::GetMetaKey and specialisations for child classes /*{{{*/ std::string pkgAcqTransactionItem::GetMetaKey() const { - return Target->MetaKey; + return Target.MetaKey; } std::string pkgAcqIndex::GetMetaKey() const { if (Stage == STAGE_DECOMPRESS_AND_VERIFY || CurrentCompressionExtension == "uncompressed") - return Target->MetaKey; - return Target->MetaKey + "." + CurrentCompressionExtension; + return Target.MetaKey; + return Target.MetaKey + "." + CurrentCompressionExtension; } std::string pkgAcqDiffIndex::GetMetaKey() const { - return Target->MetaKey + ".diff/Index"; + return Target.MetaKey + ".diff/Index"; } /*}}}*/ //pkgAcqTransactionItem::TransactionState and specialisations for child classes /*{{{*/ @@ -317,7 +397,7 @@ bool pkgAcqTransactionItem::TransactionState(TransactionStates const state) } else { if(Debug == true) std::clog << "rm " << DestFile << " # " << DescURI() << std::endl; - unlink(DestFile.c_str()); + RemoveFile("TransactionCommit", DestFile); } break; } @@ -343,12 +423,12 @@ bool pkgAcqIndex::TransactionState(TransactionStates const state) // keep the compressed file, but drop the decompressed EraseFileName.clear(); if (PartialFile.empty() == false && flExtension(PartialFile) == "decomp") - unlink(PartialFile.c_str()); + RemoveFile("TransactionAbort", PartialFile); } break; case TransactionCommit: if (EraseFileName.empty() == false) - unlink(EraseFileName.c_str()); + RemoveFile("TransactionCommit", EraseFileName); break; } return true; @@ -363,8 +443,8 @@ bool pkgAcqDiffIndex::TransactionState(TransactionStates const state) case TransactionCommit: break; case TransactionAbort: - std::string const Partial = GetPartialFileNameFromURI(Target->URI); - unlink(Partial.c_str()); + std::string const Partial = GetPartialFileNameFromURI(Target.URI); + RemoveFile("TransactionAbort", Partial); break; } @@ -372,11 +452,36 @@ bool pkgAcqDiffIndex::TransactionState(TransactionStates const state) } /*}}}*/ +class APT_HIDDEN NoActionItem : public pkgAcquire::Item /*{{{*/ +/* The sole purpose of this class is having an item which does nothing to + reach its done state to prevent cleanup deleting the mentioned file. + Handy in cases in which we know we have the file already, like IMS-Hits. */ +{ + IndexTarget const Target; + public: + virtual std::string DescURI() const APT_OVERRIDE {return Target.URI;}; + virtual HashStringList GetExpectedHashes() const APT_OVERRIDE {return HashStringList();}; + + NoActionItem(pkgAcquire * const Owner, IndexTarget const &Target) : + pkgAcquire::Item(Owner), Target(Target) + { + Status = StatDone; + DestFile = GetFinalFileNameFromURI(Target.URI); + } + NoActionItem(pkgAcquire * const Owner, IndexTarget const &Target, std::string const &FinalFile) : + pkgAcquire::Item(Owner), Target(Target) + { + Status = StatDone; + DestFile = FinalFile; + } +}; + /*}}}*/ + // Acquire::Item::Item - Constructor /*{{{*/ APT_IGNORE_DEPRECATED_PUSH -pkgAcquire::Item::Item(pkgAcquire * const Owner) : - FileSize(0), PartialSize(0), Mode(0), Complete(false), Local(false), - QueueCounter(0), ExpectedAdditionalItems(0), Owner(Owner) +pkgAcquire::Item::Item(pkgAcquire * const owner) : + FileSize(0), PartialSize(0), Mode(0), ID(0), Complete(false), Local(false), + QueueCounter(0), ExpectedAdditionalItems(0), Owner(owner), d(NULL) { Owner->Add(this); Status = StatIdle; @@ -408,6 +513,11 @@ APT_PURE pkgAcquire * pkgAcquire::Item::GetOwner() const /*{{{*/ return Owner; } /*}}}*/ +APT_CONST pkgAcquire::ItemDesc &pkgAcquire::Item::GetItemDesc() /*{{{*/ +{ + return Desc; +} + /*}}}*/ APT_CONST bool pkgAcquire::Item::IsTrusted() const /*{{{*/ { return false; @@ -421,7 +531,6 @@ void pkgAcquire::Item::Failed(string const &Message,pkgAcquire::MethodConfig con { if(ErrorText.empty()) ErrorText = LookupTag(Message,"Message"); - UsedMirror = LookupTag(Message,"UsedMirror"); if (QueueCounter <= 1) { /* This indicates that the file is not available right now but might @@ -479,24 +588,35 @@ void pkgAcquire::Item::Start(string const &/*Message*/, unsigned long long const FileSize = Size; } /*}}}*/ +// Acquire::Item::VerifyDone - check if Item was downloaded OK /*{{{*/ +/* Note that hash-verification is 'hardcoded' in acquire-worker and has + * already passed if this method is called. */ +bool pkgAcquire::Item::VerifyDone(std::string const &Message, + pkgAcquire::MethodConfig const * const /*Cnf*/) +{ + std::string const FileName = LookupTag(Message,"Filename"); + if (FileName.empty() == true) + { + Status = StatError; + ErrorText = "Method gave a blank filename"; + return false; + } + + return true; +} + /*}}}*/ // Acquire::Item::Done - Item downloaded OK /*{{{*/ -void pkgAcquire::Item::Done(string const &Message, HashStringList const &Hashes, +void pkgAcquire::Item::Done(string const &/*Message*/, HashStringList const &Hashes, pkgAcquire::MethodConfig const * const /*Cnf*/) { // We just downloaded something.. - string FileName = LookupTag(Message,"Filename"); - UsedMirror = LookupTag(Message,"UsedMirror"); - unsigned long long const downloadedSize = Hashes.FileSize(); - if (downloadedSize != 0) + if (FileSize == 0) { - if (Complete == false && !Local && FileName == DestFile) + unsigned long long const downloadedSize = Hashes.FileSize(); + if (downloadedSize != 0) { - if (Owner->Log != 0) - Owner->Log->Fetched(Hashes.FileSize(),atoi(LookupTag(Message,"Resume-Point","0").c_str())); + FileSize = downloadedSize; } - - if (FileSize == 0) - FileSize= downloadedSize; } Status = StatDone; ErrorText = string(); @@ -556,8 +676,8 @@ bool pkgAcquire::Item::RenameOnError(pkgAcquire::Item::RenameOnErrorState const Status = StatError; break; case NotClearsigned: - errtext = _("Does not start with a cleartext signature"); - Status = StatError; + strprintf(errtext, _("Clearsigned file isn't valid, got '%s' (does the network require authentication?)"), "NOSPLIT"); + Status = StatAuthError; break; case MaximumSizeExceeded: // the method is expected to report a good error for this @@ -631,8 +751,8 @@ std::string pkgAcquire::Item::HashSum() const /*{{{*/ /*}}}*/ pkgAcqTransactionItem::pkgAcqTransactionItem(pkgAcquire * const Owner, /*{{{*/ - pkgAcqMetaBase * const TransactionManager, IndexTarget const * const Target) : - pkgAcquire::Item(Owner), Target(Target), TransactionManager(TransactionManager) + pkgAcqMetaClearSig * const transactionManager, IndexTarget const &target) : + pkgAcquire::Item(Owner), d(NULL), Target(target), TransactionManager(transactionManager) { if (TransactionManager != this) TransactionManager->Add(this); @@ -642,25 +762,19 @@ pkgAcqTransactionItem::~pkgAcqTransactionItem() /*{{{*/ { } /*}}}*/ -HashStringList pkgAcqTransactionItem::GetExpectedHashesFor(std::string const MetaKey) const /*{{{*/ +HashStringList pkgAcqTransactionItem::GetExpectedHashesFor(std::string const &MetaKey) const /*{{{*/ { - if (TransactionManager->MetaIndexParser == NULL) - return HashStringList(); - indexRecords::checkSum * const R = TransactionManager->MetaIndexParser->Lookup(MetaKey); - if (R == NULL) - return HashStringList(); - return R->Hashes; + return GetExpectedHashesFromFor(TransactionManager->MetaIndexParser, MetaKey); } /*}}}*/ // AcqMetaBase - Constructor /*{{{*/ pkgAcqMetaBase::pkgAcqMetaBase(pkgAcquire * const Owner, - pkgAcqMetaBase * const TransactionManager, - std::vector const * const IndexTargets, - IndexTarget const &DataTarget, - indexRecords * const MetaIndexParser) -: pkgAcqTransactionItem(Owner, TransactionManager, NULL), DataTarget(DataTarget), - MetaIndexParser(MetaIndexParser), LastMetaIndexParser(NULL), IndexTargets(IndexTargets), + pkgAcqMetaClearSig * const TransactionManager, + std::vector const &IndexTargets, + IndexTarget const &DataTarget) +: pkgAcqTransactionItem(Owner, TransactionManager, DataTarget), d(NULL), + IndexTargets(IndexTargets), AuthPass(false), IMSHit(false) { } @@ -750,7 +864,7 @@ bool pkgAcqMetaBase::CheckStopAuthentication(pkgAcquire::Item * const I, const s _error->Warning(_("An error occurred during the signature " "verification. The repository is not updated " "and the previous index files will be used. " - "GPG error: %s: %s\n"), + "GPG error: %s: %s"), Desc.Description.c_str(), LookupTag(Message,"Message").c_str()); RunScripts("APT::Update::Auth-Failure"); @@ -760,7 +874,7 @@ bool pkgAcqMetaBase::CheckStopAuthentication(pkgAcquire::Item * const I, const s _error->Error(_("GPG error: %s: %s"), Desc.Description.c_str(), LookupTag(Message,"Message").c_str()); - I->Status = StatError; + I->Status = StatAuthError; return true; } else { _error->Warning(_("GPG error: %s: %s"), @@ -783,7 +897,6 @@ string pkgAcqMetaBase::Custom600Headers() const Header += MaximumSize; string const FinalFile = GetFinalFilename(); - struct stat Buf; if (stat(FinalFile.c_str(),&Buf) == 0) Header += "\nLast-Modified: " + TimeRFC1123(Buf.st_mtime); @@ -807,15 +920,8 @@ bool pkgAcqMetaBase::CheckDownloadDone(pkgAcqTransactionItem * const I, const st // We have just finished downloading a Release file (it is not // verified yet) - string const FileName = LookupTag(Message,"Filename"); - if (FileName.empty() == true) - { - I->Status = StatError; - I->ErrorText = "Method gave a blank filename"; - return false; - } - - if (FileName != I->DestFile) + std::string const FileName = LookupTag(Message,"Filename"); + if (FileName != I->DestFile && RealFileExists(I->DestFile) == false) { I->Local = true; I->Desc.URI = "copy:" + FileName; @@ -832,7 +938,7 @@ bool pkgAcqMetaBase::CheckDownloadDone(pkgAcqTransactionItem * const I, const st if (RealFileExists(FinalFile) && Hashes.VerifyFile(FinalFile) == true) { IMSHit = true; - unlink(I->DestFile.c_str()); + RemoveFile("CheckDownloadDone", I->DestFile); } } @@ -876,26 +982,28 @@ bool pkgAcqMetaBase::CheckAuthDone(string const &Message) /*{{{*/ } if (RealFileExists(FinalInRelease) || RealFileExists(FinalRelease)) { - TransactionManager->LastMetaIndexParser = new indexRecords; - _error->PushToStack(); - if (RealFileExists(FinalInRelease)) - TransactionManager->LastMetaIndexParser->Load(FinalInRelease); - else - TransactionManager->LastMetaIndexParser->Load(FinalRelease); - // its unlikely to happen, but if what we have is bad ignore it - if (_error->PendingError()) + TransactionManager->LastMetaIndexParser = TransactionManager->MetaIndexParser->UnloadedClone(); + if (TransactionManager->LastMetaIndexParser != NULL) { - delete TransactionManager->LastMetaIndexParser; - TransactionManager->LastMetaIndexParser = NULL; + _error->PushToStack(); + if (RealFileExists(FinalInRelease)) + TransactionManager->LastMetaIndexParser->Load(FinalInRelease, NULL); + else + TransactionManager->LastMetaIndexParser->Load(FinalRelease, NULL); + // its unlikely to happen, but if what we have is bad ignore it + if (_error->PendingError()) + { + delete TransactionManager->LastMetaIndexParser; + TransactionManager->LastMetaIndexParser = NULL; + } + _error->RevertToStack(); } - _error->RevertToStack(); } } - if (TransactionManager->MetaIndexParser->Load(DestFile) == false) + if (TransactionManager->MetaIndexParser->Load(DestFile, &ErrorText) == false) { Status = StatAuthError; - ErrorText = TransactionManager->MetaIndexParser->ErrorText; return false; } @@ -920,29 +1028,165 @@ void pkgAcqMetaBase::QueueIndexes(bool const verify) /*{{{*/ // at this point the real Items are loaded in the fetcher ExpectedAdditionalItems = 0; - vector ::const_iterator Target; - for (Target = IndexTargets->begin(); - Target != IndexTargets->end(); + bool metaBaseSupportsByHash = false; + if (TransactionManager != NULL && TransactionManager->MetaIndexParser != NULL) + metaBaseSupportsByHash = TransactionManager->MetaIndexParser->GetSupportsAcquireByHash(); + + for (std::vector ::iterator Target = IndexTargets.begin(); + Target != IndexTargets.end(); ++Target) { - if (verify == true && TransactionManager->MetaIndexParser->Exists((*Target)->MetaKey) == false) + // all is an implementation detail. Users shouldn't use this as arch + // We need this support trickery here as e.g. Debian has binary-all files already, + // but arch:all packages are still in the arch:any files, so we would waste precious + // download time, bandwidth and diskspace for nothing, BUT Debian doesn't feature all + // in the set of supported architectures, so we can filter based on this property rather + // than invent an entirely new flag we would need to carry for all of eternity. + if (Target->Option(IndexTarget::ARCHITECTURE) == "all" && + TransactionManager->MetaIndexParser->IsArchitectureSupported("all") == false) + continue; + + bool trypdiff = Target->OptionBool(IndexTarget::PDIFFS); + if (verify == true) { - // optional target that we do not have in the Release file are skipped - if ((*Target)->IsOptional()) + if (TransactionManager->MetaIndexParser->Exists(Target->MetaKey) == false) + { + // optional targets that we do not have in the Release file are skipped + if (Target->IsOptional) + continue; + + std::string const &arch = Target->Option(IndexTarget::ARCHITECTURE); + if (arch.empty() == false) + { + if (TransactionManager->MetaIndexParser->IsArchitectureSupported(arch) == false) + { + _error->Notice(_("Skipping acquire of configured file '%s' as repository '%s' doesn't support architecture '%s'"), + Target->MetaKey.c_str(), TransactionManager->Target.Description.c_str(), arch.c_str()); + continue; + } + // if the architecture is officially supported but currently no packages for it available, + // ignore silently as this is pretty much the same as just shipping an empty file. + // if we don't know which architectures are supported, we do NOT ignore it to notify user about this + if (TransactionManager->MetaIndexParser->IsArchitectureSupported("*undefined*") == false) + continue; + } + + Status = StatAuthError; + strprintf(ErrorText, _("Unable to find expected entry '%s' in Release file (Wrong sources.list entry or malformed file)"), Target->MetaKey.c_str()); + return; + } + else + { + auto const hashes = GetExpectedHashesFor(Target->MetaKey); + if (hashes.usable() == false && hashes.empty() == false) + { + _error->Warning(_("Skipping acquire of configured file '%s' as repository '%s' provides only weak security information for it"), + Target->MetaKey.c_str(), TransactionManager->Target.Description.c_str()); + continue; + } + } + + // autoselect the compression method + std::vector types = VectorizeString(Target->Option(IndexTarget::COMPRESSIONTYPES), ' '); + types.erase(std::remove_if(types.begin(), types.end(), [&](std::string const &t) { + if (t == "uncompressed") + return TransactionManager->MetaIndexParser->Exists(Target->MetaKey) == false; + std::string const MetaKey = Target->MetaKey + "." + t; + return TransactionManager->MetaIndexParser->Exists(MetaKey) == false; + }), types.end()); + if (types.empty() == false) + { + std::ostringstream os; + // add the special compressiontype byhash first if supported + std::string const useByHashConf = Target->Option(IndexTarget::BY_HASH); + bool useByHash = false; + if(useByHashConf == "force") + useByHash = true; + else + useByHash = StringToBool(useByHashConf) == true && metaBaseSupportsByHash; + if (useByHash == true) + os << "by-hash "; + std::copy(types.begin(), types.end()-1, std::ostream_iterator(os, " ")); + os << *types.rbegin(); + Target->Options["COMPRESSIONTYPES"] = os.str(); + } + else + Target->Options["COMPRESSIONTYPES"].clear(); + + std::string filename = GetFinalFileNameFromURI(Target->URI); + if (RealFileExists(filename) == false) + { + if (Target->KeepCompressed) + { + filename = GetKeepCompressedFileName(filename, *Target); + if (RealFileExists(filename) == false) + filename.clear(); + } + else + filename.clear(); + } + + if (filename.empty() == false) + { + // if the Release file is a hit and we have an index it must be the current one + if (TransactionManager->IMSHit == true) + ; + else if (TransactionManager->LastMetaIndexParser != NULL) + { + // see if the file changed since the last Release file + // we use the uncompressed files as we might compress differently compared to the server, + // so the hashes might not match, even if they contain the same data. + HashStringList const newFile = GetExpectedHashesFromFor(TransactionManager->MetaIndexParser, Target->MetaKey); + HashStringList const oldFile = GetExpectedHashesFromFor(TransactionManager->LastMetaIndexParser, Target->MetaKey); + if (newFile != oldFile) + filename.clear(); + } + else + filename.clear(); + } + else + trypdiff = false; // no file to patch + + if (filename.empty() == false) + { + new NoActionItem(Owner, *Target, filename); + std::string const idxfilename = GetFinalFileNameFromURI(Target->URI + ".diff/Index"); + if (FileExists(idxfilename)) + new NoActionItem(Owner, *Target, idxfilename); continue; + } - Status = StatAuthError; - strprintf(ErrorText, _("Unable to find expected entry '%s' in Release file (Wrong sources.list entry or malformed file)"), (*Target)->MetaKey.c_str()); - return; + // check if we have patches available + trypdiff &= TransactionManager->MetaIndexParser->Exists(Target->MetaKey + ".diff/Index"); + } + else + { + // if we have no file to patch, no point in trying + std::string filename = GetFinalFileNameFromURI(Target->URI); + if (RealFileExists(filename) == false) + { + if (Target->KeepCompressed) + { + filename = GetKeepCompressedFileName(filename, *Target); + if (RealFileExists(filename) == false) + filename.clear(); + } + else + filename.clear(); + } + trypdiff &= (filename.empty() == false); + } + + // no point in patching from local sources + if (trypdiff) + { + std::string const proto = Target->URI.substr(0, strlen("file:/")); + if (proto == "file:/" || proto == "copy:/" || proto == "cdrom:") + trypdiff = false; } - /* Queue the Index file (Packages, Sources, Translation-$foo - (either diff or full packages files, depending - on the users option) - we also check if the PDiff Index file is listed - in the Meta-Index file. Ideal would be if pkgAcqDiffIndex would test this - instead, but passing the required info to it is to much hassle */ - if(_config->FindB("Acquire::PDiffs",true) == true && (verify == false || - TransactionManager->MetaIndexParser->Exists((*Target)->MetaKey + ".diff/Index") == true)) + // Queue the Index file (Packages, Sources, Translation-$foo, …) + if (trypdiff) new pkgAcqDiffIndex(Owner, TransactionManager, *Target); else new pkgAcqIndex(Owner, TransactionManager, *Target); @@ -986,19 +1230,19 @@ bool pkgAcqMetaBase::VerifyVendor(string const &Message) /*{{{*/ Transformed = ""; } - if (_config->FindB("Acquire::Check-Valid-Until", true) == true && - TransactionManager->MetaIndexParser->GetValidUntil() > 0) { + if (TransactionManager->MetaIndexParser->GetValidUntil() > 0) + { time_t const invalid_since = time(NULL) - TransactionManager->MetaIndexParser->GetValidUntil(); if (invalid_since > 0) { std::string errmsg; strprintf(errmsg, // TRANSLATOR: The first %s is the URL of the bad Release file, the second is - // the time since then the file is invalid - formated in the same way as in + // the time since then the file is invalid - formatted in the same way as in // the download progress display (e.g. 7d 3h 42min 1s) _("Release file for %s is expired (invalid since %s). " "Updates for this repository will not be applied."), - DataTarget.URI.c_str(), TimeToStr(invalid_since).c_str()); + Target.URI.c_str(), TimeToStr(invalid_since).c_str()); if (ErrorText.empty()) ErrorText = errmsg; return _error->Error("%s", errmsg.c_str()); @@ -1011,16 +1255,18 @@ bool pkgAcqMetaBase::VerifyVendor(string const &Message) /*{{{*/ TransactionManager->LastMetaIndexParser->GetDate() > TransactionManager->MetaIndexParser->GetDate()) { TransactionManager->IMSHit = true; - unlink(DestFile.c_str()); + RemoveFile("VerifyVendor", DestFile); PartialFile = DestFile = GetFinalFilename(); - delete TransactionManager->MetaIndexParser; - TransactionManager->MetaIndexParser = TransactionManager->LastMetaIndexParser; + // load the 'old' file in the 'new' one instead of flipping pointers as + // the new one isn't owned by us, while the old one is so cleanup would be confused. + TransactionManager->MetaIndexParser->swapLoad(TransactionManager->LastMetaIndexParser); + delete TransactionManager->LastMetaIndexParser; TransactionManager->LastMetaIndexParser = NULL; } if (_config->FindB("Debug::pkgAcquire::Auth", false)) { - std::cerr << "Got Codename: " << TransactionManager->MetaIndexParser->GetDist() << std::endl; + std::cerr << "Got Codename: " << TransactionManager->MetaIndexParser->GetCodename() << std::endl; std::cerr << "Expecting Dist: " << TransactionManager->MetaIndexParser->GetExpectedDist() << std::endl; std::cerr << "Transformed Dist: " << Transformed << std::endl; } @@ -1031,37 +1277,43 @@ bool pkgAcqMetaBase::VerifyVendor(string const &Message) /*{{{*/ // Status = StatAuthError; // ErrorText = "Conflicting distribution; expected " // + MetaIndexParser->GetExpectedDist() + " but got " -// + MetaIndexParser->GetDist(); +// + MetaIndexParser->GetCodename(); // return false; if (!Transformed.empty()) { _error->Warning(_("Conflicting distribution: %s (expected %s but got %s)"), Desc.Description.c_str(), Transformed.c_str(), - TransactionManager->MetaIndexParser->GetDist().c_str()); + TransactionManager->MetaIndexParser->GetCodename().c_str()); } } return true; } /*}}}*/ +pkgAcqMetaBase::~pkgAcqMetaBase() +{ +} pkgAcqMetaClearSig::pkgAcqMetaClearSig(pkgAcquire * const Owner, /*{{{*/ IndexTarget const &ClearsignedTarget, IndexTarget const &DetachedDataTarget, IndexTarget const &DetachedSigTarget, - const vector* const IndexTargets, - indexRecords * const MetaIndexParser) : - pkgAcqMetaIndex(Owner, this, ClearsignedTarget, DetachedSigTarget, IndexTargets, MetaIndexParser), - ClearsignedTarget(ClearsignedTarget), - DetachedDataTarget(DetachedDataTarget), DetachedSigTarget(DetachedSigTarget) + std::vector const &IndexTargets, + metaIndex * const MetaIndexParser) : + pkgAcqMetaIndex(Owner, this, ClearsignedTarget, DetachedSigTarget, IndexTargets), + d(NULL), ClearsignedTarget(ClearsignedTarget), + DetachedDataTarget(DetachedDataTarget), + MetaIndexParser(MetaIndexParser), LastMetaIndexParser(NULL) { // index targets + (worst case:) Release/Release.gpg - ExpectedAdditionalItems = IndexTargets->size() + 2; + ExpectedAdditionalItems = IndexTargets.size() + 2; TransactionManager->Add(this); } /*}}}*/ pkgAcqMetaClearSig::~pkgAcqMetaClearSig() /*{{{*/ { + if (LastMetaIndexParser != NULL) + delete LastMetaIndexParser; } /*}}}*/ // pkgAcqMetaClearSig::Custom600Headers - Insert custom request headers /*{{{*/ @@ -1069,41 +1321,31 @@ string pkgAcqMetaClearSig::Custom600Headers() const { string Header = pkgAcqMetaBase::Custom600Headers(); Header += "\nFail-Ignore: true"; + std::string const key = TransactionManager->MetaIndexParser->GetSignedBy(); + if (key.empty() == false) + Header += "\nSigned-By: " + key; + return Header; } /*}}}*/ -// pkgAcqMetaClearSig::Done - We got a file /*{{{*/ -class APT_HIDDEN DummyItem : public pkgAcquire::Item +bool pkgAcqMetaClearSig::VerifyDone(std::string const &Message, /*{{{*/ + pkgAcquire::MethodConfig const * const Cnf) { - IndexTarget const * const Target; - public: - virtual std::string DescURI() const {return Target->URI;}; - virtual HashStringList GetExpectedHashes() const {return HashStringList();}; + Item::VerifyDone(Message, Cnf); - DummyItem(pkgAcquire * const Owner, IndexTarget const * const Target) : - pkgAcquire::Item(Owner), Target(Target) - { - Status = StatDone; - DestFile = GetFinalFileNameFromURI(Target->URI); - } -}; + if (FileExists(DestFile) && !StartsWithGPGClearTextSignature(DestFile)) + return RenameOnError(NotClearsigned); + + return true; +} + /*}}}*/ +// pkgAcqMetaClearSig::Done - We got a file /*{{{*/ void pkgAcqMetaClearSig::Done(std::string const &Message, HashStringList const &Hashes, pkgAcquire::MethodConfig const * const Cnf) { Item::Done(Message, Hashes, Cnf); - // if we expect a ClearTextSignature (InRelease), ensure that - // this is what we get and if not fail to queue a - // Release/Release.gpg, see #346386 - if (FileExists(DestFile) && !StartsWithGPGClearTextSignature(DestFile)) - { - pkgAcquire::Item::Failed(Message, Cnf); - RenameOnError(NotClearsigned); - TransactionManager->AbortTransaction(); - return; - } - if(AuthPass == false) { if(CheckDownloadDone(this, Message, Hashes) == true) @@ -1119,8 +1361,8 @@ void pkgAcqMetaClearSig::Done(std::string const &Message, // We got an InRelease file IMSHit, but we haven't one, which means // we had a valid Release/Release.gpg combo stepping in, which we have // to 'acquire' now to ensure list cleanup isn't removing them - new DummyItem(Owner, &DetachedDataTarget); - new DummyItem(Owner, &DetachedSigTarget); + new NoActionItem(Owner, DetachedDataTarget); + new NoActionItem(Owner, DetachedSigTarget); } } } @@ -1134,27 +1376,33 @@ void pkgAcqMetaClearSig::Failed(string const &Message,pkgAcquire::MethodConfig c if (AuthPass == false) { + if (Status == StatAuthError || Status == StatTransientNetworkError) + { + // if we expected a ClearTextSignature (InRelease) but got a network + // error or got a file, but it wasn't valid, we end up here (see VerifyDone). + // As these is usually called by web-portals we do not try Release/Release.gpg + // as this is gonna fail anyway and instead abort our try (LP#346386) + TransactionManager->AbortTransaction(); + return; + } + // Queue the 'old' InRelease file for removal if we try Release.gpg // as otherwise the file will stay around and gives a false-auth // impression (CVE-2012-0214) TransactionManager->TransactionStageRemoval(this, GetFinalFilename()); Status = StatDone; - new pkgAcqMetaIndex(Owner, TransactionManager, DetachedDataTarget, DetachedSigTarget, IndexTargets, TransactionManager->MetaIndexParser); + new pkgAcqMetaIndex(Owner, TransactionManager, DetachedDataTarget, DetachedSigTarget, IndexTargets); } else { if(CheckStopAuthentication(this, Message)) return; - _error->Warning(_("The data from '%s' is not signed. Packages " - "from that repository can not be authenticated."), - ClearsignedTarget.Description.c_str()); - // No Release file was present, or verification failed, so fall // back to queueing Packages files without verification - // only allow going further if the users explicitely wants it - if(AllowInsecureRepositories(TransactionManager->MetaIndexParser, TransactionManager, this) == true) + // only allow going further if the user explicitly wants it + if(AllowInsecureRepositories(_("The repository '%s' is not signed."), ClearsignedTarget.Description, TransactionManager->MetaIndexParser, TransactionManager, this) == true) { Status = StatDone; @@ -1172,25 +1420,28 @@ void pkgAcqMetaClearSig::Failed(string const &Message,pkgAcquire::MethodConfig c // open the last Release if we have it if (TransactionManager->IMSHit == false) { - TransactionManager->LastMetaIndexParser = new indexRecords; - _error->PushToStack(); - if (RealFileExists(FinalInRelease)) - TransactionManager->LastMetaIndexParser->Load(FinalInRelease); - else - TransactionManager->LastMetaIndexParser->Load(FinalRelease); - // its unlikely to happen, but if what we have is bad ignore it - if (_error->PendingError()) + TransactionManager->LastMetaIndexParser = TransactionManager->MetaIndexParser->UnloadedClone(); + if (TransactionManager->LastMetaIndexParser != NULL) { - delete TransactionManager->LastMetaIndexParser; - TransactionManager->LastMetaIndexParser = NULL; + _error->PushToStack(); + if (RealFileExists(FinalInRelease)) + TransactionManager->LastMetaIndexParser->Load(FinalInRelease, NULL); + else + TransactionManager->LastMetaIndexParser->Load(FinalRelease, NULL); + // its unlikely to happen, but if what we have is bad ignore it + if (_error->PendingError()) + { + delete TransactionManager->LastMetaIndexParser; + TransactionManager->LastMetaIndexParser = NULL; + } + _error->RevertToStack(); } - _error->RevertToStack(); } } // we parse the indexes here because at this point the user wanted // a repository that may potentially harm him - if (TransactionManager->MetaIndexParser->Load(PartialRelease) == false || VerifyVendor(Message) == false) + if (TransactionManager->MetaIndexParser->Load(PartialRelease, &ErrorText) == false || VerifyVendor(Message) == false) /* expired Release files are still a problem you need extra force for */; else QueueIndexes(true); @@ -1200,12 +1451,11 @@ void pkgAcqMetaClearSig::Failed(string const &Message,pkgAcquire::MethodConfig c /*}}}*/ pkgAcqMetaIndex::pkgAcqMetaIndex(pkgAcquire * const Owner, /*{{{*/ - pkgAcqMetaBase * const TransactionManager, + pkgAcqMetaClearSig * const TransactionManager, IndexTarget const &DataTarget, IndexTarget const &DetachedSigTarget, - vector const * const IndexTargets, - indexRecords * const MetaIndexParser) : - pkgAcqMetaBase(Owner, TransactionManager, IndexTargets, DataTarget, MetaIndexParser), + vector const &IndexTargets) : + pkgAcqMetaBase(Owner, TransactionManager, IndexTargets, DataTarget), d(NULL), DetachedSigTarget(DetachedSigTarget) { if(_config->FindB("Debug::Acquire::Transaction", false) == true) @@ -1221,7 +1471,7 @@ pkgAcqMetaIndex::pkgAcqMetaIndex(pkgAcquire * const Owner, /*{{{*/ Desc.URI = DataTarget.URI; // we expect more item - ExpectedAdditionalItems = IndexTargets->size(); + ExpectedAdditionalItems = IndexTargets.size(); QueueURI(Desc); } /*}}}*/ @@ -1236,7 +1486,7 @@ void pkgAcqMetaIndex::Done(string const &Message, /*{{{*/ // we have a Release file, now download the Signature, all further // verify/queue for additional downloads will be done in the // pkgAcqMetaSig::Done() code - new pkgAcqMetaSig(Owner, TransactionManager, &DetachedSigTarget, this); + new pkgAcqMetaSig(Owner, TransactionManager, DetachedSigTarget, this); } } /*}}}*/ @@ -1247,19 +1497,13 @@ void pkgAcqMetaIndex::Failed(string const &Message, pkgAcquire::Item::Failed(Message, Cnf); Status = StatDone; - _error->Warning(_("The repository '%s' does not have a Release file. " - "This is deprecated, please contact the owner of the " - "repository."), DataTarget.Description.c_str()); - // No Release file was present so fall // back to queueing Packages files without verification - // only allow going further if the users explicitely wants it - if(AllowInsecureRepositories(TransactionManager->MetaIndexParser, TransactionManager, this) == true) + // only allow going further if the user explicitly wants it + if(AllowInsecureRepositories(_("The repository '%s' does not have a Release file."), Target.Description, TransactionManager->MetaIndexParser, TransactionManager, this) == true) { // ensure old Release files are removed TransactionManager->TransactionStageRemoval(this, GetFinalFilename()); - delete TransactionManager->MetaIndexParser; - TransactionManager->MetaIndexParser = NULL; // queue without any kind of hashsum support QueueIndexes(false); @@ -1277,23 +1521,24 @@ void pkgAcqMetaIndex::Finished() /*{{{*/ /*}}}*/ std::string pkgAcqMetaIndex::DescURI() const /*{{{*/ { - return DataTarget.URI; + return Target.URI; } /*}}}*/ +pkgAcqMetaIndex::~pkgAcqMetaIndex() {} // AcqMetaSig::AcqMetaSig - Constructor /*{{{*/ pkgAcqMetaSig::pkgAcqMetaSig(pkgAcquire * const Owner, - pkgAcqMetaBase * const TransactionManager, - IndexTarget const * const Target, + pkgAcqMetaClearSig * const TransactionManager, + IndexTarget const &Target, pkgAcqMetaIndex * const MetaIndex) : - pkgAcqTransactionItem(Owner, TransactionManager, Target), MetaIndex(MetaIndex) + pkgAcqTransactionItem(Owner, TransactionManager, Target), d(NULL), MetaIndex(MetaIndex) { - DestFile = GetPartialFileNameFromURI(Target->URI); + DestFile = GetPartialFileNameFromURI(Target.URI); // remove any partial downloaded sig-file in partial/. // it may confuse proxies and is too small to warrant a // partial download anyway - unlink(DestFile.c_str()); + RemoveFile("pkgAcqMetaSig", DestFile); // set the TransactionManager if(_config->FindB("Debug::Acquire::Transaction", false) == true) @@ -1301,10 +1546,10 @@ pkgAcqMetaSig::pkgAcqMetaSig(pkgAcquire * const Owner, << TransactionManager << std::endl; // Create the item - Desc.Description = Target->Description; + Desc.Description = Target.Description; Desc.Owner = this; - Desc.ShortDesc = Target->ShortDesc; - Desc.URI = Target->URI; + Desc.ShortDesc = Target.ShortDesc; + Desc.URI = Target.URI; // If we got a hit for Release, we will get one for Release.gpg too (or obscure errors), // so we skip the download step and go instantly to verification @@ -1322,6 +1567,16 @@ pkgAcqMetaSig::pkgAcqMetaSig(pkgAcquire * const Owner, /*}}}*/ pkgAcqMetaSig::~pkgAcqMetaSig() /*{{{*/ { +} + /*}}}*/ +// pkgAcqMetaSig::Custom600Headers - Insert custom request headers /*{{{*/ +std::string pkgAcqMetaSig::Custom600Headers() const +{ + std::string Header = pkgAcqTransactionItem::Custom600Headers(); + std::string const key = TransactionManager->MetaIndexParser->GetSignedBy(); + if (key.empty() == false) + Header += "\nSigned-By: " + key; + return Header; } /*}}}*/ // AcqMetaSig::Done - The signature was downloaded/verified /*{{{*/ @@ -1372,7 +1627,7 @@ void pkgAcqMetaSig::Failed(string const &Message,pkgAcquire::MethodConfig const { std::string downgrade_msg; strprintf(downgrade_msg, _("The repository '%s' is no longer signed."), - MetaIndex->DataTarget.Description.c_str()); + MetaIndex->Target.Description.c_str()); if(_config->FindB("Acquire::AllowDowngradeToInsecureRepositories")) { // meh, the users wants to take risks (we still mark the packages @@ -1383,7 +1638,7 @@ void pkgAcqMetaSig::Failed(string const &Message,pkgAcquire::MethodConfig const "given to override it.")); Status = StatDone; } else { - _error->Error("%s", downgrade_msg.c_str()); + MessageInsecureRepository(true, downgrade_msg); if (TransactionManager->IMSHit == false) Rename(MetaIndex->DestFile, MetaIndex->DestFile + ".FAILED"); Item::Failed("Message: " + downgrade_msg, Cnf); @@ -1391,44 +1646,44 @@ void pkgAcqMetaSig::Failed(string const &Message,pkgAcquire::MethodConfig const return; } } - else - _error->Warning(_("The data from '%s' is not signed. Packages " - "from that repository can not be authenticated."), - MetaIndex->DataTarget.Description.c_str()); // ensures that a Release.gpg file in the lists/ is removed by the transaction TransactionManager->TransactionStageRemoval(this, DestFile); - // only allow going further if the users explicitely wants it - if(AllowInsecureRepositories(TransactionManager->MetaIndexParser, TransactionManager, this) == true) + // only allow going further if the user explicitly wants it + if (AllowInsecureRepositories(_("The repository '%s' is not signed."), MetaIndex->Target.Description, TransactionManager->MetaIndexParser, TransactionManager, this) == true) { if (RealFileExists(FinalReleasegpg) || RealFileExists(FinalInRelease)) { // open the last Release if we have it if (TransactionManager->IMSHit == false) { - TransactionManager->LastMetaIndexParser = new indexRecords; - _error->PushToStack(); - if (RealFileExists(FinalInRelease)) - TransactionManager->LastMetaIndexParser->Load(FinalInRelease); - else - TransactionManager->LastMetaIndexParser->Load(FinalRelease); - // its unlikely to happen, but if what we have is bad ignore it - if (_error->PendingError()) + TransactionManager->LastMetaIndexParser = TransactionManager->MetaIndexParser->UnloadedClone(); + if (TransactionManager->LastMetaIndexParser != NULL) { - delete TransactionManager->LastMetaIndexParser; - TransactionManager->LastMetaIndexParser = NULL; + _error->PushToStack(); + if (RealFileExists(FinalInRelease)) + TransactionManager->LastMetaIndexParser->Load(FinalInRelease, NULL); + else + TransactionManager->LastMetaIndexParser->Load(FinalRelease, NULL); + // its unlikely to happen, but if what we have is bad ignore it + if (_error->PendingError()) + { + delete TransactionManager->LastMetaIndexParser; + TransactionManager->LastMetaIndexParser = NULL; + } + _error->RevertToStack(); } - _error->RevertToStack(); } } // we parse the indexes here because at this point the user wanted // a repository that may potentially harm him - if (TransactionManager->MetaIndexParser->Load(MetaIndex->DestFile) == false || MetaIndex->VerifyVendor(Message) == false) + bool const GoodLoad = TransactionManager->MetaIndexParser->Load(MetaIndex->DestFile, &ErrorText); + if (MetaIndex->VerifyVendor(Message) == false) /* expired Release files are still a problem you need extra force for */; else - MetaIndex->QueueIndexes(true); + MetaIndex->QueueIndexes(GoodLoad); TransactionManager->TransactionStageCopy(MetaIndex, MetaIndex->DestFile, MetaIndex->GetFinalFilename()); } @@ -1446,12 +1701,13 @@ void pkgAcqMetaSig::Failed(string const &Message,pkgAcquire::MethodConfig const // AcqBaseIndex - Constructor /*{{{*/ pkgAcqBaseIndex::pkgAcqBaseIndex(pkgAcquire * const Owner, - pkgAcqMetaBase * const TransactionManager, - IndexTarget const * const Target) -: pkgAcqTransactionItem(Owner, TransactionManager, Target) + pkgAcqMetaClearSig * const TransactionManager, + IndexTarget const &Target) +: pkgAcqTransactionItem(Owner, TransactionManager, Target), d(NULL) { } /*}}}*/ +pkgAcqBaseIndex::~pkgAcqBaseIndex() {} // AcqDiffIndex::AcqDiffIndex - Constructor /*{{{*/ // --------------------------------------------------------------------- @@ -1461,42 +1717,23 @@ pkgAcqBaseIndex::pkgAcqBaseIndex(pkgAcquire * const Owner, * the original packages file */ pkgAcqDiffIndex::pkgAcqDiffIndex(pkgAcquire * const Owner, - pkgAcqMetaBase * const TransactionManager, - IndexTarget const * const Target) - : pkgAcqBaseIndex(Owner, TransactionManager, Target) + pkgAcqMetaClearSig * const TransactionManager, + IndexTarget const &Target) + : pkgAcqBaseIndex(Owner, TransactionManager, Target), d(NULL), diffs(NULL) { Debug = _config->FindB("Debug::pkgAcquire::Diffs",false); Desc.Owner = this; - Desc.Description = Target->Description + ".diff/Index"; - Desc.ShortDesc = Target->ShortDesc; - Desc.URI = Target->URI + ".diff/Index"; + Desc.Description = Target.Description + ".diff/Index"; + Desc.ShortDesc = Target.ShortDesc; + Desc.URI = Target.URI + ".diff/Index"; DestFile = GetPartialFileNameFromURI(Desc.URI); if(Debug) std::clog << "pkgAcqDiffIndex: " << Desc.URI << std::endl; - // look for the current package file - CurrentPackagesFile = GetFinalFileNameFromURI(Target->URI); - - // FIXME: this file:/ check is a hack to prevent fetching - // from local sources. this is really silly, and - // should be fixed cleanly as soon as possible - if(!FileExists(CurrentPackagesFile) || - Desc.URI.substr(0,strlen("file:/")) == "file:/") - { - // we don't have a pkg file or we don't want to queue - Failed("No index file, local or canceld by user", NULL); - return; - } - - if(Debug) - std::clog << "pkgAcqDiffIndex::pkgAcqDiffIndex(): " - << CurrentPackagesFile << std::endl; - QueueURI(Desc); - } /*}}}*/ // AcqIndex::Custom600Headers - Insert custom request headers /*{{{*/ @@ -1504,6 +1741,9 @@ pkgAcqDiffIndex::pkgAcqDiffIndex(pkgAcquire * const Owner, /* The only header we use is the last-modified header. */ string pkgAcqDiffIndex::Custom600Headers() const { + if (TransactionManager->LastMetaIndexParser != NULL) + return "\nIndex-File: true"; + string const Final = GetFinalFilename(); if(Debug) @@ -1533,7 +1773,7 @@ bool pkgAcqDiffIndex::ParseDiffIndex(string const &IndexDiffFile) /*{{{*/ FileFd Fd(IndexDiffFile,FileFd::ReadOnly); pkgTagFile TF(&Fd); - if (_error->PendingError() == true) + if (Fd.IsOpen() == false || Fd.Failed()) return false; pkgTagSection Tags; @@ -1570,7 +1810,8 @@ bool pkgAcqDiffIndex::ParseDiffIndex(string const &IndexDiffFile) /*{{{*/ return false; } - HashStringList const TargetFileHashes = GetExpectedHashesFor(Target->MetaKey); + std::string const CurrentPackagesFile = GetFinalFileNameFromURI(Target.URI); + HashStringList const TargetFileHashes = GetExpectedHashesFor(Target.MetaKey); if (TargetFileHashes.usable() == false || ServerHashes != TargetFileHashes) { if (Debug == true) @@ -1581,7 +1822,19 @@ bool pkgAcqDiffIndex::ParseDiffIndex(string const &IndexDiffFile) /*{{{*/ return false; } - if (ServerHashes.VerifyFile(CurrentPackagesFile) == true) + HashStringList LocalHashes; + // try avoiding calculating the hash here as this is costly + if (TransactionManager->LastMetaIndexParser != NULL) + LocalHashes = GetExpectedHashesFromFor(TransactionManager->LastMetaIndexParser, Target.MetaKey); + if (LocalHashes.usable() == false) + { + FileFd fd(CurrentPackagesFile, FileFd::ReadOnly, FileFd::Auto); + Hashes LocalHashesCalc(ServerHashes); + LocalHashesCalc.AddFD(fd); + LocalHashes = LocalHashesCalc.GetHashStringList(); + } + + if (ServerHashes == LocalHashes) { // we have the same sha1 as the server so we are done here if(Debug) @@ -1590,19 +1843,22 @@ bool pkgAcqDiffIndex::ParseDiffIndex(string const &IndexDiffFile) /*{{{*/ return true; } - FileFd fd(CurrentPackagesFile, FileFd::ReadOnly); - Hashes LocalHashesCalc; - LocalHashesCalc.AddFD(fd); - HashStringList const LocalHashes = LocalHashesCalc.GetHashStringList(); - if(Debug) std::clog << "Server-Current: " << ServerHashes.find(NULL)->toStr() << " and we start at " - << fd.Name() << " " << fd.FileSize() << " " << LocalHashes.find(NULL)->toStr() << std::endl; + << CurrentPackagesFile << " " << LocalHashes.FileSize() << " " << LocalHashes.find(NULL)->toStr() << std::endl; + + // historically, older hashes have more info than newer ones, so start + // collecting with older ones first to avoid implementing complicated + // information merging techniques… a failure is after all always + // recoverable with a complete file and hashes aren't changed that often. + std::vector types; + for (char const * const * type = HashString::SupportedHashes(); *type != NULL; ++type) + types.push_back(*type); // parse all of (provided) history vector available_patches; bool firstAcceptedHashes = true; - for (char const * const * type = HashString::SupportedHashes(); *type != NULL; ++type) + for (auto type = types.crbegin(); type != types.crend(); ++type) { if (LocalHashes.find(*type) == NULL) continue; @@ -1660,7 +1916,7 @@ bool pkgAcqDiffIndex::ParseDiffIndex(string const &IndexDiffFile) /*{{{*/ return false; } - for (char const * const * type = HashString::SupportedHashes(); *type != NULL; ++type) + for (auto type = types.crbegin(); type != types.crend(); ++type) { if (LocalHashes.find(*type) == NULL) continue; @@ -1700,7 +1956,7 @@ bool pkgAcqDiffIndex::ParseDiffIndex(string const &IndexDiffFile) /*{{{*/ } } - for (char const * const * type = HashString::SupportedHashes(); *type != NULL; ++type) + for (auto type = types.crbegin(); type != types.crend(); ++type) { std::string tagname = *type; tagname.append("-Download"); @@ -1809,7 +2065,7 @@ bool pkgAcqDiffIndex::ParseDiffIndex(string const &IndexDiffFile) /*{{{*/ new pkgAcqIndexDiffs(Owner, TransactionManager, Target, available_patches); else { - std::vector *diffs = new std::vector(available_patches.size()); + diffs = new std::vector(available_patches.size()); for(size_t i = 0; i < available_patches.size(); ++i) (*diffs)[i] = new pkgAcqIndexMergeDiffs(Owner, TransactionManager, Target, @@ -1865,6 +2121,11 @@ void pkgAcqDiffIndex::Done(string const &Message,HashStringList const &Hashes, / return; } /*}}}*/ +pkgAcqDiffIndex::~pkgAcqDiffIndex() +{ + if (diffs != NULL) + delete diffs; +} // AcqIndexDiffs::AcqIndexDiffs - Constructor /*{{{*/ // --------------------------------------------------------------------- @@ -1872,37 +2133,32 @@ void pkgAcqDiffIndex::Done(string const &Message,HashStringList const &Hashes, / * for each diff and the index */ pkgAcqIndexDiffs::pkgAcqIndexDiffs(pkgAcquire * const Owner, - pkgAcqMetaBase * const TransactionManager, - IndexTarget const * const Target, + pkgAcqMetaClearSig * const TransactionManager, + IndexTarget const &Target, vector const &diffs) - : pkgAcqBaseIndex(Owner, TransactionManager, Target), + : pkgAcqBaseIndex(Owner, TransactionManager, Target), d(NULL), available_patches(diffs) { - DestFile = GetPartialFileNameFromURI(Target->URI); + DestFile = GetKeepCompressedFileName(GetPartialFileNameFromURI(Target.URI), Target); Debug = _config->FindB("Debug::pkgAcquire::Diffs",false); Desc.Owner = this; - Description = Target->Description; - Desc.ShortDesc = Target->ShortDesc; + Description = Target.Description; + Desc.ShortDesc = Target.ShortDesc; if(available_patches.empty() == true) { // we are done (yeah!), check hashes against the final file - DestFile = GetFinalFileNameFromURI(Target->URI); + DestFile = GetKeepCompressedFileName(GetFinalFileNameFromURI(Target.URI), Target); Finish(true); } else { - // patching needs to be bootstrapped with the 'old' version - std::string const PartialFile = GetPartialFileNameFromURI(Target->URI); - if (RealFileExists(PartialFile) == false) + if (BootstrapPDiffWith(GetPartialFileNameFromURI(Target.URI), GetFinalFilename(), Target) == false) { - if (symlink(GetFinalFilename().c_str(), PartialFile.c_str()) != 0) - { - Failed("Link creation of " + PartialFile + " to " + GetFinalFilename() + " failed", NULL); - return; - } + Failed("Bootstrapping of " + DestFile + " failed", NULL); + return; } // get the next diff @@ -1916,10 +2172,10 @@ void pkgAcqIndexDiffs::Failed(string const &Message,pkgAcquire::MethodConfig con Item::Failed(Message,Cnf); Status = StatDone; + DestFile = GetKeepCompressedFileName(GetPartialFileNameFromURI(Target.URI), Target); if(Debug) std::clog << "pkgAcqIndexDiffs failed: " << Desc.URI << " with " << Message << std::endl - << "Falling back to normal index file acquire" << std::endl; - DestFile = GetPartialFileNameFromURI(Target->URI); + << "Falling back to normal index file acquire " << std::endl; RenameOnError(PDiffError); std::string const patchname = GetDiffsPatchFileName(DestFile); if (RealFileExists(patchname)) @@ -1940,7 +2196,14 @@ void pkgAcqIndexDiffs::Finish(bool allDone) // the file will be cleaned if(allDone) { - TransactionManager->TransactionStageCopy(this, DestFile, GetFinalFilename()); + std::string Final = GetFinalFilename(); + if (Target.KeepCompressed) + { + std::string const ext = flExtension(DestFile); + if (ext.empty() == false) + Final.append(".").append(ext); + } + TransactionManager->TransactionStageCopy(this, DestFile, Final); // this is for the "real" finish Complete = true; @@ -1950,6 +2213,8 @@ void pkgAcqIndexDiffs::Finish(bool allDone) std::clog << "\n\nallDone: " << DestFile << "\n" << std::endl; return; } + else + DestFile.clear(); if(Debug) std::clog << "Finishing: " << Desc.URI << std::endl; @@ -1962,15 +2227,14 @@ void pkgAcqIndexDiffs::Finish(bool allDone) bool pkgAcqIndexDiffs::QueueNextDiff() /*{{{*/ { // calc sha1 of the just patched file - std::string const FinalFile = GetPartialFileNameFromURI(Target->URI); - + std::string const FinalFile = GetKeepCompressedFileName(GetPartialFileNameFromURI(Target.URI), Target); if(!FileExists(FinalFile)) { Failed("Message: No FinalFile " + FinalFile + " available", NULL); return false; } - FileFd fd(FinalFile, FileFd::ReadOnly); + FileFd fd(FinalFile, FileFd::ReadOnly, FileFd::Extension); Hashes LocalHashesCalc; LocalHashesCalc.AddFD(fd); HashStringList const LocalHashes = LocalHashesCalc.GetHashStringList(); @@ -1978,7 +2242,7 @@ bool pkgAcqIndexDiffs::QueueNextDiff() /*{{{*/ if(Debug) std::clog << "QueueNextDiff: " << FinalFile << " (" << LocalHashes.find(NULL)->toStr() << ")" << std::endl; - HashStringList const TargetFileHashes = GetExpectedHashesFor(Target->MetaKey); + HashStringList const TargetFileHashes = GetExpectedHashesFor(Target.MetaKey); if (unlikely(LocalHashes.usable() == false || TargetFileHashes.usable() == false)) { Failed("Local/Expected hashes are not usable", NULL); @@ -1995,14 +2259,10 @@ bool pkgAcqIndexDiffs::QueueNextDiff() /*{{{*/ // remove all patches until the next matching patch is found // this requires the Index file to be ordered - for(vector::iterator I = available_patches.begin(); - available_patches.empty() == false && - I != available_patches.end() && - I->result_hashes != LocalHashes; - ++I) - { - available_patches.erase(I); - } + available_patches.erase(available_patches.begin(), + std::find_if(available_patches.begin(), available_patches.end(), [&](DiffInfo const &I) { + return I.result_hashes == LocalHashes; + })); // error checking and falling back if no patch was found if(available_patches.empty() == true) @@ -2012,9 +2272,9 @@ bool pkgAcqIndexDiffs::QueueNextDiff() /*{{{*/ } // queue the right diff - Desc.URI = Target->URI + ".diff/" + available_patches[0].file + ".gz"; + Desc.URI = Target.URI + ".diff/" + available_patches[0].file + ".gz"; Desc.Description = Description + " " + available_patches[0].file + string(".pdiff"); - DestFile = GetPartialFileNameFromURI(Target->URI + ".diff/" + available_patches[0].file); + DestFile = GetKeepCompressedFileName(GetPartialFileNameFromURI(Target.URI + ".diff/" + available_patches[0].file), Target); if(Debug) std::clog << "pkgAcqIndexDiffs::QueueNextDiff(): " << Desc.URI << std::endl; @@ -2032,7 +2292,7 @@ void pkgAcqIndexDiffs::Done(string const &Message, HashStringList const &Hashes, Item::Done(Message, Hashes, Cnf); - std::string const FinalFile = GetPartialFileNameFromURI(Target->URI); + std::string const FinalFile = GetKeepCompressedFileName(GetPartialFileNameFromURI(Target.URI), Target); std::string const PatchFile = GetDiffsPatchFileName(FinalFile); // success in downloading a diff, enter ApplyDiff state @@ -2056,7 +2316,7 @@ void pkgAcqIndexDiffs::Done(string const &Message, HashStringList const &Hashes, { // remove the just applied patch available_patches.erase(available_patches.begin()); - unlink(PatchFile.c_str()); + RemoveFile("pkgAcqIndexDiffs::Done", PatchFile); // move into place if(Debug) @@ -2091,26 +2351,27 @@ std::string pkgAcqIndexDiffs::Custom600Headers() const /*{{{*/ return patchhashes.str(); } /*}}}*/ +pkgAcqIndexDiffs::~pkgAcqIndexDiffs() {} // AcqIndexMergeDiffs::AcqIndexMergeDiffs - Constructor /*{{{*/ pkgAcqIndexMergeDiffs::pkgAcqIndexMergeDiffs(pkgAcquire * const Owner, - pkgAcqMetaBase * const TransactionManager, - IndexTarget const * const Target, + pkgAcqMetaClearSig * const TransactionManager, + IndexTarget const &Target, DiffInfo const &patch, std::vector const * const allPatches) - : pkgAcqBaseIndex(Owner, TransactionManager, Target), + : pkgAcqBaseIndex(Owner, TransactionManager, Target), d(NULL), patch(patch), allPatches(allPatches), State(StateFetchDiff) { Debug = _config->FindB("Debug::pkgAcquire::Diffs",false); Desc.Owner = this; - Description = Target->Description; - Desc.ShortDesc = Target->ShortDesc; + Description = Target.Description; + Desc.ShortDesc = Target.ShortDesc; - Desc.URI = Target->URI + ".diff/" + patch.file + ".gz"; + Desc.URI = Target.URI + ".diff/" + patch.file + ".gz"; Desc.Description = Description + " " + patch.file + string(".pdiff"); - DestFile = GetPartialFileNameFromURI(Target->URI + ".diff/" + patch.file); + DestFile = GetKeepCompressedFileName(GetPartialFileNameFromURI(Target.URI + ".diff/" + patch.file), Target); if(Debug) std::clog << "pkgAcqIndexMergeDiffs: " << Desc.URI << std::endl; @@ -2137,12 +2398,13 @@ void pkgAcqIndexMergeDiffs::Failed(string const &Message,pkgAcquire::MethodConfi State = StateErrorDiff; if (Debug) std::clog << "Falling back to normal index file acquire" << std::endl; - DestFile = GetPartialFileNameFromURI(Target->URI); + DestFile = GetKeepCompressedFileName(GetPartialFileNameFromURI(Target.URI), Target); RenameOnError(PDiffError); std::string const patchname = GetMergeDiffsPatchFileName(DestFile, patch.file); if (RealFileExists(patchname)) rename(patchname.c_str(), std::string(patchname + ".FAILED").c_str()); new pkgAcqIndex(Owner, TransactionManager, Target); + DestFile.clear(); } /*}}}*/ void pkgAcqIndexMergeDiffs::Done(string const &Message, HashStringList const &Hashes, /*{{{*/ @@ -2153,7 +2415,8 @@ void pkgAcqIndexMergeDiffs::Done(string const &Message, HashStringList const &Ha Item::Done(Message, Hashes, Cnf); - string const FinalFile = GetPartialFileNameFromURI(Target->URI); + std::string const UncompressedFinalFile = GetPartialFileNameFromURI(Target.URI); + std::string const FinalFile = GetKeepCompressedFileName(GetPartialFileNameFromURI(Target.URI), Target); if (State == StateFetchDiff) { Rename(DestFile, GetMergeDiffsPatchFileName(FinalFile, patch.file)); @@ -2172,10 +2435,9 @@ void pkgAcqIndexMergeDiffs::Done(string const &Message, HashStringList const &Ha // this is the last completed diff, so we are ready to apply now State = StateApplyDiff; - // patching needs to be bootstrapped with the 'old' version - if (symlink(GetFinalFilename().c_str(), FinalFile.c_str()) != 0) + if (BootstrapPDiffWith(UncompressedFinalFile, GetFinalFilename(), Target) == false) { - Failed("Link creation of " + FinalFile + " to " + GetFinalFilename() + " failed", NULL); + Failed("Bootstrapping of " + DestFile + " failed", NULL); return; } @@ -2192,7 +2454,7 @@ void pkgAcqIndexMergeDiffs::Done(string const &Message, HashStringList const &Ha else if (State == StateApplyDiff) { // move the result into place - std::string const Final = GetFinalFilename(); + std::string const Final = GetKeepCompressedFileName(GetFinalFilename(), Target); if(Debug) std::clog << "Queue patched file in place: " << std::endl << DestFile << " -> " << Final << std::endl; @@ -2204,11 +2466,11 @@ void pkgAcqIndexMergeDiffs::Done(string const &Message, HashStringList const &Ha for (std::vector::const_iterator I = allPatches->begin(); I != allPatches->end(); ++I) { - std::string const PartialFile = GetPartialFileNameFromURI(Target->URI); + std::string const PartialFile = GetKeepCompressedFileName(GetPartialFileNameFromURI(Target.URI), Target); std::string const patch = GetMergeDiffsPatchFileName(PartialFile, (*I)->patch.file); - unlink(patch.c_str()); + RemoveFile("pkgAcqIndexMergeDiffs::Done", patch); } - unlink(FinalFile.c_str()); + RemoveFile("pkgAcqIndexMergeDiffs::Done", FinalFile); // all set and done Complete = true; @@ -2235,70 +2497,74 @@ std::string pkgAcqIndexMergeDiffs::Custom600Headers() const /*{{{*/ return patchhashes.str(); } /*}}}*/ +pkgAcqIndexMergeDiffs::~pkgAcqIndexMergeDiffs() {} // AcqIndex::AcqIndex - Constructor /*{{{*/ pkgAcqIndex::pkgAcqIndex(pkgAcquire * const Owner, - pkgAcqMetaBase * const TransactionManager, - IndexTarget const * const Target) - : pkgAcqBaseIndex(Owner, TransactionManager, Target) + pkgAcqMetaClearSig * const TransactionManager, + IndexTarget const &Target) + : pkgAcqBaseIndex(Owner, TransactionManager, Target), d(NULL), Stage(STAGE_DOWNLOAD), + CompressionExtensions(Target.Option(IndexTarget::COMPRESSIONTYPES)) { - // autoselect the compression method - AutoSelectCompression(); - Init(Target->URI, Target->Description, Target->ShortDesc); + Init(Target.URI, Target.Description, Target.ShortDesc); if(_config->FindB("Debug::Acquire::Transaction", false) == true) std::clog << "New pkgIndex with TransactionManager " << TransactionManager << std::endl; } /*}}}*/ -// AcqIndex::AutoSelectCompression - Select compression /*{{{*/ -void pkgAcqIndex::AutoSelectCompression() +// AcqIndex::Init - defered Constructor /*{{{*/ +static void NextCompressionExtension(std::string &CurrentCompressionExtension, std::string &CompressionExtensions, bool const preview) { - std::vector types = APT::Configuration::getCompressionTypes(); - CompressionExtensions = ""; - if (TransactionManager->MetaIndexParser != NULL && TransactionManager->MetaIndexParser->Exists(Target->MetaKey)) + size_t const nextExt = CompressionExtensions.find(' '); + if (nextExt == std::string::npos) { - for (std::vector::const_iterator t = types.begin(); - t != types.end(); ++t) - { - std::string CompressedMetaKey = string(Target->MetaKey).append(".").append(*t); - if (*t == "uncompressed" || - TransactionManager->MetaIndexParser->Exists(CompressedMetaKey) == true) - CompressionExtensions.append(*t).append(" "); - } + CurrentCompressionExtension = CompressionExtensions; + if (preview == false) + CompressionExtensions.clear(); } else { - for (std::vector::const_iterator t = types.begin(); t != types.end(); ++t) - CompressionExtensions.append(*t).append(" "); + CurrentCompressionExtension = CompressionExtensions.substr(0, nextExt); + if (preview == false) + CompressionExtensions = CompressionExtensions.substr(nextExt+1); } - if (CompressionExtensions.empty() == false) - CompressionExtensions.erase(CompressionExtensions.end()-1); } - /*}}}*/ -// AcqIndex::Init - defered Constructor /*{{{*/ void pkgAcqIndex::Init(string const &URI, string const &URIDesc, string const &ShortDesc) { Stage = STAGE_DOWNLOAD; DestFile = GetPartialFileNameFromURI(URI); + NextCompressionExtension(CurrentCompressionExtension, CompressionExtensions, false); - size_t const nextExt = CompressionExtensions.find(' '); - if (nextExt == std::string::npos) + if (CurrentCompressionExtension == "uncompressed") { - CurrentCompressionExtension = CompressionExtensions; - CompressionExtensions.clear(); + Desc.URI = URI; } - else + else if (CurrentCompressionExtension == "by-hash") { - CurrentCompressionExtension = CompressionExtensions.substr(0, nextExt); - CompressionExtensions = CompressionExtensions.substr(nextExt+1); - } + NextCompressionExtension(CurrentCompressionExtension, CompressionExtensions, true); + if(unlikely(TransactionManager->MetaIndexParser == NULL || CurrentCompressionExtension.empty())) + return; + if (CurrentCompressionExtension != "uncompressed") + { + Desc.URI = URI + '.' + CurrentCompressionExtension; + DestFile = DestFile + '.' + CurrentCompressionExtension; + } - if (CurrentCompressionExtension == "uncompressed") - { - Desc.URI = URI; + HashStringList const Hashes = GetExpectedHashes(); + HashString const * const TargetHash = Hashes.find(NULL); + if (unlikely(TargetHash == nullptr)) + return; + std::string const ByHash = "/by-hash/" + TargetHash->HashType() + "/" + TargetHash->HashValue(); + size_t const trailing_slash = Desc.URI.find_last_of("/"); + if (unlikely(trailing_slash == std::string::npos)) + return; + Desc.URI = Desc.URI.replace( + trailing_slash, + Desc.URI.substr(trailing_slash+1).size()+1, + ByHash); } else if (unlikely(CurrentCompressionExtension.empty())) return; @@ -2308,8 +2574,6 @@ void pkgAcqIndex::Init(string const &URI, string const &URIDesc, DestFile = DestFile + '.' + CurrentCompressionExtension; } - if(TransactionManager->MetaIndexParser != NULL) - InitByHashIfNeeded(); Desc.Description = URIDesc; Desc.Owner = this; @@ -2318,49 +2582,24 @@ void pkgAcqIndex::Init(string const &URI, string const &URIDesc, QueueURI(Desc); } /*}}}*/ -// AcqIndex::AdjustForByHash - modify URI for by-hash support /*{{{*/ -void pkgAcqIndex::InitByHashIfNeeded() -{ - // TODO: - // - (maybe?) add support for by-hash into the sources.list as flag - // - make apt-ftparchive generate the hashes (and expire?) - std::string HostKnob = "APT::Acquire::" + ::URI(Desc.URI).Host + "::By-Hash"; - if(_config->FindB("APT::Acquire::By-Hash", false) == true || - _config->FindB(HostKnob, false) == true || - TransactionManager->MetaIndexParser->GetSupportsAcquireByHash()) - { - HashStringList const Hashes = GetExpectedHashes(); - if(Hashes.usable()) - { - // FIXME: should we really use the best hash here? or a fixed one? - HashString const * const TargetHash = Hashes.find(""); - std::string const ByHash = "/by-hash/" + TargetHash->HashType() + "/" + TargetHash->HashValue(); - size_t const trailing_slash = Desc.URI.find_last_of("/"); - Desc.URI = Desc.URI.replace( - trailing_slash, - Desc.URI.substr(trailing_slash+1).size()+1, - ByHash); - } else { - _error->Warning( - "Fetching ByHash requested but can not find record for %s", - GetMetaKey().c_str()); - } - } -} - /*}}}*/ // AcqIndex::Custom600Headers - Insert custom request headers /*{{{*/ // --------------------------------------------------------------------- /* The only header we use is the last-modified header. */ string pkgAcqIndex::Custom600Headers() const { - string Final = GetFinalFilename(); string msg = "\nIndex-File: true"; - struct stat Buf; - if (stat(Final.c_str(),&Buf) == 0) - msg += "\nLast-Modified: " + TimeRFC1123(Buf.st_mtime); - if(Target->IsOptional()) + if (TransactionManager->LastMetaIndexParser == NULL) + { + std::string const Final = GetFinalFilename(); + + struct stat Buf; + if (stat(Final.c_str(),&Buf) == 0) + msg += "\nLast-Modified: " + TimeRFC1123(Buf.st_mtime); + } + + if(Target.IsOptional) msg += "\nFail-Ignore: true"; return msg; @@ -2376,13 +2615,13 @@ void pkgAcqIndex::Failed(string const &Message,pkgAcquire::MethodConfig const * { if (CompressionExtensions.empty() == false) { - Init(Target->URI, Desc.Description, Desc.ShortDesc); + Init(Target.URI, Desc.Description, Desc.ShortDesc); Status = StatIdle; return; } } - if(Target->IsOptional() && GetExpectedHashes().empty() && Stage == STAGE_DOWNLOAD) + if(Target.IsOptional && GetExpectedHashes().empty() && Stage == STAGE_DOWNLOAD) Status = StatDone; else TransactionManager->AbortTransaction(); @@ -2393,7 +2632,7 @@ void pkgAcqIndex::ReverifyAfterIMS() { // update destfile to *not* include the compression extension when doing // a reverify (as its uncompressed on disk already) - DestFile = GetCompressedFileName(Target->URI, GetPartialFileNameFromURI(Target->URI), CurrentCompressionExtension); + DestFile = GetCompressedFileName(Target, GetPartialFileNameFromURI(Target.URI), CurrentCompressionExtension); // copy FinalFile into partial/ so that we check the hash again string FinalFile = GetFinalFilename(); @@ -2402,33 +2641,6 @@ void pkgAcqIndex::ReverifyAfterIMS() QueueURI(Desc); } /*}}}*/ -// AcqIndex::ValidateFile - Validate the content of the downloaded file /*{{{*/ -bool pkgAcqIndex::ValidateFile(const std::string &FileName) -{ - // FIXME: this can go away once we only ever download stuff that - // has a valid hash and we never do GET based probing - // FIXME2: this also leaks debian-isms into the code and should go therefore - - /* Always validate the index file for correctness (all indexes must - * have a Package field) (LP: #346386) (Closes: #627642) - */ - FileFd fd(FileName, FileFd::ReadOnly, FileFd::Extension); - // Only test for correctness if the content of the file is not empty - // (empty is ok) - if (fd.Size() > 0) - { - pkgTagSection sec; - pkgTagFile tag(&fd); - - // all our current indexes have a field 'Package' in each section - if (_error->PendingError() == true || - tag.Step(sec) == false || - sec.Exists("Package") == false) - return false; - } - return true; -} - /*}}}*/ // AcqIndex::Done - Finished a fetch /*{{{*/ // --------------------------------------------------------------------- /* This goes through a number of states.. On the initial fetch the @@ -2460,7 +2672,7 @@ void pkgAcqIndex::StageDownloadDone(string const &Message, HashStringList const Complete = true; // Handle the unzipd case - string FileName = LookupTag(Message,"Alt-Filename"); + std::string FileName = LookupTag(Message,"Alt-Filename"); if (FileName.empty() == false) { Stage = STAGE_DECOMPRESS_AND_VERIFY; @@ -2471,18 +2683,23 @@ void pkgAcqIndex::StageDownloadDone(string const &Message, HashStringList const SetActiveSubprocess("copy"); return; } - FileName = LookupTag(Message,"Filename"); - if (FileName.empty() == true) - { - Status = StatError; - ErrorText = "Method gave a blank filename"; - } // Methods like e.g. "file:" will give us a (compressed) FileName that is // not the "DestFile" we set, in this case we uncompress from the local file - if (FileName != DestFile) + if (FileName != DestFile && RealFileExists(DestFile) == false) + { Local = true; + if (Target.KeepCompressed == true) + { + // but if we don't keep the uncompress we copy the compressed file first + Stage = STAGE_DOWNLOAD; + Desc.URI = "copy:" + FileName; + QueueURI(Desc); + SetActiveSubprocess("copy"); + return; + } + } else EraseFileName = FileName; @@ -2496,18 +2713,6 @@ void pkgAcqIndex::StageDownloadDone(string const &Message, HashStringList const return; } - // If we have compressed indexes enabled, queue for hash verification - if (_config->FindB("Acquire::GzipIndexes",false)) - { - DestFile = GetPartialFileNameFromURI(Target->URI + '.' + CurrentCompressionExtension); - EraseFileName = ""; - Stage = STAGE_DECOMPRESS_AND_VERIFY; - Desc.URI = "copy:" + FileName; - QueueURI(Desc); - SetActiveSubprocess("copy"); - return; - } - // get the binary name for your used compression type string decompProg; if(CurrentCompressionExtension == "uncompressed") @@ -2520,32 +2725,35 @@ void pkgAcqIndex::StageDownloadDone(string const &Message, HashStringList const return; } + if (Target.KeepCompressed == true) + { + DestFile = "/dev/null"; + EraseFileName.clear(); + } + else + DestFile += ".decomp"; + // queue uri for the next stage Stage = STAGE_DECOMPRESS_AND_VERIFY; - DestFile += ".decomp"; Desc.URI = decompProg + ":" + FileName; QueueURI(Desc); SetActiveSubprocess(decompProg); } /*}}}*/ // AcqIndex::StageDecompressDone - Final verification /*{{{*/ -void pkgAcqIndex::StageDecompressDone(string const &Message, +void pkgAcqIndex::StageDecompressDone(string const &, HashStringList const &, - pkgAcquire::MethodConfig const * const Cfg) + pkgAcquire::MethodConfig const * const) { - if(!ValidateFile(DestFile)) - { - RenameOnError(InvalidFormat); - Failed(Message, Cfg); - return; - } + if (Target.KeepCompressed == true && DestFile == "/dev/null") + DestFile = GetPartialFileNameFromURI(Target.URI + '.' + CurrentCompressionExtension); // Done, queue for rename on transaction finished TransactionManager->TransactionStageCopy(this, DestFile, GetFinalFilename()); - return; } /*}}}*/ +pkgAcqIndex::~pkgAcqIndex() {} // AcqArchive::AcqArchive - Constructor /*{{{*/ @@ -2555,7 +2763,7 @@ void pkgAcqIndex::StageDecompressDone(string const &Message, pkgAcqArchive::pkgAcqArchive(pkgAcquire * const Owner,pkgSourceList * const Sources, pkgRecords * const Recs,pkgCache::VerIterator const &Version, string &StoreFilename) : - Item(Owner), LocalSource(false), Version(Version), Sources(Sources), Recs(Recs), + Item(Owner), d(NULL), LocalSource(false), Version(Version), Sources(Sources), Recs(Recs), StoreFilename(StoreFilename), Vf(Version.FileList()), Trusted(false) { @@ -2576,7 +2784,7 @@ pkgAcqArchive::pkgAcqArchive(pkgAcquire * const Owner,pkgSourceList * const Sour // Skip not source sources, they do not have file fields. for (; Vf.end() == false; ++Vf) { - if ((Vf.File()->Flags & pkgCache::Flag::NotSource) != 0) + if (Vf.File().Flagged(pkgCache::Flag::NotSource)) continue; break; } @@ -2644,14 +2852,14 @@ bool pkgAcqArchive::QueueNext() { pkgCache::PkgFileIterator const PkgF = Vf.File(); // Ignore not source sources - if ((PkgF->Flags & pkgCache::Flag::NotSource) != 0) + if (PkgF.Flagged(pkgCache::Flag::NotSource)) continue; // Try to cross match against the source list pkgIndexFile *Index; if (Sources->FindIndex(PkgF, Index) == false) continue; - LocalSource = (PkgF->Flags & pkgCache::Flag::LocalSource) == pkgCache::Flag::LocalSource; + LocalSource = PkgF.Flagged(pkgCache::Flag::LocalSource); // only try to get a trusted package from another source if that source // is also trusted @@ -2694,7 +2902,7 @@ bool pkgAcqArchive::QueueNext() /* Hmm, we have a file and its size does not match, this means it is an old style mismatched arch */ - unlink(FinalFile.c_str()); + RemoveFile("pkgAcqArchive::QueueNext", FinalFile); } // Check it again using the new style output filenames @@ -2713,7 +2921,7 @@ bool pkgAcqArchive::QueueNext() /* Hmm, we have a file and its size does not match, this shouldn't happen.. */ - unlink(FinalFile.c_str()); + RemoveFile("pkgAcqArchive::QueueNext", FinalFile); } DestFile = _config->FindDir("Dir::Cache::Archives") + "partial/" + flNotDir(StoreFilename); @@ -2723,7 +2931,7 @@ bool pkgAcqArchive::QueueNext() { // Hmm, the partial file is too big, erase it if ((unsigned long long)Buf.st_size > Version->Size) - unlink(DestFile.c_str()); + RemoveFile("pkgAcqArchive::QueueNext", DestFile); else PartialSize = Buf.st_size; } @@ -2758,16 +2966,8 @@ void pkgAcqArchive::Done(string const &Message, HashStringList const &Hashes, Item::Done(Message, Hashes, Cfg); // Grab the output filename - string FileName = LookupTag(Message,"Filename"); - if (FileName.empty() == true) - { - Status = StatError; - ErrorText = "Method gave a blank filename"; - return; - } - - // Reference filename - if (FileName != DestFile) + std::string const FileName = LookupTag(Message,"Filename"); + if (DestFile != FileName && RealFileExists(DestFile) == false) { StoreFilename = DestFile = FileName; Local = true; @@ -2843,13 +3043,216 @@ std::string pkgAcqArchive::ShortDesc() const /*{{{*/ return Desc.ShortDesc; } /*}}}*/ +pkgAcqArchive::~pkgAcqArchive() {} + +// AcqChangelog::pkgAcqChangelog - Constructors /*{{{*/ +pkgAcqChangelog::pkgAcqChangelog(pkgAcquire * const Owner, pkgCache::VerIterator const &Ver, + std::string const &DestDir, std::string const &DestFilename) : + pkgAcquire::Item(Owner), d(NULL), SrcName(Ver.SourcePkgName()), SrcVersion(Ver.SourceVerStr()) +{ + Desc.URI = URI(Ver); + Init(DestDir, DestFilename); +} +// some parameters are char* here as they come likely from char* interfaces – which can also return NULL +pkgAcqChangelog::pkgAcqChangelog(pkgAcquire * const Owner, pkgCache::RlsFileIterator const &RlsFile, + char const * const Component, char const * const SrcName, char const * const SrcVersion, + const string &DestDir, const string &DestFilename) : + pkgAcquire::Item(Owner), d(NULL), SrcName(SrcName), SrcVersion(SrcVersion) +{ + Desc.URI = URI(RlsFile, Component, SrcName, SrcVersion); + Init(DestDir, DestFilename); +} +pkgAcqChangelog::pkgAcqChangelog(pkgAcquire * const Owner, + std::string const &URI, char const * const SrcName, char const * const SrcVersion, + const string &DestDir, const string &DestFilename) : + pkgAcquire::Item(Owner), d(NULL), SrcName(SrcName), SrcVersion(SrcVersion) +{ + Desc.URI = URI; + Init(DestDir, DestFilename); +} +void pkgAcqChangelog::Init(std::string const &DestDir, std::string const &DestFilename) +{ + if (Desc.URI.empty()) + { + Status = StatError; + // TRANSLATOR: %s=%s is sourcename=sourceversion, e.g. apt=1.1 + strprintf(ErrorText, _("Changelog unavailable for %s=%s"), SrcName.c_str(), SrcVersion.c_str()); + // Let the error message print something sensible rather than "Failed to fetch /" + if (DestFilename.empty()) + DestFile = SrcName + ".changelog"; + else + DestFile = DestFilename; + Desc.URI = "changelog:/" + DestFile; + return; + } + + if (DestDir.empty()) + { + std::string const SandboxUser = _config->Find("APT::Sandbox::User"); + std::string const systemTemp = GetTempDir(SandboxUser); + char tmpname[100]; + snprintf(tmpname, sizeof(tmpname), "%s/apt-changelog-XXXXXX", systemTemp.c_str()); + if (NULL == mkdtemp(tmpname)) + { + _error->Errno("mkdtemp", "mkdtemp failed in changelog acquire of %s %s", SrcName.c_str(), SrcVersion.c_str()); + Status = StatError; + return; + } + DestFile = TemporaryDirectory = tmpname; + + ChangeOwnerAndPermissionOfFile("Item::QueueURI", DestFile.c_str(), + SandboxUser.c_str(), "root", 0700); + } + else + DestFile = DestDir; + + if (DestFilename.empty()) + DestFile = flCombine(DestFile, SrcName + ".changelog"); + else + DestFile = flCombine(DestFile, DestFilename); + + Desc.ShortDesc = "Changelog"; + strprintf(Desc.Description, "%s %s %s Changelog", URI::SiteOnly(Desc.URI).c_str(), SrcName.c_str(), SrcVersion.c_str()); + Desc.Owner = this; + QueueURI(Desc); +} + /*}}}*/ +std::string pkgAcqChangelog::URI(pkgCache::VerIterator const &Ver) /*{{{*/ +{ + char const * const SrcName = Ver.SourcePkgName(); + char const * const SrcVersion = Ver.SourceVerStr(); + pkgCache::PkgFileIterator PkgFile; + // find the first source for this version which promises a changelog + for (pkgCache::VerFileIterator VF = Ver.FileList(); VF.end() == false; ++VF) + { + pkgCache::PkgFileIterator const PF = VF.File(); + if (PF.Flagged(pkgCache::Flag::NotSource) || PF->Release == 0) + continue; + PkgFile = PF; + pkgCache::RlsFileIterator const RF = PF.ReleaseFile(); + std::string const uri = URI(RF, PF.Component(), SrcName, SrcVersion); + if (uri.empty()) + continue; + return uri; + } + return ""; +} +std::string pkgAcqChangelog::URITemplate(pkgCache::RlsFileIterator const &Rls) +{ + if (Rls.end() == true || (Rls->Label == 0 && Rls->Origin == 0)) + return ""; + std::string const serverConfig = "Acquire::Changelogs::URI"; + std::string server; +#define APT_EMPTY_SERVER \ + if (server.empty() == false) \ + { \ + if (server != "no") \ + return server; \ + return ""; \ + } +#define APT_CHECK_SERVER(X, Y) \ + if (Rls->X != 0) \ + { \ + std::string const specialServerConfig = serverConfig + "::" + Y + #X + "::" + Rls.X(); \ + server = _config->Find(specialServerConfig); \ + APT_EMPTY_SERVER \ + } + // this way e.g. Debian-Security can fallback to Debian + APT_CHECK_SERVER(Label, "Override::") + APT_CHECK_SERVER(Origin, "Override::") + + if (RealFileExists(Rls.FileName())) + { + _error->PushToStack(); + FileFd rf; + /* This can be costly. A caller wanting to get millions of URIs might + want to do this on its own once and use Override settings. + We don't do this here as Origin/Label are not as unique as they + should be so this could produce request order-dependent anomalies */ + if (OpenMaybeClearSignedFile(Rls.FileName(), rf) == true) + { + pkgTagFile TagFile(&rf, rf.Size()); + pkgTagSection Section; + if (TagFile.Step(Section) == true) + server = Section.FindS("Changelogs"); + } + _error->RevertToStack(); + APT_EMPTY_SERVER + } + + APT_CHECK_SERVER(Label, "") + APT_CHECK_SERVER(Origin, "") +#undef APT_CHECK_SERVER +#undef APT_EMPTY_SERVER + return ""; +} +std::string pkgAcqChangelog::URI(pkgCache::RlsFileIterator const &Rls, + char const * const Component, char const * const SrcName, + char const * const SrcVersion) +{ + return URI(URITemplate(Rls), Component, SrcName, SrcVersion); +} +std::string pkgAcqChangelog::URI(std::string const &Template, + char const * const Component, char const * const SrcName, + char const * const SrcVersion) +{ + if (Template.find("@CHANGEPATH@") == std::string::npos) + return ""; + + // the path is: COMPONENT/SRC/SRCNAME/SRCNAME_SRCVER, e.g. main/a/apt/1.1 or contrib/liba/libapt/2.0 + std::string Src = SrcName; + std::string path = APT::String::Startswith(SrcName, "lib") ? Src.substr(0, 4) : Src.substr(0,1); + path.append("/").append(Src).append("/"); + path.append(Src).append("_").append(StripEpoch(SrcVersion)); + // we omit component for releases without one (= flat-style repositories) + if (Component != NULL && strlen(Component) != 0) + path = std::string(Component) + "/" + path; + + return SubstVar(Template, "@CHANGEPATH@", path); +} + /*}}}*/ +// AcqChangelog::Failed - Failure handler /*{{{*/ +void pkgAcqChangelog::Failed(string const &Message, pkgAcquire::MethodConfig const * const Cnf) +{ + Item::Failed(Message,Cnf); + + std::string errText; + // TRANSLATOR: %s=%s is sourcename=sourceversion, e.g. apt=1.1 + strprintf(errText, _("Changelog unavailable for %s=%s"), SrcName.c_str(), SrcVersion.c_str()); + + // Error is probably something techy like 404 Not Found + if (ErrorText.empty()) + ErrorText = errText; + else + ErrorText = errText + " (" + ErrorText + ")"; + return; +} + /*}}}*/ +// AcqChangelog::Done - Item downloaded OK /*{{{*/ +void pkgAcqChangelog::Done(string const &Message,HashStringList const &CalcHashes, + pkgAcquire::MethodConfig const * const Cnf) +{ + Item::Done(Message,CalcHashes,Cnf); + + Complete = true; +} + /*}}}*/ +pkgAcqChangelog::~pkgAcqChangelog() /*{{{*/ +{ + if (TemporaryDirectory.empty() == false) + { + RemoveFile("~pkgAcqChangelog", DestFile); + rmdir(TemporaryDirectory.c_str()); + } +} + /*}}}*/ // AcqFile::pkgAcqFile - Constructor /*{{{*/ pkgAcqFile::pkgAcqFile(pkgAcquire * const Owner,string const &URI, HashStringList const &Hashes, unsigned long long const Size,string const &Dsc,string const &ShortDesc, const string &DestDir, const string &DestFilename, bool const IsIndexFile) : - Item(Owner), IsIndexFile(IsIndexFile), ExpectedHashes(Hashes) + Item(Owner), d(NULL), IsIndexFile(IsIndexFile), ExpectedHashes(Hashes) { Retries = _config->FindI("Acquire::Retries",0); @@ -2875,7 +3278,7 @@ pkgAcqFile::pkgAcqFile(pkgAcquire * const Owner,string const &URI, HashStringLis { // Hmm, the partial file is too big, erase it if ((Size > 0) && (unsigned long long)Buf.st_size > Size) - unlink(DestFile.c_str()); + RemoveFile("pkgAcqFile", DestFile); else PartialSize = Buf.st_size; } @@ -2889,14 +3292,7 @@ void pkgAcqFile::Done(string const &Message,HashStringList const &CalcHashes, { Item::Done(Message,CalcHashes,Cnf); - string FileName = LookupTag(Message,"Filename"); - if (FileName.empty() == true) - { - Status = StatError; - ErrorText = "Method gave a blank filename"; - return; - } - + std::string const FileName = LookupTag(Message,"Filename"); Complete = true; // The files timestamp matches @@ -2904,7 +3300,7 @@ void pkgAcqFile::Done(string const &Message,HashStringList const &CalcHashes, return; // We have to copy it into place - if (FileName != DestFile) + if (RealFileExists(DestFile.c_str()) == false) { Local = true; if (_config->FindB("Acquire::Source-Symlinks",true) == false || @@ -2920,7 +3316,7 @@ void pkgAcqFile::Done(string const &Message,HashStringList const &CalcHashes, if (lstat(DestFile.c_str(),&St) == 0) { if (S_ISLNK(St.st_mode) != 0) - unlink(DestFile.c_str()); + RemoveFile("pkgAcqFile::Done", DestFile); } // Symlink the file @@ -2929,7 +3325,7 @@ void pkgAcqFile::Done(string const &Message,HashStringList const &CalcHashes, _error->PushToStack(); _error->Errno("pkgAcqFile::Done", "Symlinking file %s failed", DestFile.c_str()); std::stringstream msg; - _error->DumpErrors(msg); + _error->DumpErrors(msg, GlobalError::DEBUG, false); _error->RevertToStack(); ErrorText = msg.str(); Status = StatError; @@ -2965,3 +3361,4 @@ string pkgAcqFile::Custom600Headers() const /*{{{*/ return ""; } /*}}}*/ +pkgAcqFile::~pkgAcqFile() {}