X-Git-Url: https://git.saurik.com/apt.git/blobdiff_plain/0be13f1c8afdc4462bd0061130f943006915fbbc..20801f613690b330c79b4f7a30dc3ff52b722468:/apt-pkg/acquire-item.cc?ds=sidebyside diff --git a/apt-pkg/acquire-item.cc b/apt-pkg/acquire-item.cc index c5037e5e0..dd85fda79 100644 --- a/apt-pkg/acquire-item.cc +++ b/apt-pkg/acquire-item.cc @@ -62,12 +62,49 @@ static void printHashSumComparision(std::string const &URI, HashStringList const std::cerr << "\t- " << hs->toStr() << std::endl; } /*}}}*/ +static std::string GetPartialFileName(std::string const &file) /*{{{*/ +{ + std::string DestFile = _config->FindDir("Dir::State::lists") + "partial/"; + DestFile += file; + return DestFile; +} + /*}}}*/ +static std::string GetPartialFileNameFromURI(std::string const &uri) /*{{{*/ +{ + return GetPartialFileName(URItoFileName(uri)); +} + /*}}}*/ +static std::string GetCompressedFileName(std::string const &URI, std::string const &Name, std::string const &Ext) /*{{{*/ +{ + if (Ext.empty() || Ext == "uncompressed") + return Name; + + // do not reverify cdrom sources as apt-cdrom may rewrite the Packages + // file when its doing the indexcopy + if (URI.substr(0,6) == "cdrom:") + return Name; + + // adjust DestFile if its compressed on disk + if (_config->FindB("Acquire::GzipIndexes",false) == true) + return Name + '.' + Ext; + return Name; +} + /*}}}*/ +static bool AllowInsecureRepositories(indexRecords const * const MetaIndexParser, pkgAcqMetaBase * const TransactionManager, pkgAcquire::Item * const I) /*{{{*/ +{ + if(MetaIndexParser->IsAlwaysTrusted() || _config->FindB("Acquire::AllowInsecureRepositories") == true) + return true; + + _error->Error(_("Use --allow-insecure-repositories to force the update")); + TransactionManager->AbortTransaction(); + I->Status = pkgAcquire::Item::StatError; + return false; +} + /*}}}*/ + // Acquire::Item::Item - Constructor /*{{{*/ -#if __GNUC__ >= 4 - #pragma GCC diagnostic push - #pragma GCC diagnostic ignored "-Wdeprecated-declarations" -#endif +APT_IGNORE_DEPRECATED_PUSH pkgAcquire::Item::Item(pkgAcquire *Owner, HashStringList const &ExpectedHashes, pkgAcqMetaBase *TransactionManager) @@ -80,9 +117,7 @@ pkgAcquire::Item::Item(pkgAcquire *Owner, if(TransactionManager != NULL) TransactionManager->Add(this); } -#if __GNUC__ >= 4 - #pragma GCC diagnostic pop -#endif +APT_IGNORE_DEPRECATED_POP /*}}}*/ // Acquire::Item::~Item - Destructor /*{{{*/ // --------------------------------------------------------------------- @@ -98,8 +133,7 @@ pkgAcquire::Item::~Item() fetch this object */ void pkgAcquire::Item::Failed(string Message,pkgAcquire::MethodConfig *Cnf) { - Status = StatIdle; - if(ErrorText == "") + if(ErrorText.empty()) ErrorText = LookupTag(Message,"Message"); UsedMirror = LookupTag(Message,"UsedMirror"); if (QueueCounter <= 1) @@ -107,7 +141,7 @@ void pkgAcquire::Item::Failed(string Message,pkgAcquire::MethodConfig *Cnf) /* This indicates that the file is not available right now but might be sometime later. If we do a retry cycle then this should be retried [CDROMs] */ - if (Cnf->LocalOnly == true && + if (Cnf != NULL && Cnf->LocalOnly == true && StringToBool(LookupTag(Message,"Transient-Failure"),false) == true) { Status = StatIdle; @@ -116,11 +150,18 @@ void pkgAcquire::Item::Failed(string Message,pkgAcquire::MethodConfig *Cnf) } Status = StatError; + Complete = false; Dequeue(); - } + } + else + Status = StatIdle; + + // check fail reason + string const FailReason = LookupTag(Message, "FailReason"); + if(FailReason == "MaximumSizeExceeded") + RenameOnError(MaximumSizeExceeded); // report mirror failure back to LP if we actually use a mirror - string FailReason = LookupTag(Message, "FailReason"); if(FailReason.size() != 0) ReportMirrorFailure(FailReason); else @@ -134,6 +175,7 @@ void pkgAcquire::Item::Failed(string Message,pkgAcquire::MethodConfig *Cnf) void pkgAcquire::Item::Start(string /*Message*/,unsigned long long Size) { Status = StatFetching; + ErrorText.clear(); if (FileSize == 0 && Complete == false) FileSize = Size; } @@ -166,21 +208,30 @@ void pkgAcquire::Item::Done(string Message,unsigned long long Size,HashStringLis step */ bool pkgAcquire::Item::Rename(string From,string To) { - if (rename(From.c_str(),To.c_str()) != 0) - { - char S[300]; - snprintf(S,sizeof(S),_("rename failed, %s (%s -> %s)."),strerror(errno), - From.c_str(),To.c_str()); - Status = StatError; - ErrorText += S; - return false; - } - return true; + if (rename(From.c_str(),To.c_str()) == 0) + return true; + + std::string S; + strprintf(S, _("rename failed, %s (%s -> %s)."), strerror(errno), + From.c_str(),To.c_str()); + Status = StatError; + ErrorText += S; + return false; +} + /*}}}*/ +void pkgAcquire::Item::QueueURI(ItemDesc &Item) /*{{{*/ +{ + Owner->Enqueue(Item); +} + /*}}}*/ +void pkgAcquire::Item::Dequeue() /*{{{*/ +{ + Owner->Dequeue(this); } /*}}}*/ bool pkgAcquire::Item::RenameOnError(pkgAcquire::Item::RenameOnErrorState const error)/*{{{*/ { - if(FileExists(DestFile)) + if (RealFileExists(DestFile)) Rename(DestFile, DestFile + ".FAILED"); switch (error) @@ -205,26 +256,23 @@ bool pkgAcquire::Item::RenameOnError(pkgAcquire::Item::RenameOnErrorState const Status = StatError; break; case NotClearsigned: - ErrorText = _("Does not start with a cleartext signature"); + ErrorText = _("Does not start with a cleartext signature"); + Status = StatError; + break; + case MaximumSizeExceeded: + // the method is expected to report a good error for this Status = StatError; break; } return false; } /*}}}*/ -void pkgAcquire::Item::SetActiveSubprocess(const std::string &subprocess) +void pkgAcquire::Item::SetActiveSubprocess(const std::string &subprocess)/*{{{*/ { ActiveSubprocess = subprocess; -#if __GNUC__ >= 4 - #pragma GCC diagnostic push - #pragma GCC diagnostic ignored "-Wdeprecated-declarations" -#endif - Mode = ActiveSubprocess.c_str(); -#if __GNUC__ >= 4 - #pragma GCC diagnostic pop -#endif + APT_IGNORE_DEPRECATED(Mode = ActiveSubprocess.c_str();) } - + /*}}}*/ // Acquire::Item::ReportMirrorFailure /*{{{*/ // --------------------------------------------------------------------- void pkgAcquire::Item::ReportMirrorFailure(string FailCode) @@ -239,30 +287,31 @@ void pkgAcquire::Item::ReportMirrorFailure(string FailCode) << " FailCode: " << FailCode << std::endl; #endif - const char *Args[40]; - unsigned int i = 0; string report = _config->Find("Methods::Mirror::ProblemReporting", "/usr/lib/apt/apt-report-mirror-failure"); if(!FileExists(report)) return; - Args[i++] = report.c_str(); - Args[i++] = UsedMirror.c_str(); - Args[i++] = DescURI().c_str(); - Args[i++] = FailCode.c_str(); - Args[i++] = NULL; + + std::vector Args; + Args.push_back(report.c_str()); + Args.push_back(UsedMirror.c_str()); + Args.push_back(DescURI().c_str()); + Args.push_back(FailCode.c_str()); + Args.push_back(NULL); + pid_t pid = ExecFork(); - if(pid < 0) + if(pid < 0) { _error->Error("ReportMirrorFailure Fork failed"); return; } - else if(pid == 0) + else if(pid == 0) { - execvp(Args[0], (char**)Args); + execvp(Args[0], (char**)Args.data()); std::cerr << "Could not exec " << Args[0] << std::endl; _exit(100); } - if(!ExecWait(pid, "report-mirror-failure")) + if(!ExecWait(pid, "report-mirror-failure")) { _error->Warning("Couldn't report problem to '%s'", _config->Find("Methods::Mirror::ProblemReporting").c_str()); @@ -289,12 +338,11 @@ pkgAcqDiffIndex::pkgAcqDiffIndex(pkgAcquire *Owner, RealURI = Target->URI; Desc.Owner = this; - Desc.Description = Target->Description + "/DiffIndex"; + Desc.Description = Target->Description + ".diff/Index"; Desc.ShortDesc = Target->ShortDesc; Desc.URI = Target->URI + ".diff/Index"; - DestFile = _config->FindDir("Dir::State::lists") + "partial/"; - DestFile += URItoFileName(Desc.URI); + DestFile = GetPartialFileNameFromURI(Desc.URI); if(Debug) std::clog << "pkgAcqDiffIndex: " << Desc.URI << std::endl; @@ -310,9 +358,7 @@ pkgAcqDiffIndex::pkgAcqDiffIndex(pkgAcquire *Owner, Desc.URI.substr(0,strlen("file:/")) == "file:/") { // we don't have a pkg file or we don't want to queue - if(Debug) - std::clog << "No index file, local or canceld by user" << std::endl; - Failed("", NULL); + Failed("No index file, local or canceld by user", NULL); return; } @@ -327,11 +373,15 @@ pkgAcqDiffIndex::pkgAcqDiffIndex(pkgAcquire *Owner, // AcqIndex::Custom600Headers - Insert custom request headers /*{{{*/ // --------------------------------------------------------------------- /* The only header we use is the last-modified header. */ +#if APT_PKG_ABI >= 413 string pkgAcqDiffIndex::Custom600Headers() const +#else +string pkgAcqDiffIndex::Custom600Headers() +#endif { string Final = _config->FindDir("Dir::State::lists"); Final += URItoFileName(Desc.URI); - + if(Debug) std::clog << "Custom600Header-IMS: " << Final << std::endl; @@ -344,192 +394,296 @@ string pkgAcqDiffIndex::Custom600Headers() const /*}}}*/ bool pkgAcqDiffIndex::ParseDiffIndex(string IndexDiffFile) /*{{{*/ { + // failing here is fine: our caller will take care of trying to + // get the complete file if patching fails if(Debug) std::clog << "pkgAcqDiffIndex::ParseIndexDiff() " << IndexDiffFile << std::endl; - pkgTagSection Tags; - string ServerSha1; - vector available_patches; - FileFd Fd(IndexDiffFile,FileFd::ReadOnly); pkgTagFile TF(&Fd); if (_error->PendingError() == true) return false; - if(TF.Step(Tags) == true) + pkgTagSection Tags; + if(unlikely(TF.Step(Tags) == false)) + return false; + + HashStringList ServerHashes; + unsigned long long ServerSize = 0; + + for (char const * const * type = HashString::SupportedHashes(); *type != NULL; ++type) { - bool found = false; - DiffInfo d; - string size; + std::string tagname = *type; + tagname.append("-Current"); + std::string const tmp = Tags.FindS(tagname.c_str()); + if (tmp.empty() == true) + continue; - string const tmp = Tags.FindS("SHA1-Current"); + string hash; + unsigned long long size; std::stringstream ss(tmp); - ss >> ServerSha1 >> size; - unsigned long const ServerSize = atol(size.c_str()); + ss >> hash >> size; + if (unlikely(hash.empty() == true)) + continue; + if (unlikely(ServerSize != 0 && ServerSize != size)) + continue; + ServerHashes.push_back(HashString(*type, hash)); + ServerSize = size; + } - FileFd fd(CurrentPackagesFile, FileFd::ReadOnly); - SHA1Summation SHA1; - SHA1.AddFD(fd); - string const local_sha1 = SHA1.Result(); + if (ServerHashes.usable() == false) + { + if (Debug == true) + std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": Did not find a good hashsum in the index" << std::endl; + return false; + } - if(local_sha1 == ServerSha1) + if (ServerHashes != HashSums()) + { + if (Debug == true) { - // we have the same sha1 as the server so we are done here - if(Debug) - std::clog << "Package file is up-to-date" << std::endl; - // ensure we have no leftovers from previous runs - std::string Partial = _config->FindDir("Dir::State::lists"); - Partial += "partial/" + URItoFileName(RealURI); - unlink(Partial.c_str()); - // list cleanup needs to know that this file as well as the already - // present index is ours, so we create an empty diff to save it for us - new pkgAcqIndexDiffs(Owner, TransactionManager, Target, - ExpectedHashes, MetaIndexParser, - ServerSha1, available_patches); - return true; + std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": Index has different hashes than parser, probably older, so fail pdiffing" << std::endl; + printHashSumComparision(CurrentPackagesFile, ServerHashes, HashSums()); } - else + return false; + } + + if (ServerHashes.VerifyFile(CurrentPackagesFile) == true) + { + // we have the same sha1 as the server so we are done here + if(Debug) + std::clog << "pkgAcqDiffIndex: Package file " << CurrentPackagesFile << " is up-to-date" << std::endl; + + // list cleanup needs to know that this file as well as the already + // present index is ours, so we create an empty diff to save it for us + new pkgAcqIndexDiffs(Owner, TransactionManager, Target, + ExpectedHashes, MetaIndexParser); + return true; + } + + FileFd fd(CurrentPackagesFile, FileFd::ReadOnly); + Hashes LocalHashesCalc; + LocalHashesCalc.AddFD(fd); + HashStringList const LocalHashes = LocalHashesCalc.GetHashStringList(); + + if(Debug) + std::clog << "Server-Current: " << ServerHashes.find(NULL)->toStr() << " and we start at " + << fd.Name() << " " << fd.FileSize() << " " << LocalHashes.find(NULL)->toStr() << std::endl; + + // parse all of (provided) history + vector available_patches; + bool firstAcceptedHashes = true; + for (char const * const * type = HashString::SupportedHashes(); *type != NULL; ++type) + { + if (LocalHashes.find(*type) == NULL) + continue; + + std::string tagname = *type; + tagname.append("-History"); + std::string const tmp = Tags.FindS(tagname.c_str()); + if (tmp.empty() == true) + continue; + + string hash, filename; + unsigned long long size; + std::stringstream ss(tmp); + + while (ss >> hash >> size >> filename) { - if(Debug) - std::clog << "SHA1-Current: " << ServerSha1 << " and we start at "<< fd.Name() << " " << fd.Size() << " " << local_sha1 << std::endl; + if (unlikely(hash.empty() == true || filename.empty() == true)) + continue; - // check the historie and see what patches we need - string const history = Tags.FindS("SHA1-History"); - std::stringstream hist(history); - while(hist >> d.sha1 >> size >> d.file) + // see if we have a record for this file already + std::vector::iterator cur = available_patches.begin(); + for (; cur != available_patches.end(); ++cur) { - // read until the first match is found - // from that point on, we probably need all diffs - if(d.sha1 == local_sha1) - found=true; - else if (found == false) + if (cur->file != filename || unlikely(cur->result_size != size)) continue; - - if(Debug) - std::clog << "Need to get diff: " << d.file << std::endl; - available_patches.push_back(d); + cur->result_hashes.push_back(HashString(*type, hash)); + break; } - - if (available_patches.empty() == false) + if (cur != available_patches.end()) + continue; + if (firstAcceptedHashes == true) { - // patching with too many files is rather slow compared to a fast download - unsigned long const fileLimit = _config->FindI("Acquire::PDiffs::FileLimit", 0); - if (fileLimit != 0 && fileLimit < available_patches.size()) - { - if (Debug) - std::clog << "Need " << available_patches.size() << " diffs (Limit is " << fileLimit - << ") so fallback to complete download" << std::endl; - return false; - } - - // see if the patches are too big - found = false; // it was true and it will be true again at the end - d = *available_patches.begin(); - string const firstPatch = d.file; - unsigned long patchesSize = 0; - std::stringstream patches(Tags.FindS("SHA1-Patches")); - while(patches >> d.sha1 >> size >> d.file) - { - if (firstPatch == d.file) - found = true; - else if (found == false) - continue; - - patchesSize += atol(size.c_str()); - } - unsigned long const sizeLimit = ServerSize * _config->FindI("Acquire::PDiffs::SizeLimit", 100); - if (sizeLimit > 0 && (sizeLimit/100) < patchesSize) - { - if (Debug) - std::clog << "Need " << patchesSize << " bytes (Limit is " << sizeLimit/100 - << ") so fallback to complete download" << std::endl; - return false; - } + DiffInfo next; + next.file = filename; + next.result_hashes.push_back(HashString(*type, hash)); + next.result_size = size; + next.patch_size = 0; + available_patches.push_back(next); + } + else + { + if (Debug == true) + std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": File " << filename + << " wasn't in the list for the first parsed hash! (history)" << std::endl; + break; } } + firstAcceptedHashes = false; + } - // we have something, queue the next diff - if(found) + if (unlikely(available_patches.empty() == true)) + { + if (Debug) + std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": " + << "Couldn't find any patches for the patch series." << std::endl; + return false; + } + + for (char const * const * type = HashString::SupportedHashes(); *type != NULL; ++type) + { + if (LocalHashes.find(*type) == NULL) + continue; + + std::string tagname = *type; + tagname.append("-Patches"); + std::string const tmp = Tags.FindS(tagname.c_str()); + if (tmp.empty() == true) + continue; + + string hash, filename; + unsigned long long size; + std::stringstream ss(tmp); + + while (ss >> hash >> size >> filename) { - // FIXME: make this use the method - PackagesFileReadyInPartial = true; - std::string Partial = _config->FindDir("Dir::State::lists"); - Partial += "partial/" + URItoFileName(RealURI); - - FileFd From(CurrentPackagesFile, FileFd::ReadOnly); - FileFd To(Partial, FileFd::WriteEmpty); - if(CopyFile(From, To) == false) - return _error->Errno("CopyFile", "failed to copy"); - - if(Debug) - std::cerr << "Done copying " << CurrentPackagesFile - << " -> " << Partial - << std::endl; + if (unlikely(hash.empty() == true || filename.empty() == true)) + continue; - // queue the diffs - string::size_type const last_space = Description.rfind(" "); - if(last_space != string::npos) - Description.erase(last_space, Description.size()-last_space); - - /* decide if we should download patches one by one or in one go: - The first is good if the server merges patches, but many don't so client - based merging can be attempt in which case the second is better. - "bad things" will happen if patches are merged on the server, - but client side merging is attempt as well */ - bool pdiff_merge = _config->FindB("Acquire::PDiffs::Merge", true); - if (pdiff_merge == true) + // see if we have a record for this file already + std::vector::iterator cur = available_patches.begin(); + for (; cur != available_patches.end(); ++cur) { - // reprepro adds this flag if it has merged patches on the server - std::string const precedence = Tags.FindS("X-Patch-Precedence"); - pdiff_merge = (precedence != "merged"); + if (cur->file != filename) + continue; + if (unlikely(cur->patch_size != 0 && cur->patch_size != size)) + continue; + cur->patch_hashes.push_back(HashString(*type, hash)); + cur->patch_size = size; + break; } + if (cur != available_patches.end()) + continue; + if (Debug == true) + std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": File " << filename + << " wasn't in the list for the first parsed hash! (patches)" << std::endl; + break; + } + } - if (pdiff_merge == false) - { - new pkgAcqIndexDiffs(Owner, TransactionManager, Target, ExpectedHashes, - MetaIndexParser, - ServerSha1, available_patches); - } - else - { - std::vector *diffs = new std::vector(available_patches.size()); - for(size_t i = 0; i < available_patches.size(); ++i) - (*diffs)[i] = new pkgAcqIndexMergeDiffs(Owner, - TransactionManager, - Target, - ExpectedHashes, - MetaIndexParser, - available_patches[i], - diffs); - } + bool foundStart = false; + for (std::vector::iterator cur = available_patches.begin(); + cur != available_patches.end(); ++cur) + { + if (LocalHashes != cur->result_hashes) + continue; - Complete = false; - Status = StatDone; - Dequeue(); - return true; - } + available_patches.erase(available_patches.begin(), cur); + foundStart = true; + break; + } + + if (foundStart == false || unlikely(available_patches.empty() == true)) + { + if (Debug) + std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": " + << "Couldn't find the start of the patch series." << std::endl; + return false; } + + // patching with too many files is rather slow compared to a fast download + unsigned long const fileLimit = _config->FindI("Acquire::PDiffs::FileLimit", 0); + if (fileLimit != 0 && fileLimit < available_patches.size()) + { + if (Debug) + std::clog << "Need " << available_patches.size() << " diffs (Limit is " << fileLimit + << ") so fallback to complete download" << std::endl; + return false; + } + + // calculate the size of all patches we have to get + // note that all sizes are uncompressed, while we download compressed files + unsigned long long patchesSize = 0; + for (std::vector::const_iterator cur = available_patches.begin(); + cur != available_patches.end(); ++cur) + patchesSize += cur->patch_size; + unsigned long long const sizeLimit = ServerSize * _config->FindI("Acquire::PDiffs::SizeLimit", 100); + if (sizeLimit > 0 && (sizeLimit/100) < patchesSize) + { + if (Debug) + std::clog << "Need " << patchesSize << " bytes (Limit is " << sizeLimit/100 + << ") so fallback to complete download" << std::endl; + return false; + } + + // FIXME: make this use the method + PackagesFileReadyInPartial = true; + std::string const Partial = GetPartialFileNameFromURI(RealURI); + + FileFd From(CurrentPackagesFile, FileFd::ReadOnly); + FileFd To(Partial, FileFd::WriteEmpty); + if(CopyFile(From, To) == false) + return _error->Errno("CopyFile", "failed to copy"); - // Nothing found, report and return false - // Failing here is ok, if we return false later, the full - // IndexFile is queued if(Debug) - std::clog << "Can't find a patch in the index file" << std::endl; - return false; + std::cerr << "Done copying " << CurrentPackagesFile + << " -> " << Partial + << std::endl; + + // we have something, queue the diffs + string::size_type const last_space = Description.rfind(" "); + if(last_space != string::npos) + Description.erase(last_space, Description.size()-last_space); + + /* decide if we should download patches one by one or in one go: + The first is good if the server merges patches, but many don't so client + based merging can be attempt in which case the second is better. + "bad things" will happen if patches are merged on the server, + but client side merging is attempt as well */ + bool pdiff_merge = _config->FindB("Acquire::PDiffs::Merge", true); + if (pdiff_merge == true) + { + // reprepro adds this flag if it has merged patches on the server + std::string const precedence = Tags.FindS("X-Patch-Precedence"); + pdiff_merge = (precedence != "merged"); + } + + if (pdiff_merge == false) + { + new pkgAcqIndexDiffs(Owner, TransactionManager, Target, ExpectedHashes, + MetaIndexParser, available_patches); + } + else + { + std::vector *diffs = new std::vector(available_patches.size()); + for(size_t i = 0; i < available_patches.size(); ++i) + (*diffs)[i] = new pkgAcqIndexMergeDiffs(Owner, TransactionManager, + Target, + ExpectedHashes, + MetaIndexParser, + available_patches[i], + diffs); + } + + Complete = false; + Status = StatDone; + Dequeue(); + return true; } /*}}}*/ -void pkgAcqDiffIndex::Failed(string Message,pkgAcquire::MethodConfig * /*Cnf*/)/*{{{*/ +void pkgAcqDiffIndex::Failed(string Message,pkgAcquire::MethodConfig * Cnf)/*{{{*/ { + Item::Failed(Message,Cnf); + Status = StatDone; + if(Debug) std::clog << "pkgAcqDiffIndex failed: " << Desc.URI << " with " << Message << std::endl << "Falling back to normal index file acquire" << std::endl; new pkgAcqIndex(Owner, TransactionManager, Target, ExpectedHashes, MetaIndexParser); - - Complete = false; - Status = StatDone; - Dequeue(); } /*}}}*/ void pkgAcqDiffIndex::Done(string Message,unsigned long long Size,HashStringList const &Hashes, /*{{{*/ @@ -555,13 +709,17 @@ void pkgAcqDiffIndex::Done(string Message,unsigned long long Size,HashStringList } + string FinalFile; + FinalFile = _config->FindDir("Dir::State::lists"); + FinalFile += URItoFileName(Desc.URI); + + if(StringToBool(LookupTag(Message,"IMS-Hit"),false)) + DestFile = FinalFile; + if(!ParseDiffIndex(DestFile)) - return Failed("", NULL); + return Failed("Message: Couldn't parse pdiff index", Cnf); // queue for final move - string FinalFile; - FinalFile = _config->FindDir("Dir::State::lists")+URItoFileName(RealURI); - FinalFile += string(".IndexDiff"); TransactionManager->TransactionStageCopy(this, DestFile, FinalFile); Complete = true; @@ -580,14 +738,11 @@ pkgAcqIndexDiffs::pkgAcqIndexDiffs(pkgAcquire *Owner, struct IndexTarget const * const Target, HashStringList const &ExpectedHashes, indexRecords *MetaIndexParser, - string ServerSha1, vector diffs) : pkgAcqBaseIndex(Owner, TransactionManager, Target, ExpectedHashes, MetaIndexParser), - available_patches(diffs), ServerSha1(ServerSha1) + available_patches(diffs) { - - DestFile = _config->FindDir("Dir::State::lists") + "partial/"; - DestFile += URItoFileName(Target->URI); + DestFile = GetPartialFileNameFromURI(Target->URI); Debug = _config->FindB("Debug::pkgAcquire::Diffs",false); @@ -611,8 +766,11 @@ pkgAcqIndexDiffs::pkgAcqIndexDiffs(pkgAcquire *Owner, } } /*}}}*/ -void pkgAcqIndexDiffs::Failed(string Message,pkgAcquire::MethodConfig * /*Cnf*/)/*{{{*/ +void pkgAcqIndexDiffs::Failed(string Message,pkgAcquire::MethodConfig * Cnf)/*{{{*/ { + Item::Failed(Message,Cnf); + Status = StatDone; + if(Debug) std::clog << "pkgAcqIndexDiffs failed: " << Desc.URI << " with " << Message << std::endl << "Falling back to normal index file acquire" << std::endl; @@ -640,16 +798,9 @@ void pkgAcqIndexDiffs::Finish(bool allDone) } // queue for copy - PartialFile = _config->FindDir("Dir::State::lists")+"partial/"+URItoFileName(RealURI); - - DestFile = _config->FindDir("Dir::State::lists"); - DestFile += URItoFileName(RealURI); - - // this happens if we have a up-to-date indexfile - if(!FileExists(PartialFile)) - PartialFile = DestFile; - - TransactionManager->TransactionStageCopy(this, PartialFile, DestFile); + std::string FinalFile = _config->FindDir("Dir::State::lists"); + FinalFile += URItoFileName(RealURI); + TransactionManager->TransactionStageCopy(this, DestFile, FinalFile); // this is for the "real" finish Complete = true; @@ -671,26 +822,31 @@ void pkgAcqIndexDiffs::Finish(bool allDone) bool pkgAcqIndexDiffs::QueueNextDiff() /*{{{*/ { // calc sha1 of the just patched file - string FinalFile = _config->FindDir("Dir::State::lists"); - FinalFile += "partial/" + URItoFileName(RealURI); + std::string const FinalFile = GetPartialFileNameFromURI(RealURI); if(!FileExists(FinalFile)) { - Failed("No FinalFile " + FinalFile + " available", NULL); + Failed("Message: No FinalFile " + FinalFile + " available", NULL); return false; } FileFd fd(FinalFile, FileFd::ReadOnly); - SHA1Summation SHA1; - SHA1.AddFD(fd); - string local_sha1 = string(SHA1.Result()); + Hashes LocalHashesCalc; + LocalHashesCalc.AddFD(fd); + HashStringList const LocalHashes = LocalHashesCalc.GetHashStringList(); + if(Debug) - std::clog << "QueueNextDiff: " - << FinalFile << " (" << local_sha1 << ")"<toStr() << ")" << std::endl; + + if (unlikely(LocalHashes.usable() == false || ExpectedHashes.usable() == false)) + { + Failed("Local/Expected hashes are not usable", NULL); + return false; + } // final file reached before all patches are applied - if(local_sha1 == ServerSha1) + if(LocalHashes == ExpectedHashes) { Finish(true); return true; @@ -698,10 +854,10 @@ bool pkgAcqIndexDiffs::QueueNextDiff() /*{{{*/ // remove all patches until the next matching patch is found // this requires the Index file to be ordered - for(vector::iterator I=available_patches.begin(); + for(vector::iterator I = available_patches.begin(); available_patches.empty() == false && I != available_patches.end() && - I->sha1 != local_sha1; + I->result_hashes != LocalHashes; ++I) { available_patches.erase(I); @@ -710,19 +866,18 @@ bool pkgAcqIndexDiffs::QueueNextDiff() /*{{{*/ // error checking and falling back if no patch was found if(available_patches.empty() == true) { - Failed("No patches available", NULL); + Failed("No patches left to reach target", NULL); return false; } // queue the right diff Desc.URI = RealURI + ".diff/" + available_patches[0].file + ".gz"; Desc.Description = Description + " " + available_patches[0].file + string(".pdiff"); - DestFile = _config->FindDir("Dir::State::lists") + "partial/"; - DestFile += URItoFileName(RealURI + ".diff/" + available_patches[0].file); + DestFile = GetPartialFileNameFromURI(RealURI + ".diff/" + available_patches[0].file); if(Debug) std::clog << "pkgAcqIndexDiffs::QueueNextDiff(): " << Desc.URI << std::endl; - + QueueURI(Desc); return true; @@ -737,13 +892,22 @@ void pkgAcqIndexDiffs::Done(string Message,unsigned long long Size, HashStringLi Item::Done(Message, Size, Hashes, Cnf); // FIXME: verify this download too before feeding it to rred - - string FinalFile; - FinalFile = _config->FindDir("Dir::State::lists")+"partial/"+URItoFileName(RealURI); + std::string const FinalFile = GetPartialFileNameFromURI(RealURI); // success in downloading a diff, enter ApplyDiff state if(State == StateFetchDiff) { + FileFd fd(DestFile, FileFd::ReadOnly, FileFd::Gzip); + class Hashes LocalHashesCalc; + LocalHashesCalc.AddFD(fd); + HashStringList const LocalHashes = LocalHashesCalc.GetHashStringList(); + + if (fd.Size() != available_patches[0].patch_size || + available_patches[0].patch_hashes != LocalHashes) + { + Failed("Patch has Size/Hashsum mismatch", NULL); + return; + } // rred excepts the patch as $FinalFile.ed Rename(DestFile,FinalFile+".ed"); @@ -780,7 +944,7 @@ void pkgAcqIndexDiffs::Done(string Message,unsigned long long Size, HashStringLi if(available_patches.empty() == false) { new pkgAcqIndexDiffs(Owner, TransactionManager, Target, ExpectedHashes, MetaIndexParser, - ServerSha1, available_patches); + available_patches); return Finish(); } else // update @@ -800,10 +964,6 @@ pkgAcqIndexMergeDiffs::pkgAcqIndexMergeDiffs(pkgAcquire *Owner, : pkgAcqBaseIndex(Owner, TransactionManager, Target, ExpectedHashes, MetaIndexParser), patch(patch), allPatches(allPatches), State(StateFetchDiff) { - - DestFile = _config->FindDir("Dir::State::lists") + "partial/"; - DestFile += URItoFileName(Target->URI); - Debug = _config->FindB("Debug::pkgAcquire::Diffs",false); RealURI = Target->URI; @@ -813,8 +973,8 @@ pkgAcqIndexMergeDiffs::pkgAcqIndexMergeDiffs(pkgAcquire *Owner, Desc.URI = RealURI + ".diff/" + patch.file + ".gz"; Desc.Description = Description + " " + patch.file + string(".pdiff"); - DestFile = _config->FindDir("Dir::State::lists") + "partial/"; - DestFile += URItoFileName(RealURI + ".diff/" + patch.file); + + DestFile = GetPartialFileNameFromURI(RealURI + ".diff/" + patch.file); if(Debug) std::clog << "pkgAcqIndexMergeDiffs: " << Desc.URI << std::endl; @@ -822,13 +982,13 @@ pkgAcqIndexMergeDiffs::pkgAcqIndexMergeDiffs(pkgAcquire *Owner, QueueURI(Desc); } /*}}}*/ -void pkgAcqIndexMergeDiffs::Failed(string Message,pkgAcquire::MethodConfig * /*Cnf*/)/*{{{*/ +void pkgAcqIndexMergeDiffs::Failed(string Message,pkgAcquire::MethodConfig * Cnf)/*{{{*/ { if(Debug) std::clog << "pkgAcqIndexMergeDiffs failed: " << Desc.URI << " with " << Message << std::endl; - Complete = false; + + Item::Failed(Message,Cnf); Status = StatDone; - Dequeue(); // check if we are the first to fail, otherwise we are done here State = StateDoneDiff; @@ -852,11 +1012,21 @@ void pkgAcqIndexMergeDiffs::Done(string Message,unsigned long long Size,HashStri Item::Done(Message,Size,Hashes,Cnf); // FIXME: verify download before feeding it to rred - - string const FinalFile = _config->FindDir("Dir::State::lists") + "partial/" + URItoFileName(RealURI); + string const FinalFile = GetPartialFileNameFromURI(RealURI); if (State == StateFetchDiff) { + FileFd fd(DestFile, FileFd::ReadOnly, FileFd::Gzip); + class Hashes LocalHashesCalc; + LocalHashesCalc.AddFD(fd); + HashStringList const LocalHashes = LocalHashesCalc.GetHashStringList(); + + if (fd.Size() != patch.patch_size || patch.patch_hashes != LocalHashes) + { + Failed("Patch has Size/Hashsum mismatch", NULL); + return; + } + // rred expects the patch as $FinalFile.ed.$patchname.gz Rename(DestFile, FinalFile + ".ed." + patch.file + ".gz"); @@ -909,11 +1079,9 @@ void pkgAcqIndexMergeDiffs::Done(string Message,unsigned long long Size,HashStri for (std::vector::const_iterator I = allPatches->begin(); I != allPatches->end(); ++I) { - std::string PartialFile = _config->FindDir("Dir::State::lists"); - PartialFile += "partial/" + URItoFileName(RealURI); - std::string patch = PartialFile + ".ed." + (*I)->patch.file + ".gz"; - std::cerr << patch << std::endl; - unlink(patch.c_str()); + std::string const PartialFile = GetPartialFileNameFromURI(RealURI); + std::string patch = PartialFile + ".ed." + (*I)->patch.file + ".gz"; + unlink(patch.c_str()); } // all set and done @@ -923,7 +1091,6 @@ void pkgAcqIndexMergeDiffs::Done(string Message,unsigned long long Size,HashStri } } /*}}}*/ - // AcqBaseIndex::VerifyHashByMetaKey - verify hash for the given metakey /*{{{*/ bool pkgAcqBaseIndex::VerifyHashByMetaKey(HashStringList const &Hashes) { @@ -938,12 +1105,11 @@ bool pkgAcqBaseIndex::VerifyHashByMetaKey(HashStringList const &Hashes) } return true; } - - + /*}}}*/ // AcqIndex::AcqIndex - Constructor /*{{{*/ // --------------------------------------------------------------------- -/* The package file is added to the queue and a second class is - instantiated to fetch the revision file */ +/* The package file is added to the queue and a second class is + instantiated to fetch the revision file */ pkgAcqIndex::pkgAcqIndex(pkgAcquire *Owner, string URI,string URIDesc,string ShortDesc, HashStringList const &ExpectedHash) @@ -960,13 +1126,12 @@ pkgAcqIndex::pkgAcqIndex(pkgAcquire *Owner, } /*}}}*/ // AcqIndex::AcqIndex - Constructor /*{{{*/ -// --------------------------------------------------------------------- pkgAcqIndex::pkgAcqIndex(pkgAcquire *Owner, pkgAcqMetaBase *TransactionManager, IndexTarget const *Target, - HashStringList const &ExpectedHash, + HashStringList const &ExpectedHash, indexRecords *MetaIndexParser) - : pkgAcqBaseIndex(Owner, TransactionManager, Target, ExpectedHash, + : pkgAcqBaseIndex(Owner, TransactionManager, Target, ExpectedHash, MetaIndexParser) { RealURI = Target->URI; @@ -981,7 +1146,6 @@ pkgAcqIndex::pkgAcqIndex(pkgAcquire *Owner, } /*}}}*/ // AcqIndex::AutoSelectCompression - Select compression /*{{{*/ -// --------------------------------------------------------------------- void pkgAcqIndex::AutoSelectCompression() { std::vector types = APT::Configuration::getCompressionTypes(); @@ -992,7 +1156,7 @@ void pkgAcqIndex::AutoSelectCompression() t != types.end(); ++t) { std::string CompressedMetaKey = string(Target->MetaKey).append(".").append(*t); - if (*t == "uncompressed" || + if (*t == "uncompressed" || MetaIndexParser->Exists(CompressedMetaKey) == true) CompressionExtensions.append(*t).append(" "); } @@ -1005,15 +1169,14 @@ void pkgAcqIndex::AutoSelectCompression() if (CompressionExtensions.empty() == false) CompressionExtensions.erase(CompressionExtensions.end()-1); } + /*}}}*/ // AcqIndex::Init - defered Constructor /*{{{*/ -// --------------------------------------------------------------------- -void pkgAcqIndex::Init(string const &URI, string const &URIDesc, +void pkgAcqIndex::Init(string const &URI, string const &URIDesc, string const &ShortDesc) { Stage = STAGE_DOWNLOAD; - DestFile = _config->FindDir("Dir::State::lists") + "partial/"; - DestFile += URItoFileName(URI); + DestFile = GetPartialFileNameFromURI(URI); CurrentCompressionExtension = CompressionExtensions.substr(0, CompressionExtensions.find(' ')); if (CurrentCompressionExtension == "uncompressed") @@ -1036,7 +1199,7 @@ void pkgAcqIndex::Init(string const &URI, string const &URIDesc, indexRecords::checkSum *Record = MetaIndexParser->Lookup(MetaKey); if(Record) FileSize = Record->Size; - + InitByHashIfNeeded(MetaKey); } @@ -1048,8 +1211,6 @@ void pkgAcqIndex::Init(string const &URI, string const &URIDesc, } /*}}}*/ // AcqIndex::AdjustForByHash - modify URI for by-hash support /*{{{*/ -// --------------------------------------------------------------------- -/* */ void pkgAcqIndex::InitByHashIfNeeded(const std::string MetaKey) { // TODO: @@ -1082,28 +1243,36 @@ void pkgAcqIndex::InitByHashIfNeeded(const std::string MetaKey) // AcqIndex::Custom600Headers - Insert custom request headers /*{{{*/ // --------------------------------------------------------------------- /* The only header we use is the last-modified header. */ +#if APT_PKG_ABI >= 413 string pkgAcqIndex::Custom600Headers() const +#else +string pkgAcqIndex::Custom600Headers() +#endif { string Final = GetFinalFilename(); - + string msg = "\nIndex-File: true"; struct stat Buf; if (stat(Final.c_str(),&Buf) == 0) msg += "\nLast-Modified: " + TimeRFC1123(Buf.st_mtime); + if(Target->IsOptional()) + msg += "\nFail-Ignore: true"; + return msg; } /*}}}*/ -// pkgAcqIndex::Failed - getting the indexfile failed /*{{{*/ -// --------------------------------------------------------------------- -/* */ -void pkgAcqIndex::Failed(string Message,pkgAcquire::MethodConfig *Cnf) /*{{{*/ +// pkgAcqIndex::Failed - getting the indexfile failed /*{{{*/ +void pkgAcqIndex::Failed(string Message,pkgAcquire::MethodConfig *Cnf) { + Item::Failed(Message,Cnf); + size_t const nextExt = CompressionExtensions.find(' '); if (nextExt != std::string::npos) { CompressionExtensions = CompressionExtensions.substr(nextExt+1); Init(RealURI, Desc.Description, Desc.ShortDesc); + Status = StatIdle; return; } @@ -1115,35 +1284,26 @@ void pkgAcqIndex::Failed(string Message,pkgAcquire::MethodConfig *Cnf) /*{{{*/ Item::Failed(Message,Cnf); - /// cancel the entire transaction - TransactionManager->AbortTransaction(); + if(Target->IsOptional() && ExpectedHashes.empty() && Stage == STAGE_DOWNLOAD) + Status = StatDone; + else + TransactionManager->AbortTransaction(); } /*}}}*/ -// pkgAcqIndex::GetFinalFilename - Return the full final file path /*{{{*/ -// --------------------------------------------------------------------- -/* */ +// pkgAcqIndex::GetFinalFilename - Return the full final file path /*{{{*/ std::string pkgAcqIndex::GetFinalFilename() const { std::string FinalFile = _config->FindDir("Dir::State::lists"); FinalFile += URItoFileName(RealURI); - if (_config->FindB("Acquire::GzipIndexes",false) == true) - FinalFile += '.' + CurrentCompressionExtension; - return FinalFile; + return GetCompressedFileName(RealURI, FinalFile, CurrentCompressionExtension); } - /*}}}*/ -// AcqIndex::ReverifyAfterIMS - Reverify index after an ims-hit /*{{{*/ -// --------------------------------------------------------------------- -/* */ + /*}}}*/ +// AcqIndex::ReverifyAfterIMS - Reverify index after an ims-hit /*{{{*/ void pkgAcqIndex::ReverifyAfterIMS() { // update destfile to *not* include the compression extension when doing // a reverify (as its uncompressed on disk already) - DestFile = _config->FindDir("Dir::State::lists") + "partial/"; - DestFile += URItoFileName(RealURI); - - // adjust DestFile if its compressed on disk - if (_config->FindB("Acquire::GzipIndexes",false) == true) - DestFile += '.' + CurrentCompressionExtension; + DestFile = GetCompressedFileName(RealURI, GetPartialFileNameFromURI(RealURI), CurrentCompressionExtension); // copy FinalFile into partial/ so that we check the hash again string FinalFile = GetFinalFilename(); @@ -1151,10 +1311,8 @@ void pkgAcqIndex::ReverifyAfterIMS() Desc.URI = "copy:" + FinalFile; QueueURI(Desc); } - /*}}}*/ - -// AcqIndex::ValidateFile - Validate the content of the downloaded file /*{{{*/ -// -------------------------------------------------------------------------- + /*}}}*/ +// AcqIndex::ValidateFile - Validate the content of the downloaded file /*{{{*/ bool pkgAcqIndex::ValidateFile(const std::string &FileName) { // FIXME: this can go away once we only ever download stuff that @@ -1180,7 +1338,7 @@ bool pkgAcqIndex::ValidateFile(const std::string &FileName) } return true; } - /*}}}*/ + /*}}}*/ // AcqIndex::Done - Finished a fetch /*{{{*/ // --------------------------------------------------------------------- /* This goes through a number of states.. On the initial fetch the @@ -1205,8 +1363,8 @@ void pkgAcqIndex::Done(string Message, break; } } - -// AcqIndex::StageDownloadDone - Queue for decompress and verify /*{{{*/ + /*}}}*/ +// AcqIndex::StageDownloadDone - Queue for decompress and verify /*{{{*/ void pkgAcqIndex::StageDownloadDone(string Message, HashStringList const &Hashes, pkgAcquire::MethodConfig *Cfg) @@ -1221,7 +1379,7 @@ void pkgAcqIndex::StageDownloadDone(string Message, } Complete = true; - + // Handle the unzipd case string FileName = LookupTag(Message,"Alt-Filename"); if (FileName.empty() == false) @@ -1253,11 +1411,6 @@ void pkgAcqIndex::StageDownloadDone(string Message, // on if-modfied-since hit to avoid a stale attack against us if(StringToBool(LookupTag(Message,"IMS-Hit"),false) == true) { - // do not reverify cdrom sources as apt-cdrom may rewrite the Packages - // file when its doing the indexcopy - if (RealURI.substr(0,6) == "cdrom:") - return; - // The files timestamp matches, reverify by copy into partial/ EraseFileName = ""; ReverifyAfterIMS(); @@ -1267,13 +1420,12 @@ void pkgAcqIndex::StageDownloadDone(string Message, // If we have compressed indexes enabled, queue for hash verification if (_config->FindB("Acquire::GzipIndexes",false)) { - DestFile = _config->FindDir("Dir::State::lists") + "partial/"; - DestFile += URItoFileName(RealURI) + '.' + CurrentCompressionExtension; + DestFile = GetPartialFileNameFromURI(RealURI + '.' + CurrentCompressionExtension); EraseFileName = ""; Stage = STAGE_DECOMPRESS_AND_VERIFY; Desc.URI = "copy:" + FileName; QueueURI(Desc); - + SetActiveSubprocess("copy"); return; } @@ -1294,11 +1446,10 @@ void pkgAcqIndex::StageDownloadDone(string Message, DestFile += ".decomp"; Desc.URI = decompProg + ":" + FileName; QueueURI(Desc); - SetActiveSubprocess(decompProg); } - /*}}}*/ -// pkgAcqIndex::StageDecompressDone - Final verification /*{{{*/ + /*}}}*/ +// pkgAcqIndex::StageDecompressDone - Final verification /*{{{*/ void pkgAcqIndex::StageDecompressDone(string Message, HashStringList const &Hashes, pkgAcquire::MethodConfig *Cfg) @@ -1318,90 +1469,23 @@ void pkgAcqIndex::StageDecompressDone(string Message, Failed(Message, Cfg); return; } - + // remove the compressed version of the file unlink(EraseFileName.c_str()); - + // Done, queue for rename on transaction finished TransactionManager->TransactionStageCopy(this, DestFile, GetFinalFilename()); - - return; -} - /*}}}*/ - /*}}}*/ -// AcqIndexTrans::pkgAcqIndexTrans - Constructor /*{{{*/ -// --------------------------------------------------------------------- -/* The Translation file is added to the queue */ -pkgAcqIndexTrans::pkgAcqIndexTrans(pkgAcquire *Owner, - string URI,string URIDesc,string ShortDesc) - : pkgAcqIndex(Owner, URI, URIDesc, ShortDesc, HashStringList()) -{ -} - /*}}}*/ -pkgAcqIndexTrans::pkgAcqIndexTrans(pkgAcquire *Owner, - pkgAcqMetaBase *TransactionManager, - IndexTarget const * const Target, - HashStringList const &ExpectedHashes, - indexRecords *MetaIndexParser) - : pkgAcqIndex(Owner, TransactionManager, Target, ExpectedHashes, MetaIndexParser) -{ - // load the filesize - indexRecords::checkSum *Record = MetaIndexParser->Lookup(string(Target->MetaKey)); - if(Record) - FileSize = Record->Size; -} - /*}}}*/ -// AcqIndexTrans::Custom600Headers - Insert custom request headers /*{{{*/ -// --------------------------------------------------------------------- -string pkgAcqIndexTrans::Custom600Headers() const -{ - string Final = GetFinalFilename(); - - struct stat Buf; - if (stat(Final.c_str(),&Buf) != 0) - return "\nFail-Ignore: true\nIndex-File: true"; - return "\nFail-Ignore: true\nIndex-File: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime); -} - /*}}}*/ -// AcqIndexTrans::Failed - Silence failure messages for missing files /*{{{*/ -// --------------------------------------------------------------------- -/* */ -void pkgAcqIndexTrans::Failed(string Message,pkgAcquire::MethodConfig *Cnf) -{ - size_t const nextExt = CompressionExtensions.find(' '); - if (nextExt != std::string::npos) - { - CompressionExtensions = CompressionExtensions.substr(nextExt+1); - Init(RealURI, Desc.Description, Desc.ShortDesc); - Status = StatIdle; - return; - } - - // FIXME: this is used often (e.g. in pkgAcqIndexTrans) so refactor - if (Cnf->LocalOnly == true || - StringToBool(LookupTag(Message,"Transient-Failure"),false) == false) - { - // Ignore this - Status = StatDone; - Complete = false; - Dequeue(); - return; - } - Item::Failed(Message,Cnf); + return; } /*}}}*/ -// AcqMetaBase::Add - Add a item to the current Transaction /*{{{*/ -// --------------------------------------------------------------------- -/* */ +// AcqMetaBase::Add - Add a item to the current Transaction /*{{{*/ void pkgAcqMetaBase::Add(Item *I) { Transaction.push_back(I); } /*}}}*/ -// AcqMetaBase::AbortTransaction - Abort the current Transaction /*{{{*/ -// --------------------------------------------------------------------- -/* */ +// AcqMetaBase::AbortTransaction - Abort the current Transaction /*{{{*/ void pkgAcqMetaBase::AbortTransaction() { if(_config->FindB("Debug::Acquire::Transaction", false) == true) @@ -1415,20 +1499,15 @@ void pkgAcqMetaBase::AbortTransaction() std::clog << " Cancel: " << (*I)->DestFile << std::endl; // the transaction will abort, so stop anything that is idle if ((*I)->Status == pkgAcquire::Item::StatIdle) + { (*I)->Status = pkgAcquire::Item::StatDone; - - // kill files in partial - string PartialFile = _config->FindDir("Dir::State::lists"); - PartialFile += "partial/"; - PartialFile += flNotDir((*I)->DestFile); - if(FileExists(PartialFile)) - Rename(PartialFile, PartialFile + ".FAILED"); + (*I)->Dequeue(); + } } + Transaction.clear(); } /*}}}*/ -// AcqMetaBase::TransactionHasError - Check for errors in Transaction /*{{{*/ -// --------------------------------------------------------------------- -/* */ +// AcqMetaBase::TransactionHasError - Check for errors in Transaction /*{{{*/ bool pkgAcqMetaBase::TransactionHasError() { for (pkgAcquire::ItemIterator I = Transaction.begin(); @@ -1441,8 +1520,6 @@ bool pkgAcqMetaBase::TransactionHasError() } /*}}}*/ // AcqMetaBase::CommitTransaction - Commit a transaction /*{{{*/ -// --------------------------------------------------------------------- -/* */ void pkgAcqMetaBase::CommitTransaction() { if(_config->FindB("Debug::Acquire::Transaction", false) == true) @@ -1455,19 +1532,16 @@ void pkgAcqMetaBase::CommitTransaction() { if((*I)->PartialFile != "") { - if(_config->FindB("Debug::Acquire::Transaction", false) == true) - std::clog << "mv " - << (*I)->PartialFile << " -> " - << (*I)->DestFile << " " - << (*I)->DescURI() - << std::endl; - Rename((*I)->PartialFile, (*I)->DestFile); - chmod((*I)->DestFile.c_str(),0644); + if(_config->FindB("Debug::Acquire::Transaction", false) == true) + std::clog << "mv " << (*I)->PartialFile << " -> "<< (*I)->DestFile << " " + << (*I)->DescURI() << std::endl; + + Rename((*I)->PartialFile, (*I)->DestFile); } else { if(_config->FindB("Debug::Acquire::Transaction", false) == true) - std::clog << "rm " + std::clog << "rm " << (*I)->DestFile - << " " + << " " << (*I)->DescURI() << std::endl; unlink((*I)->DestFile.c_str()); @@ -1475,11 +1549,10 @@ void pkgAcqMetaBase::CommitTransaction() // mark that this transaction is finished (*I)->TransactionManager = 0; } + Transaction.clear(); } /*}}}*/ // AcqMetaBase::TransactionStageCopy - Stage a file for copying /*{{{*/ -// --------------------------------------------------------------------- -/* */ void pkgAcqMetaBase::TransactionStageCopy(Item *I, const std::string &From, const std::string &To) @@ -1489,8 +1562,6 @@ void pkgAcqMetaBase::TransactionStageCopy(Item *I, } /*}}}*/ // AcqMetaBase::TransactionStageRemoval - Sage a file for removal /*{{{*/ -// --------------------------------------------------------------------- -/* */ void pkgAcqMetaBase::TransactionStageRemoval(Item *I, const std::string &FinalFile) { @@ -1498,10 +1569,7 @@ void pkgAcqMetaBase::TransactionStageRemoval(Item *I, I->DestFile = FinalFile; } /*}}}*/ - /*{{{*/ // AcqMetaBase::GenerateAuthWarning - Check gpg authentication error /*{{{*/ -// --------------------------------------------------------------------- -/* */ bool pkgAcqMetaBase::CheckStopAuthentication(const std::string &RealURI, const std::string &Message) { @@ -1509,7 +1577,7 @@ bool pkgAcqMetaBase::CheckStopAuthentication(const std::string &RealURI, // a unauthenticated state and can cleanly rollback string Final = _config->FindDir("Dir::State::lists") + URItoFileName(RealURI); - + if(FileExists(Final)) { Status = StatTransientNetworkError; @@ -1533,21 +1601,19 @@ bool pkgAcqMetaBase::CheckStopAuthentication(const std::string &RealURI, Desc.Description.c_str(), LookupTag(Message,"Message").c_str()); } - // gpgv method failed + // gpgv method failed ReportMirrorFailure("GPGFailure"); return false; } /*}}}*/ -// AcqMetaSig::AcqMetaSig - Constructor /*{{{*/ -// --------------------------------------------------------------------- -/* */ +// AcqMetaSig::AcqMetaSig - Constructor /*{{{*/ pkgAcqMetaSig::pkgAcqMetaSig(pkgAcquire *Owner, pkgAcqMetaBase *TransactionManager, string URI,string URIDesc,string ShortDesc, string MetaIndexFile, const vector* IndexTargets, indexRecords* MetaIndexParser) : - pkgAcqMetaBase(Owner, IndexTargets, MetaIndexParser, + pkgAcqMetaBase(Owner, IndexTargets, MetaIndexParser, HashStringList(), TransactionManager), RealURI(URI), MetaIndexFile(MetaIndexFile), URIDesc(URIDesc), ShortDesc(ShortDesc) @@ -1555,8 +1621,8 @@ pkgAcqMetaSig::pkgAcqMetaSig(pkgAcquire *Owner, DestFile = _config->FindDir("Dir::State::lists") + "partial/"; DestFile += URItoFileName(RealURI); - // remove any partial downloaded sig-file in partial/. - // it may confuse proxies and is too small to warrant a + // remove any partial downloaded sig-file in partial/. + // it may confuse proxies and is too small to warrant a // partial download anyway unlink(DestFile.c_str()); @@ -1580,19 +1646,17 @@ pkgAcqMetaSig::~pkgAcqMetaSig() /*{{{*/ /*}}}*/ // pkgAcqMetaSig::Custom600Headers - Insert custom request headers /*{{{*/ // --------------------------------------------------------------------- +#if APT_PKG_ABI >= 413 string pkgAcqMetaSig::Custom600Headers() const +#else +string pkgAcqMetaSig::Custom600Headers() +#endif { - string FinalFile = _config->FindDir("Dir::State::lists"); - FinalFile += URItoFileName(RealURI); - - struct stat Buf; - if (stat(FinalFile.c_str(),&Buf) != 0) - return "\nIndex-File: true"; - - return "\nIndex-File: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime); + std::string Header = GetCustom600Headers(RealURI); + return Header; } /*}}}*/ -// pkgAcqMetaSig::Done - The signature was downloaded/verified /*{{{*/ +// pkgAcqMetaSig::Done - The signature was downloaded/verified /*{{{*/ // --------------------------------------------------------------------- /* The only header we use is the last-modified header. */ void pkgAcqMetaSig::Done(string Message,unsigned long long Size, @@ -1612,13 +1676,12 @@ void pkgAcqMetaSig::Done(string Message,unsigned long long Size, } return; } - else + else { if(CheckAuthDone(Message, RealURI) == true) { std::string FinalFile = _config->FindDir("Dir::State::lists"); FinalFile += URItoFileName(RealURI); - TransactionManager->TransactionStageCopy(this, MetaIndexFileSignature, FinalFile); } } @@ -1626,16 +1689,17 @@ void pkgAcqMetaSig::Done(string Message,unsigned long long Size, /*}}}*/ void pkgAcqMetaSig::Failed(string Message,pkgAcquire::MethodConfig *Cnf)/*{{{*/ { - string Final = _config->FindDir("Dir::State::lists") + URItoFileName(RealURI); + Item::Failed(Message,Cnf); // check if we need to fail at this point if (AuthPass == true && CheckStopAuthentication(RealURI, Message)) return; // FIXME: meh, this is not really elegant - string InReleaseURI = RealURI.replace(RealURI.rfind("Release.gpg"), 12, + string const Final = _config->FindDir("Dir::State::lists") + URItoFileName(RealURI); + string const InReleaseURI = RealURI.replace(RealURI.rfind("Release.gpg"), 12, "InRelease"); - string FinalInRelease = _config->FindDir("Dir::State::lists") + URItoFileName(InReleaseURI); + string const FinalInRelease = _config->FindDir("Dir::State::lists") + URItoFileName(InReleaseURI); if (RealFileExists(Final) || RealFileExists(FinalInRelease)) { @@ -1650,46 +1714,41 @@ void pkgAcqMetaSig::Failed(string Message,pkgAcquire::MethodConfig *Cnf)/*{{{*/ _error->Warning(_("This is normally not allowed, but the option " "Acquire::AllowDowngradeToInsecureRepositories was " "given to override it.")); - + Status = StatDone; } else { _error->Error("%s", downgrade_msg.c_str()); Rename(MetaIndexFile, MetaIndexFile+".FAILED"); - Status = pkgAcquire::Item::StatError; + Item::Failed("Message: " + downgrade_msg, Cnf); TransactionManager->AbortTransaction(); return; } } + else + _error->Warning(_("The data from '%s' is not signed. Packages " + "from that repository can not be authenticated."), + URIDesc.c_str()); // this ensures that any file in the lists/ dir is removed by the // transaction - DestFile = _config->FindDir("Dir::State::lists") + "partial/"; - DestFile += URItoFileName(RealURI); + DestFile = GetPartialFileNameFromURI(RealURI); TransactionManager->TransactionStageRemoval(this, DestFile); // only allow going further if the users explicitely wants it - if(_config->FindB("Acquire::AllowInsecureRepositories") == true) + if(AllowInsecureRepositories(MetaIndexParser, TransactionManager, this) == true) { // we parse the indexes here because at this point the user wanted // a repository that may potentially harm him MetaIndexParser->Load(MetaIndexFile); QueueIndexes(true); - } - else - { - _error->Warning("Use --allow-insecure-repositories to force the update"); } // FIXME: this is used often (e.g. in pkgAcqIndexTrans) so refactor - if (Cnf->LocalOnly == true || + if (Cnf->LocalOnly == true || StringToBool(LookupTag(Message,"Transient-Failure"),false) == false) - { + { // Ignore this Status = StatDone; - Complete = false; - Dequeue(); - return; } - Item::Failed(Message,Cnf); } /*}}}*/ pkgAcqMetaIndex::pkgAcqMetaIndex(pkgAcquire *Owner, /*{{{*/ @@ -1718,11 +1777,10 @@ pkgAcqMetaIndex::pkgAcqMetaIndex(pkgAcquire *Owner, /*{{{*/ Init(URIDesc, ShortDesc); } /*}}}*/ -// pkgAcqMetaIndex::Init - Delayed constructor /*{{{*/ +// pkgAcqMetaIndex::Init - Delayed constructor /*{{{*/ void pkgAcqMetaIndex::Init(std::string URIDesc, std::string ShortDesc) { - DestFile = _config->FindDir("Dir::State::lists") + "partial/"; - DestFile += URItoFileName(RealURI); + DestFile = GetPartialFileNameFromURI(RealURI); // Create the item Desc.Description = URIDesc; @@ -1734,18 +1792,16 @@ void pkgAcqMetaIndex::Init(std::string URIDesc, std::string ShortDesc) ExpectedAdditionalItems = IndexTargets->size(); QueueURI(Desc); } + /*}}}*/ // pkgAcqMetaIndex::Custom600Headers - Insert custom request headers /*{{{*/ // --------------------------------------------------------------------- +#if APT_PKG_ABI >= 413 string pkgAcqMetaIndex::Custom600Headers() const +#else +string pkgAcqMetaIndex::Custom600Headers() +#endif { - string Final = _config->FindDir("Dir::State::lists"); - Final += URItoFileName(RealURI); - - struct stat Buf; - if (stat(Final.c_str(),&Buf) != 0) - return "\nIndex-File: true"; - - return "\nIndex-File: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime); + return GetCustom600Headers(RealURI); } /*}}}*/ void pkgAcqMetaIndex::Done(string Message,unsigned long long Size, /*{{{*/ @@ -1806,7 +1862,27 @@ bool pkgAcqMetaBase::CheckAuthDone(string Message, const string &RealURI) /*{{{* return true; } /*}}}*/ - /*{{{*/ +// pkgAcqMetaBase::GetCustom600Headers - Get header for AcqMetaBase /*{{{*/ +// --------------------------------------------------------------------- +string pkgAcqMetaBase::GetCustom600Headers(const string &RealURI) const +{ + std::string Header = "\nIndex-File: true"; + std::string MaximumSize; + strprintf(MaximumSize, "\nMaximum-Size: %i", + _config->FindI("Acquire::MaxReleaseFileSize", 10*1000*1000)); + Header += MaximumSize; + + string FinalFile = _config->FindDir("Dir::State::lists"); + FinalFile += URItoFileName(RealURI); + + struct stat Buf; + if (stat(FinalFile.c_str(),&Buf) == 0) + Header += "\nLast-Modified: " + TimeRFC1123(Buf.st_mtime); + + return Header; +} + /*}}}*/ +// pkgAcqMetaBase::QueueForSignatureVerify /*{{{*/ void pkgAcqMetaBase::QueueForSignatureVerify(const std::string &MetaIndexFile, const std::string &MetaIndexFileSignature) { @@ -1817,7 +1893,7 @@ void pkgAcqMetaBase::QueueForSignatureVerify(const std::string &MetaIndexFile, SetActiveSubprocess("gpgv"); } /*}}}*/ - /*{{{*/ +// pkgAcqMetaBase::CheckDownloadDone /*{{{*/ bool pkgAcqMetaBase::CheckDownloadDone(const std::string &Message, const std::string &RealURI) { @@ -1857,87 +1933,59 @@ bool pkgAcqMetaBase::CheckDownloadDone(const std::string &Message, /*}}}*/ void pkgAcqMetaBase::QueueIndexes(bool verify) /*{{{*/ { - bool transInRelease = false; - { - std::vector const keys = MetaIndexParser->MetaKeys(); - for (std::vector::const_iterator k = keys.begin(); k != keys.end(); ++k) - // FIXME: Feels wrong to check for hardcoded string here, but what should we do else… - if (k->find("Translation-") != std::string::npos) - { - transInRelease = true; - break; - } - } - // at this point the real Items are loaded in the fetcher ExpectedAdditionalItems = 0; - for (vector ::const_iterator Target = IndexTargets->begin(); + + vector ::const_iterator Target; + for (Target = IndexTargets->begin(); Target != IndexTargets->end(); ++Target) { HashStringList ExpectedIndexHashes; const indexRecords::checkSum *Record = MetaIndexParser->Lookup((*Target)->MetaKey); - bool compressedAvailable = false; - if (Record == NULL) + + // optional target that we do not have in the Release file are + // skipped + if (verify == true && Record == NULL && (*Target)->IsOptional()) + continue; + + // targets without a hash record are a error when verify is required + if (verify == true && Record == NULL) { - if ((*Target)->IsOptional() == true) - { - std::vector types = APT::Configuration::getCompressionTypes(); - for (std::vector::const_iterator t = types.begin(); t != types.end(); ++t) - if (MetaIndexParser->Exists((*Target)->MetaKey + "." + *t) == true) - { - compressedAvailable = true; - break; - } - } - else if (verify == true) - { - Status = StatAuthError; - strprintf(ErrorText, _("Unable to find expected entry '%s' in Release file (Wrong sources.list entry or malformed file)"), (*Target)->MetaKey.c_str()); - return; - } + Status = StatAuthError; + strprintf(ErrorText, _("Unable to find expected entry '%s' in Release file (Wrong sources.list entry or malformed file)"), (*Target)->MetaKey.c_str()); + return; } - else + + if (Record) + ExpectedIndexHashes = Record->Hashes; + + if (_config->FindB("Debug::pkgAcquire::Auth", false)) { - ExpectedIndexHashes = Record->Hashes; - if (_config->FindB("Debug::pkgAcquire::Auth", false)) - { - std::cerr << "Queueing: " << (*Target)->URI << std::endl - << "Expected Hash:" << std::endl; - for (HashStringList::const_iterator hs = ExpectedIndexHashes.begin(); hs != ExpectedIndexHashes.end(); ++hs) - std::cerr << "\t- " << hs->toStr() << std::endl; - std::cerr << "For: " << Record->MetaKeyFilename << std::endl; - } - if (verify == true && ExpectedIndexHashes.empty() == true && (*Target)->IsOptional() == false) - { - Status = StatAuthError; - strprintf(ErrorText, _("Unable to find hash sum for '%s' in Release file"), (*Target)->MetaKey.c_str()); - return; - } - } + std::cerr << "Queueing: " << (*Target)->URI << std::endl + << "Expected Hash:" << std::endl; + for (HashStringList::const_iterator hs = ExpectedIndexHashes.begin(); hs != ExpectedIndexHashes.end(); ++hs) + std::cerr << "\t- " << hs->toStr() << std::endl; + std::cerr << "For: " << Record->MetaKeyFilename << std::endl; - if ((*Target)->IsOptional() == true) + } + if (verify == true && ExpectedIndexHashes.empty() == true) { - if (transInRelease == false || Record != NULL || compressedAvailable == true) - { - if (_config->FindB("Acquire::PDiffs",true) == true && transInRelease == true && - MetaIndexParser->Exists((*Target)->MetaKey + ".diff/Index") == true) - new pkgAcqDiffIndex(Owner, TransactionManager, *Target, ExpectedIndexHashes, MetaIndexParser); - else - new pkgAcqIndexTrans(Owner, TransactionManager, *Target, ExpectedIndexHashes, MetaIndexParser); - } - continue; + Status = StatAuthError; + strprintf(ErrorText, _("Unable to find hash sum for '%s' in Release file"), (*Target)->MetaKey.c_str()); + return; } - /* Queue Packages file (either diff or full packages files, depending + /* Queue the Index file (Packages, Sources, Translation-$foo + (either diff or full packages files, depending on the users option) - we also check if the PDiff Index file is listed in the Meta-Index file. Ideal would be if pkgAcqDiffIndex would test this instead, but passing the required info to it is to much hassle */ if(_config->FindB("Acquire::PDiffs",true) == true && (verify == false || - MetaIndexParser->Exists((*Target)->MetaKey + ".diff/Index") == true)) - new pkgAcqDiffIndex(Owner, TransactionManager, *Target, ExpectedIndexHashes, MetaIndexParser); + MetaIndexParser->Exists((*Target)->MetaKey + ".diff/Index") == true)) + new pkgAcqDiffIndex(Owner, TransactionManager, *Target, ExpectedIndexHashes, MetaIndexParser); else - new pkgAcqIndex(Owner, TransactionManager, *Target, ExpectedIndexHashes, MetaIndexParser); + new pkgAcqIndex(Owner, TransactionManager, *Target, ExpectedIndexHashes, MetaIndexParser); } } /*}}}*/ @@ -2018,12 +2066,13 @@ bool pkgAcqMetaBase::VerifyVendor(string Message, const string &RealURI)/*{{{*/ return true; } /*}}}*/ -// pkgAcqMetaIndex::Failed - no Release file present /*{{{*/ -// --------------------------------------------------------------------- -/* */ -void pkgAcqMetaIndex::Failed(string /*Message*/, - pkgAcquire::MethodConfig * /*Cnf*/) +// pkgAcqMetaIndex::Failed - no Release file present /*{{{*/ +void pkgAcqMetaIndex::Failed(string Message, + pkgAcquire::MethodConfig * Cnf) { + pkgAcquire::Item::Failed(Message, Cnf); + Status = StatDone; + string FinalFile = _config->FindDir("Dir::State::lists") + URItoFileName(RealURI); _error->Warning(_("The repository '%s' does not have a Release file. " @@ -2033,7 +2082,7 @@ void pkgAcqMetaIndex::Failed(string /*Message*/, // No Release file was present so fall // back to queueing Packages files without verification // only allow going further if the users explicitely wants it - if(_config->FindB("Acquire::AllowInsecureRepositories") == true) + if(AllowInsecureRepositories(MetaIndexParser, TransactionManager, this) == true) { // Done, queue for rename on transaction finished if (FileExists(DestFile)) @@ -2041,17 +2090,10 @@ void pkgAcqMetaIndex::Failed(string /*Message*/, // queue without any kind of hashsum support QueueIndexes(false); - } else { - // warn if the repository is unsinged - _error->Warning("Use --allow-insecure-repositories to force the update"); - TransactionManager->AbortTransaction(); - Status = StatError; - return; - } + } } /*}}}*/ - -void pkgAcqMetaIndex::Finished() +void pkgAcqMetaIndex::Finished() /*{{{*/ { if(_config->FindB("Debug::Acquire::Transaction", false) == true) std::clog << "Finished: " << DestFile <TransactionHasError() == false) TransactionManager->CommitTransaction(); } - - + /*}}}*/ pkgAcqMetaClearSig::pkgAcqMetaClearSig(pkgAcquire *Owner, /*{{{*/ string const &URI, string const &URIDesc, string const &ShortDesc, string const &MetaIndexURI, string const &MetaIndexURIDesc, string const &MetaIndexShortDesc, @@ -2082,27 +2123,25 @@ pkgAcqMetaClearSig::~pkgAcqMetaClearSig() /*{{{*/ /*}}}*/ // pkgAcqMetaClearSig::Custom600Headers - Insert custom request headers /*{{{*/ // --------------------------------------------------------------------- +#if APT_PKG_ABI >= 413 string pkgAcqMetaClearSig::Custom600Headers() const +#else +string pkgAcqMetaClearSig::Custom600Headers() +#endif { - string Final = _config->FindDir("Dir::State::lists"); - Final += URItoFileName(RealURI); - - struct stat Buf; - if (stat(Final.c_str(),&Buf) != 0) - { - if (stat(Final.c_str(),&Buf) != 0) - return "\nIndex-File: true\nFail-Ignore: true\n"; - } - - return "\nIndex-File: true\nFail-Ignore: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime); + string Header = GetCustom600Headers(RealURI); + Header += "\nFail-Ignore: true"; + return Header; } /*}}}*/ // pkgAcqMetaClearSig::Done - We got a file /*{{{*/ // --------------------------------------------------------------------- -void pkgAcqMetaClearSig::Done(std::string Message,unsigned long long /*Size*/, - HashStringList const &/*Hashes*/, +void pkgAcqMetaClearSig::Done(std::string Message,unsigned long long Size, + HashStringList const &Hashes, pkgAcquire::MethodConfig *Cnf) { + Item::Done(Message, Size, Hashes, Cnf); + // if we expect a ClearTextSignature (InRelase), ensure that // this is what we get and if not fail to queue a // Release/Release.gpg, see #346386 @@ -2135,6 +2174,8 @@ void pkgAcqMetaClearSig::Done(std::string Message,unsigned long long /*Size*/, /*}}}*/ void pkgAcqMetaClearSig::Failed(string Message,pkgAcquire::MethodConfig *Cnf) /*{{{*/ { + Item::Failed(Message, Cnf); + // we failed, we will not get additional items from this method ExpectedAdditionalItems = 0; @@ -2146,14 +2187,12 @@ void pkgAcqMetaClearSig::Failed(string Message,pkgAcquire::MethodConfig *Cnf) /* string FinalFile = _config->FindDir("Dir::State::lists"); FinalFile.append(URItoFileName(RealURI)); TransactionManager->TransactionStageRemoval(this, FinalFile); + Status = StatDone; new pkgAcqMetaIndex(Owner, TransactionManager, MetaIndexURI, MetaIndexURIDesc, MetaIndexShortDesc, MetaSigURI, MetaSigURIDesc, MetaSigShortDesc, IndexTargets, MetaIndexParser); - if (Cnf->LocalOnly == true || - StringToBool(LookupTag(Message, "Transient-Failure"), false) == false) - Dequeue(); } else { @@ -2167,11 +2206,13 @@ void pkgAcqMetaClearSig::Failed(string Message,pkgAcquire::MethodConfig *Cnf) /* // No Release file was present, or verification failed, so fall // back to queueing Packages files without verification // only allow going further if the users explicitely wants it - if(_config->FindB("Acquire::AllowInsecureRepositories") == true) + if(AllowInsecureRepositories(MetaIndexParser, TransactionManager, this) == true) { + Status = StatDone; + /* Always move the meta index, even if gpgv failed. This ensures * that PackageFile objects are correctly filled in */ - if (FileExists(DestFile)) + if (FileExists(DestFile)) { string FinalFile = _config->FindDir("Dir::State::lists"); FinalFile += URItoFileName(RealURI); @@ -2181,19 +2222,12 @@ void pkgAcqMetaClearSig::Failed(string Message,pkgAcquire::MethodConfig *Cnf) /* "Release"); FinalFile = FinalFile.replace(FinalFile.rfind("InRelease"), 9, "Release"); - - + // Done, queue for rename on transaction finished TransactionManager->TransactionStageCopy(this, DestFile, FinalFile); } QueueIndexes(false); - } else { - // warn if the repository is unsinged - _error->Warning("Use --allow-insecure-repositories to force the update"); - TransactionManager->AbortTransaction(); - Status = StatError; - return; - } + } } } /*}}}*/ @@ -2428,21 +2462,19 @@ void pkgAcqArchive::Done(string Message,unsigned long long Size, HashStringList return; } - Complete = true; - // Reference filename if (FileName != DestFile) { StoreFilename = DestFile = FileName; Local = true; + Complete = true; return; } - + // Done, move it into position string FinalFile = _config->FindDir("Dir::Cache::Archives"); FinalFile += flNotDir(StoreFilename); Rename(DestFile,FinalFile); - StoreFilename = DestFile = FinalFile; Complete = true; } @@ -2452,8 +2484,8 @@ void pkgAcqArchive::Done(string Message,unsigned long long Size, HashStringList /* Here we try other sources */ void pkgAcqArchive::Failed(string Message,pkgAcquire::MethodConfig *Cnf) { - ErrorText = LookupTag(Message,"Message"); - + Item::Failed(Message,Cnf); + /* We don't really want to retry on failed media swaps, this prevents that. An interesting observation is that permanent failures are not recorded. */ @@ -2463,10 +2495,10 @@ void pkgAcqArchive::Failed(string Message,pkgAcquire::MethodConfig *Cnf) // Vf = Version.FileList(); while (Vf.end() == false) ++Vf; StoreFilename = string(); - Item::Failed(Message,Cnf); return; } - + + Status = StatIdle; if (QueueNext() == false) { // This is the retry counter @@ -2479,15 +2511,19 @@ void pkgAcqArchive::Failed(string Message,pkgAcquire::MethodConfig *Cnf) if (QueueNext() == true) return; } - + StoreFilename = string(); - Item::Failed(Message,Cnf); + Status = StatError; } } /*}}}*/ // AcqArchive::IsTrusted - Determine whether this archive comes from a trusted source /*{{{*/ // --------------------------------------------------------------------- +#if APT_PKG_ABI >= 413 APT_PURE bool pkgAcqArchive::IsTrusted() const +#else +APT_PURE bool pkgAcqArchive::IsTrusted() +#endif { return Trusted; } @@ -2597,7 +2633,12 @@ void pkgAcqFile::Done(string Message,unsigned long long Size,HashStringList cons // Symlink the file if (symlink(FileName.c_str(),DestFile.c_str()) != 0) { - ErrorText = "Link to " + DestFile + " failure "; + _error->PushToStack(); + _error->Errno("pkgAcqFile::Done", "Symlinking file %s failed", DestFile.c_str()); + std::stringstream msg; + _error->DumpErrors(msg); + _error->RevertToStack(); + ErrorText = msg.str(); Status = StatError; Complete = false; } @@ -2609,25 +2650,29 @@ void pkgAcqFile::Done(string Message,unsigned long long Size,HashStringList cons /* Here we try other sources */ void pkgAcqFile::Failed(string Message,pkgAcquire::MethodConfig *Cnf) { - ErrorText = LookupTag(Message,"Message"); - + Item::Failed(Message,Cnf); + // This is the retry counter if (Retries != 0 && Cnf->LocalOnly == false && StringToBool(LookupTag(Message,"Transient-Failure"),false) == true) { - Retries--; + --Retries; QueueURI(Desc); + Status = StatIdle; return; } - - Item::Failed(Message,Cnf); + } /*}}}*/ // AcqIndex::Custom600Headers - Insert custom request headers /*{{{*/ // --------------------------------------------------------------------- /* The only header we use is the last-modified header. */ +#if APT_PKG_ABI >= 413 string pkgAcqFile::Custom600Headers() const +#else +string pkgAcqFile::Custom600Headers() +#endif { if (IsIndexFile) return "\nIndex-File: true";