return false;
}
// If we got the InRelease file via a mirror, pick all indexes directly from this mirror, too
- if (TransactionManager->BaseURI.empty() == false &&
+ if (TransactionManager->BaseURI.empty() == false && UsedMirror.empty() &&
URI::SiteOnly(Item.URI) != URI::SiteOnly(TransactionManager->BaseURI))
{
// this ensures we rewrite only once and only the first step
/*}}}*/
// Acquire::Item::Item - Constructor /*{{{*/
+class pkgAcquire::Item::Private
+{
+public:
+ std::vector<std::string> PastRedirections;
+};
APT_IGNORE_DEPRECATED_PUSH
pkgAcquire::Item::Item(pkgAcquire * const owner) :
FileSize(0), PartialSize(0), Mode(0), ID(0), Complete(false), Local(false),
- QueueCounter(0), ExpectedAdditionalItems(0), Owner(owner), d(NULL)
+ QueueCounter(0), ExpectedAdditionalItems(0), Owner(owner), d(new Private())
{
Owner->Add(this);
Status = StatIdle;
pkgAcquire::Item::~Item()
{
Owner->Remove(this);
+ delete d;
}
/*}}}*/
std::string pkgAcquire::Item::Custom600Headers() const /*{{{*/
}
string const FailReason = LookupTag(Message, "FailReason");
- enum { MAXIMUM_SIZE_EXCEEDED, HASHSUM_MISMATCH, WEAK_HASHSUMS, OTHER } failreason = OTHER;
+ enum { MAXIMUM_SIZE_EXCEEDED, HASHSUM_MISMATCH, WEAK_HASHSUMS, REDIRECTION_LOOP, OTHER } failreason = OTHER;
if ( FailReason == "MaximumSizeExceeded")
failreason = MAXIMUM_SIZE_EXCEEDED;
else if ( FailReason == "WeakHashSums")
failreason = WEAK_HASHSUMS;
+ else if (FailReason == "RedirectionLoop")
+ failreason = REDIRECTION_LOOP;
else if (Status == StatAuthError)
failreason = HASHSUM_MISMATCH;
if(ErrorText.empty())
{
+ std::ostringstream out;
+ switch (failreason)
+ {
+ case HASHSUM_MISMATCH:
+ out << _("Hash Sum mismatch") << std::endl;
+ break;
+ case WEAK_HASHSUMS:
+ out << _("Insufficient information available to perform this download securely") << std::endl;
+ break;
+ case REDIRECTION_LOOP:
+ out << "Redirection loop encountered" << std::endl;
+ break;
+ case MAXIMUM_SIZE_EXCEEDED:
+ out << LookupTag(Message, "Message") << std::endl;
+ break;
+ case OTHER:
+ out << LookupTag(Message, "Message");
+ break;
+ }
+
if (Status == StatAuthError)
{
- std::ostringstream out;
- switch (failreason)
- {
- case HASHSUM_MISMATCH:
- out << _("Hash Sum mismatch") << std::endl;
- break;
- case WEAK_HASHSUMS:
- out << _("Insufficient information available to perform this download securely") << std::endl;
- break;
- case MAXIMUM_SIZE_EXCEEDED:
- case OTHER:
- out << LookupTag(Message, "Message") << std::endl;
- break;
- }
auto const ExpectedHashes = GetExpectedHashes();
if (ExpectedHashes.empty() == false)
{
}
out << "Last modification reported: " << LookupTag(Message, "Last-Modified", "<none>") << std::endl;
}
- ErrorText = out.str();
}
- else
- ErrorText = LookupTag(Message,"Message");
+ ErrorText = out.str();
}
switch (failreason)
case MAXIMUM_SIZE_EXCEEDED: RenameOnError(MaximumSizeExceeded); break;
case HASHSUM_MISMATCH: RenameOnError(HashSumMismatch); break;
case WEAK_HASHSUMS: break;
+ case REDIRECTION_LOOP: break;
case OTHER: break;
}
return hs != NULL ? hs->toStr() : "";
}
/*}}}*/
+bool pkgAcquire::Item::IsRedirectionLoop(std::string const &NewURI) /*{{{*/
+{
+ // store can fail due to permission errors and the item will "loop" then
+ if (APT::String::Startswith(NewURI, "store:"))
+ return false;
+ if (d->PastRedirections.empty())
+ {
+ d->PastRedirections.push_back(NewURI);
+ return false;
+ }
+ auto const LastURI = std::prev(d->PastRedirections.end());
+ // redirections to the same file are a way of restarting/resheduling,
+ // individual methods will have to make sure that they aren't looping this way
+ if (*LastURI == NewURI)
+ return false;
+ if (std::find(d->PastRedirections.begin(), LastURI, NewURI) != LastURI)
+ return true;
+ d->PastRedirections.push_back(NewURI);
+ return false;
+}
+ /*}}}*/
pkgAcqTransactionItem::pkgAcqTransactionItem(pkgAcquire * const Owner, /*{{{*/
pkgAcqMetaClearSig * const transactionManager, IndexTarget const &target) :
string const FinalFile = GetFinalFilename();
struct stat Buf;
if (stat(FinalFile.c_str(),&Buf) == 0)
- Header += "\nLast-Modified: " + TimeRFC1123(Buf.st_mtime);
+ Header += "\nLast-Modified: " + TimeRFC1123(Buf.st_mtime, false);
return Header;
}
if (I->UsedMirror.empty() == false && _config->FindB("Acquire::SameMirrorForAllIndexes", true))
{
if (APT::String::Endswith(I->Desc.URI, "InRelease"))
+ {
TransactionManager->BaseURI = I->Desc.URI.substr(0, I->Desc.URI.length() - strlen("InRelease"));
+ TransactionManager->UsedMirror = I->UsedMirror;
+ }
else if (APT::String::Endswith(I->Desc.URI, "Release"))
+ {
TransactionManager->BaseURI = I->Desc.URI.substr(0, I->Desc.URI.length() - strlen("Release"));
+ TransactionManager->UsedMirror = I->UsedMirror;
+ }
}
std::string const FileName = LookupTag(Message,"Filename");
// than invent an entirely new flag we would need to carry for all of eternity.
if (hasReleaseFile && Target.Option(IndexTarget::ARCHITECTURE) == "all")
{
- if (TransactionManager->MetaIndexParser->IsArchitectureSupported("all") == false ||
- TransactionManager->MetaIndexParser->IsArchitectureAllSupportedFor(Target) == false)
+ if (TransactionManager->MetaIndexParser->IsArchitectureAllSupportedFor(Target) == false)
{
new CleanupItem(Owner, TransactionManager, Target);
continue;
if (timespec == 0)
ErrorText.append("<unknown>");
else
- ErrorText.append(TimeRFC1123(timespec));
+ ErrorText.append(TimeRFC1123(timespec, true));
ErrorText.append("\n");
}
/*}}}*/
if (stat(Final.c_str(),&Buf) != 0)
return "\nIndex-File: true";
- return "\nIndex-File: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime);
+ return "\nIndex-File: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime, false);
}
/*}}}*/
void pkgAcqDiffIndex::QueueOnIMSHit() const /*{{{*/
new pkgAcqIndexDiffs(Owner, TransactionManager, Target);
}
/*}}}*/
+static bool RemoveFileForBootstrapLinking(bool const Debug, std::string const &For, std::string const &Boot)/*{{{*/
+{
+ if (FileExists(Boot) && RemoveFile("Bootstrap-linking", Boot) == false)
+ {
+ if (Debug)
+ std::clog << "Bootstrap-linking for patching " << For
+ << " by removing stale " << Boot << " failed!" << std::endl;
+ return false;
+ }
+ return true;
+}
+ /*}}}*/
bool pkgAcqDiffIndex::ParseDiffIndex(string const &IndexDiffFile) /*{{{*/
{
ExpectedAdditionalItems = 0;
if (unlikely(Final.empty())) // because we wouldn't be called in such a case
return false;
std::string const PartialFile = GetPartialFileNameFromURI(Target.URI);
- if (FileExists(PartialFile) && RemoveFile("Bootstrap-linking", PartialFile) == false)
- {
- if (Debug)
- std::clog << "Bootstrap-linking for patching " << CurrentPackagesFile
- << " by removing stale " << PartialFile << " failed!" << std::endl;
+ std::string const PatchedFile = GetKeepCompressedFileName(PartialFile + "-patched", Target);
+ if (RemoveFileForBootstrapLinking(Debug, CurrentPackagesFile, PartialFile) == false ||
+ RemoveFileForBootstrapLinking(Debug, CurrentPackagesFile, PatchedFile) == false)
return false;
- }
for (auto const &ext : APT::Configuration::getCompressorExtensions())
{
- std::string const Partial = PartialFile + ext;
- if (FileExists(Partial) && RemoveFile("Bootstrap-linking", Partial) == false)
- {
- if (Debug)
- std::clog << "Bootstrap-linking for patching " << CurrentPackagesFile
- << " by removing stale " << Partial << " failed!" << std::endl;
+ if (RemoveFileForBootstrapLinking(Debug, CurrentPackagesFile, PartialFile + ext) == false ||
+ RemoveFileForBootstrapLinking(Debug, CurrentPackagesFile, PatchedFile + ext) == false)
return false;
- }
}
std::string const Ext = Final.substr(CurrentPackagesFile.length());
std::string const Partial = PartialFile + Ext;
if(State != StateApplyDiff)
return pkgAcqBaseIndex::Custom600Headers();
std::ostringstream patchhashes;
- HashStringList const ExpectedHashes = available_patches[0].patch_hashes;
- for (HashStringList::const_iterator hs = ExpectedHashes.begin(); hs != ExpectedHashes.end(); ++hs)
- patchhashes << "\nPatch-0-" << hs->HashType() << "-Hash: " << hs->HashValue();
+ for (auto && hs : available_patches[0].result_hashes)
+ patchhashes << "\nStart-" << hs.HashType() << "-Hash: " << hs.HashValue();
+ for (auto && hs : available_patches[0].patch_hashes)
+ patchhashes << "\nPatch-0-" << hs.HashType() << "-Hash: " << hs.HashValue();
patchhashes << pkgAcqBaseIndex::Custom600Headers();
return patchhashes.str();
}
return pkgAcqBaseIndex::Custom600Headers();
std::ostringstream patchhashes;
unsigned int seen_patches = 0;
+ for (auto && hs : (*allPatches)[0]->patch.result_hashes)
+ patchhashes << "\nStart-" << hs.HashType() << "-Hash: " << hs.HashValue();
for (std::vector<pkgAcqIndexMergeDiffs *>::const_iterator I = allPatches->begin();
I != allPatches->end(); ++I)
{
HashStringList const ExpectedHashes = (*I)->patch.patch_hashes;
for (HashStringList::const_iterator hs = ExpectedHashes.begin(); hs != ExpectedHashes.end(); ++hs)
- patchhashes << "\nPatch-" << seen_patches << "-" << hs->HashType() << "-Hash: " << hs->HashValue();
+ patchhashes << "\nPatch-" << std::to_string(seen_patches) << "-" << hs->HashType() << "-Hash: " << hs->HashValue();
++seen_patches;
}
patchhashes << pkgAcqBaseIndex::Custom600Headers();
DestFile = GetPartialFileNameFromURI(URI);
NextCompressionExtension(CurrentCompressionExtension, CompressionExtensions, false);
- // store file size of the download to ensure the fetcher gives
- // accurate progress reporting
- FileSize = GetExpectedHashes().FileSize();
-
if (CurrentCompressionExtension == "uncompressed")
{
Desc.URI = URI;
DestFile = DestFile + '.' + CurrentCompressionExtension;
}
+ // store file size of the download to ensure the fetcher gives
+ // accurate progress reporting
+ FileSize = GetExpectedHashes().FileSize();
Desc.Description = URIDesc;
Desc.Owner = this;
struct stat Buf;
if (stat(Final.c_str(),&Buf) == 0)
- msg += "\nLast-Modified: " + TimeRFC1123(Buf.st_mtime);
+ msg += "\nLast-Modified: " + TimeRFC1123(Buf.st_mtime, false);
}
if(Target.IsOptional)
{
pkgAcqBaseIndex::Failed(Message,Cnf);
+ if (UsedMirror.empty() == false && UsedMirror != "DIRECT" &&
+ LookupTag(Message, "FailReason") == "HttpError404")
+ {
+ UsedMirror = "DIRECT";
+ if (Desc.URI.find("/by-hash/") != std::string::npos)
+ CompressionExtensions = "by-hash " + CompressionExtensions;
+ else
+ CompressionExtensions = CurrentCompressionExtension + ' ' + CompressionExtensions;
+ Desc.Description = Target.Description;
+ Init(Target.URI, Desc.Description, Desc.ShortDesc);
+ Status = StatIdle;
+ return;
+ }
+
// authorisation matches will not be fixed by other compression types
if (Status != StatAuthError)
{