}
/*}}}*/
-static bool MessageInsecureRepository(bool const isError, char const * const msg, std::string const &repo)/*{{{*/
+static APT_NONNULL(2) bool MessageInsecureRepository(bool const isError, char const * const msg, std::string const &repo)/*{{{*/
{
std::string m;
strprintf(m, msg, repo.c_str());
/*}}}*/
// AllowInsecureRepositories /*{{{*/
enum class InsecureType { UNSIGNED, WEAK, NORELEASE };
-static bool APT_NONNULL(3, 4, 5) AllowInsecureRepositories(InsecureType msg, std::string const &repo,
+static bool TargetIsAllowedToBe(IndexTarget const &Target, InsecureType const type)
+{
+ if (_config->FindB("Acquire::AllowInsecureRepositories"))
+ return true;
+
+ if (Target.OptionBool(IndexTarget::ALLOW_INSECURE))
+ return true;
+
+ switch (type)
+ {
+ case InsecureType::UNSIGNED: break;
+ case InsecureType::NORELEASE: break;
+ case InsecureType::WEAK:
+ if (_config->FindB("Acquire::AllowWeakRepositories"))
+ return true;
+ if (Target.OptionBool(IndexTarget::ALLOW_WEAK))
+ return true;
+ break;
+ }
+ return false;
+}
+static bool APT_NONNULL(3, 4, 5) AllowInsecureRepositories(InsecureType const msg, std::string const &repo,
metaIndex const * const MetaIndexParser, pkgAcqMetaClearSig * const TransactionManager, pkgAcquire::Item * const I)
{
// we skip weak downgrades as its unlikely that a repository gets really weaker –
case InsecureType::NORELEASE: msgstr = _("The repository '%s' does no longer have a Release file."); break;
case InsecureType::WEAK: /* unreachable */ break;
}
- if (_config->FindB("Acquire::AllowDowngradeToInsecureRepositories"))
+ if (_config->FindB("Acquire::AllowDowngradeToInsecureRepositories") ||
+ TransactionManager->Target.OptionBool(IndexTarget::ALLOW_DOWNGRADE_TO_INSECURE))
{
// meh, the users wants to take risks (we still mark the packages
// from this repository as unauthenticated)
case InsecureType::WEAK: msgstr = _("The repository '%s' provides only weak security information."); break;
}
- if (_config->FindB("Acquire::AllowInsecureRepositories") == true)
+ if (TargetIsAllowedToBe(TransactionManager->Target, msg) == true)
{
MessageInsecureRepository(false, msgstr, repo);
return true;
we can at least trust them for integrity of the download itself.
Only repositories without a Release file can (obviously) not have
hashes – and they are very uncommon and strongly discouraged */
- return TransactionManager->MetaIndexParser->GetLoadedSuccessfully() == metaIndex::TRI_YES;
+ if (TransactionManager->MetaIndexParser->GetLoadedSuccessfully() != metaIndex::TRI_YES)
+ return false;
+ if (TargetIsAllowedToBe(Target, InsecureType::WEAK))
+ {
+ /* If we allow weak hashes, we check that we have some (weak) and then
+ declare hashes not needed. That will tip us in the right direction
+ as if hashes exist, they will be used, even if not required */
+ auto const hsl = GetExpectedHashes();
+ if (hsl.usable())
+ return true;
+ if (hsl.empty() == false)
+ return false;
+ }
+ return true;
}
HashStringList pkgAcqTransactionItem::GetExpectedHashes() const
{
return false;
}
// If we got the InRelease file via a mirror, pick all indexes directly from this mirror, too
- if (TransactionManager->BaseURI.empty() == false &&
+ if (TransactionManager->BaseURI.empty() == false && UsedMirror.empty() &&
URI::SiteOnly(Item.URI) != URI::SiteOnly(TransactionManager->BaseURI))
{
// this ensures we rewrite only once and only the first step
/*}}}*/
// Acquire::Item::Item - Constructor /*{{{*/
+class pkgAcquire::Item::Private
+{
+public:
+ std::vector<std::string> PastRedirections;
+};
APT_IGNORE_DEPRECATED_PUSH
pkgAcquire::Item::Item(pkgAcquire * const owner) :
FileSize(0), PartialSize(0), Mode(0), ID(0), Complete(false), Local(false),
- QueueCounter(0), ExpectedAdditionalItems(0), Owner(owner), d(NULL)
+ QueueCounter(0), ExpectedAdditionalItems(0), Owner(owner), d(new Private())
{
Owner->Add(this);
Status = StatIdle;
pkgAcquire::Item::~Item()
{
Owner->Remove(this);
+ delete d;
}
/*}}}*/
std::string pkgAcquire::Item::Custom600Headers() const /*{{{*/
}
string const FailReason = LookupTag(Message, "FailReason");
- enum { MAXIMUM_SIZE_EXCEEDED, HASHSUM_MISMATCH, WEAK_HASHSUMS, OTHER } failreason = OTHER;
+ enum { MAXIMUM_SIZE_EXCEEDED, HASHSUM_MISMATCH, WEAK_HASHSUMS, REDIRECTION_LOOP, OTHER } failreason = OTHER;
if ( FailReason == "MaximumSizeExceeded")
failreason = MAXIMUM_SIZE_EXCEEDED;
else if ( FailReason == "WeakHashSums")
failreason = WEAK_HASHSUMS;
+ else if (FailReason == "RedirectionLoop")
+ failreason = REDIRECTION_LOOP;
else if (Status == StatAuthError)
failreason = HASHSUM_MISMATCH;
if(ErrorText.empty())
{
+ std::ostringstream out;
+ switch (failreason)
+ {
+ case HASHSUM_MISMATCH:
+ out << _("Hash Sum mismatch") << std::endl;
+ break;
+ case WEAK_HASHSUMS:
+ out << _("Insufficient information available to perform this download securely") << std::endl;
+ break;
+ case REDIRECTION_LOOP:
+ out << "Redirection loop encountered" << std::endl;
+ break;
+ case MAXIMUM_SIZE_EXCEEDED:
+ out << LookupTag(Message, "Message") << std::endl;
+ break;
+ case OTHER:
+ out << LookupTag(Message, "Message");
+ break;
+ }
+
if (Status == StatAuthError)
{
- std::ostringstream out;
- switch (failreason)
- {
- case HASHSUM_MISMATCH:
- out << _("Hash Sum mismatch") << std::endl;
- break;
- case WEAK_HASHSUMS:
- out << _("Insufficient information available to perform this download securely") << std::endl;
- break;
- case MAXIMUM_SIZE_EXCEEDED:
- case OTHER:
- out << LookupTag(Message, "Message") << std::endl;
- break;
- }
auto const ExpectedHashes = GetExpectedHashes();
if (ExpectedHashes.empty() == false)
{
out << "Hashes of expected file:" << std::endl;
for (auto const &hs: ExpectedHashes)
- out << " - " << hs.toStr() << std::endl;
+ {
+ out << " - " << hs.toStr();
+ if (hs.usable() == false)
+ out << " [weak]";
+ out << std::endl;
+ }
}
if (failreason == HASHSUM_MISMATCH)
{
std::string const tagname = std::string(*type) + "-Hash";
std::string const hashsum = LookupTag(Message, tagname.c_str());
if (hashsum.empty() == false)
- out << " - " << HashString(*type, hashsum).toStr() << std::endl;
+ {
+ auto const hs = HashString(*type, hashsum);
+ out << " - " << hs.toStr();
+ if (hs.usable() == false)
+ out << " [weak]";
+ out << std::endl;
+ }
}
out << "Last modification reported: " << LookupTag(Message, "Last-Modified", "<none>") << std::endl;
}
- ErrorText = out.str();
}
- else
- ErrorText = LookupTag(Message,"Message");
+ ErrorText = out.str();
}
switch (failreason)
case MAXIMUM_SIZE_EXCEEDED: RenameOnError(MaximumSizeExceeded); break;
case HASHSUM_MISMATCH: RenameOnError(HashSumMismatch); break;
case WEAK_HASHSUMS: break;
+ case REDIRECTION_LOOP: break;
case OTHER: break;
}
return hs != NULL ? hs->toStr() : "";
}
/*}}}*/
+bool pkgAcquire::Item::IsRedirectionLoop(std::string const &NewURI) /*{{{*/
+{
+ // store can fail due to permission errors and the item will "loop" then
+ if (APT::String::Startswith(NewURI, "store:"))
+ return false;
+ if (d->PastRedirections.empty())
+ {
+ d->PastRedirections.push_back(NewURI);
+ return false;
+ }
+ auto const LastURI = std::prev(d->PastRedirections.end());
+ // redirections to the same file are a way of restarting/resheduling,
+ // individual methods will have to make sure that they aren't looping this way
+ if (*LastURI == NewURI)
+ return false;
+ if (std::find(d->PastRedirections.begin(), LastURI, NewURI) != LastURI)
+ return true;
+ d->PastRedirections.push_back(NewURI);
+ return false;
+}
+ /*}}}*/
pkgAcqTransactionItem::pkgAcqTransactionItem(pkgAcquire * const Owner, /*{{{*/
pkgAcqMetaClearSig * const transactionManager, IndexTarget const &target) :
string const FinalFile = GetFinalFilename();
struct stat Buf;
if (stat(FinalFile.c_str(),&Buf) == 0)
- Header += "\nLast-Modified: " + TimeRFC1123(Buf.st_mtime);
+ Header += "\nLast-Modified: " + TimeRFC1123(Buf.st_mtime, false);
return Header;
}
if (I->UsedMirror.empty() == false && _config->FindB("Acquire::SameMirrorForAllIndexes", true))
{
if (APT::String::Endswith(I->Desc.URI, "InRelease"))
+ {
TransactionManager->BaseURI = I->Desc.URI.substr(0, I->Desc.URI.length() - strlen("InRelease"));
+ TransactionManager->UsedMirror = I->UsedMirror;
+ }
else if (APT::String::Endswith(I->Desc.URI, "Release"))
+ {
TransactionManager->BaseURI = I->Desc.URI.substr(0, I->Desc.URI.length() - strlen("Release"));
+ TransactionManager->UsedMirror = I->UsedMirror;
+ }
}
std::string const FileName = LookupTag(Message,"Filename");
// than invent an entirely new flag we would need to carry for all of eternity.
if (hasReleaseFile && Target.Option(IndexTarget::ARCHITECTURE) == "all")
{
- if (TransactionManager->MetaIndexParser->IsArchitectureSupported("all") == false ||
- TransactionManager->MetaIndexParser->IsArchitectureAllSupportedFor(Target) == false)
+ if (TransactionManager->MetaIndexParser->IsArchitectureAllSupportedFor(Target) == false)
{
new CleanupItem(Owner, TransactionManager, Target);
continue;
auto const hashes = GetExpectedHashesFor(Target.MetaKey);
if (hashes.empty() == false)
{
- if (hashes.usable() == false)
+ if (hashes.usable() == false && TargetIsAllowedToBe(TransactionManager->Target, InsecureType::WEAK) == false)
{
new CleanupItem(Owner, TransactionManager, Target);
_error->Warning(_("Skipping acquire of configured file '%s' as repository '%s' provides only weak security information for it"),
IndexTarget const &DetachedDataTarget, IndexTarget const &DetachedSigTarget,
metaIndex * const MetaIndexParser) :
pkgAcqMetaIndex(Owner, this, ClearsignedTarget, DetachedSigTarget),
- d(NULL), ClearsignedTarget(ClearsignedTarget),
- DetachedDataTarget(DetachedDataTarget),
+ d(NULL), DetachedDataTarget(DetachedDataTarget),
MetaIndexParser(MetaIndexParser), LastMetaIndexParser(NULL)
{
// index targets + (worst case:) Release/Release.gpg
if(CheckStopAuthentication(this, Message))
return;
- if(AllowInsecureRepositories(InsecureType::UNSIGNED, ClearsignedTarget.Description, TransactionManager->MetaIndexParser, TransactionManager, this) == true)
+ if(AllowInsecureRepositories(InsecureType::UNSIGNED, Target.Description, TransactionManager->MetaIndexParser, TransactionManager, this) == true)
{
Status = StatDone;
if (timespec == 0)
ErrorText.append("<unknown>");
else
- ErrorText.append(TimeRFC1123(timespec));
+ ErrorText.append(TimeRFC1123(timespec, true));
ErrorText.append("\n");
}
/*}}}*/
if (stat(Final.c_str(),&Buf) != 0)
return "\nIndex-File: true";
- return "\nIndex-File: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime);
+ return "\nIndex-File: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime, false);
}
/*}}}*/
void pkgAcqDiffIndex::QueueOnIMSHit() const /*{{{*/
new pkgAcqIndexDiffs(Owner, TransactionManager, Target);
}
/*}}}*/
+static bool RemoveFileForBootstrapLinking(bool const Debug, std::string const &For, std::string const &Boot)/*{{{*/
+{
+ if (FileExists(Boot) && RemoveFile("Bootstrap-linking", Boot) == false)
+ {
+ if (Debug)
+ std::clog << "Bootstrap-linking for patching " << For
+ << " by removing stale " << Boot << " failed!" << std::endl;
+ return false;
+ }
+ return true;
+}
+ /*}}}*/
bool pkgAcqDiffIndex::ParseDiffIndex(string const &IndexDiffFile) /*{{{*/
{
ExpectedAdditionalItems = 0;
HashStringList ServerHashes;
unsigned long long ServerSize = 0;
+ auto const &posix = std::locale("C.UTF-8");
for (char const * const * type = HashString::SupportedHashes(); *type != NULL; ++type)
{
std::string tagname = *type;
string hash;
unsigned long long size;
std::stringstream ss(tmp);
+ ss.imbue(posix);
ss >> hash >> size;
if (unlikely(hash.empty() == true))
continue;
string hash, filename;
unsigned long long size;
std::stringstream ss(tmp);
+ ss.imbue(posix);
while (ss >> hash >> size >> filename)
{
string hash, filename;
unsigned long long size;
std::stringstream ss(tmp);
+ ss.imbue(posix);
while (ss >> hash >> size >> filename)
{
string hash, filename;
unsigned long long size;
std::stringstream ss(tmp);
+ ss.imbue(posix);
// FIXME: all of pdiff supports only .gz compressed patches
while (ss >> hash >> size >> filename)
if (unlikely(Final.empty())) // because we wouldn't be called in such a case
return false;
std::string const PartialFile = GetPartialFileNameFromURI(Target.URI);
- if (FileExists(PartialFile) && RemoveFile("Bootstrap-linking", PartialFile) == false)
- {
- if (Debug)
- std::clog << "Bootstrap-linking for patching " << CurrentPackagesFile
- << " by removing stale " << PartialFile << " failed!" << std::endl;
+ std::string const PatchedFile = GetKeepCompressedFileName(PartialFile + "-patched", Target);
+ if (RemoveFileForBootstrapLinking(Debug, CurrentPackagesFile, PartialFile) == false ||
+ RemoveFileForBootstrapLinking(Debug, CurrentPackagesFile, PatchedFile) == false)
return false;
- }
for (auto const &ext : APT::Configuration::getCompressorExtensions())
{
- std::string const Partial = PartialFile + ext;
- if (FileExists(Partial) && RemoveFile("Bootstrap-linking", Partial) == false)
- {
- if (Debug)
- std::clog << "Bootstrap-linking for patching " << CurrentPackagesFile
- << " by removing stale " << Partial << " failed!" << std::endl;
+ if (RemoveFileForBootstrapLinking(Debug, CurrentPackagesFile, PartialFile + ext) == false ||
+ RemoveFileForBootstrapLinking(Debug, CurrentPackagesFile, PatchedFile + ext) == false)
return false;
- }
}
std::string const Ext = Final.substr(CurrentPackagesFile.length());
std::string const Partial = PartialFile + Ext;
if(State != StateApplyDiff)
return pkgAcqBaseIndex::Custom600Headers();
std::ostringstream patchhashes;
- HashStringList const ExpectedHashes = available_patches[0].patch_hashes;
- for (HashStringList::const_iterator hs = ExpectedHashes.begin(); hs != ExpectedHashes.end(); ++hs)
- patchhashes << "\nPatch-0-" << hs->HashType() << "-Hash: " << hs->HashValue();
+ for (auto && hs : available_patches[0].result_hashes)
+ patchhashes << "\nStart-" << hs.HashType() << "-Hash: " << hs.HashValue();
+ for (auto && hs : available_patches[0].patch_hashes)
+ patchhashes << "\nPatch-0-" << hs.HashType() << "-Hash: " << hs.HashValue();
patchhashes << pkgAcqBaseIndex::Custom600Headers();
return patchhashes.str();
}
return pkgAcqBaseIndex::Custom600Headers();
std::ostringstream patchhashes;
unsigned int seen_patches = 0;
+ for (auto && hs : (*allPatches)[0]->patch.result_hashes)
+ patchhashes << "\nStart-" << hs.HashType() << "-Hash: " << hs.HashValue();
for (std::vector<pkgAcqIndexMergeDiffs *>::const_iterator I = allPatches->begin();
I != allPatches->end(); ++I)
{
HashStringList const ExpectedHashes = (*I)->patch.patch_hashes;
for (HashStringList::const_iterator hs = ExpectedHashes.begin(); hs != ExpectedHashes.end(); ++hs)
- patchhashes << "\nPatch-" << seen_patches << "-" << hs->HashType() << "-Hash: " << hs->HashValue();
+ patchhashes << "\nPatch-" << std::to_string(seen_patches) << "-" << hs->HashType() << "-Hash: " << hs->HashValue();
++seen_patches;
}
patchhashes << pkgAcqBaseIndex::Custom600Headers();
DestFile = GetPartialFileNameFromURI(URI);
NextCompressionExtension(CurrentCompressionExtension, CompressionExtensions, false);
- // store file size of the download to ensure the fetcher gives
- // accurate progress reporting
- FileSize = GetExpectedHashes().FileSize();
-
if (CurrentCompressionExtension == "uncompressed")
{
Desc.URI = URI;
DestFile = DestFile + '.' + CurrentCompressionExtension;
}
+ // store file size of the download to ensure the fetcher gives
+ // accurate progress reporting
+ FileSize = GetExpectedHashes().FileSize();
Desc.Description = URIDesc;
Desc.Owner = this;
struct stat Buf;
if (stat(Final.c_str(),&Buf) == 0)
- msg += "\nLast-Modified: " + TimeRFC1123(Buf.st_mtime);
+ msg += "\nLast-Modified: " + TimeRFC1123(Buf.st_mtime, false);
}
if(Target.IsOptional)
{
pkgAcqBaseIndex::Failed(Message,Cnf);
+ if (UsedMirror.empty() == false && UsedMirror != "DIRECT" &&
+ LookupTag(Message, "FailReason") == "HttpError404")
+ {
+ UsedMirror = "DIRECT";
+ if (Desc.URI.find("/by-hash/") != std::string::npos)
+ CompressionExtensions = "by-hash " + CompressionExtensions;
+ else
+ CompressionExtensions = CurrentCompressionExtension + ' ' + CompressionExtensions;
+ Desc.Description = Target.Description;
+ Init(Target.URI, Desc.Description, Desc.ShortDesc);
+ Status = StatIdle;
+ return;
+ }
+
// authorisation matches will not be fixed by other compression types
if (Status != StatAuthError)
{