#include <string>
#include <sstream>
#include <stdio.h>
+#include <ctime>
/*}}}*/
using namespace std;
{
Status = StatIdle;
ErrorText = LookupTag(Message,"Message");
+ UsedMirror = LookupTag(Message,"UsedMirror");
if (QueueCounter <= 1)
{
/* This indicates that the file is not available right now but might
Dequeue();
return;
}
-
+
Status = StatError;
Dequeue();
}
+
+ // report mirror failure back to LP if we actually use a mirror
+ string FailReason = LookupTag(Message, "FailReason");
+ if(FailReason.size() != 0)
+ ReportMirrorFailure(FailReason);
+ else
+ ReportMirrorFailure(ErrorText);
}
/*}}}*/
// Acquire::Item::Start - Item has begun to download /*{{{*/
{
// We just downloaded something..
string FileName = LookupTag(Message,"Filename");
- // we only inform the Log class if it was actually not a local thing
+ UsedMirror = LookupTag(Message,"UsedMirror");
if (Complete == false && !Local && FileName == DestFile)
{
if (Owner->Log != 0)
if (FileSize == 0)
FileSize= Size;
-
Status = StatDone;
ErrorText = string();
Owner->Dequeue(this);
}
}
/*}}}*/
+// Acquire::Item::ReportMirrorFailure /*{{{*/
+// ---------------------------------------------------------------------
+void pkgAcquire::Item::ReportMirrorFailure(string FailCode)
+{
+ // we only act if a mirror was used at all
+ if(UsedMirror.empty())
+ return;
+#if 0
+ std::cerr << "\nReportMirrorFailure: "
+ << UsedMirror
+ << " Uri: " << DescURI()
+ << " FailCode: "
+ << FailCode << std::endl;
+#endif
+ const char *Args[40];
+ unsigned int i = 0;
+ string report = _config->Find("Methods::Mirror::ProblemReporting",
+ "/usr/lib/apt/apt-report-mirror-failure");
+ if(!FileExists(report))
+ return;
+ Args[i++] = report.c_str();
+ Args[i++] = UsedMirror.c_str();
+ Args[i++] = DescURI().c_str();
+ Args[i++] = FailCode.c_str();
+ Args[i++] = NULL;
+ pid_t pid = ExecFork();
+ if(pid < 0)
+ {
+ _error->Error("ReportMirrorFailure Fork failed");
+ return;
+ }
+ else if(pid == 0)
+ {
+ execvp(Args[0], (char**)Args);
+ std::cerr << "Could not exec " << Args[0] << std::endl;
+ _exit(100);
+ }
+ if(!ExecWait(pid, "report-mirror-failure"))
+ {
+ _error->Warning("Couldn't report problem to '%s'",
+ _config->Find("Methods::Mirror::ProblemReporting").c_str());
+ }
+}
+ /*}}}*/
// AcqDiffIndex::AcqDiffIndex - Constructor /*{{{*/
// ---------------------------------------------------------------------
/* Get the DiffIndex file first and see if there are patches availabe
if(TF.Step(Tags) == true)
{
- string local_sha1;
bool found = false;
DiffInfo d;
string size;
- string tmp = Tags.FindS("SHA1-Current");
+ string const tmp = Tags.FindS("SHA1-Current");
std::stringstream ss(tmp);
- ss >> ServerSha1;
+ ss >> ServerSha1 >> size;
+ unsigned long const ServerSize = atol(size.c_str());
- FileFd fd(CurrentPackagesFile, FileFd::ReadOnly);
+ FileFd fd(CurrentPackagesFile, FileFd::ReadOnlyGzip);
SHA1Summation SHA1;
SHA1.AddFD(fd.Fd(), fd.Size());
- local_sha1 = string(SHA1.Result());
+ string const local_sha1 = SHA1.Result();
if(local_sha1 == ServerSha1)
{
std::clog << "SHA1-Current: " << ServerSha1 << std::endl;
// check the historie and see what patches we need
- string history = Tags.FindS("SHA1-History");
+ string const history = Tags.FindS("SHA1-History");
std::stringstream hist(history);
- while(hist >> d.sha1 >> size >> d.file)
+ while(hist >> d.sha1 >> size >> d.file)
{
- d.size = atoi(size.c_str());
// read until the first match is found
+ // from that point on, we probably need all diffs
if(d.sha1 == local_sha1)
found=true;
- // from that point on, we probably need all diffs
- if(found)
+ else if (found == false)
+ continue;
+
+ if(Debug)
+ std::clog << "Need to get diff: " << d.file << std::endl;
+ available_patches.push_back(d);
+ }
+
+ if (available_patches.empty() == false)
+ {
+ // patching with too many files is rather slow compared to a fast download
+ unsigned long const fileLimit = _config->FindI("Acquire::PDiffs::FileLimit", 0);
+ if (fileLimit != 0 && fileLimit < available_patches.size())
{
- if(Debug)
- std::clog << "Need to get diff: " << d.file << std::endl;
- available_patches.push_back(d);
+ if (Debug)
+ std::clog << "Need " << available_patches.size() << " diffs (Limit is " << fileLimit
+ << ") so fallback to complete download" << std::endl;
+ return false;
+ }
+
+ // see if the patches are too big
+ found = false; // it was true and it will be true again at the end
+ d = *available_patches.begin();
+ string const firstPatch = d.file;
+ unsigned long patchesSize = 0;
+ std::stringstream patches(Tags.FindS("SHA1-Patches"));
+ while(patches >> d.sha1 >> size >> d.file)
+ {
+ if (firstPatch == d.file)
+ found = true;
+ else if (found == false)
+ continue;
+
+ patchesSize += atol(size.c_str());
+ }
+ unsigned long const sizeLimit = ServerSize * _config->FindI("Acquire::PDiffs::SizeLimit", 100);
+ if (sizeLimit > 0 && (sizeLimit/100) < patchesSize)
+ {
+ if (Debug)
+ std::clog << "Need " << patchesSize << " bytes (Limit is " << sizeLimit/100
+ << ") so fallback to complete download" << std::endl;
+ return false;
}
}
}
if(found)
{
// queue the diffs
- string::size_type last_space = Description.rfind(" ");
+ string::size_type const last_space = Description.rfind(" ");
if(last_space != string::npos)
Description.erase(last_space, Description.size()-last_space);
new pkgAcqIndexDiffs(Owner, RealURI, Description, Desc.ShortDesc,
string FinalFile = _config->FindDir("Dir::State::lists");
FinalFile += URItoFileName(RealURI);
- FileFd fd(FinalFile, FileFd::ReadOnly);
+ FileFd fd(FinalFile, FileFd::ReadOnlyGzip);
SHA1Summation SHA1;
SHA1.AddFD(fd.Fd(), fd.Size());
string local_sha1 = string(SHA1.Result());
{
string Final = _config->FindDir("Dir::State::lists");
Final += URItoFileName(RealURI);
+ if (_config->FindB("Acquire::GzipIndexes",false))
+ Final += ".gz";
struct stat Buf;
if (stat(Final.c_str(),&Buf) != 0)
return "\nIndex-File: true";
-
return "\nIndex-File: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime);
}
/*}}}*/
Status = StatAuthError;
ErrorText = _("Hash Sum mismatch");
Rename(DestFile,DestFile + ".FAILED");
+ ReportMirrorFailure("HashChecksumFailure");
return;
}
// Done, move it into position
ErrorText = "Method gave a blank filename";
}
+ string compExt = flExtension(flNotDir(URI(Desc.URI).Path));
+
// The files timestamp matches
- if (StringToBool(LookupTag(Message,"IMS-Hit"),false) == true)
+ if (StringToBool(LookupTag(Message,"IMS-Hit"),false) == true) {
+ if (_config->FindB("Acquire::GzipIndexes",false) && compExt == "gz")
+ // Update DestFile for .gz suffix so that the clean operation keeps it
+ DestFile += ".gz";
return;
+ }
if (FileName == DestFile)
Erase = true;
else
Local = true;
- string compExt = flExtension(flNotDir(URI(Desc.URI).Path));
string decompProg;
+ // If we enable compressed indexes and already have gzip, keep it
+ if (_config->FindB("Acquire::GzipIndexes",false) && compExt == "gz" && !Local) {
+ string FinalFile = _config->FindDir("Dir::State::lists");
+ FinalFile += URItoFileName(RealURI) + ".gz";
+ Rename(DestFile,FinalFile);
+ chmod(FinalFile.c_str(),0644);
+
+ // Update DestFile for .gz suffix so that the clean operation keeps it
+ DestFile = _config->FindDir("Dir::State::lists") + "partial/";
+ DestFile += URItoFileName(RealURI) + ".gz";
+ return;
+ }
+
// get the binary name for your used compression type
decompProg = _config->Find(string("Acquire::CompressionTypes::").append(compExt),"");
if(decompProg.empty() == false);
string URI,string URIDesc,string ShortDesc)
: pkgAcqIndex(Owner, URI, URIDesc, ShortDesc, HashString(), "")
{
+}
+ /*}}}*/
+// AcqIndexTrans::Custom600Headers - Insert custom request headers /*{{{*/
+// ---------------------------------------------------------------------
+string pkgAcqIndexTrans::Custom600Headers()
+{
+ string Final = _config->FindDir("Dir::State::lists");
+ Final += URItoFileName(RealURI);
+
+ struct stat Buf;
+ if (stat(Final.c_str(),&Buf) != 0)
+ return "\nFail-Ignore: true";
+ return "\nFail-Ignore: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime);
}
/*}}}*/
// AcqIndexTrans::Failed - Silence failure messages for missing files /*{{{*/
Rename(LastGoodSig, DestFile);
// queue a pkgAcqMetaIndex to be verified against the sig we just retrieved
- new pkgAcqMetaIndex(Owner, MetaIndexURI, MetaIndexURIDesc, MetaIndexShortDesc,
- DestFile, IndexTargets, MetaIndexParser);
+ new pkgAcqMetaIndex(Owner, MetaIndexURI, MetaIndexURIDesc,
+ MetaIndexShortDesc, DestFile, IndexTargets,
+ MetaIndexParser);
}
/*}}}*/
{
Item::Failed(Message,Cnf);
// move the sigfile back on transient network failures
- if(FileExists(DestFile))
+ if(FileExists(LastGoodSig))
Rename(LastGoodSig,Final);
// set the status back to , Item::Failed likes to reset it
if (AuthPass == true)
{
AuthDone(Message);
+
+ // all cool, move Release file into place
+ Complete = true;
+
+ string FinalFile = _config->FindDir("Dir::State::lists");
+ FinalFile += URItoFileName(RealURI);
+ Rename(DestFile,FinalFile);
+ chmod(FinalFile.c_str(),0644);
+ DestFile = FinalFile;
}
else
{
return;
}
- // see if the download was a IMSHit
+ // make sure to verify against the right file on I-M-S hit
IMSHit = StringToBool(LookupTag(Message,"IMS-Hit"),false);
+ if(IMSHit)
+ {
+ string FinalFile = _config->FindDir("Dir::State::lists");
+ FinalFile += URItoFileName(RealURI);
+ DestFile = FinalFile;
+ }
Complete = true;
-
- string FinalFile = _config->FindDir("Dir::State::lists");
- FinalFile += URItoFileName(RealURI);
-
- // If we get a IMS hit we can remove the empty file in partial
- // othersie we move the file in place
- if (IMSHit)
- unlink(DestFile.c_str());
- else
- Rename(DestFile,FinalFile);
-
- chmod(FinalFile.c_str(),0644);
- DestFile = FinalFile;
}
/*}}}*/
void pkgAcqMetaIndex::AuthDone(string Message) /*{{{*/
QueueIndexes(true);
// Done, move signature file into position
-
string VerifiedSigFile = _config->FindDir("Dir::State::lists") +
URItoFileName(RealURI) + ".gpg";
Rename(SigFile,VerifiedSigFile);
return;
}
}
-
- // Queue Packages file (either diff or full packages files, depending
- // on the users option)
- if(_config->FindB("Acquire::PDiffs",true) == true)
+
+ /* Queue Packages file (either diff or full packages files, depending
+ on the users option) - we also check if the PDiff Index file is listed
+ in the Meta-Index file. Ideal would be if pkgAcqDiffIndex would test this
+ instead, but passing the required info to it is to much hassle */
+ if(_config->FindB("Acquire::PDiffs",true) == true && (verify == false ||
+ MetaIndexParser->Exists(string((*Target)->MetaKey).append(".diff/Index")) == true))
new pkgAcqDiffIndex(Owner, (*Target)->URI, (*Target)->Description,
(*Target)->ShortDesc, ExpectedIndexHash);
- else
+ else
new pkgAcqIndex(Owner, (*Target)->URI, (*Target)->Description,
(*Target)->ShortDesc, ExpectedIndexHash);
}
Transformed = "";
}
+ if (_config->FindB("Acquire::Check-Valid-Until", true) == true &&
+ MetaIndexParser->GetValidUntil() > 0) {
+ time_t const invalid_since = time(NULL) - MetaIndexParser->GetValidUntil();
+ if (invalid_since > 0)
+ // TRANSLATOR: The first %s is the URL of the bad Release file, the second is
+ // the time since then the file is invalid - formated in the same way as in
+ // the download progress display (e.g. 7d 3h 42min 1s)
+ return _error->Error(_("Release file expired, ignoring %s (invalid since %s)"),
+ RealURI.c_str(), TimeToStr(invalid_since).c_str());
+ }
+
if (_config->FindB("Debug::pkgAcquire::Auth", false))
{
std::cerr << "Got Codename: " << MetaIndexParser->GetDist() << std::endl;
// return false;
if (!Transformed.empty())
{
- _error->Warning("Conflicting distribution: %s (expected %s but got %s)",
+ _error->Warning(_("Conflicting distribution: %s (expected %s but got %s)"),
Desc.Description.c_str(),
Transformed.c_str(),
MetaIndexParser->GetDist().c_str());
{
if (AuthPass == true)
{
- // if we fail the authentication but got the file via a IMS-Hit
- // this means that the file wasn't downloaded and that it might be
- // just stale (server problem, proxy etc). we delete what we have
- // queue it again without i-m-s
- // alternatively we could just unlink the file and let the user try again
- if (IMSHit)
+ // gpgv method failed, if we have a good signature
+ string LastGoodSigFile = _config->FindDir("Dir::State::lists") +
+ "partial/" + URItoFileName(RealURI) + ".gpg.reverify";
+ if(FileExists(LastGoodSigFile))
{
- Complete = false;
- Local = false;
- AuthPass = false;
- unlink(DestFile.c_str());
-
- DestFile = _config->FindDir("Dir::State::lists") + "partial/";
- DestFile += URItoFileName(RealURI);
- Desc.URI = RealURI;
- QueueURI(Desc);
+ string VerifiedSigFile = _config->FindDir("Dir::State::lists") +
+ URItoFileName(RealURI) + ".gpg";
+ Rename(LastGoodSigFile,VerifiedSigFile);
+ Status = StatTransientNetworkError;
+ _error->Warning(_("A error occurred during the signature "
+ "verification. The repository is not updated "
+ "and the previous index files will be used."
+ "GPG error: %s: %s\n"),
+ Desc.Description.c_str(),
+ LookupTag(Message,"Message").c_str());
+ RunScripts("APT::Update::Auth-Failure");
return;
+ } else {
+ _error->Warning(_("GPG error: %s: %s"),
+ Desc.Description.c_str(),
+ LookupTag(Message,"Message").c_str());
}
-
// gpgv method failed
- _error->Warning("GPG error: %s: %s",
- Desc.Description.c_str(),
- LookupTag(Message,"Message").c_str());
-
+ ReportMirrorFailure("GPGFailure");
}
// No Release file was present, or verification failed, so fall
the archive is already available in the cache and stashs the MD5 for
checking later. */
bool pkgAcqArchive::QueueNext()
-{
+{
+ string const ForceHash = _config->Find("Acquire::ForceHash");
for (; Vf.end() == false; Vf++)
{
// Ignore not source sources
return false;
string PkgFile = Parse.FileName();
- if(Parse.SHA256Hash() != "")
- ExpectedHash = HashString("SHA256", Parse.SHA256Hash());
- else if (Parse.SHA1Hash() != "")
- ExpectedHash = HashString("SHA1", Parse.SHA1Hash());
- else
- ExpectedHash = HashString("MD5Sum", Parse.MD5Hash());
+ if (ForceHash.empty() == false)
+ {
+ if(stringcasecmp(ForceHash, "sha256") == 0)
+ ExpectedHash = HashString("SHA256", Parse.SHA256Hash());
+ else if (stringcasecmp(ForceHash, "sha1") == 0)
+ ExpectedHash = HashString("SHA1", Parse.SHA1Hash());
+ else
+ ExpectedHash = HashString("MD5Sum", Parse.MD5Hash());
+ }
+ else
+ {
+ string Hash;
+ if ((Hash = Parse.SHA256Hash()).empty() == false)
+ ExpectedHash = HashString("SHA256", Hash);
+ else if ((Hash = Parse.SHA1Hash()).empty() == false)
+ ExpectedHash = HashString("SHA1", Hash);
+ else
+ ExpectedHash = HashString("MD5Sum", Parse.MD5Hash());
+ }
if (PkgFile.empty() == true)
return _error->Error(_("The package index files are corrupted. No Filename: "
"field for package %s."),
/* The file is added to the queue */
pkgAcqFile::pkgAcqFile(pkgAcquire *Owner,string URI,string Hash,
unsigned long Size,string Dsc,string ShortDesc,
- const string &DestDir, const string &DestFilename) :
- Item(Owner), ExpectedHash(Hash)
+ const string &DestDir, const string &DestFilename,
+ bool IsIndexFile) :
+ Item(Owner), ExpectedHash(Hash), IsIndexFile(IsIndexFile)
{
Retries = _config->FindI("Acquire::Retries",0);
Item::Failed(Message,Cnf);
}
/*}}}*/
+// AcqIndex::Custom600Headers - Insert custom request headers /*{{{*/
+// ---------------------------------------------------------------------
+/* The only header we use is the last-modified header. */
+string pkgAcqFile::Custom600Headers()
+{
+ if (IsIndexFile)
+ return "\nIndex-File: true";
+ return "";
+}
+ /*}}}*/