// -*- mode: cpp; mode: fold -*-
// Description /*{{{*/
-// $Id: acquire-item.cc,v 1.30 1999/05/24 03:39:36 jgg Exp $
+// $Id: acquire-item.cc,v 1.46.2.9 2004/01/16 18:51:11 mdz Exp $
/* ######################################################################
Acquire Item - Item to acquire
##################################################################### */
/*}}}*/
// Include Files /*{{{*/
-#ifdef __GNUG__
-#pragma implementation "apt-pkg/acquire-item.h"
-#endif
#include <apt-pkg/acquire-item.h>
#include <apt-pkg/configuration.h>
+#include <apt-pkg/aptconfiguration.h>
+#include <apt-pkg/sourcelist.h>
+#include <apt-pkg/vendorlist.h>
#include <apt-pkg/error.h>
#include <apt-pkg/strutl.h>
#include <apt-pkg/fileutl.h>
+#include <apt-pkg/md5.h>
+#include <apt-pkg/sha1.h>
+#include <apt-pkg/tagfile.h>
+#include <apti18n.h>
+
#include <sys/stat.h>
#include <unistd.h>
#include <errno.h>
-#include <string.h>
+#include <string>
+#include <sstream>
#include <stdio.h>
+#include <ctime>
/*}}}*/
+using namespace std;
+
// Acquire::Item::Item - Constructor /*{{{*/
// ---------------------------------------------------------------------
/* */
{
Status = StatIdle;
ErrorText = LookupTag(Message,"Message");
+ UsedMirror = LookupTag(Message,"UsedMirror");
if (QueueCounter <= 1)
{
/* This indicates that the file is not available right now but might
Dequeue();
return;
}
-
+
Status = StatError;
Dequeue();
}
+
+ // report mirror failure back to LP if we actually use a mirror
+ string FailReason = LookupTag(Message, "FailReason");
+ if(FailReason.size() != 0)
+ ReportMirrorFailure(FailReason);
+ else
+ ReportMirrorFailure(ErrorText);
}
/*}}}*/
// Acquire::Item::Start - Item has begun to download /*{{{*/
// ---------------------------------------------------------------------
/* Stash status and the file size. Note that setting Complete means
sub-phases of the acquire process such as decompresion are operating */
-void pkgAcquire::Item::Start(string Message,unsigned long Size)
+void pkgAcquire::Item::Start(string /*Message*/,unsigned long Size)
{
Status = StatFetching;
if (FileSize == 0 && Complete == false)
// Acquire::Item::Done - Item downloaded OK /*{{{*/
// ---------------------------------------------------------------------
/* */
-void pkgAcquire::Item::Done(string Message,unsigned long Size,string)
+void pkgAcquire::Item::Done(string Message,unsigned long Size,string Hash,
+ pkgAcquire::MethodConfig *Cnf)
{
// We just downloaded something..
string FileName = LookupTag(Message,"Filename");
- if (Complete == false && FileName == DestFile)
+ UsedMirror = LookupTag(Message,"UsedMirror");
+ if (Complete == false && !Local && FileName == DestFile)
{
if (Owner->Log != 0)
Owner->Log->Fetched(Size,atoi(LookupTag(Message,"Resume-Point","0").c_str()));
}
-
+
+ if (FileSize == 0)
+ FileSize= Size;
Status = StatDone;
ErrorText = string();
Owner->Dequeue(this);
if (rename(From.c_str(),To.c_str()) != 0)
{
char S[300];
- sprintf(S,"rename failed, %s (%s -> %s).",strerror(errno),
+ snprintf(S,sizeof(S),_("rename failed, %s (%s -> %s)."),strerror(errno),
From.c_str(),To.c_str());
Status = StatError;
ErrorText = S;
- }
+ }
+}
+ /*}}}*/
+// Acquire::Item::ReportMirrorFailure /*{{{*/
+// ---------------------------------------------------------------------
+void pkgAcquire::Item::ReportMirrorFailure(string FailCode)
+{
+ // we only act if a mirror was used at all
+ if(UsedMirror.empty())
+ return;
+#if 0
+ std::cerr << "\nReportMirrorFailure: "
+ << UsedMirror
+ << " Uri: " << DescURI()
+ << " FailCode: "
+ << FailCode << std::endl;
+#endif
+ const char *Args[40];
+ unsigned int i = 0;
+ string report = _config->Find("Methods::Mirror::ProblemReporting",
+ "/usr/lib/apt/apt-report-mirror-failure");
+ if(!FileExists(report))
+ return;
+ Args[i++] = report.c_str();
+ Args[i++] = UsedMirror.c_str();
+ Args[i++] = DescURI().c_str();
+ Args[i++] = FailCode.c_str();
+ Args[i++] = NULL;
+ pid_t pid = ExecFork();
+ if(pid < 0)
+ {
+ _error->Error("ReportMirrorFailure Fork failed");
+ return;
+ }
+ else if(pid == 0)
+ {
+ execvp(Args[0], (char**)Args);
+ std::cerr << "Could not exec " << Args[0] << std::endl;
+ _exit(100);
+ }
+ if(!ExecWait(pid, "report-mirror-failure"))
+ {
+ _error->Warning("Couldn't report problem to '%s'",
+ _config->Find("Methods::Mirror::ProblemReporting").c_str());
+ }
+}
+ /*}}}*/
+// AcqDiffIndex::AcqDiffIndex - Constructor /*{{{*/
+// ---------------------------------------------------------------------
+/* Get the DiffIndex file first and see if there are patches availabe
+ * If so, create a pkgAcqIndexDiffs fetcher that will get and apply the
+ * patches. If anything goes wrong in that process, it will fall back to
+ * the original packages file
+ */
+pkgAcqDiffIndex::pkgAcqDiffIndex(pkgAcquire *Owner,
+ string URI,string URIDesc,string ShortDesc,
+ HashString ExpectedHash)
+ : Item(Owner), RealURI(URI), ExpectedHash(ExpectedHash),
+ Description(URIDesc)
+{
+
+ Debug = _config->FindB("Debug::pkgAcquire::Diffs",false);
+
+ Desc.Description = URIDesc + "/DiffIndex";
+ Desc.Owner = this;
+ Desc.ShortDesc = ShortDesc;
+ Desc.URI = URI + ".diff/Index";
+
+ DestFile = _config->FindDir("Dir::State::lists") + "partial/";
+ DestFile += URItoFileName(URI) + string(".DiffIndex");
+
+ if(Debug)
+ std::clog << "pkgAcqDiffIndex: " << Desc.URI << std::endl;
+
+ // look for the current package file
+ CurrentPackagesFile = _config->FindDir("Dir::State::lists");
+ CurrentPackagesFile += URItoFileName(RealURI);
+
+ // FIXME: this file:/ check is a hack to prevent fetching
+ // from local sources. this is really silly, and
+ // should be fixed cleanly as soon as possible
+ if(!FileExists(CurrentPackagesFile) ||
+ Desc.URI.substr(0,strlen("file:/")) == "file:/")
+ {
+ // we don't have a pkg file or we don't want to queue
+ if(Debug)
+ std::clog << "No index file, local or canceld by user" << std::endl;
+ Failed("", NULL);
+ return;
+ }
+
+ if(Debug)
+ std::clog << "pkgAcqIndexDiffs::pkgAcqIndexDiffs(): "
+ << CurrentPackagesFile << std::endl;
+
+ QueueURI(Desc);
+
+}
+ /*}}}*/
+// AcqIndex::Custom600Headers - Insert custom request headers /*{{{*/
+// ---------------------------------------------------------------------
+/* The only header we use is the last-modified header. */
+string pkgAcqDiffIndex::Custom600Headers()
+{
+ string Final = _config->FindDir("Dir::State::lists");
+ Final += URItoFileName(RealURI) + string(".IndexDiff");
+
+ if(Debug)
+ std::clog << "Custom600Header-IMS: " << Final << std::endl;
+
+ struct stat Buf;
+ if (stat(Final.c_str(),&Buf) != 0)
+ return "\nIndex-File: true";
+
+ return "\nIndex-File: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime);
+}
+ /*}}}*/
+bool pkgAcqDiffIndex::ParseDiffIndex(string IndexDiffFile) /*{{{*/
+{
+ if(Debug)
+ std::clog << "pkgAcqIndexDiffs::ParseIndexDiff() " << IndexDiffFile
+ << std::endl;
+
+ pkgTagSection Tags;
+ string ServerSha1;
+ vector<DiffInfo> available_patches;
+
+ FileFd Fd(IndexDiffFile,FileFd::ReadOnly);
+ pkgTagFile TF(&Fd);
+ if (_error->PendingError() == true)
+ return false;
+
+ if(TF.Step(Tags) == true)
+ {
+ bool found = false;
+ DiffInfo d;
+ string size;
+
+ string const tmp = Tags.FindS("SHA1-Current");
+ std::stringstream ss(tmp);
+ ss >> ServerSha1 >> size;
+ unsigned long const ServerSize = atol(size.c_str());
+
+ FileFd fd(CurrentPackagesFile, FileFd::ReadOnlyGzip);
+ SHA1Summation SHA1;
+ SHA1.AddFD(fd.Fd(), fd.Size());
+ string const local_sha1 = SHA1.Result();
+
+ if(local_sha1 == ServerSha1)
+ {
+ // we have the same sha1 as the server
+ if(Debug)
+ std::clog << "Package file is up-to-date" << std::endl;
+ // set found to true, this will queue a pkgAcqIndexDiffs with
+ // a empty availabe_patches
+ found = true;
+ }
+ else
+ {
+ if(Debug)
+ std::clog << "SHA1-Current: " << ServerSha1 << std::endl;
+
+ // check the historie and see what patches we need
+ string const history = Tags.FindS("SHA1-History");
+ std::stringstream hist(history);
+ while(hist >> d.sha1 >> size >> d.file)
+ {
+ // read until the first match is found
+ // from that point on, we probably need all diffs
+ if(d.sha1 == local_sha1)
+ found=true;
+ else if (found == false)
+ continue;
+
+ if(Debug)
+ std::clog << "Need to get diff: " << d.file << std::endl;
+ available_patches.push_back(d);
+ }
+
+ if (available_patches.empty() == false)
+ {
+ // patching with too many files is rather slow compared to a fast download
+ unsigned long const fileLimit = _config->FindI("Acquire::PDiffs::FileLimit", 0);
+ if (fileLimit != 0 && fileLimit < available_patches.size())
+ {
+ if (Debug)
+ std::clog << "Need " << available_patches.size() << " diffs (Limit is " << fileLimit
+ << ") so fallback to complete download" << std::endl;
+ return false;
+ }
+
+ // see if the patches are too big
+ found = false; // it was true and it will be true again at the end
+ d = *available_patches.begin();
+ string const firstPatch = d.file;
+ unsigned long patchesSize = 0;
+ std::stringstream patches(Tags.FindS("SHA1-Patches"));
+ while(patches >> d.sha1 >> size >> d.file)
+ {
+ if (firstPatch == d.file)
+ found = true;
+ else if (found == false)
+ continue;
+
+ patchesSize += atol(size.c_str());
+ }
+ unsigned long const sizeLimit = ServerSize * _config->FindI("Acquire::PDiffs::SizeLimit", 100);
+ if (sizeLimit > 0 && (sizeLimit/100) < patchesSize)
+ {
+ if (Debug)
+ std::clog << "Need " << patchesSize << " bytes (Limit is " << sizeLimit/100
+ << ") so fallback to complete download" << std::endl;
+ return false;
+ }
+ }
+ }
+
+ // we have something, queue the next diff
+ if(found)
+ {
+ // queue the diffs
+ string::size_type const last_space = Description.rfind(" ");
+ if(last_space != string::npos)
+ Description.erase(last_space, Description.size()-last_space);
+ new pkgAcqIndexDiffs(Owner, RealURI, Description, Desc.ShortDesc,
+ ExpectedHash, ServerSha1, available_patches);
+ Complete = false;
+ Status = StatDone;
+ Dequeue();
+ return true;
+ }
+ }
+
+ // Nothing found, report and return false
+ // Failing here is ok, if we return false later, the full
+ // IndexFile is queued
+ if(Debug)
+ std::clog << "Can't find a patch in the index file" << std::endl;
+ return false;
+}
+ /*}}}*/
+void pkgAcqDiffIndex::Failed(string Message,pkgAcquire::MethodConfig *Cnf) /*{{{*/
+{
+ if(Debug)
+ std::clog << "pkgAcqDiffIndex failed: " << Desc.URI << std::endl
+ << "Falling back to normal index file aquire" << std::endl;
+
+ new pkgAcqIndex(Owner, RealURI, Description, Desc.ShortDesc,
+ ExpectedHash);
+
+ Complete = false;
+ Status = StatDone;
+ Dequeue();
+}
+ /*}}}*/
+void pkgAcqDiffIndex::Done(string Message,unsigned long Size,string Md5Hash, /*{{{*/
+ pkgAcquire::MethodConfig *Cnf)
+{
+ if(Debug)
+ std::clog << "pkgAcqDiffIndex::Done(): " << Desc.URI << std::endl;
+
+ Item::Done(Message,Size,Md5Hash,Cnf);
+
+ string FinalFile;
+ FinalFile = _config->FindDir("Dir::State::lists")+URItoFileName(RealURI);
+
+ // sucess in downloading the index
+ // rename the index
+ FinalFile += string(".IndexDiff");
+ if(Debug)
+ std::clog << "Renaming: " << DestFile << " -> " << FinalFile
+ << std::endl;
+ Rename(DestFile,FinalFile);
+ chmod(FinalFile.c_str(),0644);
+ DestFile = FinalFile;
+
+ if(!ParseDiffIndex(DestFile))
+ return Failed("", NULL);
+
+ Complete = true;
+ Status = StatDone;
+ Dequeue();
+ return;
+}
+ /*}}}*/
+// AcqIndexDiffs::AcqIndexDiffs - Constructor /*{{{*/
+// ---------------------------------------------------------------------
+/* The package diff is added to the queue. one object is constructed
+ * for each diff and the index
+ */
+pkgAcqIndexDiffs::pkgAcqIndexDiffs(pkgAcquire *Owner,
+ string URI,string URIDesc,string ShortDesc,
+ HashString ExpectedHash,
+ string ServerSha1,
+ vector<DiffInfo> diffs)
+ : Item(Owner), RealURI(URI), ExpectedHash(ExpectedHash),
+ available_patches(diffs), ServerSha1(ServerSha1)
+{
+
+ DestFile = _config->FindDir("Dir::State::lists") + "partial/";
+ DestFile += URItoFileName(URI);
+
+ Debug = _config->FindB("Debug::pkgAcquire::Diffs",false);
+
+ Description = URIDesc;
+ Desc.Owner = this;
+ Desc.ShortDesc = ShortDesc;
+
+ if(available_patches.size() == 0)
+ {
+ // we are done (yeah!)
+ Finish(true);
+ }
+ else
+ {
+ // get the next diff
+ State = StateFetchDiff;
+ QueueNextDiff();
+ }
+}
+ /*}}}*/
+void pkgAcqIndexDiffs::Failed(string Message,pkgAcquire::MethodConfig *Cnf) /*{{{*/
+{
+ if(Debug)
+ std::clog << "pkgAcqIndexDiffs failed: " << Desc.URI << std::endl
+ << "Falling back to normal index file aquire" << std::endl;
+ new pkgAcqIndex(Owner, RealURI, Description,Desc.ShortDesc,
+ ExpectedHash);
+ Finish();
+}
+ /*}}}*/
+// Finish - helper that cleans the item out of the fetcher queue /*{{{*/
+void pkgAcqIndexDiffs::Finish(bool allDone)
+{
+ // we restore the original name, this is required, otherwise
+ // the file will be cleaned
+ if(allDone)
+ {
+ DestFile = _config->FindDir("Dir::State::lists");
+ DestFile += URItoFileName(RealURI);
+
+ if(!ExpectedHash.empty() && !ExpectedHash.VerifyFile(DestFile))
+ {
+ Status = StatAuthError;
+ ErrorText = _("MD5Sum mismatch");
+ Rename(DestFile,DestFile + ".FAILED");
+ Dequeue();
+ return;
+ }
+
+ // this is for the "real" finish
+ Complete = true;
+ Status = StatDone;
+ Dequeue();
+ if(Debug)
+ std::clog << "\n\nallDone: " << DestFile << "\n" << std::endl;
+ return;
+ }
+
+ if(Debug)
+ std::clog << "Finishing: " << Desc.URI << std::endl;
+ Complete = false;
+ Status = StatDone;
+ Dequeue();
+ return;
}
/*}}}*/
+bool pkgAcqIndexDiffs::QueueNextDiff() /*{{{*/
+{
+
+ // calc sha1 of the just patched file
+ string FinalFile = _config->FindDir("Dir::State::lists");
+ FinalFile += URItoFileName(RealURI);
+
+ FileFd fd(FinalFile, FileFd::ReadOnlyGzip);
+ SHA1Summation SHA1;
+ SHA1.AddFD(fd.Fd(), fd.Size());
+ string local_sha1 = string(SHA1.Result());
+ if(Debug)
+ std::clog << "QueueNextDiff: "
+ << FinalFile << " (" << local_sha1 << ")"<<std::endl;
+
+ // final file reached before all patches are applied
+ if(local_sha1 == ServerSha1)
+ {
+ Finish(true);
+ return true;
+ }
+
+ // remove all patches until the next matching patch is found
+ // this requires the Index file to be ordered
+ for(vector<DiffInfo>::iterator I=available_patches.begin();
+ available_patches.size() > 0 &&
+ I != available_patches.end() &&
+ (*I).sha1 != local_sha1;
+ I++)
+ {
+ available_patches.erase(I);
+ }
+
+ // error checking and falling back if no patch was found
+ if(available_patches.size() == 0)
+ {
+ Failed("", NULL);
+ return false;
+ }
+
+ // queue the right diff
+ Desc.URI = string(RealURI) + ".diff/" + available_patches[0].file + ".gz";
+ Desc.Description = Description + " " + available_patches[0].file + string(".pdiff");
+ DestFile = _config->FindDir("Dir::State::lists") + "partial/";
+ DestFile += URItoFileName(RealURI + ".diff/" + available_patches[0].file);
+
+ if(Debug)
+ std::clog << "pkgAcqIndexDiffs::QueueNextDiff(): " << Desc.URI << std::endl;
+
+ QueueURI(Desc);
+
+ return true;
+}
+ /*}}}*/
+void pkgAcqIndexDiffs::Done(string Message,unsigned long Size,string Md5Hash, /*{{{*/
+ pkgAcquire::MethodConfig *Cnf)
+{
+ if(Debug)
+ std::clog << "pkgAcqIndexDiffs::Done(): " << Desc.URI << std::endl;
+
+ Item::Done(Message,Size,Md5Hash,Cnf);
+
+ string FinalFile;
+ FinalFile = _config->FindDir("Dir::State::lists")+URItoFileName(RealURI);
+
+ // sucess in downloading a diff, enter ApplyDiff state
+ if(State == StateFetchDiff)
+ {
+ if(Debug)
+ std::clog << "Sending to gzip method: " << FinalFile << std::endl;
+
+ string FileName = LookupTag(Message,"Filename");
+ State = StateUnzipDiff;
+ Local = true;
+ Desc.URI = "gzip:" + FileName;
+ DestFile += ".decomp";
+ QueueURI(Desc);
+ Mode = "gzip";
+ return;
+ }
+
+ // sucess in downloading a diff, enter ApplyDiff state
+ if(State == StateUnzipDiff)
+ {
+
+ // rred excepts the patch as $FinalFile.ed
+ Rename(DestFile,FinalFile+".ed");
+
+ if(Debug)
+ std::clog << "Sending to rred method: " << FinalFile << std::endl;
+
+ State = StateApplyDiff;
+ Local = true;
+ Desc.URI = "rred:" + FinalFile;
+ QueueURI(Desc);
+ Mode = "rred";
+ return;
+ }
+
+
+ // success in download/apply a diff, queue next (if needed)
+ if(State == StateApplyDiff)
+ {
+ // remove the just applied patch
+ available_patches.erase(available_patches.begin());
+
+ // move into place
+ if(Debug)
+ {
+ std::clog << "Moving patched file in place: " << std::endl
+ << DestFile << " -> " << FinalFile << std::endl;
+ }
+ Rename(DestFile,FinalFile);
+ chmod(FinalFile.c_str(),0644);
+
+ // see if there is more to download
+ if(available_patches.size() > 0) {
+ new pkgAcqIndexDiffs(Owner, RealURI, Description, Desc.ShortDesc,
+ ExpectedHash, ServerSha1, available_patches);
+ return Finish();
+ } else
+ return Finish(true);
+ }
+}
+ /*}}}*/
// AcqIndex::AcqIndex - Constructor /*{{{*/
// ---------------------------------------------------------------------
/* The package file is added to the queue and a second class is
- instantiated to fetch the revision file */
-pkgAcqIndex::pkgAcqIndex(pkgAcquire *Owner,const pkgSourceList::Item *Location) :
- Item(Owner), Location(Location)
+ instantiated to fetch the revision file */
+pkgAcqIndex::pkgAcqIndex(pkgAcquire *Owner,
+ string URI,string URIDesc,string ShortDesc,
+ HashString ExpectedHash, string comprExt)
+ : Item(Owner), RealURI(URI), ExpectedHash(ExpectedHash)
{
Decompression = false;
Erase = false;
-
+
DestFile = _config->FindDir("Dir::State::lists") + "partial/";
- DestFile += URItoFileName(Location->PackagesURI());
+ DestFile += URItoFileName(URI);
- // Create the item
- Desc.URI = Location->PackagesURI() + ".gz";
- Desc.Description = Location->PackagesInfo();
- Desc.Owner = this;
+ if(comprExt.empty())
+ {
+ // autoselect the compression method
+ std::vector<std::string> types = APT::Configuration::getCompressionTypes();
+ if (types.empty() == true)
+ comprExt = "plain";
+ else
+ comprExt = "." + types[0];
+ }
+ CompressionExtension = ((comprExt == "plain" || comprExt == ".") ? "" : comprExt);
- // Set the short description to the archive component
- if (Location->Dist[Location->Dist.size() - 1] == '/')
- Desc.ShortDesc = Location->Dist;
- else
- Desc.ShortDesc = Location->Dist + '/' + Location->Section;
+ Desc.URI = URI + CompressionExtension;
+
+ Desc.Description = URIDesc;
+ Desc.Owner = this;
+ Desc.ShortDesc = ShortDesc;
QueueURI(Desc);
-
- // Create the Release fetch class
- new pkgAcqIndexRel(Owner,Location);
}
/*}}}*/
// AcqIndex::Custom600Headers - Insert custom request headers /*{{{*/
string pkgAcqIndex::Custom600Headers()
{
string Final = _config->FindDir("Dir::State::lists");
- Final += URItoFileName(Location->PackagesURI());
+ Final += URItoFileName(RealURI);
+ if (_config->FindB("Acquire::GzipIndexes",false))
+ Final += ".gz";
struct stat Buf;
if (stat(Final.c_str(),&Buf) != 0)
return "\nIndex-File: true";
-
return "\nIndex-File: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime);
}
/*}}}*/
+void pkgAcqIndex::Failed(string Message,pkgAcquire::MethodConfig *Cnf) /*{{{*/
+{
+ std::vector<std::string> types = APT::Configuration::getCompressionTypes();
+
+ for (std::vector<std::string>::const_iterator t = types.begin();
+ t != types.end(); t++)
+ {
+ // jump over all already tried compression types
+ const unsigned int nameLen = Desc.URI.size() - (*t).size();
+ if(Desc.URI.substr(nameLen) != *t)
+ continue;
+
+ // we want to try it with the next extension (and make sure to
+ // not skip over the end)
+ t++;
+ if (t == types.end())
+ break;
+
+ // queue new download
+ Desc.URI = Desc.URI.substr(0, nameLen) + *t;
+ new pkgAcqIndex(Owner, RealURI, Desc.Description, Desc.ShortDesc,
+ ExpectedHash, string(".").append(*t));
+
+ Status = StatDone;
+ Complete = false;
+ Dequeue();
+ return;
+ }
+
+ // on decompression failure, remove bad versions in partial/
+ if(Decompression && Erase) {
+ string s = _config->FindDir("Dir::State::lists") + "partial/";
+ s += URItoFileName(RealURI);
+ unlink(s.c_str());
+ }
+
+ Item::Failed(Message,Cnf);
+}
+ /*}}}*/
// AcqIndex::Done - Finished a fetch /*{{{*/
// ---------------------------------------------------------------------
/* This goes through a number of states.. On the initial fetch the
to the uncompressed version of the file. If this is so the file
is copied into the partial directory. In all other cases the file
is decompressed with a gzip uri. */
-void pkgAcqIndex::Done(string Message,unsigned long Size,string MD5)
+void pkgAcqIndex::Done(string Message,unsigned long Size,string Hash,
+ pkgAcquire::MethodConfig *Cfg)
{
- Item::Done(Message,Size,MD5);
+ Item::Done(Message,Size,Hash,Cfg);
if (Decompression == true)
{
+ if (_config->FindB("Debug::pkgAcquire::Auth", false))
+ {
+ std::cerr << std::endl << RealURI << ": Computed Hash: " << Hash;
+ std::cerr << " Expected Hash: " << ExpectedHash.toStr() << std::endl;
+ }
+
+ if (!ExpectedHash.empty() && ExpectedHash.toStr() != Hash)
+ {
+ Status = StatAuthError;
+ ErrorText = _("Hash Sum mismatch");
+ Rename(DestFile,DestFile + ".FAILED");
+ ReportMirrorFailure("HashChecksumFailure");
+ return;
+ }
// Done, move it into position
string FinalFile = _config->FindDir("Dir::State::lists");
- FinalFile += URItoFileName(Location->PackagesURI());
+ FinalFile += URItoFileName(RealURI);
Rename(DestFile,FinalFile);
+ chmod(FinalFile.c_str(),0644);
/* We restore the original name to DestFile so that the clean operation
will work OK */
DestFile = _config->FindDir("Dir::State::lists") + "partial/";
- DestFile += URItoFileName(Location->PackagesURI());
+ DestFile += URItoFileName(RealURI);
// Remove the compressed version.
if (Erase == true)
string FileName = LookupTag(Message,"Alt-Filename");
if (FileName.empty() == false)
{
- // The files timestamp matches
- if (StringToBool(LookupTag(Message,"Alt-IMS-Hit"),false) == true)
- return;
-
- Decompression = true;
+ // The files timestamp matches
+ if (StringToBool(LookupTag(Message,"Alt-IMS-Hit"),false) == true)
+ return;
+ Decompression = true;
+ Local = true;
+ DestFile += ".decomp";
+ Desc.URI = "copy:" + FileName;
+ QueueURI(Desc);
+ Mode = "copy";
+ return;
+ }
+
+ FileName = LookupTag(Message,"Filename");
+ if (FileName.empty() == true)
+ {
+ Status = StatError;
+ ErrorText = "Method gave a blank filename";
+ }
+
+ string compExt = flExtension(flNotDir(URI(Desc.URI).Path));
+
+ // The files timestamp matches
+ if (StringToBool(LookupTag(Message,"IMS-Hit"),false) == true) {
+ if (_config->FindB("Acquire::GzipIndexes",false) && compExt == "gz")
+ // Update DestFile for .gz suffix so that the clean operation keeps it
+ DestFile += ".gz";
+ return;
+ }
+
+ if (FileName == DestFile)
+ Erase = true;
+ else
+ Local = true;
+
+ string decompProg;
+
+ // If we enable compressed indexes and already have gzip, keep it
+ if (_config->FindB("Acquire::GzipIndexes",false) && compExt == "gz" && !Local) {
+ string FinalFile = _config->FindDir("Dir::State::lists");
+ FinalFile += URItoFileName(RealURI) + ".gz";
+ Rename(DestFile,FinalFile);
+ chmod(FinalFile.c_str(),0644);
+
+ // Update DestFile for .gz suffix so that the clean operation keeps it
+ DestFile = _config->FindDir("Dir::State::lists") + "partial/";
+ DestFile += URItoFileName(RealURI) + ".gz";
+ return;
+ }
+
+ // get the binary name for your used compression type
+ decompProg = _config->Find(string("Acquire::CompressionTypes::").append(compExt),"");
+ if(decompProg.empty() == false);
+ // flExtensions returns the full name if no extension is found
+ // this is why we have this complicated compare operation here
+ // FIMXE: add a new flJustExtension() that return "" if no
+ // extension is found and use that above so that it can
+ // be tested against ""
+ else if(compExt == flNotDir(URI(Desc.URI).Path))
+ decompProg = "copy";
+ else {
+ _error->Error("Unsupported extension: %s", compExt.c_str());
+ return;
+ }
+
+ Decompression = true;
+ DestFile += ".decomp";
+ Desc.URI = decompProg + ":" + FileName;
+ QueueURI(Desc);
+ Mode = decompProg.c_str();
+}
+ /*}}}*/
+// AcqIndexTrans::pkgAcqIndexTrans - Constructor /*{{{*/
+// ---------------------------------------------------------------------
+/* The Translation file is added to the queue */
+pkgAcqIndexTrans::pkgAcqIndexTrans(pkgAcquire *Owner,
+ string URI,string URIDesc,string ShortDesc)
+ : pkgAcqIndex(Owner, URI, URIDesc, ShortDesc, HashString(), "")
+{
+}
+ /*}}}*/
+// AcqIndexTrans::Custom600Headers - Insert custom request headers /*{{{*/
+// ---------------------------------------------------------------------
+string pkgAcqIndexTrans::Custom600Headers()
+{
+ string Final = _config->FindDir("Dir::State::lists");
+ Final += URItoFileName(RealURI);
+
+ struct stat Buf;
+ if (stat(Final.c_str(),&Buf) != 0)
+ return "\nFail-Ignore: true";
+ return "\nFail-Ignore: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime);
+}
+ /*}}}*/
+// AcqIndexTrans::Failed - Silence failure messages for missing files /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+void pkgAcqIndexTrans::Failed(string Message,pkgAcquire::MethodConfig *Cnf)
+{
+ if (Cnf->LocalOnly == true ||
+ StringToBool(LookupTag(Message,"Transient-Failure"),false) == false)
+ {
+ // Ignore this
+ Status = StatDone;
+ Complete = false;
+ Dequeue();
+ return;
+ }
+
+ Item::Failed(Message,Cnf);
+}
+ /*}}}*/
+pkgAcqMetaSig::pkgAcqMetaSig(pkgAcquire *Owner, /*{{{*/
+ string URI,string URIDesc,string ShortDesc,
+ string MetaIndexURI, string MetaIndexURIDesc,
+ string MetaIndexShortDesc,
+ const vector<IndexTarget*>* IndexTargets,
+ indexRecords* MetaIndexParser) :
+ Item(Owner), RealURI(URI), MetaIndexURI(MetaIndexURI),
+ MetaIndexURIDesc(MetaIndexURIDesc), MetaIndexShortDesc(MetaIndexShortDesc),
+ MetaIndexParser(MetaIndexParser), IndexTargets(IndexTargets)
+{
+ DestFile = _config->FindDir("Dir::State::lists") + "partial/";
+ DestFile += URItoFileName(URI);
+
+ // remove any partial downloaded sig-file in partial/.
+ // it may confuse proxies and is too small to warrant a
+ // partial download anyway
+ unlink(DestFile.c_str());
+
+ // Create the item
+ Desc.Description = URIDesc;
+ Desc.Owner = this;
+ Desc.ShortDesc = ShortDesc;
+ Desc.URI = URI;
+
+ string Final = _config->FindDir("Dir::State::lists");
+ Final += URItoFileName(RealURI);
+ struct stat Buf;
+ if (stat(Final.c_str(),&Buf) == 0)
+ {
+ // File was already in place. It needs to be re-downloaded/verified
+ // because Release might have changed, we do give it a differnt
+ // name than DestFile because otherwise the http method will
+ // send If-Range requests and there are too many broken servers
+ // out there that do not understand them
+ LastGoodSig = DestFile+".reverify";
+ Rename(Final,LastGoodSig);
+ }
+
+ QueueURI(Desc);
+}
+ /*}}}*/
+// pkgAcqMetaSig::Custom600Headers - Insert custom request headers /*{{{*/
+// ---------------------------------------------------------------------
+/* The only header we use is the last-modified header. */
+string pkgAcqMetaSig::Custom600Headers()
+{
+ struct stat Buf;
+ if (stat(LastGoodSig.c_str(),&Buf) != 0)
+ return "\nIndex-File: true";
+
+ return "\nIndex-File: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime);
+}
+
+void pkgAcqMetaSig::Done(string Message,unsigned long Size,string MD5,
+ pkgAcquire::MethodConfig *Cfg)
+{
+ Item::Done(Message,Size,MD5,Cfg);
+
+ string FileName = LookupTag(Message,"Filename");
+ if (FileName.empty() == true)
+ {
+ Status = StatError;
+ ErrorText = "Method gave a blank filename";
+ return;
+ }
+
+ if (FileName != DestFile)
+ {
+ // We have to copy it into place
Local = true;
- DestFile += ".decomp";
Desc.URI = "copy:" + FileName;
QueueURI(Desc);
- Mode = "copy";
return;
}
- FileName = LookupTag(Message,"Filename");
- if (FileName.empty() == true)
+ Complete = true;
+
+ // put the last known good file back on i-m-s hit (it will
+ // be re-verified again)
+ // Else do nothing, we have the new file in DestFile then
+ if(StringToBool(LookupTag(Message,"IMS-Hit"),false) == true)
+ Rename(LastGoodSig, DestFile);
+
+ // queue a pkgAcqMetaIndex to be verified against the sig we just retrieved
+ new pkgAcqMetaIndex(Owner, MetaIndexURI, MetaIndexURIDesc,
+ MetaIndexShortDesc, DestFile, IndexTargets,
+ MetaIndexParser);
+
+}
+ /*}}}*/
+void pkgAcqMetaSig::Failed(string Message,pkgAcquire::MethodConfig *Cnf)/*{{{*/
+{
+ string Final = _config->FindDir("Dir::State::lists") + URItoFileName(RealURI);
+
+ // if we get a network error we fail gracefully
+ if(Status == StatTransientNetworkError)
{
- Status = StatError;
- ErrorText = "Method gave a blank filename";
- }
-
- // The files timestamp matches
- if (StringToBool(LookupTag(Message,"IMS-Hit"),false) == true)
+ Item::Failed(Message,Cnf);
+ // move the sigfile back on transient network failures
+ if(FileExists(LastGoodSig))
+ Rename(LastGoodSig,Final);
+
+ // set the status back to , Item::Failed likes to reset it
+ Status = pkgAcquire::Item::StatTransientNetworkError;
return;
+ }
- if (FileName == DestFile)
- Erase = true;
- else
- Local = true;
+ // Delete any existing sigfile when the acquire failed
+ unlink(Final.c_str());
+
+ // queue a pkgAcqMetaIndex with no sigfile
+ new pkgAcqMetaIndex(Owner, MetaIndexURI, MetaIndexURIDesc, MetaIndexShortDesc,
+ "", IndexTargets, MetaIndexParser);
+
+ if (Cnf->LocalOnly == true ||
+ StringToBool(LookupTag(Message,"Transient-Failure"),false) == false)
+ {
+ // Ignore this
+ Status = StatDone;
+ Complete = false;
+ Dequeue();
+ return;
+ }
- Decompression = true;
- DestFile += ".decomp";
- Desc.URI = "gzip:" + FileName,Location->PackagesInfo();
- QueueURI(Desc);
- Mode = "gzip";
+ Item::Failed(Message,Cnf);
}
/*}}}*/
-
-// AcqIndexRel::pkgAcqIndexRel - Constructor /*{{{*/
-// ---------------------------------------------------------------------
-/* The Release file is added to the queue */
-pkgAcqIndexRel::pkgAcqIndexRel(pkgAcquire *Owner,
- const pkgSourceList::Item *Location) :
- Item(Owner), Location(Location)
+pkgAcqMetaIndex::pkgAcqMetaIndex(pkgAcquire *Owner, /*{{{*/
+ string URI,string URIDesc,string ShortDesc,
+ string SigFile,
+ const vector<struct IndexTarget*>* IndexTargets,
+ indexRecords* MetaIndexParser) :
+ Item(Owner), RealURI(URI), SigFile(SigFile), IndexTargets(IndexTargets),
+ MetaIndexParser(MetaIndexParser), AuthPass(false), IMSHit(false)
{
DestFile = _config->FindDir("Dir::State::lists") + "partial/";
- DestFile += URItoFileName(Location->ReleaseURI());
-
+ DestFile += URItoFileName(URI);
+
// Create the item
- Desc.URI = Location->ReleaseURI();
- Desc.Description = Location->ReleaseInfo();
+ Desc.Description = URIDesc;
Desc.Owner = this;
+ Desc.ShortDesc = ShortDesc;
+ Desc.URI = URI;
- // Set the short description to the archive component
- if (Location->Dist[Location->Dist.size() - 1] == '/')
- Desc.ShortDesc = Location->Dist;
- else
- Desc.ShortDesc = Location->Dist + '/' + Location->Section;
-
QueueURI(Desc);
}
/*}}}*/
-// AcqIndexRel::Custom600Headers - Insert custom request headers /*{{{*/
+// pkgAcqMetaIndex::Custom600Headers - Insert custom request headers /*{{{*/
// ---------------------------------------------------------------------
/* The only header we use is the last-modified header. */
-string pkgAcqIndexRel::Custom600Headers()
+string pkgAcqMetaIndex::Custom600Headers()
{
string Final = _config->FindDir("Dir::State::lists");
- Final += URItoFileName(Location->ReleaseURI());
+ Final += URItoFileName(RealURI);
struct stat Buf;
if (stat(Final.c_str(),&Buf) != 0)
return "\nIndex-File: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime);
}
/*}}}*/
-// AcqIndexRel::Done - Item downloaded OK /*{{{*/
-// ---------------------------------------------------------------------
-/* The release file was not placed into the download directory then
- a copy URI is generated and it is copied there otherwise the file
- in the partial directory is moved into .. and the URI is finished. */
-void pkgAcqIndexRel::Done(string Message,unsigned long Size,string MD5)
+void pkgAcqMetaIndex::Done(string Message,unsigned long Size,string Hash, /*{{{*/
+ pkgAcquire::MethodConfig *Cfg)
+{
+ Item::Done(Message,Size,Hash,Cfg);
+
+ // MetaIndexes are done in two passes: one to download the
+ // metaindex with an appropriate method, and a second to verify it
+ // with the gpgv method
+
+ if (AuthPass == true)
+ {
+ AuthDone(Message);
+
+ // all cool, move Release file into place
+ Complete = true;
+
+ string FinalFile = _config->FindDir("Dir::State::lists");
+ FinalFile += URItoFileName(RealURI);
+ Rename(DestFile,FinalFile);
+ chmod(FinalFile.c_str(),0644);
+ DestFile = FinalFile;
+ }
+ else
+ {
+ RetrievalDone(Message);
+ if (!Complete)
+ // Still more retrieving to do
+ return;
+
+ if (SigFile == "")
+ {
+ // There was no signature file, so we are finished. Download
+ // the indexes without verification.
+ QueueIndexes(false);
+ }
+ else
+ {
+ // There was a signature file, so pass it to gpgv for
+ // verification
+
+ if (_config->FindB("Debug::pkgAcquire::Auth", false))
+ std::cerr << "Metaindex acquired, queueing gpg verification ("
+ << SigFile << "," << DestFile << ")\n";
+ AuthPass = true;
+ Desc.URI = "gpgv:" + SigFile;
+ QueueURI(Desc);
+ Mode = "gpgv";
+ }
+ }
+}
+ /*}}}*/
+void pkgAcqMetaIndex::RetrievalDone(string Message) /*{{{*/
{
- Item::Done(Message,Size,MD5);
+ // We have just finished downloading a Release file (it is not
+ // verified yet)
string FileName = LookupTag(Message,"Filename");
if (FileName.empty() == true)
return;
}
- Complete = true;
-
- // The files timestamp matches
- if (StringToBool(LookupTag(Message,"IMS-Hit"),false) == true)
- return;
-
- // We have to copy it into place
if (FileName != DestFile)
{
Local = true;
QueueURI(Desc);
return;
}
-
- // Done, move it into position
- string FinalFile = _config->FindDir("Dir::State::lists");
- FinalFile += URItoFileName(Location->ReleaseURI());
- Rename(DestFile,FinalFile);
+
+ // make sure to verify against the right file on I-M-S hit
+ IMSHit = StringToBool(LookupTag(Message,"IMS-Hit"),false);
+ if(IMSHit)
+ {
+ string FinalFile = _config->FindDir("Dir::State::lists");
+ FinalFile += URItoFileName(RealURI);
+ DestFile = FinalFile;
+ }
+ Complete = true;
}
/*}}}*/
-// AcqIndexRel::Failed - Silence failure messages for missing rel files /*{{{*/
-// ---------------------------------------------------------------------
-/* */
-void pkgAcqIndexRel::Failed(string Message,pkgAcquire::MethodConfig *Cnf)
+void pkgAcqMetaIndex::AuthDone(string Message) /*{{{*/
{
- // This is the retry counter
- if (Cnf->LocalOnly == true ||
- StringToBool(LookupTag(Message,"Transient-Failure"),false) == false)
- {
- // Ignore this
- Status = StatDone;
- Complete = false;
- Dequeue();
+ // At this point, the gpgv method has succeeded, so there is a
+ // valid signature from a key in the trusted keyring. We
+ // perform additional verification of its contents, and use them
+ // to verify the indexes we are about to download
+
+ if (!MetaIndexParser->Load(DestFile))
+ {
+ Status = StatAuthError;
+ ErrorText = MetaIndexParser->ErrorText;
return;
}
-
- Item::Failed(Message,Cnf);
+
+ if (!VerifyVendor(Message))
+ {
+ return;
+ }
+
+ if (_config->FindB("Debug::pkgAcquire::Auth", false))
+ std::cerr << "Signature verification succeeded: "
+ << DestFile << std::endl;
+
+ // Download further indexes with verification
+ QueueIndexes(true);
+
+ // Done, move signature file into position
+ string VerifiedSigFile = _config->FindDir("Dir::State::lists") +
+ URItoFileName(RealURI) + ".gpg";
+ Rename(SigFile,VerifiedSigFile);
+ chmod(VerifiedSigFile.c_str(),0644);
+}
+ /*}}}*/
+void pkgAcqMetaIndex::QueueIndexes(bool verify) /*{{{*/
+{
+ for (vector <struct IndexTarget*>::const_iterator Target = IndexTargets->begin();
+ Target != IndexTargets->end();
+ Target++)
+ {
+ HashString ExpectedIndexHash;
+ if (verify)
+ {
+ const indexRecords::checkSum *Record = MetaIndexParser->Lookup((*Target)->MetaKey);
+ if (!Record)
+ {
+ Status = StatAuthError;
+ ErrorText = "Unable to find expected entry "
+ + (*Target)->MetaKey + " in Meta-index file (malformed Release file?)";
+ return;
+ }
+ ExpectedIndexHash = Record->Hash;
+ if (_config->FindB("Debug::pkgAcquire::Auth", false))
+ {
+ std::cerr << "Queueing: " << (*Target)->URI << std::endl;
+ std::cerr << "Expected Hash: " << ExpectedIndexHash.toStr() << std::endl;
+ }
+ if (ExpectedIndexHash.empty())
+ {
+ Status = StatAuthError;
+ ErrorText = "Unable to find hash sum for "
+ + (*Target)->MetaKey + " in Meta-index file";
+ return;
+ }
+ }
+
+ /* Queue Packages file (either diff or full packages files, depending
+ on the users option) - we also check if the PDiff Index file is listed
+ in the Meta-Index file. Ideal would be if pkgAcqDiffIndex would test this
+ instead, but passing the required info to it is to much hassle */
+ if(_config->FindB("Acquire::PDiffs",true) == true && (verify == false ||
+ MetaIndexParser->Exists(string((*Target)->MetaKey).append(".diff/Index")) == true))
+ new pkgAcqDiffIndex(Owner, (*Target)->URI, (*Target)->Description,
+ (*Target)->ShortDesc, ExpectedIndexHash);
+ else
+ new pkgAcqIndex(Owner, (*Target)->URI, (*Target)->Description,
+ (*Target)->ShortDesc, ExpectedIndexHash);
+ }
+}
+ /*}}}*/
+bool pkgAcqMetaIndex::VerifyVendor(string Message) /*{{{*/
+{
+// // Maybe this should be made available from above so we don't have
+// // to read and parse it every time?
+// pkgVendorList List;
+// List.ReadMainList();
+
+// const Vendor* Vndr = NULL;
+// for (std::vector<string>::const_iterator I = GPGVOutput.begin(); I != GPGVOutput.end(); I++)
+// {
+// string::size_type pos = (*I).find("VALIDSIG ");
+// if (_config->FindB("Debug::Vendor", false))
+// std::cerr << "Looking for VALIDSIG in \"" << (*I) << "\": pos " << pos
+// << std::endl;
+// if (pos != std::string::npos)
+// {
+// string Fingerprint = (*I).substr(pos+sizeof("VALIDSIG"));
+// if (_config->FindB("Debug::Vendor", false))
+// std::cerr << "Looking for \"" << Fingerprint << "\" in vendor..." <<
+// std::endl;
+// Vndr = List.FindVendor(Fingerprint) != "";
+// if (Vndr != NULL);
+// break;
+// }
+// }
+ string::size_type pos;
+
+ // check for missing sigs (that where not fatal because otherwise we had
+ // bombed earlier)
+ string missingkeys;
+ string msg = _("There is no public key available for the "
+ "following key IDs:\n");
+ pos = Message.find("NO_PUBKEY ");
+ if (pos != std::string::npos)
+ {
+ string::size_type start = pos+strlen("NO_PUBKEY ");
+ string Fingerprint = Message.substr(start, Message.find("\n")-start);
+ missingkeys += (Fingerprint);
+ }
+ if(!missingkeys.empty())
+ _error->Warning("%s", string(msg+missingkeys).c_str());
+
+ string Transformed = MetaIndexParser->GetExpectedDist();
+
+ if (Transformed == "../project/experimental")
+ {
+ Transformed = "experimental";
+ }
+
+ pos = Transformed.rfind('/');
+ if (pos != string::npos)
+ {
+ Transformed = Transformed.substr(0, pos);
+ }
+
+ if (Transformed == ".")
+ {
+ Transformed = "";
+ }
+
+ if (_config->FindB("Acquire::Check-Valid-Until", true) == true &&
+ MetaIndexParser->GetValidUntil() > 0) {
+ time_t const invalid_since = time(NULL) - MetaIndexParser->GetValidUntil();
+ if (invalid_since > 0)
+ // TRANSLATOR: The first %s is the URL of the bad Release file, the second is
+ // the time since then the file is invalid - formated in the same way as in
+ // the download progress display (e.g. 7d 3h 42min 1s)
+ return _error->Error(_("Release file expired, ignoring %s (invalid since %s)"),
+ RealURI.c_str(), TimeToStr(invalid_since).c_str());
+ }
+
+ if (_config->FindB("Debug::pkgAcquire::Auth", false))
+ {
+ std::cerr << "Got Codename: " << MetaIndexParser->GetDist() << std::endl;
+ std::cerr << "Expecting Dist: " << MetaIndexParser->GetExpectedDist() << std::endl;
+ std::cerr << "Transformed Dist: " << Transformed << std::endl;
+ }
+
+ if (MetaIndexParser->CheckDist(Transformed) == false)
+ {
+ // This might become fatal one day
+// Status = StatAuthError;
+// ErrorText = "Conflicting distribution; expected "
+// + MetaIndexParser->GetExpectedDist() + " but got "
+// + MetaIndexParser->GetDist();
+// return false;
+ if (!Transformed.empty())
+ {
+ _error->Warning(_("Conflicting distribution: %s (expected %s but got %s)"),
+ Desc.Description.c_str(),
+ Transformed.c_str(),
+ MetaIndexParser->GetDist().c_str());
+ }
+ }
+
+ return true;
}
/*}}}*/
+// pkgAcqMetaIndex::Failed - no Release file present or no signature file present /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+void pkgAcqMetaIndex::Failed(string Message,pkgAcquire::MethodConfig *Cnf)
+{
+ if (AuthPass == true)
+ {
+ // gpgv method failed, if we have a good signature
+ string LastGoodSigFile = _config->FindDir("Dir::State::lists") +
+ "partial/" + URItoFileName(RealURI) + ".gpg.reverify";
+ if(FileExists(LastGoodSigFile))
+ {
+ string VerifiedSigFile = _config->FindDir("Dir::State::lists") +
+ URItoFileName(RealURI) + ".gpg";
+ Rename(LastGoodSigFile,VerifiedSigFile);
+ Status = StatTransientNetworkError;
+ _error->Warning(_("A error occurred during the signature "
+ "verification. The repository is not updated "
+ "and the previous index files will be used."
+ "GPG error: %s: %s\n"),
+ Desc.Description.c_str(),
+ LookupTag(Message,"Message").c_str());
+ RunScripts("APT::Update::Auth-Failure");
+ return;
+ } else {
+ _error->Warning(_("GPG error: %s: %s"),
+ Desc.Description.c_str(),
+ LookupTag(Message,"Message").c_str());
+ }
+ // gpgv method failed
+ ReportMirrorFailure("GPGFailure");
+ }
+ // No Release file was present, or verification failed, so fall
+ // back to queueing Packages files without verification
+ QueueIndexes(false);
+}
+ /*}}}*/
// AcqArchive::AcqArchive - Constructor /*{{{*/
// ---------------------------------------------------------------------
/* This just sets up the initial fetch environment and queues the first
pkgRecords *Recs,pkgCache::VerIterator const &Version,
string &StoreFilename) :
Item(Owner), Version(Version), Sources(Sources), Recs(Recs),
- StoreFilename(StoreFilename), Vf(Version.FileList())
+ StoreFilename(StoreFilename), Vf(Version.FileList()),
+ Trusted(false)
{
Retries = _config->FindI("Acquire::Retries",0);
if (Version.Arch() == 0)
- _error->Error("I wasn't able to locate file for the %s package. "
- "This might mean you need to manually fix this package. (due to missing arch)",
+ {
+ _error->Error(_("I wasn't able to locate a file for the %s package. "
+ "This might mean you need to manually fix this package. "
+ "(due to missing arch)"),
Version.ParentPkg().Name());
+ return;
+ }
- // Generate the final file name as: package_version_arch.deb
- StoreFilename = QuoteString(Version.ParentPkg().Name(),"_:") + '_' +
- QuoteString(Version.VerStr(),"_:") + '_' +
- QuoteString(Version.Arch(),"_:.") + ".deb";
+ /* We need to find a filename to determine the extension. We make the
+ assumption here that all the available sources for this version share
+ the same extension.. */
+ // Skip not source sources, they do not have file fields.
+ for (; Vf.end() == false; Vf++)
+ {
+ if ((Vf.File()->Flags & pkgCache::Flag::NotSource) != 0)
+ continue;
+ break;
+ }
+ // Does not really matter here.. we are going to fail out below
+ if (Vf.end() != true)
+ {
+ // If this fails to get a file name we will bomb out below.
+ pkgRecords::Parser &Parse = Recs->Lookup(Vf);
+ if (_error->PendingError() == true)
+ return;
+
+ // Generate the final file name as: package_version_arch.foo
+ StoreFilename = QuoteString(Version.ParentPkg().Name(),"_:") + '_' +
+ QuoteString(Version.VerStr(),"_:") + '_' +
+ QuoteString(Version.Arch(),"_:.") +
+ "." + flExtension(Parse.FileName());
+ }
+
+ // check if we have one trusted source for the package. if so, switch
+ // to "TrustedOnly" mode
+ for (pkgCache::VerFileIterator i = Version.FileList(); i.end() == false; i++)
+ {
+ pkgIndexFile *Index;
+ if (Sources->FindIndex(i.File(),Index) == false)
+ continue;
+ if (_config->FindB("Debug::pkgAcquire::Auth", false))
+ {
+ std::cerr << "Checking index: " << Index->Describe()
+ << "(Trusted=" << Index->IsTrusted() << ")\n";
+ }
+ if (Index->IsTrusted()) {
+ Trusted = true;
+ break;
+ }
+ }
+
+ // "allow-unauthenticated" restores apts old fetching behaviour
+ // that means that e.g. unauthenticated file:// uris are higher
+ // priority than authenticated http:// uris
+ if (_config->FindB("APT::Get::AllowUnauthenticated",false) == true)
+ Trusted = false;
+
// Select a source
if (QueueNext() == false && _error->PendingError() == false)
- _error->Error("I wasn't able to locate file for the %s package. "
- "This might mean you need to manually fix this package.",
+ _error->Error(_("I wasn't able to locate file for the %s package. "
+ "This might mean you need to manually fix this package."),
Version.ParentPkg().Name());
}
/*}}}*/
checking later. */
bool pkgAcqArchive::QueueNext()
{
+ string const ForceHash = _config->Find("Acquire::ForceHash");
for (; Vf.end() == false; Vf++)
{
// Ignore not source sources
continue;
// Try to cross match against the source list
- string PkgFile = flNotDir(Vf.File().FileName());
- pkgSourceList::const_iterator Location;
- for (Location = Sources->begin(); Location != Sources->end(); Location++)
- if (PkgFile == URItoFileName(Location->PackagesURI()))
- break;
-
- if (Location == Sources->end())
- continue;
+ pkgIndexFile *Index;
+ if (Sources->FindIndex(Vf.File(),Index) == false)
+ continue;
+ // only try to get a trusted package from another source if that source
+ // is also trusted
+ if(Trusted && !Index->IsTrusted())
+ continue;
+
// Grab the text package record
pkgRecords::Parser &Parse = Recs->Lookup(Vf);
if (_error->PendingError() == true)
return false;
- PkgFile = Parse.FileName();
- MD5 = Parse.MD5Hash();
+ string PkgFile = Parse.FileName();
+ if (ForceHash.empty() == false)
+ {
+ if(stringcasecmp(ForceHash, "sha256") == 0)
+ ExpectedHash = HashString("SHA256", Parse.SHA256Hash());
+ else if (stringcasecmp(ForceHash, "sha1") == 0)
+ ExpectedHash = HashString("SHA1", Parse.SHA1Hash());
+ else
+ ExpectedHash = HashString("MD5Sum", Parse.MD5Hash());
+ }
+ else
+ {
+ string Hash;
+ if ((Hash = Parse.SHA256Hash()).empty() == false)
+ ExpectedHash = HashString("SHA256", Hash);
+ else if ((Hash = Parse.SHA1Hash()).empty() == false)
+ ExpectedHash = HashString("SHA1", Hash);
+ else
+ ExpectedHash = HashString("MD5Sum", Parse.MD5Hash());
+ }
if (PkgFile.empty() == true)
- return _error->Error("The package index files are corrupted. No Filename: "
- "field for package %s."
- ,Version.ParentPkg().Name());
+ return _error->Error(_("The package index files are corrupted. No Filename: "
+ "field for package %s."),
+ Version.ParentPkg().Name());
+
+ Desc.URI = Index->ArchiveURI(PkgFile);
+ Desc.Description = Index->ArchiveInfo(Version);
+ Desc.Owner = this;
+ Desc.ShortDesc = Version.ParentPkg().Name();
// See if we already have the file. (Legacy filenames)
FileSize = Version->Size;
}
// Create the item
- Desc.URI = Location->ArchiveURI(PkgFile);
- Desc.Description = Location->ArchiveInfo(Version);
+ Local = false;
+ Desc.URI = Index->ArchiveURI(PkgFile);
+ Desc.Description = Index->ArchiveInfo(Version);
Desc.Owner = this;
Desc.ShortDesc = Version.ParentPkg().Name();
QueueURI(Desc);
// AcqArchive::Done - Finished fetching /*{{{*/
// ---------------------------------------------------------------------
/* */
-void pkgAcqArchive::Done(string Message,unsigned long Size,string Md5Hash)
+void pkgAcqArchive::Done(string Message,unsigned long Size,string CalcHash,
+ pkgAcquire::MethodConfig *Cfg)
{
- Item::Done(Message,Size,Md5Hash);
+ Item::Done(Message,Size,CalcHash,Cfg);
// Check the size
if (Size != Version->Size)
{
- _error->Error("Size mismatch for package %s",Version.ParentPkg().Name());
+ Status = StatError;
+ ErrorText = _("Size mismatch");
return;
}
- // Check the md5
- if (Md5Hash.empty() == false && MD5.empty() == false)
+ // Check the hash
+ if(ExpectedHash.toStr() != CalcHash)
{
- if (Md5Hash != MD5)
- {
- _error->Error("MD5Sum mismatch for package %s",Version.ParentPkg().Name());
- Rename(DestFile + ".FAILED",DestFile);
- return;
- }
+ Status = StatError;
+ ErrorText = _("Hash Sum mismatch");
+ if(FileExists(DestFile))
+ Rename(DestFile,DestFile + ".FAILED");
+ return;
}
// Grab the output filename
void pkgAcqArchive::Failed(string Message,pkgAcquire::MethodConfig *Cnf)
{
ErrorText = LookupTag(Message,"Message");
+
+ /* We don't really want to retry on failed media swaps, this prevents
+ that. An interesting observation is that permanent failures are not
+ recorded. */
+ if (Cnf->Removable == true &&
+ StringToBool(LookupTag(Message,"Transient-Failure"),false) == true)
+ {
+ // Vf = Version.FileList();
+ while (Vf.end() == false) Vf++;
+ StoreFilename = string();
+ Item::Failed(Message,Cnf);
+ return;
+ }
+
if (QueueNext() == false)
{
// This is the retry counter
}
}
/*}}}*/
+// AcqArchive::IsTrusted - Determine whether this archive comes from a trusted source /*{{{*/
+// ---------------------------------------------------------------------
+bool pkgAcqArchive::IsTrusted()
+{
+ return Trusted;
+}
+ /*}}}*/
// AcqArchive::Finished - Fetching has finished, tidy up /*{{{*/
// ---------------------------------------------------------------------
/* */
StoreFilename = string();
}
/*}}}*/
-
// AcqFile::pkgAcqFile - Constructor /*{{{*/
// ---------------------------------------------------------------------
/* The file is added to the queue */
-pkgAcqFile::pkgAcqFile(pkgAcquire *Owner,string URI,string MD5,
- unsigned long Size,string Dsc,string ShortDesc) :
- Item(Owner), MD5(MD5)
+pkgAcqFile::pkgAcqFile(pkgAcquire *Owner,string URI,string Hash,
+ unsigned long Size,string Dsc,string ShortDesc,
+ const string &DestDir, const string &DestFilename,
+ bool IsIndexFile) :
+ Item(Owner), ExpectedHash(Hash), IsIndexFile(IsIndexFile)
{
- DestFile = flNotDir(URI);
+ Retries = _config->FindI("Acquire::Retries",0);
+ if(!DestFilename.empty())
+ DestFile = DestFilename;
+ else if(!DestDir.empty())
+ DestFile = DestDir + "/" + flNotDir(URI);
+ else
+ DestFile = flNotDir(URI);
+
// Create the item
Desc.URI = URI;
Desc.Description = Dsc;
else
PartialSize = Buf.st_size;
}
-
+
QueueURI(Desc);
}
/*}}}*/
// AcqFile::Done - Item downloaded OK /*{{{*/
// ---------------------------------------------------------------------
/* */
-void pkgAcqFile::Done(string Message,unsigned long Size,string MD5)
+void pkgAcqFile::Done(string Message,unsigned long Size,string CalcHash,
+ pkgAcquire::MethodConfig *Cnf)
{
- Item::Done(Message,Size,MD5);
+ Item::Done(Message,Size,CalcHash,Cnf);
+ // Check the hash
+ if(!ExpectedHash.empty() && ExpectedHash.toStr() != CalcHash)
+ {
+ Status = StatError;
+ ErrorText = _("Hash Sum mismatch");
+ Rename(DestFile,DestFile + ".FAILED");
+ return;
+ }
+
string FileName = LookupTag(Message,"Filename");
if (FileName.empty() == true)
{
if (FileName != DestFile)
{
Local = true;
- Desc.URI = "copy:" + FileName;
+ if (_config->FindB("Acquire::Source-Symlinks",true) == false ||
+ Cnf->Removable == true)
+ {
+ Desc.URI = "copy:" + FileName;
+ QueueURI(Desc);
+ return;
+ }
+
+ // Erase the file if it is a symlink so we can overwrite it
+ struct stat St;
+ if (lstat(DestFile.c_str(),&St) == 0)
+ {
+ if (S_ISLNK(St.st_mode) != 0)
+ unlink(DestFile.c_str());
+ }
+
+ // Symlink the file
+ if (symlink(FileName.c_str(),DestFile.c_str()) != 0)
+ {
+ ErrorText = "Link to " + DestFile + " failure ";
+ Status = StatError;
+ Complete = false;
+ }
+ }
+}
+ /*}}}*/
+// AcqFile::Failed - Failure handler /*{{{*/
+// ---------------------------------------------------------------------
+/* Here we try other sources */
+void pkgAcqFile::Failed(string Message,pkgAcquire::MethodConfig *Cnf)
+{
+ ErrorText = LookupTag(Message,"Message");
+
+ // This is the retry counter
+ if (Retries != 0 &&
+ Cnf->LocalOnly == false &&
+ StringToBool(LookupTag(Message,"Transient-Failure"),false) == true)
+ {
+ Retries--;
QueueURI(Desc);
return;
}
+
+ Item::Failed(Message,Cnf);
+}
+ /*}}}*/
+// AcqIndex::Custom600Headers - Insert custom request headers /*{{{*/
+// ---------------------------------------------------------------------
+/* The only header we use is the last-modified header. */
+string pkgAcqFile::Custom600Headers()
+{
+ if (IsIndexFile)
+ return "\nIndex-File: true";
+ return "";
}
/*}}}*/