##################################################################### */
/*}}}*/
// Include Files /*{{{*/
+#include <config.h>
+
#include <apt-pkg/acquire-item.h>
#include <apt-pkg/configuration.h>
#include <apt-pkg/aptconfiguration.h>
#include <apt-pkg/error.h>
#include <apt-pkg/strutl.h>
#include <apt-pkg/fileutl.h>
-#include <apt-pkg/md5.h>
#include <apt-pkg/sha1.h>
#include <apt-pkg/tagfile.h>
-
-#include <apti18n.h>
-
+#include <apt-pkg/indexrecords.h>
+#include <apt-pkg/acquire.h>
+#include <apt-pkg/hashes.h>
+#include <apt-pkg/indexfile.h>
+#include <apt-pkg/pkgcache.h>
+#include <apt-pkg/cacheiterators.h>
+#include <apt-pkg/pkgrecords.h>
+
+#include <stddef.h>
+#include <stdlib.h>
+#include <string.h>
+#include <iostream>
+#include <vector>
#include <sys/stat.h>
#include <unistd.h>
#include <errno.h>
#include <sstream>
#include <stdio.h>
#include <ctime>
+
+#include <apti18n.h>
/*}}}*/
using namespace std;
// ---------------------------------------------------------------------
/* Stash status and the file size. Note that setting Complete means
sub-phases of the acquire process such as decompresion are operating */
-void pkgAcquire::Item::Start(string /*Message*/,unsigned long Size)
+void pkgAcquire::Item::Start(string /*Message*/,unsigned long long Size)
{
Status = StatFetching;
if (FileSize == 0 && Complete == false)
// Acquire::Item::Done - Item downloaded OK /*{{{*/
// ---------------------------------------------------------------------
/* */
-void pkgAcquire::Item::Done(string Message,unsigned long Size,string Hash,
- pkgAcquire::MethodConfig *Cnf)
+void pkgAcquire::Item::Done(string Message,unsigned long long Size,string /*Hash*/,
+ pkgAcquire::MethodConfig * /*Cnf*/)
{
// We just downloaded something..
string FileName = LookupTag(Message,"Filename");
/*}}}*/
// Acquire::Item::Rename - Rename a file /*{{{*/
// ---------------------------------------------------------------------
-/* This helper function is used by alot of item methods as thier final
+/* This helper function is used by a lot of item methods as their final
step */
void pkgAcquire::Item::Rename(string From,string To)
{
}
}
/*}}}*/
+bool pkgAcquire::Item::RenameOnError(pkgAcquire::Item::RenameOnErrorState const error)/*{{{*/
+{
+ if(FileExists(DestFile))
+ Rename(DestFile, DestFile + ".FAILED");
+
+ switch (error)
+ {
+ case HashSumMismatch:
+ ErrorText = _("Hash Sum mismatch");
+ Status = StatAuthError;
+ ReportMirrorFailure("HashChecksumFailure");
+ break;
+ case SizeMismatch:
+ ErrorText = _("Size mismatch");
+ Status = StatAuthError;
+ ReportMirrorFailure("SizeFailure");
+ break;
+ case InvalidFormat:
+ ErrorText = _("Invalid file format");
+ Status = StatError;
+ // do not report as usually its not the mirrors fault, but Portal/Proxy
+ break;
+ }
+ return false;
+}
+ /*}}}*/
// Acquire::Item::ReportMirrorFailure /*{{{*/
// ---------------------------------------------------------------------
void pkgAcquire::Item::ReportMirrorFailure(string FailCode)
/*}}}*/
// AcqSubIndex::AcqSubIndex - Constructor /*{{{*/
// ---------------------------------------------------------------------
-/* Get the Index file first and see if there are languages available
- * If so, create a pkgAcqIndexTrans for the found language(s).
- */
+/* Get a sub-index file based on checksums from a 'master' file and
+ possibly query additional files */
pkgAcqSubIndex::pkgAcqSubIndex(pkgAcquire *Owner, string const &URI,
string const &URIDesc, string const &ShortDesc,
HashString const &ExpectedHash)
: Item(Owner), ExpectedHash(ExpectedHash)
{
+ /* XXX: Beware: Currently this class does nothing (of value) anymore ! */
Debug = _config->FindB("Debug::pkgAcquire::SubIndex",false);
DestFile = _config->FindDir("Dir::State::lists") + "partial/";
return "\nIndex-File: true\nFail-Ignore: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime);
}
/*}}}*/
-void pkgAcqSubIndex::Failed(string Message,pkgAcquire::MethodConfig *Cnf) /*{{{*/
+void pkgAcqSubIndex::Failed(string Message,pkgAcquire::MethodConfig * /*Cnf*/)/*{{{*/
{
if(Debug)
- std::clog << "pkgAcqSubIndex failed: " << Desc.URI << std::endl;
+ std::clog << "pkgAcqSubIndex failed: " << Desc.URI << " with " << Message << std::endl;
Complete = false;
Status = StatDone;
Dequeue();
- // No good Index is provided, so try guessing
- std::vector<std::string> langs = APT::Configuration::getLanguages(true);
- for (std::vector<std::string>::const_iterator l = langs.begin();
- l != langs.end(); ++l)
- {
- if (*l == "none") continue;
- string const file = "Translation-" + *l;
- new pkgAcqIndexTrans(Owner, Desc.URI.substr(0, Desc.URI.rfind('/')+1).append(file),
- Desc.Description.erase(Desc.Description.rfind(' ')+1).append(file),
- file);
- }
+ // No good Index is provided
}
/*}}}*/
-void pkgAcqSubIndex::Done(string Message,unsigned long Size,string Md5Hash, /*{{{*/
+void pkgAcqSubIndex::Done(string Message,unsigned long long Size,string Md5Hash, /*{{{*/
pkgAcquire::MethodConfig *Cnf)
{
if(Debug)
return;
}
- // sucess in downloading the index
+ // success in downloading the index
// rename the index
if(Debug)
std::clog << "Renaming: " << DestFile << " -> " << FinalFile << std::endl;
indexRecords SubIndexParser;
if (FileExists(IndexFile) == false || SubIndexParser.Load(IndexFile) == false)
return false;
-
- std::vector<std::string> lang = APT::Configuration::getLanguages(true);
- for (std::vector<std::string>::const_iterator l = lang.begin();
- l != lang.end(); ++l)
- {
- if (*l == "none")
- continue;
-
- string file = "Translation-" + *l;
- indexRecords::checkSum const *Record = SubIndexParser.Lookup(file);
- HashString expected;
- if (Record == NULL)
- {
- // FIXME: the Index file provided by debian currently only includes bz2 records
- Record = SubIndexParser.Lookup(file + ".bz2");
- if (Record == NULL)
- continue;
- }
- else
- {
- expected = Record->Hash;
- if (expected.empty() == true)
- continue;
- }
-
- IndexTarget target;
- target.Description = Desc.Description.erase(Desc.Description.rfind(' ')+1).append(file);
- target.MetaKey = file;
- target.ShortDesc = file;
- target.URI = Desc.URI.substr(0, Desc.URI.rfind('/')+1).append(file);
- new pkgAcqIndexTrans(Owner, &target, expected, &SubIndexParser);
- }
+ // so something with the downloaded index
return true;
}
/*}}}*/
// AcqDiffIndex::AcqDiffIndex - Constructor /*{{{*/
// ---------------------------------------------------------------------
-/* Get the DiffIndex file first and see if there are patches availabe
+/* Get the DiffIndex file first and see if there are patches available
* If so, create a pkgAcqIndexDiffs fetcher that will get and apply the
* patches. If anything goes wrong in that process, it will fall back to
* the original packages file
Desc.URI = URI + ".diff/Index";
DestFile = _config->FindDir("Dir::State::lists") + "partial/";
- DestFile += URItoFileName(URI) + string(".DiffIndex");
+ DestFile += URItoFileName(Desc.URI);
if(Debug)
std::clog << "pkgAcqDiffIndex: " << Desc.URI << std::endl;
return;
}
- if(Debug)
- std::clog << "pkgAcqIndexDiffs::pkgAcqIndexDiffs(): "
- << CurrentPackagesFile << std::endl;
-
+ if(Debug)
+ std::clog << "pkgAcqDiffIndex::pkgAcqDiffIndex(): "
+ << CurrentPackagesFile << std::endl;
+
QueueURI(Desc);
}
string pkgAcqDiffIndex::Custom600Headers()
{
string Final = _config->FindDir("Dir::State::lists");
- Final += URItoFileName(RealURI) + string(".IndexDiff");
+ Final += URItoFileName(Desc.URI);
if(Debug)
std::clog << "Custom600Header-IMS: " << Final << std::endl;
bool pkgAcqDiffIndex::ParseDiffIndex(string IndexDiffFile) /*{{{*/
{
if(Debug)
- std::clog << "pkgAcqIndexDiffs::ParseIndexDiff() " << IndexDiffFile
- << std::endl;
+ std::clog << "pkgAcqDiffIndex::ParseIndexDiff() " << IndexDiffFile
+ << std::endl;
pkgTagSection Tags;
string ServerSha1;
FileFd fd(CurrentPackagesFile, FileFd::ReadOnly);
SHA1Summation SHA1;
- SHA1.AddFD(fd.Fd(), fd.Size());
+ SHA1.AddFD(fd);
string const local_sha1 = SHA1.Result();
- if(local_sha1 == ServerSha1)
+ if(local_sha1 == ServerSha1)
{
- // we have the same sha1 as the server
+ // we have the same sha1 as the server so we are done here
if(Debug)
std::clog << "Package file is up-to-date" << std::endl;
- // set found to true, this will queue a pkgAcqIndexDiffs with
- // a empty availabe_patches
- found = true;
- }
- else
+ // list cleanup needs to know that this file as well as the already
+ // present index is ours, so we create an empty diff to save it for us
+ new pkgAcqIndexDiffs(Owner, RealURI, Description, Desc.ShortDesc,
+ ExpectedHash, ServerSha1, available_patches);
+ return true;
+ }
+ else
{
if(Debug)
std::clog << "SHA1-Current: " << ServerSha1 << " and we start at "<< fd.Name() << " " << fd.Size() << " " << local_sha1 << std::endl;
}
// we have something, queue the next diff
- if(found)
+ if(found)
{
// queue the diffs
string::size_type const last_space = Description.rfind(" ");
if(last_space != string::npos)
Description.erase(last_space, Description.size()-last_space);
- new pkgAcqIndexDiffs(Owner, RealURI, Description, Desc.ShortDesc,
- ExpectedHash, ServerSha1, available_patches);
+
+ /* decide if we should download patches one by one or in one go:
+ The first is good if the server merges patches, but many don't so client
+ based merging can be attempt in which case the second is better.
+ "bad things" will happen if patches are merged on the server,
+ but client side merging is attempt as well */
+ bool pdiff_merge = _config->FindB("Acquire::PDiffs::Merge", true);
+ if (pdiff_merge == true)
+ {
+ // reprepro adds this flag if it has merged patches on the server
+ std::string const precedence = Tags.FindS("X-Patch-Precedence");
+ pdiff_merge = (precedence != "merged");
+ }
+
+ if (pdiff_merge == false)
+ new pkgAcqIndexDiffs(Owner, RealURI, Description, Desc.ShortDesc,
+ ExpectedHash, ServerSha1, available_patches);
+ else
+ {
+ std::vector<pkgAcqIndexMergeDiffs*> *diffs = new std::vector<pkgAcqIndexMergeDiffs*>(available_patches.size());
+ for(size_t i = 0; i < available_patches.size(); ++i)
+ (*diffs)[i] = new pkgAcqIndexMergeDiffs(Owner, RealURI, Description, Desc.ShortDesc, ExpectedHash,
+ available_patches[i], diffs);
+ }
+
Complete = false;
Status = StatDone;
Dequeue();
return false;
}
/*}}}*/
-void pkgAcqDiffIndex::Failed(string Message,pkgAcquire::MethodConfig *Cnf) /*{{{*/
+void pkgAcqDiffIndex::Failed(string Message,pkgAcquire::MethodConfig * /*Cnf*/)/*{{{*/
{
if(Debug)
- std::clog << "pkgAcqDiffIndex failed: " << Desc.URI << std::endl
- << "Falling back to normal index file aquire" << std::endl;
+ std::clog << "pkgAcqDiffIndex failed: " << Desc.URI << " with " << Message << std::endl
+ << "Falling back to normal index file acquire" << std::endl;
new pkgAcqIndex(Owner, RealURI, Description, Desc.ShortDesc,
ExpectedHash);
Dequeue();
}
/*}}}*/
-void pkgAcqDiffIndex::Done(string Message,unsigned long Size,string Md5Hash, /*{{{*/
+void pkgAcqDiffIndex::Done(string Message,unsigned long long Size,string Md5Hash, /*{{{*/
pkgAcquire::MethodConfig *Cnf)
{
if(Debug)
string FinalFile;
FinalFile = _config->FindDir("Dir::State::lists")+URItoFileName(RealURI);
- // sucess in downloading the index
+ // success in downloading the index
// rename the index
FinalFile += string(".IndexDiff");
if(Debug)
Desc.Owner = this;
Desc.ShortDesc = ShortDesc;
- if(available_patches.size() == 0)
+ if(available_patches.empty() == true)
{
// we are done (yeah!)
Finish(true);
}
}
/*}}}*/
-void pkgAcqIndexDiffs::Failed(string Message,pkgAcquire::MethodConfig *Cnf) /*{{{*/
+void pkgAcqIndexDiffs::Failed(string Message,pkgAcquire::MethodConfig * /*Cnf*/)/*{{{*/
{
if(Debug)
- std::clog << "pkgAcqIndexDiffs failed: " << Desc.URI << std::endl
- << "Falling back to normal index file aquire" << std::endl;
+ std::clog << "pkgAcqIndexDiffs failed: " << Desc.URI << " with " << Message << std::endl
+ << "Falling back to normal index file acquire" << std::endl;
new pkgAcqIndex(Owner, RealURI, Description,Desc.ShortDesc,
ExpectedHash);
Finish();
if(!ExpectedHash.empty() && !ExpectedHash.VerifyFile(DestFile))
{
- Status = StatAuthError;
- ErrorText = _("MD5Sum mismatch");
- Rename(DestFile,DestFile + ".FAILED");
+ RenameOnError(HashSumMismatch);
Dequeue();
return;
}
FileFd fd(FinalFile, FileFd::ReadOnly);
SHA1Summation SHA1;
- SHA1.AddFD(fd.Fd(), fd.Size());
+ SHA1.AddFD(fd);
string local_sha1 = string(SHA1.Result());
if(Debug)
std::clog << "QueueNextDiff: "
// remove all patches until the next matching patch is found
// this requires the Index file to be ordered
for(vector<DiffInfo>::iterator I=available_patches.begin();
- available_patches.size() > 0 &&
+ available_patches.empty() == false &&
I != available_patches.end() &&
- (*I).sha1 != local_sha1;
- I++)
+ I->sha1 != local_sha1;
+ ++I)
{
available_patches.erase(I);
}
// error checking and falling back if no patch was found
- if(available_patches.size() == 0)
- {
+ if(available_patches.empty() == true)
+ {
Failed("", NULL);
return false;
}
// queue the right diff
- Desc.URI = string(RealURI) + ".diff/" + available_patches[0].file + ".gz";
+ Desc.URI = RealURI + ".diff/" + available_patches[0].file + ".gz";
Desc.Description = Description + " " + available_patches[0].file + string(".pdiff");
DestFile = _config->FindDir("Dir::State::lists") + "partial/";
DestFile += URItoFileName(RealURI + ".diff/" + available_patches[0].file);
return true;
}
/*}}}*/
-void pkgAcqIndexDiffs::Done(string Message,unsigned long Size,string Md5Hash, /*{{{*/
+void pkgAcqIndexDiffs::Done(string Message,unsigned long long Size,string Md5Hash, /*{{{*/
pkgAcquire::MethodConfig *Cnf)
{
if(Debug)
string FinalFile;
FinalFile = _config->FindDir("Dir::State::lists")+URItoFileName(RealURI);
- // sucess in downloading a diff, enter ApplyDiff state
+ // success in downloading a diff, enter ApplyDiff state
if(State == StateFetchDiff)
{
{
// remove the just applied patch
available_patches.erase(available_patches.begin());
+ unlink((FinalFile + ".ed").c_str());
// move into place
if(Debug)
chmod(FinalFile.c_str(),0644);
// see if there is more to download
- if(available_patches.size() > 0) {
+ if(available_patches.empty() == false) {
new pkgAcqIndexDiffs(Owner, RealURI, Description, Desc.ShortDesc,
ExpectedHash, ServerSha1, available_patches);
return Finish();
}
}
/*}}}*/
+// AcqIndexMergeDiffs::AcqIndexMergeDiffs - Constructor /*{{{*/
+pkgAcqIndexMergeDiffs::pkgAcqIndexMergeDiffs(pkgAcquire *Owner,
+ string const &URI, string const &URIDesc,
+ string const &ShortDesc, HashString const &ExpectedHash,
+ DiffInfo const &patch,
+ std::vector<pkgAcqIndexMergeDiffs*> const * const allPatches)
+ : Item(Owner), RealURI(URI), ExpectedHash(ExpectedHash),
+ patch(patch),allPatches(allPatches), State(StateFetchDiff)
+{
+
+ DestFile = _config->FindDir("Dir::State::lists") + "partial/";
+ DestFile += URItoFileName(URI);
+
+ Debug = _config->FindB("Debug::pkgAcquire::Diffs",false);
+
+ Description = URIDesc;
+ Desc.Owner = this;
+ Desc.ShortDesc = ShortDesc;
+
+ Desc.URI = RealURI + ".diff/" + patch.file + ".gz";
+ Desc.Description = Description + " " + patch.file + string(".pdiff");
+ DestFile = _config->FindDir("Dir::State::lists") + "partial/";
+ DestFile += URItoFileName(RealURI + ".diff/" + patch.file);
+
+ if(Debug)
+ std::clog << "pkgAcqIndexMergeDiffs: " << Desc.URI << std::endl;
+
+ QueueURI(Desc);
+}
+ /*}}}*/
+void pkgAcqIndexMergeDiffs::Failed(string Message,pkgAcquire::MethodConfig * /*Cnf*/)/*{{{*/
+{
+ if(Debug)
+ std::clog << "pkgAcqIndexMergeDiffs failed: " << Desc.URI << " with " << Message << std::endl;
+ Complete = false;
+ Status = StatDone;
+ Dequeue();
+
+ // check if we are the first to fail, otherwise we are done here
+ State = StateDoneDiff;
+ for (std::vector<pkgAcqIndexMergeDiffs *>::const_iterator I = allPatches->begin();
+ I != allPatches->end(); ++I)
+ if ((*I)->State == StateErrorDiff)
+ return;
+
+ // first failure means we should fallback
+ State = StateErrorDiff;
+ std::clog << "Falling back to normal index file acquire" << std::endl;
+ new pkgAcqIndex(Owner, RealURI, Description,Desc.ShortDesc,
+ ExpectedHash);
+}
+ /*}}}*/
+void pkgAcqIndexMergeDiffs::Done(string Message,unsigned long long Size,string Md5Hash, /*{{{*/
+ pkgAcquire::MethodConfig *Cnf)
+{
+ if(Debug)
+ std::clog << "pkgAcqIndexMergeDiffs::Done(): " << Desc.URI << std::endl;
+
+ Item::Done(Message,Size,Md5Hash,Cnf);
+
+ string const FinalFile = _config->FindDir("Dir::State::lists") + URItoFileName(RealURI);
+
+ if (State == StateFetchDiff)
+ {
+ // rred expects the patch as $FinalFile.ed.$patchname.gz
+ Rename(DestFile, FinalFile + ".ed." + patch.file + ".gz");
+
+ // check if this is the last completed diff
+ State = StateDoneDiff;
+ for (std::vector<pkgAcqIndexMergeDiffs *>::const_iterator I = allPatches->begin();
+ I != allPatches->end(); ++I)
+ if ((*I)->State != StateDoneDiff)
+ {
+ if(Debug)
+ std::clog << "Not the last done diff in the batch: " << Desc.URI << std::endl;
+ return;
+ }
+
+ // this is the last completed diff, so we are ready to apply now
+ State = StateApplyDiff;
+
+ if(Debug)
+ std::clog << "Sending to rred method: " << FinalFile << std::endl;
+
+ Local = true;
+ Desc.URI = "rred:" + FinalFile;
+ QueueURI(Desc);
+ Mode = "rred";
+ return;
+ }
+ // success in download/apply all diffs, clean up
+ else if (State == StateApplyDiff)
+ {
+ // see if we really got the expected file
+ if(!ExpectedHash.empty() && !ExpectedHash.VerifyFile(DestFile))
+ {
+ RenameOnError(HashSumMismatch);
+ return;
+ }
+
+ // move the result into place
+ if(Debug)
+ std::clog << "Moving patched file in place: " << std::endl
+ << DestFile << " -> " << FinalFile << std::endl;
+ Rename(DestFile, FinalFile);
+ chmod(FinalFile.c_str(), 0644);
+
+ // otherwise lists cleanup will eat the file
+ DestFile = FinalFile;
+
+ // ensure the ed's are gone regardless of list-cleanup
+ for (std::vector<pkgAcqIndexMergeDiffs *>::const_iterator I = allPatches->begin();
+ I != allPatches->end(); ++I)
+ {
+ std::string patch = FinalFile + ".ed." + (*I)->patch.file + ".gz";
+ unlink(patch.c_str());
+ }
+
+ // all set and done
+ Complete = true;
+ if(Debug)
+ std::clog << "allDone: " << DestFile << "\n" << std::endl;
+ }
+}
+ /*}}}*/
// AcqIndex::AcqIndex - Constructor /*{{{*/
// ---------------------------------------------------------------------
/* The package file is added to the queue and a second class is
to the uncompressed version of the file. If this is so the file
is copied into the partial directory. In all other cases the file
is decompressed with a gzip uri. */
-void pkgAcqIndex::Done(string Message,unsigned long Size,string Hash,
+void pkgAcqIndex::Done(string Message,unsigned long long Size,string Hash,
pkgAcquire::MethodConfig *Cfg)
{
Item::Done(Message,Size,Hash,Cfg);
if (!ExpectedHash.empty() && ExpectedHash.toStr() != Hash)
{
- Status = StatAuthError;
- ErrorText = _("Hash Sum mismatch");
- Rename(DestFile,DestFile + ".FAILED");
- ReportMirrorFailure("HashChecksumFailure");
+ RenameOnError(HashSumMismatch);
return;
}
- /* Verify the index file for correctness (all indexes must
- * have a Package field) (LP: #346386) (Closes: #627642) */
+ // FIXME: this can go away once we only ever download stuff that
+ // has a valid hash and we never do GET based probing
+ //
+ /* Always verify the index file for correctness (all indexes must
+ * have a Package field) (LP: #346386) (Closes: #627642)
+ */
+ FileFd fd(DestFile, FileFd::ReadOnly);
+ // Only test for correctness if the file is not empty (empty is ok)
+ if (fd.FileSize() > 0)
{
- FileFd fd(DestFile, FileFd::ReadOnly);
- pkgTagSection sec;
- pkgTagFile tag(&fd);
-
- // Only test for correctness if the file is not empty (empty is ok)
- if (fd.Size() > 0) {
- if (_error->PendingError() || !tag.Step(sec)) {
- Status = StatError;
- _error->DumpErrors();
- Rename(DestFile,DestFile + ".FAILED");
- return;
- } else if (!sec.Exists("Package")) {
- Status = StatError;
- ErrorText = ("Encountered a section with no Package: header");
- Rename(DestFile,DestFile + ".FAILED");
- return;
- }
+ pkgTagSection sec;
+ pkgTagFile tag(&fd);
+
+ // all our current indexes have a field 'Package' in each section
+ if (_error->PendingError() == true || tag.Step(sec) == false || sec.Exists("Package") == false)
+ {
+ RenameOnError(InvalidFormat);
+ return;
}
}
DestFile += ".decomp";
Desc.URI = decompProg + ":" + FileName;
QueueURI(Desc);
+
+ // FIXME: this points to a c++ string that goes out of scope
Mode = decompProg.c_str();
}
/*}}}*/
string Final = _config->FindDir("Dir::State::lists");
Final += URItoFileName(RealURI);
- struct stat Buf;
- if (stat(Final.c_str(),&Buf) == 0)
+ if (RealFileExists(Final) == true)
{
// File was already in place. It needs to be re-downloaded/verified
- // because Release might have changed, we do give it a differnt
+ // because Release might have changed, we do give it a different
// name than DestFile because otherwise the http method will
// send If-Range requests and there are too many broken servers
// out there that do not understand them
}
QueueURI(Desc);
+}
+ /*}}}*/
+pkgAcqMetaSig::~pkgAcqMetaSig() /*{{{*/
+{
+ // if the file was never queued undo file-changes done in the constructor
+ if (QueueCounter == 1 && Status == StatIdle && FileSize == 0 && Complete == false &&
+ LastGoodSig.empty() == false)
+ {
+ string const Final = _config->FindDir("Dir::State::lists") + URItoFileName(RealURI);
+ if (RealFileExists(Final) == false && RealFileExists(LastGoodSig) == true)
+ Rename(LastGoodSig, Final);
+ }
+
}
/*}}}*/
// pkgAcqMetaSig::Custom600Headers - Insert custom request headers /*{{{*/
return "\nIndex-File: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime);
}
-void pkgAcqMetaSig::Done(string Message,unsigned long Size,string MD5,
+void pkgAcqMetaSig::Done(string Message,unsigned long long Size,string MD5,
pkgAcquire::MethodConfig *Cfg)
{
Item::Done(Message,Size,MD5,Cfg);
return "\nIndex-File: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime);
}
/*}}}*/
-void pkgAcqMetaIndex::Done(string Message,unsigned long Size,string Hash, /*{{{*/
+void pkgAcqMetaIndex::Done(string Message,unsigned long long Size,string Hash, /*{{{*/
pkgAcquire::MethodConfig *Cfg)
{
Item::Done(Message,Size,Hash,Cfg);
if (SigFile == "")
{
// There was no signature file, so we are finished. Download
- // the indexes without verification.
+ // the indexes and do only hashsum verification if possible
+ MetaIndexParser->Load(DestFile);
QueueIndexes(false);
}
else
string FinalFile = _config->FindDir("Dir::State::lists");
FinalFile += URItoFileName(RealURI);
if (SigFile == DestFile)
+ {
SigFile = FinalFile;
+ // constructor of pkgAcqMetaClearSig moved it out of the way,
+ // now move it back in on IMS hit for the 'old' file
+ string const OldClearSig = DestFile + ".reverify";
+ if (RealFileExists(OldClearSig) == true)
+ Rename(OldClearSig, FinalFile);
+ }
DestFile = FinalFile;
}
Complete = true;
return;
}
#endif
+ bool transInRelease = false;
+ {
+ std::vector<std::string> const keys = MetaIndexParser->MetaKeys();
+ for (std::vector<std::string>::const_iterator k = keys.begin(); k != keys.end(); ++k)
+ // FIXME: Feels wrong to check for hardcoded string here, but what should we do else…
+ if (k->find("Translation-") != std::string::npos)
+ {
+ transInRelease = true;
+ break;
+ }
+ }
+
for (vector <struct IndexTarget*>::const_iterator Target = IndexTargets->begin();
Target != IndexTargets->end();
- Target++)
+ ++Target)
{
HashString ExpectedIndexHash;
- if (verify)
+ const indexRecords::checkSum *Record = MetaIndexParser->Lookup((*Target)->MetaKey);
+ bool compressedAvailable = false;
+ if (Record == NULL)
{
- const indexRecords::checkSum *Record = MetaIndexParser->Lookup((*Target)->MetaKey);
- if (Record == NULL)
+ if ((*Target)->IsOptional() == true)
{
- if ((*Target)->IsOptional() == false)
- {
- Status = StatAuthError;
- strprintf(ErrorText, _("Unable to find expected entry '%s' in Release file (Wrong sources.list entry or malformed file)"), (*Target)->MetaKey.c_str());
- return;
- }
+ std::vector<std::string> types = APT::Configuration::getCompressionTypes();
+ for (std::vector<std::string>::const_iterator t = types.begin(); t != types.end(); ++t)
+ if (MetaIndexParser->Exists((*Target)->MetaKey + "." + *t) == true)
+ {
+ compressedAvailable = true;
+ break;
+ }
}
- else
+ else if (verify == true)
{
- ExpectedIndexHash = Record->Hash;
- if (_config->FindB("Debug::pkgAcquire::Auth", false))
- {
- std::cerr << "Queueing: " << (*Target)->URI << std::endl;
- std::cerr << "Expected Hash: " << ExpectedIndexHash.toStr() << std::endl;
- }
- if (ExpectedIndexHash.empty() == true && (*Target)->IsOptional() == false)
- {
- Status = StatAuthError;
- strprintf(ErrorText, _("Unable to find hash sum for '%s' in Release file"), (*Target)->MetaKey.c_str());
- return;
- }
+ Status = StatAuthError;
+ strprintf(ErrorText, _("Unable to find expected entry '%s' in Release file (Wrong sources.list entry or malformed file)"), (*Target)->MetaKey.c_str());
+ return;
+ }
+ }
+ else
+ {
+ ExpectedIndexHash = Record->Hash;
+ if (_config->FindB("Debug::pkgAcquire::Auth", false))
+ {
+ std::cerr << "Queueing: " << (*Target)->URI << std::endl;
+ std::cerr << "Expected Hash: " << ExpectedIndexHash.toStr() << std::endl;
+ std::cerr << "For: " << Record->MetaKeyFilename << std::endl;
+ }
+ if (verify == true && ExpectedIndexHash.empty() == true && (*Target)->IsOptional() == false)
+ {
+ Status = StatAuthError;
+ strprintf(ErrorText, _("Unable to find hash sum for '%s' in Release file"), (*Target)->MetaKey.c_str());
+ return;
}
}
if ((*Target)->IsSubIndex() == true)
new pkgAcqSubIndex(Owner, (*Target)->URI, (*Target)->Description,
(*Target)->ShortDesc, ExpectedIndexHash);
- else
- new pkgAcqIndexTrans(Owner, *Target, ExpectedIndexHash, MetaIndexParser);
+ else if (transInRelease == false || Record != NULL || compressedAvailable == true)
+ {
+ if (_config->FindB("Acquire::PDiffs",true) == true && transInRelease == true &&
+ MetaIndexParser->Exists((*Target)->MetaKey + ".diff/Index") == true)
+ new pkgAcqDiffIndex(Owner, (*Target)->URI, (*Target)->Description,
+ (*Target)->ShortDesc, ExpectedIndexHash);
+ else
+ new pkgAcqIndexTrans(Owner, *Target, ExpectedIndexHash, MetaIndexParser);
+ }
continue;
}
in the Meta-Index file. Ideal would be if pkgAcqDiffIndex would test this
instead, but passing the required info to it is to much hassle */
if(_config->FindB("Acquire::PDiffs",true) == true && (verify == false ||
- MetaIndexParser->Exists(string((*Target)->MetaKey).append(".diff/Index")) == true))
+ MetaIndexParser->Exists((*Target)->MetaKey + ".diff/Index") == true))
new pkgAcqDiffIndex(Owner, (*Target)->URI, (*Target)->Description,
(*Target)->ShortDesc, ExpectedIndexHash);
else
missingkeys += (Fingerprint);
}
if(!missingkeys.empty())
- _error->Warning("%s", string(msg+missingkeys).c_str());
+ _error->Warning("%s", (msg + missingkeys).c_str());
string Transformed = MetaIndexParser->GetExpectedDist();
// TRANSLATOR: The first %s is the URL of the bad Release file, the second is
// the time since then the file is invalid - formated in the same way as in
// the download progress display (e.g. 7d 3h 42min 1s)
- return _error->Error(_("Release file expired, ignoring %s (invalid since %s)"),
- RealURI.c_str(), TimeToStr(invalid_since).c_str());
+ return _error->Error(
+ _("Release file for %s is expired (invalid since %s). "
+ "Updates for this repository will not be applied."),
+ RealURI.c_str(), TimeToStr(invalid_since).c_str());
}
if (_config->FindB("Debug::pkgAcquire::Auth", false))
// pkgAcqMetaIndex::Failed - no Release file present or no signature file present /*{{{*/
// ---------------------------------------------------------------------
/* */
-void pkgAcqMetaIndex::Failed(string Message,pkgAcquire::MethodConfig *Cnf)
+void pkgAcqMetaIndex::Failed(string Message,pkgAcquire::MethodConfig * /*Cnf*/)
{
if (AuthPass == true)
{
// gpgv method failed, if we have a good signature
- string LastGoodSigFile = _config->FindDir("Dir::State::lists");
- if (DestFile == SigFile)
- LastGoodSigFile.append(URItoFileName(RealURI));
- else
- LastGoodSigFile.append("partial/").append(URItoFileName(RealURI)).append(".gpg.reverify");
+ string LastGoodSigFile = _config->FindDir("Dir::State::lists").append("partial/").append(URItoFileName(RealURI));
+ if (DestFile != SigFile)
+ LastGoodSigFile.append(".gpg");
+ LastGoodSigFile.append(".reverify");
if(FileExists(LastGoodSigFile))
{
+ string VerifiedSigFile = _config->FindDir("Dir::State::lists") + URItoFileName(RealURI);
if (DestFile != SigFile)
- {
- string VerifiedSigFile = _config->FindDir("Dir::State::lists") +
- URItoFileName(RealURI) + ".gpg";
- Rename(LastGoodSigFile,VerifiedSigFile);
- }
+ VerifiedSigFile.append(".gpg");
+ Rename(LastGoodSigFile, VerifiedSigFile);
Status = StatTransientNetworkError;
- _error->Warning(_("A error occurred during the signature "
+ _error->Warning(_("An error occurred during the signature "
"verification. The repository is not updated "
"and the previous index files will be used. "
"GPG error: %s: %s\n"),
MetaSigURI(MetaSigURI), MetaSigURIDesc(MetaSigURIDesc), MetaSigShortDesc(MetaSigShortDesc)
{
SigFile = DestFile;
+
+ // keep the old InRelease around in case of transistent network errors
+ string const Final = _config->FindDir("Dir::State::lists") + URItoFileName(RealURI);
+ if (RealFileExists(Final) == true)
+ {
+ string const LastGoodSig = DestFile + ".reverify";
+ Rename(Final,LastGoodSig);
+ }
+}
+ /*}}}*/
+pkgAcqMetaClearSig::~pkgAcqMetaClearSig() /*{{{*/
+{
+ // if the file was never queued undo file-changes done in the constructor
+ if (QueueCounter == 1 && Status == StatIdle && FileSize == 0 && Complete == false)
+ {
+ string const Final = _config->FindDir("Dir::State::lists") + URItoFileName(RealURI);
+ string const LastGoodSig = DestFile + ".reverify";
+ if (RealFileExists(Final) == false && RealFileExists(LastGoodSig) == true)
+ Rename(LastGoodSig, Final);
+ }
}
/*}}}*/
// pkgAcqMetaClearSig::Custom600Headers - Insert custom request headers /*{{{*/
struct stat Buf;
if (stat(Final.c_str(),&Buf) != 0)
- return "\nIndex-File: true\nFail-Ignore: true\n";
+ {
+ Final = DestFile + ".reverify";
+ if (stat(Final.c_str(),&Buf) != 0)
+ return "\nIndex-File: true\nFail-Ignore: true\n";
+ }
return "\nIndex-File: true\nFail-Ignore: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime);
}
{
if (AuthPass == false)
{
+ // Remove the 'old' InRelease file if we try Release.gpg now as otherwise
+ // the file will stay around and gives a false-auth impression (CVE-2012-0214)
+ string FinalFile = _config->FindDir("Dir::State::lists");
+ FinalFile.append(URItoFileName(RealURI));
+ if (FileExists(FinalFile))
+ unlink(FinalFile.c_str());
+
new pkgAcqMetaSig(Owner,
MetaSigURI, MetaSigURIDesc, MetaSigShortDesc,
MetaIndexURI, MetaIndexURIDesc, MetaIndexShortDesc,
_error->Error(_("I wasn't able to locate a file for the %s package. "
"This might mean you need to manually fix this package. "
"(due to missing arch)"),
- Version.ParentPkg().Name());
+ Version.ParentPkg().FullName().c_str());
return;
}
assumption here that all the available sources for this version share
the same extension.. */
// Skip not source sources, they do not have file fields.
- for (; Vf.end() == false; Vf++)
+ for (; Vf.end() == false; ++Vf)
{
if ((Vf.File()->Flags & pkgCache::Flag::NotSource) != 0)
continue;
}
// check if we have one trusted source for the package. if so, switch
- // to "TrustedOnly" mode
- for (pkgCache::VerFileIterator i = Version.FileList(); i.end() == false; i++)
+ // to "TrustedOnly" mode - but only if not in AllowUnauthenticated mode
+ bool const allowUnauth = _config->FindB("APT::Get::AllowUnauthenticated", false);
+ bool const debugAuth = _config->FindB("Debug::pkgAcquire::Auth", false);
+ bool seenUntrusted = false;
+ for (pkgCache::VerFileIterator i = Version.FileList(); i.end() == false; ++i)
{
pkgIndexFile *Index;
if (Sources->FindIndex(i.File(),Index) == false)
continue;
- if (_config->FindB("Debug::pkgAcquire::Auth", false))
- {
+
+ if (debugAuth == true)
std::cerr << "Checking index: " << Index->Describe()
- << "(Trusted=" << Index->IsTrusted() << ")\n";
- }
- if (Index->IsTrusted()) {
+ << "(Trusted=" << Index->IsTrusted() << ")" << std::endl;
+
+ if (Index->IsTrusted() == true)
+ {
Trusted = true;
- break;
+ if (allowUnauth == false)
+ break;
}
+ else
+ seenUntrusted = true;
}
// "allow-unauthenticated" restores apts old fetching behaviour
// that means that e.g. unauthenticated file:// uris are higher
// priority than authenticated http:// uris
- if (_config->FindB("APT::Get::AllowUnauthenticated",false) == true)
+ if (allowUnauth == true && seenUntrusted == true)
Trusted = false;
// Select a source
if (QueueNext() == false && _error->PendingError() == false)
- _error->Error(_("I wasn't able to locate a file for the %s package. "
- "This might mean you need to manually fix this package."),
- Version.ParentPkg().Name());
+ _error->Error(_("Can't find a source to download version '%s' of '%s'"),
+ Version.VerStr(), Version.ParentPkg().FullName(false).c_str());
}
/*}}}*/
// AcqArchive::QueueNext - Queue the next file source /*{{{*/
bool pkgAcqArchive::QueueNext()
{
string const ForceHash = _config->Find("Acquire::ForceHash");
- for (; Vf.end() == false; Vf++)
+ for (; Vf.end() == false; ++Vf)
{
// Ignore not source sources
if ((Vf.File()->Flags & pkgCache::Flag::NotSource) != 0)
{
if(stringcasecmp(ForceHash, "sha512") == 0)
ExpectedHash = HashString("SHA512", Parse.SHA512Hash());
- if(stringcasecmp(ForceHash, "sha256") == 0)
+ else if(stringcasecmp(ForceHash, "sha256") == 0)
ExpectedHash = HashString("SHA256", Parse.SHA256Hash());
else if (stringcasecmp(ForceHash, "sha1") == 0)
ExpectedHash = HashString("SHA1", Parse.SHA1Hash());
Desc.URI = Index->ArchiveURI(PkgFile);
Desc.Description = Index->ArchiveInfo(Version);
Desc.Owner = this;
- Desc.ShortDesc = Version.ParentPkg().Name();
+ Desc.ShortDesc = Version.ParentPkg().FullName(true);
// See if we already have the file. (Legacy filenames)
FileSize = Version->Size;
if (stat(FinalFile.c_str(),&Buf) == 0)
{
// Make sure the size matches
- if ((unsigned)Buf.st_size == Version->Size)
+ if ((unsigned long long)Buf.st_size == Version->Size)
{
Complete = true;
Local = true;
if (stat(FinalFile.c_str(),&Buf) == 0)
{
// Make sure the size matches
- if ((unsigned)Buf.st_size == Version->Size)
+ if ((unsigned long long)Buf.st_size == Version->Size)
{
Complete = true;
Local = true;
return true;
}
- /* Hmm, we have a file and its size does not match, this shouldnt
+ /* Hmm, we have a file and its size does not match, this shouldn't
happen.. */
unlink(FinalFile.c_str());
}
if (stat(DestFile.c_str(),&Buf) == 0)
{
// Hmm, the partial file is too big, erase it
- if ((unsigned)Buf.st_size > Version->Size)
+ if ((unsigned long long)Buf.st_size > Version->Size)
unlink(DestFile.c_str());
else
PartialSize = Buf.st_size;
}
-
+
+ // Disables download of archives - useful if no real installation follows,
+ // e.g. if we are just interested in proposed installation order
+ if (_config->FindB("Debug::pkgAcqArchive::NoQueue", false) == true)
+ {
+ Complete = true;
+ Local = true;
+ Status = StatDone;
+ StoreFilename = DestFile = FinalFile;
+ return true;
+ }
+
// Create the item
Local = false;
- Desc.URI = Index->ArchiveURI(PkgFile);
- Desc.Description = Index->ArchiveInfo(Version);
- Desc.Owner = this;
- Desc.ShortDesc = Version.ParentPkg().Name();
QueueURI(Desc);
- Vf++;
+ ++Vf;
return true;
}
return false;
// AcqArchive::Done - Finished fetching /*{{{*/
// ---------------------------------------------------------------------
/* */
-void pkgAcqArchive::Done(string Message,unsigned long Size,string CalcHash,
+void pkgAcqArchive::Done(string Message,unsigned long long Size,string CalcHash,
pkgAcquire::MethodConfig *Cfg)
{
Item::Done(Message,Size,CalcHash,Cfg);
// Check the size
if (Size != Version->Size)
{
- Status = StatError;
- ErrorText = _("Size mismatch");
+ RenameOnError(SizeMismatch);
return;
}
// Check the hash
if(ExpectedHash.toStr() != CalcHash)
{
- Status = StatError;
- ErrorText = _("Hash Sum mismatch");
- if(FileExists(DestFile))
- Rename(DestFile,DestFile + ".FAILED");
+ RenameOnError(HashSumMismatch);
return;
}
StringToBool(LookupTag(Message,"Transient-Failure"),false) == true)
{
// Vf = Version.FileList();
- while (Vf.end() == false) Vf++;
+ while (Vf.end() == false) ++Vf;
StoreFilename = string();
Item::Failed(Message,Cnf);
return;
/*}}}*/
// AcqArchive::IsTrusted - Determine whether this archive comes from a trusted source /*{{{*/
// ---------------------------------------------------------------------
-bool pkgAcqArchive::IsTrusted()
+APT_PURE bool pkgAcqArchive::IsTrusted()
{
return Trusted;
}
// ---------------------------------------------------------------------
/* The file is added to the queue */
pkgAcqFile::pkgAcqFile(pkgAcquire *Owner,string URI,string Hash,
- unsigned long Size,string Dsc,string ShortDesc,
+ unsigned long long Size,string Dsc,string ShortDesc,
const string &DestDir, const string &DestFilename,
bool IsIndexFile) :
Item(Owner), ExpectedHash(Hash), IsIndexFile(IsIndexFile)
if (stat(DestFile.c_str(),&Buf) == 0)
{
// Hmm, the partial file is too big, erase it
- if ((unsigned)Buf.st_size > Size)
+ if ((Size > 0) && (unsigned long long)Buf.st_size > Size)
unlink(DestFile.c_str());
else
PartialSize = Buf.st_size;
// AcqFile::Done - Item downloaded OK /*{{{*/
// ---------------------------------------------------------------------
/* */
-void pkgAcqFile::Done(string Message,unsigned long Size,string CalcHash,
+void pkgAcqFile::Done(string Message,unsigned long long Size,string CalcHash,
pkgAcquire::MethodConfig *Cnf)
{
Item::Done(Message,Size,CalcHash,Cnf);
// Check the hash
if(!ExpectedHash.empty() && ExpectedHash.toStr() != CalcHash)
{
- Status = StatError;
- ErrorText = _("Hash Sum mismatch");
- Rename(DestFile,DestFile + ".FAILED");
+ RenameOnError(HashSumMismatch);
return;
}
return "";
}
/*}}}*/
-bool IndexTarget::IsOptional() const {
- if (strncmp(ShortDesc.c_str(), "Translation", 11) != 0)
- return false;
- return true;
-}
-bool IndexTarget::IsSubIndex() const {
- if (ShortDesc != "TranslationIndex")
- return false;
- return true;
-}