#include <apt-pkg/error.h>
#include <apt-pkg/strutl.h>
#include <apt-pkg/fileutl.h>
-#include <apt-pkg/md5.h>
#include <apt-pkg/sha1.h>
#include <apt-pkg/tagfile.h>
#include <apt-pkg/indexrecords.h>
-#include <apt-pkg/metaindex.h>
-
+#include <apt-pkg/acquire.h>
+#include <apt-pkg/hashes.h>
+#include <apt-pkg/indexfile.h>
+#include <apt-pkg/pkgcache.h>
+#include <apt-pkg/cacheiterators.h>
+#include <apt-pkg/pkgrecords.h>
+
+#include <stddef.h>
+#include <stdlib.h>
+#include <string.h>
+#include <iostream>
+#include <vector>
#include <sys/stat.h>
#include <unistd.h>
#include <errno.h>
using namespace std;
+static void printHashSumComparision(std::string const &URI, HashStringList const &Expected, HashStringList const &Actual) /*{{{*/
+{
+ if (_config->FindB("Debug::Acquire::HashSumMismatch", false) == false)
+ return;
+ std::cerr << std::endl << URI << ":" << std::endl << " Expected Hash: " << std::endl;
+ for (HashStringList::const_iterator hs = Expected.begin(); hs != Expected.end(); ++hs)
+ std::cerr << "\t- " << hs->toStr() << std::endl;
+ std::cerr << " Actual Hash: " << std::endl;
+ for (HashStringList::const_iterator hs = Actual.begin(); hs != Actual.end(); ++hs)
+ std::cerr << "\t- " << hs->toStr() << std::endl;
+}
+ /*}}}*/
+
// Acquire::Item::Item - Constructor /*{{{*/
-// ---------------------------------------------------------------------
-/* */
-pkgAcquire::Item::Item(pkgAcquire *Owner) : Owner(Owner), FileSize(0),
- PartialSize(0), Mode(0), ID(0), Complete(false),
- Local(false), QueueCounter(0)
+pkgAcquire::Item::Item(pkgAcquire *Owner,
+ HashStringList const &ExpectedHashes,
+ unsigned long TransactionID)
+ : Owner(Owner), FileSize(0), PartialSize(0), Mode(0), ID(0), Complete(false),
+ Local(false), QueueCounter(0), TransactionID(TransactionID),
+ ExpectedAdditionalItems(0), ExpectedHashes(ExpectedHashes)
{
Owner->Add(this);
Status = StatIdle;
void pkgAcquire::Item::Failed(string Message,pkgAcquire::MethodConfig *Cnf)
{
Status = StatIdle;
- ErrorText = LookupTag(Message,"Message");
+ if(ErrorText == "")
+ ErrorText = LookupTag(Message,"Message");
UsedMirror = LookupTag(Message,"UsedMirror");
if (QueueCounter <= 1)
{
// Acquire::Item::Done - Item downloaded OK /*{{{*/
// ---------------------------------------------------------------------
/* */
-void pkgAcquire::Item::Done(string Message,unsigned long long Size,string Hash,
- pkgAcquire::MethodConfig *Cnf)
+void pkgAcquire::Item::Done(string Message,unsigned long long Size,HashStringList const &/*Hash*/,
+ pkgAcquire::MethodConfig * /*Cnf*/)
{
// We just downloaded something..
string FileName = LookupTag(Message,"Filename");
- UsedMirror = LookupTag(Message,"UsedMirror");
+ UsedMirror = LookupTag(Message,"UsedMirror");
if (Complete == false && !Local && FileName == DestFile)
{
if (Owner->Log != 0)
/*}}}*/
// Acquire::Item::Rename - Rename a file /*{{{*/
// ---------------------------------------------------------------------
-/* This helper function is used by alot of item methods as thier final
+/* This helper function is used by a lot of item methods as their final
step */
void pkgAcquire::Item::Rename(string From,string To)
{
// ---------------------------------------------------------------------
/* Get a sub-index file based on checksums from a 'master' file and
possibly query additional files */
-pkgAcqSubIndex::pkgAcqSubIndex(pkgAcquire *Owner, string const &URI,
- string const &URIDesc, string const &ShortDesc,
- HashString const &ExpectedHash)
- : Item(Owner), ExpectedHash(ExpectedHash)
+pkgAcqSubIndex::pkgAcqSubIndex(pkgAcquire *Owner,
+ unsigned long TransactionID,
+ string const &URI,
+ string const &URIDesc, string const &ShortDesc,
+ HashStringList const &ExpectedHashes)
+ : Item(Owner, ExpectedHashes, TransactionID)
{
/* XXX: Beware: Currently this class does nothing (of value) anymore ! */
Debug = _config->FindB("Debug::pkgAcquire::SubIndex",false);
// AcqSubIndex::Custom600Headers - Insert custom request headers /*{{{*/
// ---------------------------------------------------------------------
/* The only header we use is the last-modified header. */
-string pkgAcqSubIndex::Custom600Headers()
+string pkgAcqSubIndex::Custom600Headers() const
{
string Final = _config->FindDir("Dir::State::lists");
Final += URItoFileName(Desc.URI);
return "\nIndex-File: true\nFail-Ignore: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime);
}
/*}}}*/
-void pkgAcqSubIndex::Failed(string Message,pkgAcquire::MethodConfig *Cnf) /*{{{*/
+void pkgAcqSubIndex::Failed(string Message,pkgAcquire::MethodConfig * /*Cnf*/)/*{{{*/
{
if(Debug)
- std::clog << "pkgAcqSubIndex failed: " << Desc.URI << std::endl;
+ std::clog << "pkgAcqSubIndex failed: " << Desc.URI << " with " << Message << std::endl;
Complete = false;
Status = StatDone;
// No good Index is provided
}
/*}}}*/
-void pkgAcqSubIndex::Done(string Message,unsigned long long Size,string Md5Hash, /*{{{*/
+void pkgAcqSubIndex::Done(string Message,unsigned long long Size,HashStringList const &Hashes, /*{{{*/
pkgAcquire::MethodConfig *Cnf)
{
if(Debug)
return;
}
- Item::Done(Message,Size,Md5Hash,Cnf);
+ Item::Done(Message, Size, Hashes, Cnf);
string FinalFile = _config->FindDir("Dir::State::lists")+URItoFileName(Desc.URI);
return;
}
- // sucess in downloading the index
+ // success in downloading the index
// rename the index
if(Debug)
std::clog << "Renaming: " << DestFile << " -> " << FinalFile << std::endl;
/*}}}*/
// AcqDiffIndex::AcqDiffIndex - Constructor /*{{{*/
// ---------------------------------------------------------------------
-/* Get the DiffIndex file first and see if there are patches availabe
+/* Get the DiffIndex file first and see if there are patches available
* If so, create a pkgAcqIndexDiffs fetcher that will get and apply the
* patches. If anything goes wrong in that process, it will fall back to
* the original packages file
*/
pkgAcqDiffIndex::pkgAcqDiffIndex(pkgAcquire *Owner,
- string URI,string URIDesc,string ShortDesc,
- HashString ExpectedHash)
- : Item(Owner), RealURI(URI), ExpectedHash(ExpectedHash),
- Description(URIDesc)
+ unsigned long TransactionID,
+ IndexTarget const * const Target,
+ HashStringList const &ExpectedHashes,
+ indexRecords *MetaIndexParser)
+ : pkgAcqBaseIndex(Owner, TransactionID, Target, ExpectedHashes,
+ MetaIndexParser)
{
Debug = _config->FindB("Debug::pkgAcquire::Diffs",false);
- Desc.Description = URIDesc + "/DiffIndex";
+ RealURI = Target->URI;
Desc.Owner = this;
- Desc.ShortDesc = ShortDesc;
- Desc.URI = URI + ".diff/Index";
+ Desc.Description = Target->Description + "/DiffIndex";
+ Desc.ShortDesc = Target->ShortDesc;
+ Desc.URI = Target->URI + ".diff/Index";
DestFile = _config->FindDir("Dir::State::lists") + "partial/";
- DestFile += URItoFileName(URI) + string(".DiffIndex");
+ DestFile += URItoFileName(Target->URI) + string(".DiffIndex");
if(Debug)
std::clog << "pkgAcqDiffIndex: " << Desc.URI << std::endl;
return;
}
- if(Debug)
- std::clog << "pkgAcqIndexDiffs::pkgAcqIndexDiffs(): "
- << CurrentPackagesFile << std::endl;
-
+ if(Debug)
+ std::clog << "pkgAcqDiffIndex::pkgAcqDiffIndex(): "
+ << CurrentPackagesFile << std::endl;
+
QueueURI(Desc);
}
// AcqIndex::Custom600Headers - Insert custom request headers /*{{{*/
// ---------------------------------------------------------------------
/* The only header we use is the last-modified header. */
-string pkgAcqDiffIndex::Custom600Headers()
+string pkgAcqDiffIndex::Custom600Headers() const
{
string Final = _config->FindDir("Dir::State::lists");
Final += URItoFileName(RealURI) + string(".IndexDiff");
bool pkgAcqDiffIndex::ParseDiffIndex(string IndexDiffFile) /*{{{*/
{
if(Debug)
- std::clog << "pkgAcqIndexDiffs::ParseIndexDiff() " << IndexDiffFile
- << std::endl;
+ std::clog << "pkgAcqDiffIndex::ParseIndexDiff() " << IndexDiffFile
+ << std::endl;
pkgTagSection Tags;
string ServerSha1;
SHA1.AddFD(fd);
string const local_sha1 = SHA1.Result();
- if(local_sha1 == ServerSha1)
+ if(local_sha1 == ServerSha1)
{
- // we have the same sha1 as the server
+ // we have the same sha1 as the server so we are done here
if(Debug)
std::clog << "Package file is up-to-date" << std::endl;
- // set found to true, this will queue a pkgAcqIndexDiffs with
- // a empty availabe_patches
- found = true;
- }
- else
+ // list cleanup needs to know that this file as well as the already
+ // present index is ours, so we create an empty diff to save it for us
+ new pkgAcqIndexDiffs(Owner, TransactionID, Target,
+ ExpectedHashes, MetaIndexParser,
+ ServerSha1, available_patches);
+ return true;
+ }
+ else
{
if(Debug)
std::clog << "SHA1-Current: " << ServerSha1 << " and we start at "<< fd.Name() << " " << fd.Size() << " " << local_sha1 << std::endl;
}
// we have something, queue the next diff
- if(found)
+ if(found)
{
// queue the diffs
string::size_type const last_space = Description.rfind(" ");
if(last_space != string::npos)
Description.erase(last_space, Description.size()-last_space);
- new pkgAcqIndexDiffs(Owner, RealURI, Description, Desc.ShortDesc,
- ExpectedHash, ServerSha1, available_patches);
+
+ /* decide if we should download patches one by one or in one go:
+ The first is good if the server merges patches, but many don't so client
+ based merging can be attempt in which case the second is better.
+ "bad things" will happen if patches are merged on the server,
+ but client side merging is attempt as well */
+ bool pdiff_merge = _config->FindB("Acquire::PDiffs::Merge", true);
+ if (pdiff_merge == true)
+ {
+ // reprepro adds this flag if it has merged patches on the server
+ std::string const precedence = Tags.FindS("X-Patch-Precedence");
+ pdiff_merge = (precedence != "merged");
+ }
+
+ if (pdiff_merge == false)
+ {
+ new pkgAcqIndexDiffs(Owner, TransactionID, Target, ExpectedHashes,
+ MetaIndexParser,
+ ServerSha1, available_patches);
+ }
+ else
+ {
+ std::vector<pkgAcqIndexMergeDiffs*> *diffs = new std::vector<pkgAcqIndexMergeDiffs*>(available_patches.size());
+ for(size_t i = 0; i < available_patches.size(); ++i)
+ (*diffs)[i] = new pkgAcqIndexMergeDiffs(Owner,
+ TransactionID,
+ Target,
+ ExpectedHashes,
+ MetaIndexParser,
+ available_patches[i],
+ diffs);
+ }
+
Complete = false;
Status = StatDone;
Dequeue();
return false;
}
/*}}}*/
-void pkgAcqDiffIndex::Failed(string Message,pkgAcquire::MethodConfig *Cnf) /*{{{*/
+void pkgAcqDiffIndex::Failed(string Message,pkgAcquire::MethodConfig * /*Cnf*/)/*{{{*/
{
if(Debug)
- std::clog << "pkgAcqDiffIndex failed: " << Desc.URI << std::endl
- << "Falling back to normal index file aquire" << std::endl;
+ std::clog << "pkgAcqDiffIndex failed: " << Desc.URI << " with " << Message << std::endl
+ << "Falling back to normal index file acquire" << std::endl;
- new pkgAcqIndex(Owner, RealURI, Description, Desc.ShortDesc,
- ExpectedHash);
+ new pkgAcqIndex(Owner, TransactionID, Target, ExpectedHashes, MetaIndexParser);
Complete = false;
Status = StatDone;
Dequeue();
}
/*}}}*/
-void pkgAcqDiffIndex::Done(string Message,unsigned long long Size,string Md5Hash, /*{{{*/
+void pkgAcqDiffIndex::Done(string Message,unsigned long long Size,HashStringList const &Hashes, /*{{{*/
pkgAcquire::MethodConfig *Cnf)
{
if(Debug)
std::clog << "pkgAcqDiffIndex::Done(): " << Desc.URI << std::endl;
- Item::Done(Message,Size,Md5Hash,Cnf);
+ Item::Done(Message, Size, Hashes, Cnf);
string FinalFile;
FinalFile = _config->FindDir("Dir::State::lists")+URItoFileName(RealURI);
- // sucess in downloading the index
+ // success in downloading the index
// rename the index
FinalFile += string(".IndexDiff");
if(Debug)
* for each diff and the index
*/
pkgAcqIndexDiffs::pkgAcqIndexDiffs(pkgAcquire *Owner,
- string URI,string URIDesc,string ShortDesc,
- HashString ExpectedHash,
+ unsigned long TransactionID,
+ struct IndexTarget const * const Target,
+ HashStringList const &ExpectedHashes,
+ indexRecords *MetaIndexParser,
string ServerSha1,
vector<DiffInfo> diffs)
- : Item(Owner), RealURI(URI), ExpectedHash(ExpectedHash),
+ : pkgAcqBaseIndex(Owner, TransactionID, Target, ExpectedHashes, MetaIndexParser),
available_patches(diffs), ServerSha1(ServerSha1)
{
DestFile = _config->FindDir("Dir::State::lists") + "partial/";
- DestFile += URItoFileName(URI);
+ DestFile += URItoFileName(Target->URI);
Debug = _config->FindB("Debug::pkgAcquire::Diffs",false);
- Description = URIDesc;
+ RealURI = Target->URI;
Desc.Owner = this;
- Desc.ShortDesc = ShortDesc;
+ Description = Target->Description;
+ Desc.ShortDesc = Target->ShortDesc;
if(available_patches.empty() == true)
{
}
}
/*}}}*/
-void pkgAcqIndexDiffs::Failed(string Message,pkgAcquire::MethodConfig *Cnf) /*{{{*/
+void pkgAcqIndexDiffs::Failed(string Message,pkgAcquire::MethodConfig * /*Cnf*/)/*{{{*/
{
if(Debug)
- std::clog << "pkgAcqIndexDiffs failed: " << Desc.URI << std::endl
- << "Falling back to normal index file aquire" << std::endl;
- new pkgAcqIndex(Owner, RealURI, Description,Desc.ShortDesc,
- ExpectedHash);
+ std::clog << "pkgAcqIndexDiffs failed: " << Desc.URI << " with " << Message << std::endl
+ << "Falling back to normal index file acquire" << std::endl;
+ new pkgAcqIndex(Owner, TransactionID, Target, ExpectedHashes, MetaIndexParser);
Finish();
}
/*}}}*/
DestFile = _config->FindDir("Dir::State::lists");
DestFile += URItoFileName(RealURI);
- if(!ExpectedHash.empty() && !ExpectedHash.VerifyFile(DestFile))
+ if(HashSums().usable() && !HashSums().VerifyFile(DestFile))
{
RenameOnError(HashSumMismatch);
Dequeue();
}
// queue the right diff
- Desc.URI = string(RealURI) + ".diff/" + available_patches[0].file + ".gz";
+ Desc.URI = RealURI + ".diff/" + available_patches[0].file + ".gz";
Desc.Description = Description + " " + available_patches[0].file + string(".pdiff");
DestFile = _config->FindDir("Dir::State::lists") + "partial/";
DestFile += URItoFileName(RealURI + ".diff/" + available_patches[0].file);
return true;
}
/*}}}*/
-void pkgAcqIndexDiffs::Done(string Message,unsigned long long Size,string Md5Hash, /*{{{*/
+void pkgAcqIndexDiffs::Done(string Message,unsigned long long Size, HashStringList const &Hashes, /*{{{*/
pkgAcquire::MethodConfig *Cnf)
{
if(Debug)
std::clog << "pkgAcqIndexDiffs::Done(): " << Desc.URI << std::endl;
- Item::Done(Message,Size,Md5Hash,Cnf);
+ Item::Done(Message, Size, Hashes, Cnf);
string FinalFile;
FinalFile = _config->FindDir("Dir::State::lists")+URItoFileName(RealURI);
- // sucess in downloading a diff, enter ApplyDiff state
+ // success in downloading a diff, enter ApplyDiff state
if(State == StateFetchDiff)
{
{
// remove the just applied patch
available_patches.erase(available_patches.begin());
+ unlink((FinalFile + ".ed").c_str());
// move into place
if(Debug)
// see if there is more to download
if(available_patches.empty() == false) {
- new pkgAcqIndexDiffs(Owner, RealURI, Description, Desc.ShortDesc,
- ExpectedHash, ServerSha1, available_patches);
+ new pkgAcqIndexDiffs(Owner, TransactionID, Target,
+ ExpectedHashes, MetaIndexParser,
+ ServerSha1, available_patches);
return Finish();
} else
return Finish(true);
}
}
/*}}}*/
+// AcqIndexMergeDiffs::AcqIndexMergeDiffs - Constructor /*{{{*/
+pkgAcqIndexMergeDiffs::pkgAcqIndexMergeDiffs(pkgAcquire *Owner,
+ unsigned long TransactionID,
+ struct IndexTarget const * const Target,
+ HashStringList const &ExpectedHashes,
+ indexRecords *MetaIndexParser,
+ DiffInfo const &patch,
+ std::vector<pkgAcqIndexMergeDiffs*> const * const allPatches)
+ : pkgAcqBaseIndex(Owner, TransactionID, Target, ExpectedHashes, MetaIndexParser),
+ patch(patch), allPatches(allPatches), State(StateFetchDiff)
+{
+
+ DestFile = _config->FindDir("Dir::State::lists") + "partial/";
+ DestFile += URItoFileName(Target->URI);
+
+ Debug = _config->FindB("Debug::pkgAcquire::Diffs",false);
+
+ RealURI = Target->URI;
+ Desc.Owner = this;
+ Description = Target->Description;
+ Desc.ShortDesc = Target->ShortDesc;
+
+ Desc.URI = RealURI + ".diff/" + patch.file + ".gz";
+ Desc.Description = Description + " " + patch.file + string(".pdiff");
+ DestFile = _config->FindDir("Dir::State::lists") + "partial/";
+ DestFile += URItoFileName(RealURI + ".diff/" + patch.file);
+
+ if(Debug)
+ std::clog << "pkgAcqIndexMergeDiffs: " << Desc.URI << std::endl;
+
+ QueueURI(Desc);
+}
+ /*}}}*/
+void pkgAcqIndexMergeDiffs::Failed(string Message,pkgAcquire::MethodConfig * /*Cnf*/)/*{{{*/
+{
+ if(Debug)
+ std::clog << "pkgAcqIndexMergeDiffs failed: " << Desc.URI << " with " << Message << std::endl;
+ Complete = false;
+ Status = StatDone;
+ Dequeue();
+
+ // check if we are the first to fail, otherwise we are done here
+ State = StateDoneDiff;
+ for (std::vector<pkgAcqIndexMergeDiffs *>::const_iterator I = allPatches->begin();
+ I != allPatches->end(); ++I)
+ if ((*I)->State == StateErrorDiff)
+ return;
+
+ // first failure means we should fallback
+ State = StateErrorDiff;
+ std::clog << "Falling back to normal index file acquire" << std::endl;
+ new pkgAcqIndex(Owner, TransactionID, Target, ExpectedHashes, MetaIndexParser);
+}
+ /*}}}*/
+void pkgAcqIndexMergeDiffs::Done(string Message,unsigned long long Size,HashStringList const &Hashes, /*{{{*/
+ pkgAcquire::MethodConfig *Cnf)
+{
+ if(Debug)
+ std::clog << "pkgAcqIndexMergeDiffs::Done(): " << Desc.URI << std::endl;
+
+ Item::Done(Message,Size,Hashes,Cnf);
+
+ string const FinalFile = _config->FindDir("Dir::State::lists") + URItoFileName(RealURI);
+
+ if (State == StateFetchDiff)
+ {
+ // rred expects the patch as $FinalFile.ed.$patchname.gz
+ Rename(DestFile, FinalFile + ".ed." + patch.file + ".gz");
+
+ // check if this is the last completed diff
+ State = StateDoneDiff;
+ for (std::vector<pkgAcqIndexMergeDiffs *>::const_iterator I = allPatches->begin();
+ I != allPatches->end(); ++I)
+ if ((*I)->State != StateDoneDiff)
+ {
+ if(Debug)
+ std::clog << "Not the last done diff in the batch: " << Desc.URI << std::endl;
+ return;
+ }
+
+ // this is the last completed diff, so we are ready to apply now
+ State = StateApplyDiff;
+
+ if(Debug)
+ std::clog << "Sending to rred method: " << FinalFile << std::endl;
+
+ Local = true;
+ Desc.URI = "rred:" + FinalFile;
+ QueueURI(Desc);
+ Mode = "rred";
+ return;
+ }
+ // success in download/apply all diffs, clean up
+ else if (State == StateApplyDiff)
+ {
+ // see if we really got the expected file
+ if(ExpectedHashes.usable() && !ExpectedHashes.VerifyFile(DestFile))
+ {
+ RenameOnError(HashSumMismatch);
+ return;
+ }
+
+ // move the result into place
+ if(Debug)
+ std::clog << "Moving patched file in place: " << std::endl
+ << DestFile << " -> " << FinalFile << std::endl;
+ Rename(DestFile, FinalFile);
+ chmod(FinalFile.c_str(), 0644);
+
+ // otherwise lists cleanup will eat the file
+ DestFile = FinalFile;
+
+ // ensure the ed's are gone regardless of list-cleanup
+ for (std::vector<pkgAcqIndexMergeDiffs *>::const_iterator I = allPatches->begin();
+ I != allPatches->end(); ++I)
+ {
+ std::string patch = FinalFile + ".ed." + (*I)->patch.file + ".gz";
+ unlink(patch.c_str());
+ }
+
+ // all set and done
+ Complete = true;
+ if(Debug)
+ std::clog << "allDone: " << DestFile << "\n" << std::endl;
+ }
+}
+ /*}}}*/
// AcqIndex::AcqIndex - Constructor /*{{{*/
// ---------------------------------------------------------------------
/* The package file is added to the queue and a second class is
instantiated to fetch the revision file */
pkgAcqIndex::pkgAcqIndex(pkgAcquire *Owner,
string URI,string URIDesc,string ShortDesc,
- HashString ExpectedHash, string comprExt)
- : Item(Owner), RealURI(URI), ExpectedHash(ExpectedHash)
+ HashStringList const &ExpectedHash, string comprExt)
+ : pkgAcqBaseIndex(Owner, 0, NULL, ExpectedHash, NULL), RealURI(URI)
{
- if(comprExt.empty() == true)
- {
- // autoselect the compression method
- std::vector<std::string> types = APT::Configuration::getCompressionTypes();
- for (std::vector<std::string>::const_iterator t = types.begin(); t != types.end(); ++t)
- comprExt.append(*t).append(" ");
- if (comprExt.empty() == false)
- comprExt.erase(comprExt.end()-1);
- }
- CompressionExtension = comprExt;
-
+ AutoSelectCompression();
Init(URI, URIDesc, ShortDesc);
+
+ if(_config->FindB("Debug::Acquire::Transaction", false) == true)
+ std::clog << "New pkgIndex with TransactionID "
+ << TransactionID << std::endl;
}
-pkgAcqIndex::pkgAcqIndex(pkgAcquire *Owner, IndexTarget const *Target,
- HashString const &ExpectedHash, indexRecords const *MetaIndexParser)
- : Item(Owner), RealURI(Target->URI), ExpectedHash(ExpectedHash)
+ /*}}}*/
+// AcqIndex::AcqIndex - Constructor /*{{{*/
+// ---------------------------------------------------------------------
+pkgAcqIndex::pkgAcqIndex(pkgAcquire *Owner,
+ unsigned long TransactionID,
+ IndexTarget const *Target,
+ HashStringList const &ExpectedHash,
+ indexRecords *MetaIndexParser)
+ : pkgAcqBaseIndex(Owner, TransactionID, Target, ExpectedHash,
+ MetaIndexParser), RealURI(Target->URI)
{
// autoselect the compression method
+ AutoSelectCompression();
+ Init(Target->URI, Target->Description, Target->ShortDesc);
+
+ if(_config->FindB("Debug::Acquire::Transaction", false) == true)
+ std::clog << "New pkgIndex with TransactionID "
+ << TransactionID << std::endl;
+}
+ /*}}}*/
+// AcqIndex::AutoSelectCompression - Select compression /*{{{*/
+// ---------------------------------------------------------------------
+void pkgAcqIndex::AutoSelectCompression()
+{
std::vector<std::string> types = APT::Configuration::getCompressionTypes();
CompressionExtension = "";
- if (ExpectedHash.empty() == false)
+ if (ExpectedHashes.usable())
{
for (std::vector<std::string>::const_iterator t = types.begin(); t != types.end(); ++t)
if (*t == "uncompressed" || MetaIndexParser->Exists(string(Target->MetaKey).append(".").append(*t)) == true)
}
if (CompressionExtension.empty() == false)
CompressionExtension.erase(CompressionExtension.end()-1);
-
- // only verify non-optional targets, see acquire-item.h for a FIXME
- // to make this more flexible
- if (Target->IsOptional())
- Verify = false;
- else
- Verify = true;
-
- Init(Target->URI, Target->Description, Target->ShortDesc);
}
- /*}}}*/
// AcqIndex::Init - defered Constructor /*{{{*/
void pkgAcqIndex::Init(string const &URI, string const &URIDesc, string const &ShortDesc) {
Decompression = false;
std::string const comprExt = CompressionExtension.substr(0, CompressionExtension.find(' '));
if (comprExt == "uncompressed")
+ {
Desc.URI = URI;
+ if(Target)
+ MetaKey = string(Target->MetaKey);
+ }
else
+ {
Desc.URI = URI + '.' + comprExt;
+ if(Target)
+ MetaKey = string(Target->MetaKey) + '.' + comprExt;
+ }
+
+ // load the filesize
+ if(MetaIndexParser)
+ {
+ indexRecords::checkSum *Record = MetaIndexParser->Lookup(MetaKey);
+ if(Record)
+ FileSize = Record->Size;
+
+ InitByHashIfNeeded(MetaKey);
+ }
Desc.Description = URIDesc;
Desc.Owner = this;
QueueURI(Desc);
}
/*}}}*/
+// AcqIndex::AdjustForByHash - modify URI for by-hash support /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+void pkgAcqIndex::InitByHashIfNeeded(const std::string MetaKey)
+{
+ // TODO:
+ // - (maybe?) add support for by-hash into the sources.list as flag
+ // - make apt-ftparchive generate the hashes (and expire?)
+ std::string HostKnob = "APT::Acquire::" + ::URI(Desc.URI).Host + "::By-Hash";
+ if(_config->FindB("APT::Acquire::By-Hash", false) == true ||
+ _config->FindB(HostKnob, false) == true ||
+ MetaIndexParser->GetSupportsAcquireByHash())
+ {
+ indexRecords::checkSum *Record = MetaIndexParser->Lookup(MetaKey);
+ if(Record)
+ {
+ // FIXME: should we really use the best hash here? or a fixed one?
+ const HashString *TargetHash = Record->Hashes.find("");
+ std::string ByHash = "/by-hash/" + TargetHash->HashType() + "/" + TargetHash->HashValue();
+ size_t trailing_slash = Desc.URI.find_last_of("/");
+ Desc.URI = Desc.URI.replace(
+ trailing_slash,
+ Desc.URI.substr(trailing_slash+1).size()+1,
+ ByHash);
+ } else {
+ _error->Warning(
+ "Fetching ByHash requested but can not find record for %s",
+ MetaKey.c_str());
+ }
+ }
+}
+ /*}}}*/
// AcqIndex::Custom600Headers - Insert custom request headers /*{{{*/
// ---------------------------------------------------------------------
/* The only header we use is the last-modified header. */
-string pkgAcqIndex::Custom600Headers()
+string pkgAcqIndex::Custom600Headers() const
{
string Final = _config->FindDir("Dir::State::lists");
Final += URItoFileName(RealURI);
return msg;
}
/*}}}*/
+// pkgAcqIndex::Failed - getting the indexfile failed /*{{{*/
+// ---------------------------------------------------------------------
+/* */
void pkgAcqIndex::Failed(string Message,pkgAcquire::MethodConfig *Cnf) /*{{{*/
{
size_t const nextExt = CompressionExtension.find(' ');
}
Item::Failed(Message,Cnf);
+
+ /// cancel the entire transaction
+ Owner->AbortTransaction(TransactionID);
}
/*}}}*/
+// pkgAcqIndex::GetFinalFilename - Return the full final file path /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+std::string pkgAcqIndex::GetFinalFilename(std::string const &URI,
+ std::string const &compExt)
+{
+ std::string FinalFile = _config->FindDir("Dir::State::lists");
+ FinalFile += URItoFileName(URI);
+ if (_config->FindB("Acquire::GzipIndexes",false) && compExt == "gz")
+ FinalFile += ".gz";
+ return FinalFile;
+}
+ /*}}}*/
+// AcqIndex::ReverifyAfterIMS - Reverify index after an ims-hit /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+void pkgAcqIndex::ReverifyAfterIMS(std::string const &FileName)
+{
+ std::string const compExt = CompressionExtension.substr(0, CompressionExtension.find(' '));
+ if (_config->FindB("Acquire::GzipIndexes",false) && compExt == "gz")
+ DestFile += ".gz";
+
+ // copy FinalFile into partial/ so that we check the hash again
+ string FinalFile = GetFinalFilename(RealURI, compExt);
+ Decompression = true;
+ Desc.URI = "copy:" + FinalFile;
+ QueueURI(Desc);
+}
+ /*}}}*/
// AcqIndex::Done - Finished a fetch /*{{{*/
// ---------------------------------------------------------------------
/* This goes through a number of states.. On the initial fetch the
to the uncompressed version of the file. If this is so the file
is copied into the partial directory. In all other cases the file
is decompressed with a gzip uri. */
-void pkgAcqIndex::Done(string Message,unsigned long long Size,string Hash,
+void pkgAcqIndex::Done(string Message,unsigned long long Size,HashStringList const &Hashes,
pkgAcquire::MethodConfig *Cfg)
{
- Item::Done(Message,Size,Hash,Cfg);
+ Item::Done(Message,Size,Hashes,Cfg);
+ std::string const compExt = CompressionExtension.substr(0, CompressionExtension.find(' '));
+
if (Decompression == true)
{
- if (_config->FindB("Debug::pkgAcquire::Auth", false))
- {
- std::cerr << std::endl << RealURI << ": Computed Hash: " << Hash;
- std::cerr << " Expected Hash: " << ExpectedHash.toStr() << std::endl;
- }
-
- if (!ExpectedHash.empty() && ExpectedHash.toStr() != Hash)
+ if (ExpectedHashes.usable() && ExpectedHashes != Hashes)
{
RenameOnError(HashSumMismatch);
+ printHashSumComparision(RealURI, ExpectedHashes, Hashes);
+ Failed(Message, Cfg);
return;
}
- /* Verify the index file for correctness (all indexes must
- * have a Package field) (LP: #346386) (Closes: #627642) */
- if (Verify == true)
+ // FIXME: this can go away once we only ever download stuff that
+ // has a valid hash and we never do GET based probing
+ //
+ /* Always verify the index file for correctness (all indexes must
+ * have a Package field) (LP: #346386) (Closes: #627642)
+ */
+ FileFd fd(DestFile, FileFd::ReadOnly);
+ // Only test for correctness if the file is not empty (empty is ok)
+ if (fd.FileSize() > 0)
{
- FileFd fd(DestFile, FileFd::ReadOnly);
- // Only test for correctness if the file is not empty (empty is ok)
- if (fd.FileSize() > 0)
- {
- pkgTagSection sec;
- pkgTagFile tag(&fd);
-
- // all our current indexes have a field 'Package' in each section
- if (_error->PendingError() == true || tag.Step(sec) == false || sec.Exists("Package") == false)
- {
- RenameOnError(InvalidFormat);
- return;
- }
+ pkgTagSection sec;
+ pkgTagFile tag(&fd);
+
+ // all our current indexes have a field 'Package' in each section
+ if (_error->PendingError() == true || tag.Step(sec) == false || sec.Exists("Package") == false)
+ {
+ RenameOnError(InvalidFormat);
+ Failed(Message, Cfg);
+ return;
}
}
- // Done, move it into position
- string FinalFile = _config->FindDir("Dir::State::lists");
- FinalFile += URItoFileName(RealURI);
- Rename(DestFile,FinalFile);
- chmod(FinalFile.c_str(),0644);
-
+ // Done, queue for rename on transaction finished
+ PartialFile = DestFile;
+
+#if 1 // FIXME: waaaay too complicated
/* We restore the original name to DestFile so that the clean operation
will work OK */
DestFile = _config->FindDir("Dir::State::lists") + "partial/";
// Remove the compressed version.
if (Erase == true)
unlink(DestFile.c_str());
+#endif
+
+ // Done, queue for rename on transaction finished
+ DestFile = GetFinalFilename(RealURI, compExt);
+
return;
+ } else {
+ // FIXME: use the same method to find
+ // check the compressed hash too
+ if(MetaKey != "" && Hashes.size() > 0)
+ {
+ indexRecords::checkSum *Record = MetaIndexParser->Lookup(MetaKey);
+ if(Record && Record->Hashes.usable() && Hashes != Record->Hashes)
+ {
+ RenameOnError(HashSumMismatch);
+ printHashSumComparision(RealURI, Record->Hashes, Hashes);
+ Failed(Message, Cfg);
+ return;
+ }
+ }
}
Erase = false;
{
// The files timestamp matches
if (StringToBool(LookupTag(Message,"Alt-IMS-Hit"),false) == true)
+ {
+ ReverifyAfterIMS(FileName);
return;
+ }
Decompression = true;
Local = true;
DestFile += ".decomp";
ErrorText = "Method gave a blank filename";
}
- std::string const compExt = CompressionExtension.substr(0, CompressionExtension.find(' '));
-
// The files timestamp matches
- if (StringToBool(LookupTag(Message,"IMS-Hit"),false) == true) {
- if (_config->FindB("Acquire::GzipIndexes",false) && compExt == "gz")
- // Update DestFile for .gz suffix so that the clean operation keeps it
- DestFile += ".gz";
+ if (StringToBool(LookupTag(Message,"IMS-Hit"),false) == true)
+ {
+ ReverifyAfterIMS(FileName);
return;
}
/* The Translation file is added to the queue */
pkgAcqIndexTrans::pkgAcqIndexTrans(pkgAcquire *Owner,
string URI,string URIDesc,string ShortDesc)
- : pkgAcqIndex(Owner, URI, URIDesc, ShortDesc, HashString(), "")
+ : pkgAcqIndex(Owner, URI, URIDesc, ShortDesc, HashStringList(), "")
{
}
-pkgAcqIndexTrans::pkgAcqIndexTrans(pkgAcquire *Owner, IndexTarget const *Target,
- HashString const &ExpectedHash, indexRecords const *MetaIndexParser)
- : pkgAcqIndex(Owner, Target, ExpectedHash, MetaIndexParser)
+ /*}}}*/
+pkgAcqIndexTrans::pkgAcqIndexTrans(pkgAcquire *Owner,
+ unsigned long TransactionID,
+ IndexTarget const * const Target,
+ HashStringList const &ExpectedHashes,
+ indexRecords *MetaIndexParser)
+ : pkgAcqIndex(Owner, TransactionID, Target, ExpectedHashes, MetaIndexParser)
{
+ // load the filesize
+ indexRecords::checkSum *Record = MetaIndexParser->Lookup(string(Target->MetaKey));
+ if(Record)
+ FileSize = Record->Size;
}
/*}}}*/
// AcqIndexTrans::Custom600Headers - Insert custom request headers /*{{{*/
// ---------------------------------------------------------------------
-string pkgAcqIndexTrans::Custom600Headers()
+string pkgAcqIndexTrans::Custom600Headers() const
{
string Final = _config->FindDir("Dir::State::lists");
Final += URItoFileName(RealURI);
return;
}
+ // FIXME: this is used often (e.g. in pkgAcqIndexTrans) so refactor
if (Cnf->LocalOnly == true ||
StringToBool(LookupTag(Message,"Transient-Failure"),false) == false)
{
Item::Failed(Message,Cnf);
}
/*}}}*/
-pkgAcqMetaSig::pkgAcqMetaSig(pkgAcquire *Owner, /*{{{*/
+pkgAcqMetaSig::pkgAcqMetaSig(pkgAcquire *Owner, /*{{{*/
+ unsigned long TransactionID,
string URI,string URIDesc,string ShortDesc,
- string MetaIndexURI, string MetaIndexURIDesc,
- string MetaIndexShortDesc,
+ string MetaIndexFile,
const vector<IndexTarget*>* IndexTargets,
indexRecords* MetaIndexParser) :
- Item(Owner), RealURI(URI), MetaIndexURI(MetaIndexURI),
- MetaIndexURIDesc(MetaIndexURIDesc), MetaIndexShortDesc(MetaIndexShortDesc),
- MetaIndexParser(MetaIndexParser), IndexTargets(IndexTargets)
+ Item(Owner, HashStringList(), TransactionID), RealURI(URI),
+ MetaIndexParser(MetaIndexParser), MetaIndexFile(MetaIndexFile),
+ IndexTargets(IndexTargets), AuthPass(false), IMSHit(false)
{
DestFile = _config->FindDir("Dir::State::lists") + "partial/";
DestFile += URItoFileName(URI);
// partial download anyway
unlink(DestFile.c_str());
+ // set the TransactionID
+ if(_config->FindB("Debug::Acquire::Transaction", false) == true)
+ std::clog << "New pkgAcqMetaSig with TransactionID "
+ << TransactionID << std::endl;
+
// Create the item
Desc.Description = URIDesc;
Desc.Owner = this;
Desc.ShortDesc = ShortDesc;
Desc.URI = URI;
-
- string Final = _config->FindDir("Dir::State::lists");
- Final += URItoFileName(RealURI);
- if (RealFileExists(Final) == true)
- {
- // File was already in place. It needs to be re-downloaded/verified
- // because Release might have changed, we do give it a differnt
- // name than DestFile because otherwise the http method will
- // send If-Range requests and there are too many broken servers
- // out there that do not understand them
- LastGoodSig = DestFile+".reverify";
- Rename(Final,LastGoodSig);
- }
QueueURI(Desc);
}
/*}}}*/
pkgAcqMetaSig::~pkgAcqMetaSig() /*{{{*/
{
- // if the file was never queued undo file-changes done in the constructor
- if (QueueCounter == 1 && Status == StatIdle && FileSize == 0 && Complete == false &&
- LastGoodSig.empty() == false)
- {
- string const Final = _config->FindDir("Dir::State::lists") + URItoFileName(RealURI);
- if (RealFileExists(Final) == false && RealFileExists(LastGoodSig) == true)
- Rename(LastGoodSig, Final);
- }
-
}
/*}}}*/
// pkgAcqMetaSig::Custom600Headers - Insert custom request headers /*{{{*/
// ---------------------------------------------------------------------
/* The only header we use is the last-modified header. */
-string pkgAcqMetaSig::Custom600Headers()
+string pkgAcqMetaSig::Custom600Headers() const
{
+ string FinalFile = _config->FindDir("Dir::State::lists");
+ FinalFile += URItoFileName(RealURI);
+
struct stat Buf;
- if (stat(LastGoodSig.c_str(),&Buf) != 0)
+ if (stat(FinalFile.c_str(),&Buf) != 0)
return "\nIndex-File: true";
return "\nIndex-File: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime);
}
-void pkgAcqMetaSig::Done(string Message,unsigned long long Size,string MD5,
+void pkgAcqMetaSig::Done(string Message,unsigned long long Size, HashStringList const &Hashes,
pkgAcquire::MethodConfig *Cfg)
{
- Item::Done(Message,Size,MD5,Cfg);
+ Item::Done(Message, Size, Hashes, Cfg);
string FileName = LookupTag(Message,"Filename");
if (FileName.empty() == true)
return;
}
- Complete = true;
-
- // put the last known good file back on i-m-s hit (it will
- // be re-verified again)
- // Else do nothing, we have the new file in DestFile then
if(StringToBool(LookupTag(Message,"IMS-Hit"),false) == true)
- Rename(LastGoodSig, DestFile);
+ IMSHit = true;
+
+ // adjust paths if its a ims-hit
+ if(IMSHit)
+ {
+ string FinalFile = _config->FindDir("Dir::State::lists");
+ FinalFile += URItoFileName(RealURI);
+
+ DestFile = PartialFile = FinalFile;
+ }
+
+ // queue for verify
+ if(AuthPass == false)
+ {
+ AuthPass = true;
+ Desc.URI = "gpgv:" + DestFile;
+ DestFile = MetaIndexFile;
+ QueueURI(Desc);
+ return;
+ }
- // queue a pkgAcqMetaIndex to be verified against the sig we just retrieved
- new pkgAcqMetaIndex(Owner, MetaIndexURI, MetaIndexURIDesc,
- MetaIndexShortDesc, DestFile, IndexTargets,
- MetaIndexParser);
+ // queue to copy the file in place if it was not a ims hit, on ims
+ // hit the file is already at the right place
+ if(IMSHit == false)
+ {
+ PartialFile = _config->FindDir("Dir::State::lists") + "partial/";
+ PartialFile += URItoFileName(RealURI);
+
+ DestFile = _config->FindDir("Dir::State::lists");
+ DestFile += URItoFileName(RealURI);
+ }
+
+ Complete = true;
}
/*}}}*/
{
string Final = _config->FindDir("Dir::State::lists") + URItoFileName(RealURI);
- // if we get a network error we fail gracefully
- if(Status == StatTransientNetworkError)
- {
- Item::Failed(Message,Cnf);
- // move the sigfile back on transient network failures
- if(FileExists(LastGoodSig))
- Rename(LastGoodSig,Final);
-
- // set the status back to , Item::Failed likes to reset it
- Status = pkgAcquire::Item::StatTransientNetworkError;
- return;
- }
-
- // Delete any existing sigfile when the acquire failed
- unlink(Final.c_str());
-
- // queue a pkgAcqMetaIndex with no sigfile
- new pkgAcqMetaIndex(Owner, MetaIndexURI, MetaIndexURIDesc, MetaIndexShortDesc,
- "", IndexTargets, MetaIndexParser);
+ // this ensures that any file in the lists/ dir is removed by the
+ // transaction
+ DestFile = _config->FindDir("Dir::State::lists") + "partial/";
+ DestFile += URItoFileName(RealURI);
+ PartialFile = "";
+ // FIXME: this is used often (e.g. in pkgAcqIndexTrans) so refactor
if (Cnf->LocalOnly == true ||
StringToBool(LookupTag(Message,"Transient-Failure"),false) == false)
{
Dequeue();
return;
}
-
Item::Failed(Message,Cnf);
}
/*}}}*/
pkgAcqMetaIndex::pkgAcqMetaIndex(pkgAcquire *Owner, /*{{{*/
+ unsigned long TransactionID,
string URI,string URIDesc,string ShortDesc,
- string SigFile,
- const vector<struct IndexTarget*>* IndexTargets,
+ string MetaIndexSigURI,string MetaIndexSigURIDesc, string MetaIndexSigShortDesc,
+ const vector<IndexTarget*>* IndexTargets,
indexRecords* MetaIndexParser) :
- Item(Owner), RealURI(URI), SigFile(SigFile), IndexTargets(IndexTargets),
- MetaIndexParser(MetaIndexParser), AuthPass(false), IMSHit(false)
+ Item(Owner, HashStringList(), TransactionID), RealURI(URI), IndexTargets(IndexTargets),
+ MetaIndexParser(MetaIndexParser), AuthPass(false), IMSHit(false),
+ MetaIndexSigURI(MetaIndexSigURI), MetaIndexSigURIDesc(MetaIndexSigURIDesc),
+ MetaIndexSigShortDesc(MetaIndexSigShortDesc)
+{
+ if(TransactionID == 0)
+ this->TransactionID = (unsigned long)this;
+
+ if(_config->FindB("Debug::Acquire::Transaction", false) == true)
+ std::clog << "New pkgAcqMetaIndex with TransactionID "
+ << TransactionID << std::endl;
+
+ Init(URIDesc, ShortDesc);
+}
+ /*}}}*/
+// pkgAcqMetaIndex::Init - Delayed constructor /*{{{*/
+void pkgAcqMetaIndex::Init(std::string URIDesc, std::string ShortDesc)
{
DestFile = _config->FindDir("Dir::State::lists") + "partial/";
- DestFile += URItoFileName(URI);
+ DestFile += URItoFileName(RealURI);
// Create the item
Desc.Description = URIDesc;
Desc.Owner = this;
Desc.ShortDesc = ShortDesc;
- Desc.URI = URI;
+ Desc.URI = RealURI;
+ // we expect more item
+ ExpectedAdditionalItems = IndexTargets->size();
QueueURI(Desc);
}
- /*}}}*/
// pkgAcqMetaIndex::Custom600Headers - Insert custom request headers /*{{{*/
// ---------------------------------------------------------------------
/* The only header we use is the last-modified header. */
-string pkgAcqMetaIndex::Custom600Headers()
+string pkgAcqMetaIndex::Custom600Headers() const
{
string Final = _config->FindDir("Dir::State::lists");
Final += URItoFileName(RealURI);
return "\nIndex-File: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime);
}
/*}}}*/
-void pkgAcqMetaIndex::Done(string Message,unsigned long long Size,string Hash, /*{{{*/
+void pkgAcqMetaIndex::Done(string Message,unsigned long long Size,HashStringList const &Hashes, /*{{{*/
pkgAcquire::MethodConfig *Cfg)
{
- Item::Done(Message,Size,Hash,Cfg);
+ Item::Done(Message,Size,Hashes,Cfg);
// MetaIndexes are done in two passes: one to download the
// metaindex with an appropriate method, and a second to verify it
if (SigFile == "")
{
- // There was no signature file, so we are finished. Download
- // the indexes and do only hashsum verification if possible
+ // load indexes, the signature will downloaded afterwards
MetaIndexParser->Load(DestFile);
- QueueIndexes(false);
+ QueueIndexes(true);
}
else
{
// There was a signature file, so pass it to gpgv for
// verification
-
if (_config->FindB("Debug::pkgAcquire::Auth", false))
std::cerr << "Metaindex acquired, queueing gpg verification ("
<< SigFile << "," << DestFile << ")\n";
FinalFile += URItoFileName(RealURI);
if (SigFile == DestFile)
SigFile = FinalFile;
- Rename(DestFile,FinalFile);
- chmod(FinalFile.c_str(),0644);
+ // queue for copy in place
+ PartialFile = DestFile;
DestFile = FinalFile;
}
}
if (SigFile == DestFile)
{
SigFile = FinalFile;
+#if 0
// constructor of pkgAcqMetaClearSig moved it out of the way,
// now move it back in on IMS hit for the 'old' file
string const OldClearSig = DestFile + ".reverify";
if (RealFileExists(OldClearSig) == true)
Rename(OldClearSig, FinalFile);
+#endif
}
DestFile = FinalFile;
}
+
+ // queue a signature
+ if(SigFile != DestFile)
+ new pkgAcqMetaSig(Owner, TransactionID,
+ MetaIndexSigURI, MetaIndexSigURIDesc,
+ MetaIndexSigShortDesc, DestFile, IndexTargets,
+ MetaIndexParser);
+
Complete = true;
}
/*}}}*/
// Download further indexes with verification
QueueIndexes(true);
+#if 0
// is it a clearsigned MetaIndex file?
if (DestFile == SigFile)
return;
URItoFileName(RealURI) + ".gpg";
Rename(SigFile,VerifiedSigFile);
chmod(VerifiedSigFile.c_str(),0644);
+#endif
}
/*}}}*/
void pkgAcqMetaIndex::QueueIndexes(bool verify) /*{{{*/
{
-#if 0
- /* Reject invalid, existing Release files (LP: #346386) (Closes: #627642)
- * FIXME: Disabled; it breaks unsigned repositories without hashes */
- if (!verify && FileExists(DestFile) && !MetaIndexParser->Load(DestFile))
- {
- Status = StatError;
- ErrorText = MetaIndexParser->ErrorText;
- return;
- }
-#endif
bool transInRelease = false;
{
std::vector<std::string> const keys = MetaIndexParser->MetaKeys();
}
}
- for (vector <struct IndexTarget*>::const_iterator Target = IndexTargets->begin();
+ // at this point the real Items are loaded in the fetcher
+ ExpectedAdditionalItems = 0;
+ for (vector <IndexTarget*>::const_iterator Target = IndexTargets->begin();
Target != IndexTargets->end();
++Target)
{
- HashString ExpectedIndexHash;
+ HashStringList ExpectedIndexHashes;
const indexRecords::checkSum *Record = MetaIndexParser->Lookup((*Target)->MetaKey);
bool compressedAvailable = false;
if (Record == NULL)
{
std::vector<std::string> types = APT::Configuration::getCompressionTypes();
for (std::vector<std::string>::const_iterator t = types.begin(); t != types.end(); ++t)
- if (MetaIndexParser->Exists(string((*Target)->MetaKey).append(".").append(*t)) == true)
+ if (MetaIndexParser->Exists((*Target)->MetaKey + "." + *t) == true)
{
compressedAvailable = true;
break;
}
else
{
- ExpectedIndexHash = Record->Hash;
+ ExpectedIndexHashes = Record->Hashes;
if (_config->FindB("Debug::pkgAcquire::Auth", false))
{
- std::cerr << "Queueing: " << (*Target)->URI << std::endl;
- std::cerr << "Expected Hash: " << ExpectedIndexHash.toStr() << std::endl;
+ std::cerr << "Queueing: " << (*Target)->URI << std::endl
+ << "Expected Hash:" << std::endl;
+ for (HashStringList::const_iterator hs = ExpectedIndexHashes.begin(); hs != ExpectedIndexHashes.end(); ++hs)
+ std::cerr << "\t- " << hs->toStr() << std::endl;
std::cerr << "For: " << Record->MetaKeyFilename << std::endl;
}
- if (verify == true && ExpectedIndexHash.empty() == true && (*Target)->IsOptional() == false)
+ if (verify == true && ExpectedIndexHashes.empty() == true && (*Target)->IsOptional() == false)
{
Status = StatAuthError;
strprintf(ErrorText, _("Unable to find hash sum for '%s' in Release file"), (*Target)->MetaKey.c_str());
if ((*Target)->IsOptional() == true)
{
if ((*Target)->IsSubIndex() == true)
- new pkgAcqSubIndex(Owner, (*Target)->URI, (*Target)->Description,
- (*Target)->ShortDesc, ExpectedIndexHash);
+ new pkgAcqSubIndex(Owner, TransactionID,
+ (*Target)->URI, (*Target)->Description,
+ (*Target)->ShortDesc, ExpectedIndexHashes);
else if (transInRelease == false || Record != NULL || compressedAvailable == true)
{
if (_config->FindB("Acquire::PDiffs",true) == true && transInRelease == true &&
- MetaIndexParser->Exists(string((*Target)->MetaKey).append(".diff/Index")) == true)
- new pkgAcqDiffIndex(Owner, (*Target)->URI, (*Target)->Description,
- (*Target)->ShortDesc, ExpectedIndexHash);
+ MetaIndexParser->Exists((*Target)->MetaKey + ".diff/Index") == true)
+ new pkgAcqDiffIndex(Owner, TransactionID, *Target, ExpectedIndexHashes, MetaIndexParser);
else
- new pkgAcqIndexTrans(Owner, *Target, ExpectedIndexHash, MetaIndexParser);
+ new pkgAcqIndexTrans(Owner, TransactionID, *Target, ExpectedIndexHashes, MetaIndexParser);
}
continue;
}
in the Meta-Index file. Ideal would be if pkgAcqDiffIndex would test this
instead, but passing the required info to it is to much hassle */
if(_config->FindB("Acquire::PDiffs",true) == true && (verify == false ||
- MetaIndexParser->Exists(string((*Target)->MetaKey).append(".diff/Index")) == true))
- new pkgAcqDiffIndex(Owner, (*Target)->URI, (*Target)->Description,
- (*Target)->ShortDesc, ExpectedIndexHash);
+ MetaIndexParser->Exists((*Target)->MetaKey + ".diff/Index") == true))
+ new pkgAcqDiffIndex(Owner, TransactionID, *Target, ExpectedIndexHashes, MetaIndexParser);
else
- new pkgAcqIndex(Owner, *Target, ExpectedIndexHash, MetaIndexParser);
+ new pkgAcqIndex(Owner, TransactionID, *Target, ExpectedIndexHashes, MetaIndexParser);
}
}
/*}}}*/
missingkeys += (Fingerprint);
}
if(!missingkeys.empty())
- _error->Warning("%s", string(msg+missingkeys).c_str());
+ _error->Warning("%s", (msg + missingkeys).c_str());
string Transformed = MetaIndexParser->GetExpectedDist();
// pkgAcqMetaIndex::Failed - no Release file present or no signature file present /*{{{*/
// ---------------------------------------------------------------------
/* */
-void pkgAcqMetaIndex::Failed(string Message,pkgAcquire::MethodConfig *Cnf)
+void pkgAcqMetaIndex::Failed(string Message,
+ pkgAcquire::MethodConfig * /*Cnf*/)
{
if (AuthPass == true)
{
// gpgv method failed, if we have a good signature
- string LastGoodSigFile = _config->FindDir("Dir::State::lists").append("partial/").append(URItoFileName(RealURI));
+ string LastGoodSigFile = _config->FindDir("Dir::State::lists");
+ LastGoodSigFile += URItoFileName(RealURI);
if (DestFile != SigFile)
LastGoodSigFile.append(".gpg");
- LastGoodSigFile.append(".reverify");
if(FileExists(LastGoodSigFile))
{
- string VerifiedSigFile = _config->FindDir("Dir::State::lists") + URItoFileName(RealURI);
- if (DestFile != SigFile)
- VerifiedSigFile.append(".gpg");
- Rename(LastGoodSigFile, VerifiedSigFile);
Status = StatTransientNetworkError;
_error->Warning(_("An error occurred during the signature "
"verification. The repository is not updated "
_error->Error(_("GPG error: %s: %s"),
Desc.Description.c_str(),
LookupTag(Message,"Message").c_str());
+ Status = StatError;
return;
} else {
_error->Warning(_("GPG error: %s: %s"),
"Release");
SigFile = FinalFile;
}
- Rename(DestFile,FinalFile);
- chmod(FinalFile.c_str(),0644);
+ // Done, queue for rename on transaction finished
+ PartialFile = DestFile;
DestFile = FinalFile;
}
QueueIndexes(false);
}
/*}}}*/
+
+void pkgAcqMetaIndex::Finished()
+{
+ if(_config->FindB("Debug::Acquire::Transaction", false) == true)
+ std::clog << "Finished: " << DestFile <<std::endl;
+ if(Owner->TransactionHasError(TransactionID) == false &&
+ TransactionID > 0)
+ Owner->CommitTransaction(TransactionID);
+}
+
+
pkgAcqMetaClearSig::pkgAcqMetaClearSig(pkgAcquire *Owner, /*{{{*/
string const &URI, string const &URIDesc, string const &ShortDesc,
string const &MetaIndexURI, string const &MetaIndexURIDesc, string const &MetaIndexShortDesc,
string const &MetaSigURI, string const &MetaSigURIDesc, string const &MetaSigShortDesc,
- const vector<struct IndexTarget*>* IndexTargets,
+ const vector<IndexTarget*>* IndexTargets,
indexRecords* MetaIndexParser) :
- pkgAcqMetaIndex(Owner, URI, URIDesc, ShortDesc, "", IndexTargets, MetaIndexParser),
- MetaIndexURI(MetaIndexURI), MetaIndexURIDesc(MetaIndexURIDesc), MetaIndexShortDesc(MetaIndexShortDesc),
- MetaSigURI(MetaSigURI), MetaSigURIDesc(MetaSigURIDesc), MetaSigShortDesc(MetaSigShortDesc)
+ pkgAcqMetaIndex(Owner, (unsigned long)this, URI, URIDesc, ShortDesc, MetaSigURI, MetaSigURIDesc,MetaSigShortDesc, IndexTargets, MetaIndexParser),
+ MetaIndexURI(MetaIndexURI), MetaIndexURIDesc(MetaIndexURIDesc), MetaIndexShortDesc(MetaIndexShortDesc),
+ MetaSigURI(MetaSigURI), MetaSigURIDesc(MetaSigURIDesc), MetaSigShortDesc(MetaSigShortDesc)
{
SigFile = DestFile;
+ // index targets + (worst case:) Release/Release.gpg
+ ExpectedAdditionalItems = IndexTargets->size() + 2;
+
+#if 0
// keep the old InRelease around in case of transistent network errors
string const Final = _config->FindDir("Dir::State::lists") + URItoFileName(RealURI);
if (RealFileExists(Final) == true)
string const LastGoodSig = DestFile + ".reverify";
Rename(Final,LastGoodSig);
}
+#endif
}
/*}}}*/
pkgAcqMetaClearSig::~pkgAcqMetaClearSig() /*{{{*/
{
+#if 0
// if the file was never queued undo file-changes done in the constructor
if (QueueCounter == 1 && Status == StatIdle && FileSize == 0 && Complete == false)
{
if (RealFileExists(Final) == false && RealFileExists(LastGoodSig) == true)
Rename(LastGoodSig, Final);
}
+#endif
}
/*}}}*/
// pkgAcqMetaClearSig::Custom600Headers - Insert custom request headers /*{{{*/
// ---------------------------------------------------------------------
// FIXME: this can go away once the InRelease file is used widely
-string pkgAcqMetaClearSig::Custom600Headers()
+string pkgAcqMetaClearSig::Custom600Headers() const
{
string Final = _config->FindDir("Dir::State::lists");
Final += URItoFileName(RealURI);
struct stat Buf;
if (stat(Final.c_str(),&Buf) != 0)
{
- Final = DestFile + ".reverify";
if (stat(Final.c_str(),&Buf) != 0)
return "\nIndex-File: true\nFail-Ignore: true\n";
}
/*}}}*/
void pkgAcqMetaClearSig::Failed(string Message,pkgAcquire::MethodConfig *Cnf) /*{{{*/
{
+ // we failed, we will not get additional items from this method
+ ExpectedAdditionalItems = 0;
+
+ // if we expect a ClearTextSignature (InRelase), ensure that
+ // this is what we get and if not fail to queue a
+ // Release/Release.gpg, see #346386
+ if (!StartsWithGPGClearTextSignature(DestFile))
+ {
+ //_error->Error(_("Does not start with a clear sign signature"));
+ pkgAcquire::Item::Failed(Message, Cnf);
+ return;
+ }
+
+
if (AuthPass == false)
{
- // Remove the 'old' InRelease file if we try Release.gpg now as otherwise
- // the file will stay around and gives a false-auth impression (CVE-2012-0214)
+ // Queue the 'old' InRelease file for removal if we try Release.gpg
+ // as otherwise the file will stay around and gives a false-auth
+ // impression (CVE-2012-0214)
string FinalFile = _config->FindDir("Dir::State::lists");
FinalFile.append(URItoFileName(RealURI));
- if (FileExists(FinalFile))
- unlink(FinalFile.c_str());
+ PartialFile = "";
+ DestFile = FinalFile;
- new pkgAcqMetaSig(Owner,
- MetaSigURI, MetaSigURIDesc, MetaSigShortDesc,
+ new pkgAcqMetaIndex(Owner, TransactionID,
MetaIndexURI, MetaIndexURIDesc, MetaIndexShortDesc,
+ MetaSigURI, MetaSigURIDesc, MetaSigShortDesc,
IndexTargets, MetaIndexParser);
if (Cnf->LocalOnly == true ||
StringToBool(LookupTag(Message, "Transient-Failure"), false) == false)
pkgAcqArchive::pkgAcqArchive(pkgAcquire *Owner,pkgSourceList *Sources,
pkgRecords *Recs,pkgCache::VerIterator const &Version,
string &StoreFilename) :
- Item(Owner), Version(Version), Sources(Sources), Recs(Recs),
+ Item(Owner, HashStringList()), Version(Version), Sources(Sources), Recs(Recs),
StoreFilename(StoreFilename), Vf(Version.FileList()),
Trusted(false)
{
_error->Error(_("I wasn't able to locate a file for the %s package. "
"This might mean you need to manually fix this package. "
"(due to missing arch)"),
- Version.ParentPkg().Name());
+ Version.ParentPkg().FullName().c_str());
return;
}
checking later. */
bool pkgAcqArchive::QueueNext()
{
- string const ForceHash = _config->Find("Acquire::ForceHash");
for (; Vf.end() == false; ++Vf)
{
// Ignore not source sources
pkgRecords::Parser &Parse = Recs->Lookup(Vf);
if (_error->PendingError() == true)
return false;
-
+
string PkgFile = Parse.FileName();
- if (ForceHash.empty() == false)
- {
- if(stringcasecmp(ForceHash, "sha512") == 0)
- ExpectedHash = HashString("SHA512", Parse.SHA512Hash());
- else if(stringcasecmp(ForceHash, "sha256") == 0)
- ExpectedHash = HashString("SHA256", Parse.SHA256Hash());
- else if (stringcasecmp(ForceHash, "sha1") == 0)
- ExpectedHash = HashString("SHA1", Parse.SHA1Hash());
- else
- ExpectedHash = HashString("MD5Sum", Parse.MD5Hash());
- }
- else
- {
- string Hash;
- if ((Hash = Parse.SHA512Hash()).empty() == false)
- ExpectedHash = HashString("SHA512", Hash);
- else if ((Hash = Parse.SHA256Hash()).empty() == false)
- ExpectedHash = HashString("SHA256", Hash);
- else if ((Hash = Parse.SHA1Hash()).empty() == false)
- ExpectedHash = HashString("SHA1", Hash);
- else
- ExpectedHash = HashString("MD5Sum", Parse.MD5Hash());
- }
+ ExpectedHashes = Parse.Hashes();
+
if (PkgFile.empty() == true)
return _error->Error(_("The package index files are corrupted. No Filename: "
"field for package %s."),
Desc.URI = Index->ArchiveURI(PkgFile);
Desc.Description = Index->ArchiveInfo(Version);
Desc.Owner = this;
- Desc.ShortDesc = Version.ParentPkg().Name();
+ Desc.ShortDesc = Version.ParentPkg().FullName(true);
// See if we already have the file. (Legacy filenames)
FileSize = Version->Size;
return true;
}
- /* Hmm, we have a file and its size does not match, this shouldnt
+ /* Hmm, we have a file and its size does not match, this shouldn't
happen.. */
unlink(FinalFile.c_str());
}
// Create the item
Local = false;
- Desc.URI = Index->ArchiveURI(PkgFile);
- Desc.Description = Index->ArchiveInfo(Version);
- Desc.Owner = this;
- Desc.ShortDesc = Version.ParentPkg().Name();
QueueURI(Desc);
++Vf;
// AcqArchive::Done - Finished fetching /*{{{*/
// ---------------------------------------------------------------------
/* */
-void pkgAcqArchive::Done(string Message,unsigned long long Size,string CalcHash,
+void pkgAcqArchive::Done(string Message,unsigned long long Size, HashStringList const &CalcHashes,
pkgAcquire::MethodConfig *Cfg)
{
- Item::Done(Message,Size,CalcHash,Cfg);
+ Item::Done(Message, Size, CalcHashes, Cfg);
// Check the size
if (Size != Version->Size)
RenameOnError(SizeMismatch);
return;
}
-
- // Check the hash
- if(ExpectedHash.toStr() != CalcHash)
+
+ // FIXME: could this empty() check impose *any* sort of security issue?
+ if(ExpectedHashes.usable() && ExpectedHashes != CalcHashes)
{
RenameOnError(HashSumMismatch);
+ printHashSumComparision(DestFile, ExpectedHashes, CalcHashes);
return;
}
/*}}}*/
// AcqArchive::IsTrusted - Determine whether this archive comes from a trusted source /*{{{*/
// ---------------------------------------------------------------------
-bool pkgAcqArchive::IsTrusted()
+APT_PURE bool pkgAcqArchive::IsTrusted() const
{
return Trusted;
}
// AcqFile::pkgAcqFile - Constructor /*{{{*/
// ---------------------------------------------------------------------
/* The file is added to the queue */
-pkgAcqFile::pkgAcqFile(pkgAcquire *Owner,string URI,string Hash,
+pkgAcqFile::pkgAcqFile(pkgAcquire *Owner,string URI, HashStringList const &Hashes,
unsigned long long Size,string Dsc,string ShortDesc,
const string &DestDir, const string &DestFilename,
bool IsIndexFile) :
- Item(Owner), ExpectedHash(Hash), IsIndexFile(IsIndexFile)
+ Item(Owner, Hashes), IsIndexFile(IsIndexFile)
{
Retries = _config->FindI("Acquire::Retries",0);
if (stat(DestFile.c_str(),&Buf) == 0)
{
// Hmm, the partial file is too big, erase it
- if ((unsigned long long)Buf.st_size > Size)
+ if ((Size > 0) && (unsigned long long)Buf.st_size > Size)
unlink(DestFile.c_str());
else
PartialSize = Buf.st_size;
// AcqFile::Done - Item downloaded OK /*{{{*/
// ---------------------------------------------------------------------
/* */
-void pkgAcqFile::Done(string Message,unsigned long long Size,string CalcHash,
+void pkgAcqFile::Done(string Message,unsigned long long Size,HashStringList const &CalcHashes,
pkgAcquire::MethodConfig *Cnf)
{
- Item::Done(Message,Size,CalcHash,Cnf);
+ Item::Done(Message,Size,CalcHashes,Cnf);
// Check the hash
- if(!ExpectedHash.empty() && ExpectedHash.toStr() != CalcHash)
+ if(ExpectedHashes.usable() && ExpectedHashes != CalcHashes)
{
RenameOnError(HashSumMismatch);
+ printHashSumComparision(DestFile, ExpectedHashes, CalcHashes);
return;
}
// AcqIndex::Custom600Headers - Insert custom request headers /*{{{*/
// ---------------------------------------------------------------------
/* The only header we use is the last-modified header. */
-string pkgAcqFile::Custom600Headers()
+string pkgAcqFile::Custom600Headers() const
{
if (IsIndexFile)
return "\nIndex-File: true";