##################################################################### */
/*}}}*/
// Include Files /*{{{*/
+#include <config.h>
+
#include <apt-pkg/acquire-item.h>
#include <apt-pkg/configuration.h>
#include <apt-pkg/aptconfiguration.h>
#include <apt-pkg/sourcelist.h>
-#include <apt-pkg/vendorlist.h>
#include <apt-pkg/error.h>
#include <apt-pkg/strutl.h>
#include <apt-pkg/fileutl.h>
-#include <apt-pkg/md5.h>
#include <apt-pkg/sha1.h>
#include <apt-pkg/tagfile.h>
-
-#include <apti18n.h>
-
+#include <apt-pkg/indexrecords.h>
+#include <apt-pkg/acquire.h>
+#include <apt-pkg/hashes.h>
+#include <apt-pkg/indexfile.h>
+#include <apt-pkg/pkgcache.h>
+#include <apt-pkg/cacheiterators.h>
+#include <apt-pkg/pkgrecords.h>
+
+#include <stddef.h>
+#include <stdlib.h>
+#include <string.h>
+#include <iostream>
+#include <vector>
#include <sys/stat.h>
#include <unistd.h>
#include <errno.h>
#include <sstream>
#include <stdio.h>
#include <ctime>
+
+#include <apti18n.h>
/*}}}*/
using namespace std;
// ---------------------------------------------------------------------
/* Stash status and the file size. Note that setting Complete means
sub-phases of the acquire process such as decompresion are operating */
-void pkgAcquire::Item::Start(string /*Message*/,unsigned long Size)
+void pkgAcquire::Item::Start(string /*Message*/,unsigned long long Size)
{
Status = StatFetching;
if (FileSize == 0 && Complete == false)
// Acquire::Item::Done - Item downloaded OK /*{{{*/
// ---------------------------------------------------------------------
/* */
-void pkgAcquire::Item::Done(string Message,unsigned long Size,string Hash,
- pkgAcquire::MethodConfig *Cnf)
+void pkgAcquire::Item::Done(string Message,unsigned long long Size,string /*Hash*/,
+ pkgAcquire::MethodConfig * /*Cnf*/)
{
// We just downloaded something..
string FileName = LookupTag(Message,"Filename");
/*}}}*/
// Acquire::Item::Rename - Rename a file /*{{{*/
// ---------------------------------------------------------------------
-/* This helper function is used by alot of item methods as thier final
+/* This helper function is used by a lot of item methods as their final
step */
void pkgAcquire::Item::Rename(string From,string To)
{
}
}
/*}}}*/
+bool pkgAcquire::Item::RenameOnError(pkgAcquire::Item::RenameOnErrorState const error)/*{{{*/
+{
+ if(FileExists(DestFile))
+ Rename(DestFile, DestFile + ".FAILED");
+
+ switch (error)
+ {
+ case HashSumMismatch:
+ ErrorText = _("Hash Sum mismatch");
+ Status = StatAuthError;
+ ReportMirrorFailure("HashChecksumFailure");
+ break;
+ case SizeMismatch:
+ ErrorText = _("Size mismatch");
+ Status = StatAuthError;
+ ReportMirrorFailure("SizeFailure");
+ break;
+ case InvalidFormat:
+ ErrorText = _("Invalid file format");
+ Status = StatError;
+ // do not report as usually its not the mirrors fault, but Portal/Proxy
+ break;
+ }
+ return false;
+}
+ /*}}}*/
// Acquire::Item::ReportMirrorFailure /*{{{*/
// ---------------------------------------------------------------------
void pkgAcquire::Item::ReportMirrorFailure(string FailCode)
}
}
/*}}}*/
+// AcqSubIndex::AcqSubIndex - Constructor /*{{{*/
+// ---------------------------------------------------------------------
+/* Get a sub-index file based on checksums from a 'master' file and
+ possibly query additional files */
+pkgAcqSubIndex::pkgAcqSubIndex(pkgAcquire *Owner, string const &URI,
+ string const &URIDesc, string const &ShortDesc,
+ HashString const &ExpectedHash)
+ : Item(Owner), ExpectedHash(ExpectedHash)
+{
+ /* XXX: Beware: Currently this class does nothing (of value) anymore ! */
+ Debug = _config->FindB("Debug::pkgAcquire::SubIndex",false);
+
+ DestFile = _config->FindDir("Dir::State::lists") + "partial/";
+ DestFile += URItoFileName(URI);
+
+ Desc.URI = URI;
+ Desc.Description = URIDesc;
+ Desc.Owner = this;
+ Desc.ShortDesc = ShortDesc;
+
+ QueueURI(Desc);
+
+ if(Debug)
+ std::clog << "pkgAcqSubIndex: " << Desc.URI << std::endl;
+}
+ /*}}}*/
+// AcqSubIndex::Custom600Headers - Insert custom request headers /*{{{*/
+// ---------------------------------------------------------------------
+/* The only header we use is the last-modified header. */
+string pkgAcqSubIndex::Custom600Headers()
+{
+ string Final = _config->FindDir("Dir::State::lists");
+ Final += URItoFileName(Desc.URI);
+
+ struct stat Buf;
+ if (stat(Final.c_str(),&Buf) != 0)
+ return "\nIndex-File: true\nFail-Ignore: true\n";
+ return "\nIndex-File: true\nFail-Ignore: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime);
+}
+ /*}}}*/
+void pkgAcqSubIndex::Failed(string Message,pkgAcquire::MethodConfig * /*Cnf*/)/*{{{*/
+{
+ if(Debug)
+ std::clog << "pkgAcqSubIndex failed: " << Desc.URI << " with " << Message << std::endl;
+
+ Complete = false;
+ Status = StatDone;
+ Dequeue();
+
+ // No good Index is provided
+}
+ /*}}}*/
+void pkgAcqSubIndex::Done(string Message,unsigned long long Size,string Md5Hash, /*{{{*/
+ pkgAcquire::MethodConfig *Cnf)
+{
+ if(Debug)
+ std::clog << "pkgAcqSubIndex::Done(): " << Desc.URI << std::endl;
+
+ string FileName = LookupTag(Message,"Filename");
+ if (FileName.empty() == true)
+ {
+ Status = StatError;
+ ErrorText = "Method gave a blank filename";
+ return;
+ }
+
+ if (FileName != DestFile)
+ {
+ Local = true;
+ Desc.URI = "copy:" + FileName;
+ QueueURI(Desc);
+ return;
+ }
+
+ Item::Done(Message,Size,Md5Hash,Cnf);
+
+ string FinalFile = _config->FindDir("Dir::State::lists")+URItoFileName(Desc.URI);
+
+ /* Downloaded invalid transindex => Error (LP: #346386) (Closes: #627642) */
+ indexRecords SubIndexParser;
+ if (FileExists(DestFile) == true && !SubIndexParser.Load(DestFile)) {
+ Status = StatError;
+ ErrorText = SubIndexParser.ErrorText;
+ return;
+ }
+
+ // success in downloading the index
+ // rename the index
+ if(Debug)
+ std::clog << "Renaming: " << DestFile << " -> " << FinalFile << std::endl;
+ Rename(DestFile,FinalFile);
+ chmod(FinalFile.c_str(),0644);
+ DestFile = FinalFile;
+
+ if(ParseIndex(DestFile) == false)
+ return Failed("", NULL);
+
+ Complete = true;
+ Status = StatDone;
+ Dequeue();
+ return;
+}
+ /*}}}*/
+bool pkgAcqSubIndex::ParseIndex(string const &IndexFile) /*{{{*/
+{
+ indexRecords SubIndexParser;
+ if (FileExists(IndexFile) == false || SubIndexParser.Load(IndexFile) == false)
+ return false;
+ // so something with the downloaded index
+ return true;
+}
+ /*}}}*/
// AcqDiffIndex::AcqDiffIndex - Constructor /*{{{*/
// ---------------------------------------------------------------------
-/* Get the DiffIndex file first and see if there are patches availabe
+/* Get the DiffIndex file first and see if there are patches available
* If so, create a pkgAcqIndexDiffs fetcher that will get and apply the
* patches. If anything goes wrong in that process, it will fall back to
* the original packages file
return;
}
- if(Debug)
- std::clog << "pkgAcqIndexDiffs::pkgAcqIndexDiffs(): "
- << CurrentPackagesFile << std::endl;
-
+ if(Debug)
+ std::clog << "pkgAcqDiffIndex::pkgAcqDiffIndex(): "
+ << CurrentPackagesFile << std::endl;
+
QueueURI(Desc);
}
bool pkgAcqDiffIndex::ParseDiffIndex(string IndexDiffFile) /*{{{*/
{
if(Debug)
- std::clog << "pkgAcqIndexDiffs::ParseIndexDiff() " << IndexDiffFile
- << std::endl;
+ std::clog << "pkgAcqDiffIndex::ParseIndexDiff() " << IndexDiffFile
+ << std::endl;
pkgTagSection Tags;
string ServerSha1;
FileFd fd(CurrentPackagesFile, FileFd::ReadOnly);
SHA1Summation SHA1;
- SHA1.AddFD(fd.Fd(), fd.Size());
+ SHA1.AddFD(fd);
string const local_sha1 = SHA1.Result();
- if(local_sha1 == ServerSha1)
+ if(local_sha1 == ServerSha1)
{
- // we have the same sha1 as the server
+ // we have the same sha1 as the server so we are done here
if(Debug)
std::clog << "Package file is up-to-date" << std::endl;
- // set found to true, this will queue a pkgAcqIndexDiffs with
- // a empty availabe_patches
- found = true;
- }
- else
+ // list cleanup needs to know that this file as well as the already
+ // present index is ours, so we create an empty diff to save it for us
+ new pkgAcqIndexDiffs(Owner, RealURI, Description, Desc.ShortDesc,
+ ExpectedHash, ServerSha1, available_patches);
+ return true;
+ }
+ else
{
if(Debug)
std::clog << "SHA1-Current: " << ServerSha1 << " and we start at "<< fd.Name() << " " << fd.Size() << " " << local_sha1 << std::endl;
}
// we have something, queue the next diff
- if(found)
+ if(found)
{
// queue the diffs
string::size_type const last_space = Description.rfind(" ");
if(last_space != string::npos)
Description.erase(last_space, Description.size()-last_space);
- new pkgAcqIndexDiffs(Owner, RealURI, Description, Desc.ShortDesc,
- ExpectedHash, ServerSha1, available_patches);
+
+ /* decide if we should download patches one by one or in one go:
+ The first is good if the server merges patches, but many don't so client
+ based merging can be attempt in which case the second is better.
+ "bad things" will happen if patches are merged on the server,
+ but client side merging is attempt as well */
+ bool pdiff_merge = _config->FindB("Acquire::PDiffs::Merge", true);
+ if (pdiff_merge == true)
+ {
+ // reprepro adds this flag if it has merged patches on the server
+ std::string const precedence = Tags.FindS("X-Patch-Precedence");
+ pdiff_merge = (precedence != "merged");
+ }
+
+ if (pdiff_merge == false)
+ new pkgAcqIndexDiffs(Owner, RealURI, Description, Desc.ShortDesc,
+ ExpectedHash, ServerSha1, available_patches);
+ else
+ {
+ std::vector<pkgAcqIndexMergeDiffs*> *diffs = new std::vector<pkgAcqIndexMergeDiffs*>(available_patches.size());
+ for(size_t i = 0; i < available_patches.size(); ++i)
+ (*diffs)[i] = new pkgAcqIndexMergeDiffs(Owner, RealURI, Description, Desc.ShortDesc, ExpectedHash,
+ available_patches[i], diffs);
+ }
+
Complete = false;
Status = StatDone;
Dequeue();
return false;
}
/*}}}*/
-void pkgAcqDiffIndex::Failed(string Message,pkgAcquire::MethodConfig *Cnf) /*{{{*/
+void pkgAcqDiffIndex::Failed(string Message,pkgAcquire::MethodConfig * /*Cnf*/)/*{{{*/
{
if(Debug)
- std::clog << "pkgAcqDiffIndex failed: " << Desc.URI << std::endl
- << "Falling back to normal index file aquire" << std::endl;
+ std::clog << "pkgAcqDiffIndex failed: " << Desc.URI << " with " << Message << std::endl
+ << "Falling back to normal index file acquire" << std::endl;
new pkgAcqIndex(Owner, RealURI, Description, Desc.ShortDesc,
ExpectedHash);
Dequeue();
}
/*}}}*/
-void pkgAcqDiffIndex::Done(string Message,unsigned long Size,string Md5Hash, /*{{{*/
+void pkgAcqDiffIndex::Done(string Message,unsigned long long Size,string Md5Hash, /*{{{*/
pkgAcquire::MethodConfig *Cnf)
{
if(Debug)
string FinalFile;
FinalFile = _config->FindDir("Dir::State::lists")+URItoFileName(RealURI);
- // sucess in downloading the index
+ // success in downloading the index
// rename the index
FinalFile += string(".IndexDiff");
if(Debug)
Desc.Owner = this;
Desc.ShortDesc = ShortDesc;
- if(available_patches.size() == 0)
+ if(available_patches.empty() == true)
{
// we are done (yeah!)
Finish(true);
}
}
/*}}}*/
-void pkgAcqIndexDiffs::Failed(string Message,pkgAcquire::MethodConfig *Cnf) /*{{{*/
+void pkgAcqIndexDiffs::Failed(string Message,pkgAcquire::MethodConfig * /*Cnf*/)/*{{{*/
{
if(Debug)
- std::clog << "pkgAcqIndexDiffs failed: " << Desc.URI << std::endl
- << "Falling back to normal index file aquire" << std::endl;
+ std::clog << "pkgAcqIndexDiffs failed: " << Desc.URI << " with " << Message << std::endl
+ << "Falling back to normal index file acquire" << std::endl;
new pkgAcqIndex(Owner, RealURI, Description,Desc.ShortDesc,
ExpectedHash);
Finish();
if(!ExpectedHash.empty() && !ExpectedHash.VerifyFile(DestFile))
{
- Status = StatAuthError;
- ErrorText = _("MD5Sum mismatch");
- Rename(DestFile,DestFile + ".FAILED");
+ RenameOnError(HashSumMismatch);
Dequeue();
return;
}
FileFd fd(FinalFile, FileFd::ReadOnly);
SHA1Summation SHA1;
- SHA1.AddFD(fd.Fd(), fd.Size());
+ SHA1.AddFD(fd);
string local_sha1 = string(SHA1.Result());
if(Debug)
std::clog << "QueueNextDiff: "
// remove all patches until the next matching patch is found
// this requires the Index file to be ordered
for(vector<DiffInfo>::iterator I=available_patches.begin();
- available_patches.size() > 0 &&
+ available_patches.empty() == false &&
I != available_patches.end() &&
- (*I).sha1 != local_sha1;
- I++)
+ I->sha1 != local_sha1;
+ ++I)
{
available_patches.erase(I);
}
// error checking and falling back if no patch was found
- if(available_patches.size() == 0)
- {
+ if(available_patches.empty() == true)
+ {
Failed("", NULL);
return false;
}
// queue the right diff
- Desc.URI = string(RealURI) + ".diff/" + available_patches[0].file + ".gz";
+ Desc.URI = RealURI + ".diff/" + available_patches[0].file + ".gz";
Desc.Description = Description + " " + available_patches[0].file + string(".pdiff");
DestFile = _config->FindDir("Dir::State::lists") + "partial/";
DestFile += URItoFileName(RealURI + ".diff/" + available_patches[0].file);
return true;
}
/*}}}*/
-void pkgAcqIndexDiffs::Done(string Message,unsigned long Size,string Md5Hash, /*{{{*/
+void pkgAcqIndexDiffs::Done(string Message,unsigned long long Size,string Md5Hash, /*{{{*/
pkgAcquire::MethodConfig *Cnf)
{
if(Debug)
string FinalFile;
FinalFile = _config->FindDir("Dir::State::lists")+URItoFileName(RealURI);
- // sucess in downloading a diff, enter ApplyDiff state
- if(State == StateFetchDiff)
- {
-
- if(Debug)
- std::clog << "Sending to gzip method: " << FinalFile << std::endl;
-
- string FileName = LookupTag(Message,"Filename");
- State = StateUnzipDiff;
- Local = true;
- Desc.URI = "gzip:" + FileName;
- DestFile += ".decomp";
- QueueURI(Desc);
- Mode = "gzip";
- return;
- }
-
- // sucess in downloading a diff, enter ApplyDiff state
- if(State == StateUnzipDiff)
+ // success in downloading a diff, enter ApplyDiff state
+ if(State == StateFetchDiff)
{
// rred excepts the patch as $FinalFile.ed
{
// remove the just applied patch
available_patches.erase(available_patches.begin());
+ unlink((FinalFile + ".ed").c_str());
// move into place
if(Debug)
chmod(FinalFile.c_str(),0644);
// see if there is more to download
- if(available_patches.size() > 0) {
+ if(available_patches.empty() == false) {
new pkgAcqIndexDiffs(Owner, RealURI, Description, Desc.ShortDesc,
ExpectedHash, ServerSha1, available_patches);
return Finish();
}
}
/*}}}*/
+// AcqIndexMergeDiffs::AcqIndexMergeDiffs - Constructor /*{{{*/
+pkgAcqIndexMergeDiffs::pkgAcqIndexMergeDiffs(pkgAcquire *Owner,
+ string const &URI, string const &URIDesc,
+ string const &ShortDesc, HashString const &ExpectedHash,
+ DiffInfo const &patch,
+ std::vector<pkgAcqIndexMergeDiffs*> const * const allPatches)
+ : Item(Owner), RealURI(URI), ExpectedHash(ExpectedHash),
+ patch(patch),allPatches(allPatches), State(StateFetchDiff)
+{
+
+ DestFile = _config->FindDir("Dir::State::lists") + "partial/";
+ DestFile += URItoFileName(URI);
+
+ Debug = _config->FindB("Debug::pkgAcquire::Diffs",false);
+
+ Description = URIDesc;
+ Desc.Owner = this;
+ Desc.ShortDesc = ShortDesc;
+
+ Desc.URI = RealURI + ".diff/" + patch.file + ".gz";
+ Desc.Description = Description + " " + patch.file + string(".pdiff");
+ DestFile = _config->FindDir("Dir::State::lists") + "partial/";
+ DestFile += URItoFileName(RealURI + ".diff/" + patch.file);
+
+ if(Debug)
+ std::clog << "pkgAcqIndexMergeDiffs: " << Desc.URI << std::endl;
+
+ QueueURI(Desc);
+}
+ /*}}}*/
+void pkgAcqIndexMergeDiffs::Failed(string Message,pkgAcquire::MethodConfig * /*Cnf*/)/*{{{*/
+{
+ if(Debug)
+ std::clog << "pkgAcqIndexMergeDiffs failed: " << Desc.URI << " with " << Message << std::endl;
+ Complete = false;
+ Status = StatDone;
+ Dequeue();
+
+ // check if we are the first to fail, otherwise we are done here
+ State = StateDoneDiff;
+ for (std::vector<pkgAcqIndexMergeDiffs *>::const_iterator I = allPatches->begin();
+ I != allPatches->end(); ++I)
+ if ((*I)->State == StateErrorDiff)
+ return;
+
+ // first failure means we should fallback
+ State = StateErrorDiff;
+ std::clog << "Falling back to normal index file acquire" << std::endl;
+ new pkgAcqIndex(Owner, RealURI, Description,Desc.ShortDesc,
+ ExpectedHash);
+}
+ /*}}}*/
+void pkgAcqIndexMergeDiffs::Done(string Message,unsigned long long Size,string Md5Hash, /*{{{*/
+ pkgAcquire::MethodConfig *Cnf)
+{
+ if(Debug)
+ std::clog << "pkgAcqIndexMergeDiffs::Done(): " << Desc.URI << std::endl;
+
+ Item::Done(Message,Size,Md5Hash,Cnf);
+
+ string const FinalFile = _config->FindDir("Dir::State::lists") + URItoFileName(RealURI);
+
+ if (State == StateFetchDiff)
+ {
+ // rred expects the patch as $FinalFile.ed.$patchname.gz
+ Rename(DestFile, FinalFile + ".ed." + patch.file + ".gz");
+
+ // check if this is the last completed diff
+ State = StateDoneDiff;
+ for (std::vector<pkgAcqIndexMergeDiffs *>::const_iterator I = allPatches->begin();
+ I != allPatches->end(); ++I)
+ if ((*I)->State != StateDoneDiff)
+ {
+ if(Debug)
+ std::clog << "Not the last done diff in the batch: " << Desc.URI << std::endl;
+ return;
+ }
+
+ // this is the last completed diff, so we are ready to apply now
+ State = StateApplyDiff;
+
+ if(Debug)
+ std::clog << "Sending to rred method: " << FinalFile << std::endl;
+
+ Local = true;
+ Desc.URI = "rred:" + FinalFile;
+ QueueURI(Desc);
+ Mode = "rred";
+ return;
+ }
+ // success in download/apply all diffs, clean up
+ else if (State == StateApplyDiff)
+ {
+ // see if we really got the expected file
+ if(!ExpectedHash.empty() && !ExpectedHash.VerifyFile(DestFile))
+ {
+ RenameOnError(HashSumMismatch);
+ return;
+ }
+
+ // move the result into place
+ if(Debug)
+ std::clog << "Moving patched file in place: " << std::endl
+ << DestFile << " -> " << FinalFile << std::endl;
+ Rename(DestFile, FinalFile);
+ chmod(FinalFile.c_str(), 0644);
+
+ // otherwise lists cleanup will eat the file
+ DestFile = FinalFile;
+
+ // ensure the ed's are gone regardless of list-cleanup
+ for (std::vector<pkgAcqIndexMergeDiffs *>::const_iterator I = allPatches->begin();
+ I != allPatches->end(); ++I)
+ {
+ std::string patch = FinalFile + ".ed." + (*I)->patch.file + ".gz";
+ unlink(patch.c_str());
+ }
+
+ // all set and done
+ Complete = true;
+ if(Debug)
+ std::clog << "allDone: " << DestFile << "\n" << std::endl;
+ }
+}
+ /*}}}*/
// AcqIndex::AcqIndex - Constructor /*{{{*/
// ---------------------------------------------------------------------
/* The package file is added to the queue and a second class is
HashString ExpectedHash, string comprExt)
: Item(Owner), RealURI(URI), ExpectedHash(ExpectedHash)
{
+ if(comprExt.empty() == true)
+ {
+ // autoselect the compression method
+ std::vector<std::string> types = APT::Configuration::getCompressionTypes();
+ for (std::vector<std::string>::const_iterator t = types.begin(); t != types.end(); ++t)
+ comprExt.append(*t).append(" ");
+ if (comprExt.empty() == false)
+ comprExt.erase(comprExt.end()-1);
+ }
+ CompressionExtension = comprExt;
+
+ Verify = true;
+
+ Init(URI, URIDesc, ShortDesc);
+}
+pkgAcqIndex::pkgAcqIndex(pkgAcquire *Owner, IndexTarget const *Target,
+ HashString const &ExpectedHash, indexRecords const *MetaIndexParser)
+ : Item(Owner), RealURI(Target->URI), ExpectedHash(ExpectedHash)
+{
+ // autoselect the compression method
+ std::vector<std::string> types = APT::Configuration::getCompressionTypes();
+ CompressionExtension = "";
+ if (ExpectedHash.empty() == false)
+ {
+ for (std::vector<std::string>::const_iterator t = types.begin(); t != types.end(); ++t)
+ if (*t == "uncompressed" || MetaIndexParser->Exists(string(Target->MetaKey).append(".").append(*t)) == true)
+ CompressionExtension.append(*t).append(" ");
+ }
+ else
+ {
+ for (std::vector<std::string>::const_iterator t = types.begin(); t != types.end(); ++t)
+ CompressionExtension.append(*t).append(" ");
+ }
+ if (CompressionExtension.empty() == false)
+ CompressionExtension.erase(CompressionExtension.end()-1);
+
+ // only verify non-optional targets, see acquire-item.h for a FIXME
+ // to make this more flexible
+ if (Target->IsOptional())
+ Verify = false;
+ else
+ Verify = true;
+
+ Init(Target->URI, Target->Description, Target->ShortDesc);
+}
+ /*}}}*/
+// AcqIndex::Init - defered Constructor /*{{{*/
+void pkgAcqIndex::Init(string const &URI, string const &URIDesc, string const &ShortDesc) {
Decompression = false;
Erase = false;
DestFile = _config->FindDir("Dir::State::lists") + "partial/";
DestFile += URItoFileName(URI);
- if(comprExt.empty())
- {
- // autoselect the compression method
- std::vector<std::string> types = APT::Configuration::getCompressionTypes();
- if (types.empty() == true)
- comprExt = "plain";
- else
- comprExt = "." + types[0];
- }
- CompressionExtension = ((comprExt == "plain" || comprExt == ".") ? "" : comprExt);
-
- Desc.URI = URI + CompressionExtension;
+ std::string const comprExt = CompressionExtension.substr(0, CompressionExtension.find(' '));
+ if (comprExt == "uncompressed")
+ Desc.URI = URI;
+ else
+ Desc.URI = URI + '.' + comprExt;
Desc.Description = URIDesc;
Desc.Owner = this;
Desc.ShortDesc = ShortDesc;
-
+
QueueURI(Desc);
}
/*}}}*/
if (_config->FindB("Acquire::GzipIndexes",false))
Final += ".gz";
+ string msg = "\nIndex-File: true";
+ // FIXME: this really should use "IndexTarget::IsOptional()" but that
+ // seems to be difficult without breaking ABI
+ if (ShortDesc().find("Translation") != 0)
+ msg += "\nFail-Ignore: true";
struct stat Buf;
- if (stat(Final.c_str(),&Buf) != 0)
- return "\nIndex-File: true";
- return "\nIndex-File: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime);
+ if (stat(Final.c_str(),&Buf) == 0)
+ msg += "\nLast-Modified: " + TimeRFC1123(Buf.st_mtime);
+
+ return msg;
}
/*}}}*/
void pkgAcqIndex::Failed(string Message,pkgAcquire::MethodConfig *Cnf) /*{{{*/
{
- std::vector<std::string> types = APT::Configuration::getCompressionTypes();
-
- for (std::vector<std::string>::const_iterator t = types.begin();
- t != types.end(); t++)
+ size_t const nextExt = CompressionExtension.find(' ');
+ if (nextExt != std::string::npos)
{
- // jump over all already tried compression types
- const unsigned int nameLen = Desc.URI.size() - (*t).size();
- if(Desc.URI.substr(nameLen) != *t)
- continue;
-
- // we want to try it with the next extension (and make sure to
- // not skip over the end)
- t++;
- if (t == types.end())
- break;
-
- // queue new download
- Desc.URI = Desc.URI.substr(0, nameLen) + *t;
- new pkgAcqIndex(Owner, RealURI, Desc.Description, Desc.ShortDesc,
- ExpectedHash, string(".").append(*t));
-
- Status = StatDone;
- Complete = false;
- Dequeue();
+ CompressionExtension = CompressionExtension.substr(nextExt+1);
+ Init(RealURI, Desc.Description, Desc.ShortDesc);
return;
}
// on decompression failure, remove bad versions in partial/
- if(Decompression && Erase) {
+ if (Decompression && Erase) {
string s = _config->FindDir("Dir::State::lists") + "partial/";
- s += URItoFileName(RealURI);
+ s.append(URItoFileName(RealURI));
unlink(s.c_str());
}
to the uncompressed version of the file. If this is so the file
is copied into the partial directory. In all other cases the file
is decompressed with a gzip uri. */
-void pkgAcqIndex::Done(string Message,unsigned long Size,string Hash,
+void pkgAcqIndex::Done(string Message,unsigned long long Size,string Hash,
pkgAcquire::MethodConfig *Cfg)
{
Item::Done(Message,Size,Hash,Cfg);
if (!ExpectedHash.empty() && ExpectedHash.toStr() != Hash)
{
- Status = StatAuthError;
- ErrorText = _("Hash Sum mismatch");
- Rename(DestFile,DestFile + ".FAILED");
- ReportMirrorFailure("HashChecksumFailure");
+ RenameOnError(HashSumMismatch);
return;
}
+
+ /* Verify the index file for correctness (all indexes must
+ * have a Package field) (LP: #346386) (Closes: #627642) */
+ if (Verify == true)
+ {
+ FileFd fd(DestFile, FileFd::ReadOnly);
+ // Only test for correctness if the file is not empty (empty is ok)
+ if (fd.FileSize() > 0)
+ {
+ pkgTagSection sec;
+ pkgTagFile tag(&fd);
+
+ // all our current indexes have a field 'Package' in each section
+ if (_error->PendingError() == true || tag.Step(sec) == false || sec.Exists("Package") == false)
+ {
+ RenameOnError(InvalidFormat);
+ return;
+ }
+ }
+ }
+
// Done, move it into position
string FinalFile = _config->FindDir("Dir::State::lists");
FinalFile += URItoFileName(RealURI);
Status = StatError;
ErrorText = "Method gave a blank filename";
}
-
- string compExt = flExtension(flNotDir(URI(Desc.URI).Path));
+
+ std::string const compExt = CompressionExtension.substr(0, CompressionExtension.find(' '));
// The files timestamp matches
if (StringToBool(LookupTag(Message,"IMS-Hit"),false) == true) {
// get the binary name for your used compression type
decompProg = _config->Find(string("Acquire::CompressionTypes::").append(compExt),"");
if(decompProg.empty() == false);
- // flExtensions returns the full name if no extension is found
- // this is why we have this complicated compare operation here
- // FIMXE: add a new flJustExtension() that return "" if no
- // extension is found and use that above so that it can
- // be tested against ""
- else if(compExt == flNotDir(URI(Desc.URI).Path))
+ else if(compExt == "uncompressed")
decompProg = "copy";
else {
_error->Error("Unsupported extension: %s", compExt.c_str());
DestFile += ".decomp";
Desc.URI = decompProg + ":" + FileName;
QueueURI(Desc);
+
+ // FIXME: this points to a c++ string that goes out of scope
Mode = decompProg.c_str();
}
/*}}}*/
string URI,string URIDesc,string ShortDesc)
: pkgAcqIndex(Owner, URI, URIDesc, ShortDesc, HashString(), "")
{
+}
+pkgAcqIndexTrans::pkgAcqIndexTrans(pkgAcquire *Owner, IndexTarget const *Target,
+ HashString const &ExpectedHash, indexRecords const *MetaIndexParser)
+ : pkgAcqIndex(Owner, Target, ExpectedHash, MetaIndexParser)
+{
}
/*}}}*/
// AcqIndexTrans::Custom600Headers - Insert custom request headers /*{{{*/
struct stat Buf;
if (stat(Final.c_str(),&Buf) != 0)
- return "\nFail-Ignore: true";
- return "\nFail-Ignore: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime);
+ return "\nFail-Ignore: true\nIndex-File: true";
+ return "\nFail-Ignore: true\nIndex-File: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime);
}
/*}}}*/
// AcqIndexTrans::Failed - Silence failure messages for missing files /*{{{*/
/* */
void pkgAcqIndexTrans::Failed(string Message,pkgAcquire::MethodConfig *Cnf)
{
+ size_t const nextExt = CompressionExtension.find(' ');
+ if (nextExt != std::string::npos)
+ {
+ CompressionExtension = CompressionExtension.substr(nextExt+1);
+ Init(RealURI, Desc.Description, Desc.ShortDesc);
+ Status = StatIdle;
+ return;
+ }
+
if (Cnf->LocalOnly == true ||
StringToBool(LookupTag(Message,"Transient-Failure"),false) == false)
{
Dequeue();
return;
}
-
+
Item::Failed(Message,Cnf);
}
/*}}}*/
string Final = _config->FindDir("Dir::State::lists");
Final += URItoFileName(RealURI);
- struct stat Buf;
- if (stat(Final.c_str(),&Buf) == 0)
+ if (RealFileExists(Final) == true)
{
// File was already in place. It needs to be re-downloaded/verified
- // because Release might have changed, we do give it a differnt
+ // because Release might have changed, we do give it a different
// name than DestFile because otherwise the http method will
// send If-Range requests and there are too many broken servers
// out there that do not understand them
}
QueueURI(Desc);
+}
+ /*}}}*/
+pkgAcqMetaSig::~pkgAcqMetaSig() /*{{{*/
+{
+ // if the file was never queued undo file-changes done in the constructor
+ if (QueueCounter == 1 && Status == StatIdle && FileSize == 0 && Complete == false &&
+ LastGoodSig.empty() == false)
+ {
+ string const Final = _config->FindDir("Dir::State::lists") + URItoFileName(RealURI);
+ if (RealFileExists(Final) == false && RealFileExists(LastGoodSig) == true)
+ Rename(LastGoodSig, Final);
+ }
+
}
/*}}}*/
// pkgAcqMetaSig::Custom600Headers - Insert custom request headers /*{{{*/
return "\nIndex-File: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime);
}
-void pkgAcqMetaSig::Done(string Message,unsigned long Size,string MD5,
+void pkgAcqMetaSig::Done(string Message,unsigned long long Size,string MD5,
pkgAcquire::MethodConfig *Cfg)
{
Item::Done(Message,Size,MD5,Cfg);
return "\nIndex-File: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime);
}
/*}}}*/
-void pkgAcqMetaIndex::Done(string Message,unsigned long Size,string Hash, /*{{{*/
+void pkgAcqMetaIndex::Done(string Message,unsigned long long Size,string Hash, /*{{{*/
pkgAcquire::MethodConfig *Cfg)
{
Item::Done(Message,Size,Hash,Cfg);
// all cool, move Release file into place
Complete = true;
-
- string FinalFile = _config->FindDir("Dir::State::lists");
- FinalFile += URItoFileName(RealURI);
- Rename(DestFile,FinalFile);
- chmod(FinalFile.c_str(),0644);
- DestFile = FinalFile;
}
else
{
if (SigFile == "")
{
// There was no signature file, so we are finished. Download
- // the indexes without verification.
+ // the indexes and do only hashsum verification if possible
+ MetaIndexParser->Load(DestFile);
QueueIndexes(false);
}
else
Desc.URI = "gpgv:" + SigFile;
QueueURI(Desc);
Mode = "gpgv";
+ return;
}
}
+
+ if (Complete == true)
+ {
+ string FinalFile = _config->FindDir("Dir::State::lists");
+ FinalFile += URItoFileName(RealURI);
+ if (SigFile == DestFile)
+ SigFile = FinalFile;
+ Rename(DestFile,FinalFile);
+ chmod(FinalFile.c_str(),0644);
+ DestFile = FinalFile;
+ }
}
/*}}}*/
void pkgAcqMetaIndex::RetrievalDone(string Message) /*{{{*/
{
string FinalFile = _config->FindDir("Dir::State::lists");
FinalFile += URItoFileName(RealURI);
+ if (SigFile == DestFile)
+ {
+ SigFile = FinalFile;
+ // constructor of pkgAcqMetaClearSig moved it out of the way,
+ // now move it back in on IMS hit for the 'old' file
+ string const OldClearSig = DestFile + ".reverify";
+ if (RealFileExists(OldClearSig) == true)
+ Rename(OldClearSig, FinalFile);
+ }
DestFile = FinalFile;
}
Complete = true;
// Download further indexes with verification
QueueIndexes(true);
+ // is it a clearsigned MetaIndex file?
+ if (DestFile == SigFile)
+ return;
+
// Done, move signature file into position
string VerifiedSigFile = _config->FindDir("Dir::State::lists") +
URItoFileName(RealURI) + ".gpg";
/*}}}*/
void pkgAcqMetaIndex::QueueIndexes(bool verify) /*{{{*/
{
+#if 0
+ /* Reject invalid, existing Release files (LP: #346386) (Closes: #627642)
+ * FIXME: Disabled; it breaks unsigned repositories without hashes */
+ if (!verify && FileExists(DestFile) && !MetaIndexParser->Load(DestFile))
+ {
+ Status = StatError;
+ ErrorText = MetaIndexParser->ErrorText;
+ return;
+ }
+#endif
+ bool transInRelease = false;
+ {
+ std::vector<std::string> const keys = MetaIndexParser->MetaKeys();
+ for (std::vector<std::string>::const_iterator k = keys.begin(); k != keys.end(); ++k)
+ // FIXME: Feels wrong to check for hardcoded string here, but what should we do else…
+ if (k->find("Translation-") != std::string::npos)
+ {
+ transInRelease = true;
+ break;
+ }
+ }
+
for (vector <struct IndexTarget*>::const_iterator Target = IndexTargets->begin();
Target != IndexTargets->end();
- Target++)
+ ++Target)
{
HashString ExpectedIndexHash;
- if (verify)
+ const indexRecords::checkSum *Record = MetaIndexParser->Lookup((*Target)->MetaKey);
+ bool compressedAvailable = false;
+ if (Record == NULL)
{
- const indexRecords::checkSum *Record = MetaIndexParser->Lookup((*Target)->MetaKey);
- if (!Record)
- {
- Status = StatAuthError;
- ErrorText = "Unable to find expected entry "
- + (*Target)->MetaKey + " in Meta-index file (malformed Release file?)";
- return;
- }
- ExpectedIndexHash = Record->Hash;
- if (_config->FindB("Debug::pkgAcquire::Auth", false))
- {
- std::cerr << "Queueing: " << (*Target)->URI << std::endl;
- std::cerr << "Expected Hash: " << ExpectedIndexHash.toStr() << std::endl;
- }
- if (ExpectedIndexHash.empty())
- {
- Status = StatAuthError;
- ErrorText = "Unable to find hash sum for "
- + (*Target)->MetaKey + " in Meta-index file";
- return;
- }
+ if ((*Target)->IsOptional() == true)
+ {
+ std::vector<std::string> types = APT::Configuration::getCompressionTypes();
+ for (std::vector<std::string>::const_iterator t = types.begin(); t != types.end(); ++t)
+ if (MetaIndexParser->Exists((*Target)->MetaKey + "." + *t) == true)
+ {
+ compressedAvailable = true;
+ break;
+ }
+ }
+ else if (verify == true)
+ {
+ Status = StatAuthError;
+ strprintf(ErrorText, _("Unable to find expected entry '%s' in Release file (Wrong sources.list entry or malformed file)"), (*Target)->MetaKey.c_str());
+ return;
+ }
+ }
+ else
+ {
+ ExpectedIndexHash = Record->Hash;
+ if (_config->FindB("Debug::pkgAcquire::Auth", false))
+ {
+ std::cerr << "Queueing: " << (*Target)->URI << std::endl;
+ std::cerr << "Expected Hash: " << ExpectedIndexHash.toStr() << std::endl;
+ std::cerr << "For: " << Record->MetaKeyFilename << std::endl;
+ }
+ if (verify == true && ExpectedIndexHash.empty() == true && (*Target)->IsOptional() == false)
+ {
+ Status = StatAuthError;
+ strprintf(ErrorText, _("Unable to find hash sum for '%s' in Release file"), (*Target)->MetaKey.c_str());
+ return;
+ }
+ }
+
+ if ((*Target)->IsOptional() == true)
+ {
+ if ((*Target)->IsSubIndex() == true)
+ new pkgAcqSubIndex(Owner, (*Target)->URI, (*Target)->Description,
+ (*Target)->ShortDesc, ExpectedIndexHash);
+ else if (transInRelease == false || Record != NULL || compressedAvailable == true)
+ {
+ if (_config->FindB("Acquire::PDiffs",true) == true && transInRelease == true &&
+ MetaIndexParser->Exists((*Target)->MetaKey + ".diff/Index") == true)
+ new pkgAcqDiffIndex(Owner, (*Target)->URI, (*Target)->Description,
+ (*Target)->ShortDesc, ExpectedIndexHash);
+ else
+ new pkgAcqIndexTrans(Owner, *Target, ExpectedIndexHash, MetaIndexParser);
+ }
+ continue;
}
/* Queue Packages file (either diff or full packages files, depending
in the Meta-Index file. Ideal would be if pkgAcqDiffIndex would test this
instead, but passing the required info to it is to much hassle */
if(_config->FindB("Acquire::PDiffs",true) == true && (verify == false ||
- MetaIndexParser->Exists(string((*Target)->MetaKey).append(".diff/Index")) == true))
+ MetaIndexParser->Exists((*Target)->MetaKey + ".diff/Index") == true))
new pkgAcqDiffIndex(Owner, (*Target)->URI, (*Target)->Description,
(*Target)->ShortDesc, ExpectedIndexHash);
else
- new pkgAcqIndex(Owner, (*Target)->URI, (*Target)->Description,
- (*Target)->ShortDesc, ExpectedIndexHash);
+ new pkgAcqIndex(Owner, *Target, ExpectedIndexHash, MetaIndexParser);
}
}
/*}}}*/
bool pkgAcqMetaIndex::VerifyVendor(string Message) /*{{{*/
{
-// // Maybe this should be made available from above so we don't have
-// // to read and parse it every time?
-// pkgVendorList List;
-// List.ReadMainList();
-
-// const Vendor* Vndr = NULL;
-// for (std::vector<string>::const_iterator I = GPGVOutput.begin(); I != GPGVOutput.end(); I++)
-// {
-// string::size_type pos = (*I).find("VALIDSIG ");
-// if (_config->FindB("Debug::Vendor", false))
-// std::cerr << "Looking for VALIDSIG in \"" << (*I) << "\": pos " << pos
-// << std::endl;
-// if (pos != std::string::npos)
-// {
-// string Fingerprint = (*I).substr(pos+sizeof("VALIDSIG"));
-// if (_config->FindB("Debug::Vendor", false))
-// std::cerr << "Looking for \"" << Fingerprint << "\" in vendor..." <<
-// std::endl;
-// Vndr = List.FindVendor(Fingerprint) != "";
-// if (Vndr != NULL);
-// break;
-// }
-// }
string::size_type pos;
// check for missing sigs (that where not fatal because otherwise we had
missingkeys += (Fingerprint);
}
if(!missingkeys.empty())
- _error->Warning("%s", string(msg+missingkeys).c_str());
+ _error->Warning("%s", (msg + missingkeys).c_str());
string Transformed = MetaIndexParser->GetExpectedDist();
// TRANSLATOR: The first %s is the URL of the bad Release file, the second is
// the time since then the file is invalid - formated in the same way as in
// the download progress display (e.g. 7d 3h 42min 1s)
- return _error->Error(_("Release file expired, ignoring %s (invalid since %s)"),
- RealURI.c_str(), TimeToStr(invalid_since).c_str());
+ return _error->Error(
+ _("Release file for %s is expired (invalid since %s). "
+ "Updates for this repository will not be applied."),
+ RealURI.c_str(), TimeToStr(invalid_since).c_str());
}
if (_config->FindB("Debug::pkgAcquire::Auth", false))
// pkgAcqMetaIndex::Failed - no Release file present or no signature file present /*{{{*/
// ---------------------------------------------------------------------
/* */
-void pkgAcqMetaIndex::Failed(string Message,pkgAcquire::MethodConfig *Cnf)
+void pkgAcqMetaIndex::Failed(string Message,pkgAcquire::MethodConfig * /*Cnf*/)
{
if (AuthPass == true)
{
// gpgv method failed, if we have a good signature
- string LastGoodSigFile = _config->FindDir("Dir::State::lists") +
- "partial/" + URItoFileName(RealURI) + ".gpg.reverify";
+ string LastGoodSigFile = _config->FindDir("Dir::State::lists").append("partial/").append(URItoFileName(RealURI));
+ if (DestFile != SigFile)
+ LastGoodSigFile.append(".gpg");
+ LastGoodSigFile.append(".reverify");
+
if(FileExists(LastGoodSigFile))
{
- string VerifiedSigFile = _config->FindDir("Dir::State::lists") +
- URItoFileName(RealURI) + ".gpg";
- Rename(LastGoodSigFile,VerifiedSigFile);
+ string VerifiedSigFile = _config->FindDir("Dir::State::lists") + URItoFileName(RealURI);
+ if (DestFile != SigFile)
+ VerifiedSigFile.append(".gpg");
+ Rename(LastGoodSigFile, VerifiedSigFile);
Status = StatTransientNetworkError;
- _error->Warning(_("A error occurred during the signature "
+ _error->Warning(_("An error occurred during the signature "
"verification. The repository is not updated "
- "and the previous index files will be used."
+ "and the previous index files will be used. "
"GPG error: %s: %s\n"),
Desc.Description.c_str(),
LookupTag(Message,"Message").c_str());
RunScripts("APT::Update::Auth-Failure");
return;
+ } else if (LookupTag(Message,"Message").find("NODATA") != string::npos) {
+ /* Invalid signature file, reject (LP: #346386) (Closes: #627642) */
+ _error->Error(_("GPG error: %s: %s"),
+ Desc.Description.c_str(),
+ LookupTag(Message,"Message").c_str());
+ return;
} else {
_error->Warning(_("GPG error: %s: %s"),
Desc.Description.c_str(),
ReportMirrorFailure("GPGFailure");
}
+ /* Always move the meta index, even if gpgv failed. This ensures
+ * that PackageFile objects are correctly filled in */
+ if (FileExists(DestFile)) {
+ string FinalFile = _config->FindDir("Dir::State::lists");
+ FinalFile += URItoFileName(RealURI);
+ /* InRelease files become Release files, otherwise
+ * they would be considered as trusted later on */
+ if (SigFile == DestFile) {
+ RealURI = RealURI.replace(RealURI.rfind("InRelease"), 9,
+ "Release");
+ FinalFile = FinalFile.replace(FinalFile.rfind("InRelease"), 9,
+ "Release");
+ SigFile = FinalFile;
+ }
+ Rename(DestFile,FinalFile);
+ chmod(FinalFile.c_str(),0644);
+
+ DestFile = FinalFile;
+ }
+
// No Release file was present, or verification failed, so fall
// back to queueing Packages files without verification
QueueIndexes(false);
}
/*}}}*/
+pkgAcqMetaClearSig::pkgAcqMetaClearSig(pkgAcquire *Owner, /*{{{*/
+ string const &URI, string const &URIDesc, string const &ShortDesc,
+ string const &MetaIndexURI, string const &MetaIndexURIDesc, string const &MetaIndexShortDesc,
+ string const &MetaSigURI, string const &MetaSigURIDesc, string const &MetaSigShortDesc,
+ const vector<struct IndexTarget*>* IndexTargets,
+ indexRecords* MetaIndexParser) :
+ pkgAcqMetaIndex(Owner, URI, URIDesc, ShortDesc, "", IndexTargets, MetaIndexParser),
+ MetaIndexURI(MetaIndexURI), MetaIndexURIDesc(MetaIndexURIDesc), MetaIndexShortDesc(MetaIndexShortDesc),
+ MetaSigURI(MetaSigURI), MetaSigURIDesc(MetaSigURIDesc), MetaSigShortDesc(MetaSigShortDesc)
+{
+ SigFile = DestFile;
+
+ // keep the old InRelease around in case of transistent network errors
+ string const Final = _config->FindDir("Dir::State::lists") + URItoFileName(RealURI);
+ if (RealFileExists(Final) == true)
+ {
+ string const LastGoodSig = DestFile + ".reverify";
+ Rename(Final,LastGoodSig);
+ }
+}
+ /*}}}*/
+pkgAcqMetaClearSig::~pkgAcqMetaClearSig() /*{{{*/
+{
+ // if the file was never queued undo file-changes done in the constructor
+ if (QueueCounter == 1 && Status == StatIdle && FileSize == 0 && Complete == false)
+ {
+ string const Final = _config->FindDir("Dir::State::lists") + URItoFileName(RealURI);
+ string const LastGoodSig = DestFile + ".reverify";
+ if (RealFileExists(Final) == false && RealFileExists(LastGoodSig) == true)
+ Rename(LastGoodSig, Final);
+ }
+}
+ /*}}}*/
+// pkgAcqMetaClearSig::Custom600Headers - Insert custom request headers /*{{{*/
+// ---------------------------------------------------------------------
+// FIXME: this can go away once the InRelease file is used widely
+string pkgAcqMetaClearSig::Custom600Headers()
+{
+ string Final = _config->FindDir("Dir::State::lists");
+ Final += URItoFileName(RealURI);
+
+ struct stat Buf;
+ if (stat(Final.c_str(),&Buf) != 0)
+ {
+ Final = DestFile + ".reverify";
+ if (stat(Final.c_str(),&Buf) != 0)
+ return "\nIndex-File: true\nFail-Ignore: true\n";
+ }
+
+ return "\nIndex-File: true\nFail-Ignore: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime);
+}
+ /*}}}*/
+void pkgAcqMetaClearSig::Failed(string Message,pkgAcquire::MethodConfig *Cnf) /*{{{*/
+{
+ if (AuthPass == false)
+ {
+ // Remove the 'old' InRelease file if we try Release.gpg now as otherwise
+ // the file will stay around and gives a false-auth impression (CVE-2012-0214)
+ string FinalFile = _config->FindDir("Dir::State::lists");
+ FinalFile.append(URItoFileName(RealURI));
+ if (FileExists(FinalFile))
+ unlink(FinalFile.c_str());
+
+ new pkgAcqMetaSig(Owner,
+ MetaSigURI, MetaSigURIDesc, MetaSigShortDesc,
+ MetaIndexURI, MetaIndexURIDesc, MetaIndexShortDesc,
+ IndexTargets, MetaIndexParser);
+ if (Cnf->LocalOnly == true ||
+ StringToBool(LookupTag(Message, "Transient-Failure"), false) == false)
+ Dequeue();
+ }
+ else
+ pkgAcqMetaIndex::Failed(Message, Cnf);
+}
+ /*}}}*/
// AcqArchive::AcqArchive - Constructor /*{{{*/
// ---------------------------------------------------------------------
/* This just sets up the initial fetch environment and queues the first
_error->Error(_("I wasn't able to locate a file for the %s package. "
"This might mean you need to manually fix this package. "
"(due to missing arch)"),
- Version.ParentPkg().Name());
+ Version.ParentPkg().FullName().c_str());
return;
}
assumption here that all the available sources for this version share
the same extension.. */
// Skip not source sources, they do not have file fields.
- for (; Vf.end() == false; Vf++)
+ for (; Vf.end() == false; ++Vf)
{
if ((Vf.File()->Flags & pkgCache::Flag::NotSource) != 0)
continue;
}
// check if we have one trusted source for the package. if so, switch
- // to "TrustedOnly" mode
- for (pkgCache::VerFileIterator i = Version.FileList(); i.end() == false; i++)
+ // to "TrustedOnly" mode - but only if not in AllowUnauthenticated mode
+ bool const allowUnauth = _config->FindB("APT::Get::AllowUnauthenticated", false);
+ bool const debugAuth = _config->FindB("Debug::pkgAcquire::Auth", false);
+ bool seenUntrusted = false;
+ for (pkgCache::VerFileIterator i = Version.FileList(); i.end() == false; ++i)
{
pkgIndexFile *Index;
if (Sources->FindIndex(i.File(),Index) == false)
continue;
- if (_config->FindB("Debug::pkgAcquire::Auth", false))
- {
+
+ if (debugAuth == true)
std::cerr << "Checking index: " << Index->Describe()
- << "(Trusted=" << Index->IsTrusted() << ")\n";
- }
- if (Index->IsTrusted()) {
+ << "(Trusted=" << Index->IsTrusted() << ")" << std::endl;
+
+ if (Index->IsTrusted() == true)
+ {
Trusted = true;
- break;
+ if (allowUnauth == false)
+ break;
}
+ else
+ seenUntrusted = true;
}
// "allow-unauthenticated" restores apts old fetching behaviour
// that means that e.g. unauthenticated file:// uris are higher
// priority than authenticated http:// uris
- if (_config->FindB("APT::Get::AllowUnauthenticated",false) == true)
+ if (allowUnauth == true && seenUntrusted == true)
Trusted = false;
// Select a source
if (QueueNext() == false && _error->PendingError() == false)
- _error->Error(_("I wasn't able to locate file for the %s package. "
- "This might mean you need to manually fix this package."),
- Version.ParentPkg().Name());
+ _error->Error(_("Can't find a source to download version '%s' of '%s'"),
+ Version.VerStr(), Version.ParentPkg().FullName(false).c_str());
}
/*}}}*/
// AcqArchive::QueueNext - Queue the next file source /*{{{*/
bool pkgAcqArchive::QueueNext()
{
string const ForceHash = _config->Find("Acquire::ForceHash");
- for (; Vf.end() == false; Vf++)
+ for (; Vf.end() == false; ++Vf)
{
// Ignore not source sources
if ((Vf.File()->Flags & pkgCache::Flag::NotSource) != 0)
string PkgFile = Parse.FileName();
if (ForceHash.empty() == false)
{
- if(stringcasecmp(ForceHash, "sha256") == 0)
+ if(stringcasecmp(ForceHash, "sha512") == 0)
+ ExpectedHash = HashString("SHA512", Parse.SHA512Hash());
+ else if(stringcasecmp(ForceHash, "sha256") == 0)
ExpectedHash = HashString("SHA256", Parse.SHA256Hash());
else if (stringcasecmp(ForceHash, "sha1") == 0)
ExpectedHash = HashString("SHA1", Parse.SHA1Hash());
else
{
string Hash;
- if ((Hash = Parse.SHA256Hash()).empty() == false)
+ if ((Hash = Parse.SHA512Hash()).empty() == false)
+ ExpectedHash = HashString("SHA512", Hash);
+ else if ((Hash = Parse.SHA256Hash()).empty() == false)
ExpectedHash = HashString("SHA256", Hash);
else if ((Hash = Parse.SHA1Hash()).empty() == false)
ExpectedHash = HashString("SHA1", Hash);
Desc.URI = Index->ArchiveURI(PkgFile);
Desc.Description = Index->ArchiveInfo(Version);
Desc.Owner = this;
- Desc.ShortDesc = Version.ParentPkg().Name();
+ Desc.ShortDesc = Version.ParentPkg().FullName(true);
// See if we already have the file. (Legacy filenames)
FileSize = Version->Size;
if (stat(FinalFile.c_str(),&Buf) == 0)
{
// Make sure the size matches
- if ((unsigned)Buf.st_size == Version->Size)
+ if ((unsigned long long)Buf.st_size == Version->Size)
{
Complete = true;
Local = true;
if (stat(FinalFile.c_str(),&Buf) == 0)
{
// Make sure the size matches
- if ((unsigned)Buf.st_size == Version->Size)
+ if ((unsigned long long)Buf.st_size == Version->Size)
{
Complete = true;
Local = true;
return true;
}
- /* Hmm, we have a file and its size does not match, this shouldnt
+ /* Hmm, we have a file and its size does not match, this shouldn't
happen.. */
unlink(FinalFile.c_str());
}
if (stat(DestFile.c_str(),&Buf) == 0)
{
// Hmm, the partial file is too big, erase it
- if ((unsigned)Buf.st_size > Version->Size)
+ if ((unsigned long long)Buf.st_size > Version->Size)
unlink(DestFile.c_str());
else
PartialSize = Buf.st_size;
}
-
+
+ // Disables download of archives - useful if no real installation follows,
+ // e.g. if we are just interested in proposed installation order
+ if (_config->FindB("Debug::pkgAcqArchive::NoQueue", false) == true)
+ {
+ Complete = true;
+ Local = true;
+ Status = StatDone;
+ StoreFilename = DestFile = FinalFile;
+ return true;
+ }
+
// Create the item
Local = false;
- Desc.URI = Index->ArchiveURI(PkgFile);
- Desc.Description = Index->ArchiveInfo(Version);
- Desc.Owner = this;
- Desc.ShortDesc = Version.ParentPkg().Name();
QueueURI(Desc);
- Vf++;
+ ++Vf;
return true;
}
return false;
// AcqArchive::Done - Finished fetching /*{{{*/
// ---------------------------------------------------------------------
/* */
-void pkgAcqArchive::Done(string Message,unsigned long Size,string CalcHash,
+void pkgAcqArchive::Done(string Message,unsigned long long Size,string CalcHash,
pkgAcquire::MethodConfig *Cfg)
{
Item::Done(Message,Size,CalcHash,Cfg);
// Check the size
if (Size != Version->Size)
{
- Status = StatError;
- ErrorText = _("Size mismatch");
+ RenameOnError(SizeMismatch);
return;
}
// Check the hash
if(ExpectedHash.toStr() != CalcHash)
{
- Status = StatError;
- ErrorText = _("Hash Sum mismatch");
- if(FileExists(DestFile))
- Rename(DestFile,DestFile + ".FAILED");
+ RenameOnError(HashSumMismatch);
return;
}
StringToBool(LookupTag(Message,"Transient-Failure"),false) == true)
{
// Vf = Version.FileList();
- while (Vf.end() == false) Vf++;
+ while (Vf.end() == false) ++Vf;
StoreFilename = string();
Item::Failed(Message,Cnf);
return;
/*}}}*/
// AcqArchive::IsTrusted - Determine whether this archive comes from a trusted source /*{{{*/
// ---------------------------------------------------------------------
-bool pkgAcqArchive::IsTrusted()
+APT_PURE bool pkgAcqArchive::IsTrusted()
{
return Trusted;
}
// ---------------------------------------------------------------------
/* The file is added to the queue */
pkgAcqFile::pkgAcqFile(pkgAcquire *Owner,string URI,string Hash,
- unsigned long Size,string Dsc,string ShortDesc,
+ unsigned long long Size,string Dsc,string ShortDesc,
const string &DestDir, const string &DestFilename,
bool IsIndexFile) :
Item(Owner), ExpectedHash(Hash), IsIndexFile(IsIndexFile)
if (stat(DestFile.c_str(),&Buf) == 0)
{
// Hmm, the partial file is too big, erase it
- if ((unsigned)Buf.st_size > Size)
+ if ((Size > 0) && (unsigned long long)Buf.st_size > Size)
unlink(DestFile.c_str());
else
PartialSize = Buf.st_size;
// AcqFile::Done - Item downloaded OK /*{{{*/
// ---------------------------------------------------------------------
/* */
-void pkgAcqFile::Done(string Message,unsigned long Size,string CalcHash,
+void pkgAcqFile::Done(string Message,unsigned long long Size,string CalcHash,
pkgAcquire::MethodConfig *Cnf)
{
Item::Done(Message,Size,CalcHash,Cnf);
// Check the hash
if(!ExpectedHash.empty() && ExpectedHash.toStr() != CalcHash)
{
- Status = StatError;
- ErrorText = _("Hash Sum mismatch");
- Rename(DestFile,DestFile + ".FAILED");
+ RenameOnError(HashSumMismatch);
return;
}