// Include Files /*{{{*/
#include <apt-pkg/acquire-item.h>
#include <apt-pkg/configuration.h>
+#include <apt-pkg/aptconfiguration.h>
#include <apt-pkg/sourcelist.h>
#include <apt-pkg/vendorlist.h>
#include <apt-pkg/error.h>
// Acquire::Item::Done - Item downloaded OK /*{{{*/
// ---------------------------------------------------------------------
/* */
-void pkgAcquire::Item::Done(string Message,unsigned long Size,string,
+void pkgAcquire::Item::Done(string Message,unsigned long Size,string Hash,
pkgAcquire::MethodConfig *Cnf)
{
// We just downloaded something..
}
}
/*}}}*/
-
-
-// AcqDiffIndex::AcqDiffIndex - Constructor
+// AcqDiffIndex::AcqDiffIndex - Constructor /*{{{*/
// ---------------------------------------------------------------------
/* Get the DiffIndex file first and see if there are patches availabe
* If so, create a pkgAcqIndexDiffs fetcher that will get and apply the
*/
pkgAcqDiffIndex::pkgAcqDiffIndex(pkgAcquire *Owner,
string URI,string URIDesc,string ShortDesc,
- string ExpectedMD5)
- : Item(Owner), RealURI(URI), ExpectedMD5(ExpectedMD5), Description(URIDesc)
+ HashString ExpectedHash)
+ : Item(Owner), RealURI(URI), ExpectedHash(ExpectedHash),
+ Description(URIDesc)
{
Debug = _config->FindB("Debug::pkgAcquire::Diffs",false);
QueueURI(Desc);
}
-
+ /*}}}*/
// AcqIndex::Custom600Headers - Insert custom request headers /*{{{*/
// ---------------------------------------------------------------------
/* The only header we use is the last-modified header. */
return "\nIndex-File: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime);
}
-
-
-bool pkgAcqDiffIndex::ParseDiffIndex(string IndexDiffFile)
+ /*}}}*/
+bool pkgAcqDiffIndex::ParseDiffIndex(string IndexDiffFile) /*{{{*/
{
if(Debug)
std::clog << "pkgAcqIndexDiffs::ParseIndexDiff() " << IndexDiffFile
if(TF.Step(Tags) == true)
{
- string local_sha1;
bool found = false;
DiffInfo d;
string size;
- string tmp = Tags.FindS("SHA1-Current");
+ string const tmp = Tags.FindS("SHA1-Current");
std::stringstream ss(tmp);
- ss >> ServerSha1;
+ ss >> ServerSha1 >> size;
+ unsigned long const ServerSize = atol(size.c_str());
FileFd fd(CurrentPackagesFile, FileFd::ReadOnly);
SHA1Summation SHA1;
SHA1.AddFD(fd.Fd(), fd.Size());
- local_sha1 = string(SHA1.Result());
+ string const local_sha1 = SHA1.Result();
if(local_sha1 == ServerSha1)
{
std::clog << "SHA1-Current: " << ServerSha1 << std::endl;
// check the historie and see what patches we need
- string history = Tags.FindS("SHA1-History");
+ string const history = Tags.FindS("SHA1-History");
std::stringstream hist(history);
- while(hist >> d.sha1 >> size >> d.file)
+ while(hist >> d.sha1 >> size >> d.file)
{
- d.size = atoi(size.c_str());
// read until the first match is found
+ // from that point on, we probably need all diffs
if(d.sha1 == local_sha1)
found=true;
- // from that point on, we probably need all diffs
- if(found)
+ else if (found == false)
+ continue;
+
+ if(Debug)
+ std::clog << "Need to get diff: " << d.file << std::endl;
+ available_patches.push_back(d);
+ }
+
+ if (available_patches.empty() == false)
+ {
+ // patching with too many files is rather slow compared to a fast download
+ unsigned long const fileLimit = _config->FindI("Acquire::PDiffs::FileLimit", 0);
+ if (fileLimit != 0 && fileLimit < available_patches.size())
{
- if(Debug)
- std::clog << "Need to get diff: " << d.file << std::endl;
- available_patches.push_back(d);
+ if (Debug)
+ std::clog << "Need " << available_patches.size() << " diffs (Limit is " << fileLimit
+ << ") so fallback to complete download" << std::endl;
+ return false;
+ }
+
+ // see if the patches are too big
+ found = false; // it was true and it will be true again at the end
+ d = *available_patches.begin();
+ string const firstPatch = d.file;
+ unsigned long patchesSize = 0;
+ std::stringstream patches(Tags.FindS("SHA1-Patches"));
+ while(patches >> d.sha1 >> size >> d.file)
+ {
+ if (firstPatch == d.file)
+ found = true;
+ else if (found == false)
+ continue;
+
+ patchesSize += atol(size.c_str());
+ }
+ unsigned long const sizeLimit = ServerSize * _config->FindI("Acquire::PDiffs::SizeLimit", 100);
+ if (sizeLimit > 0 && (sizeLimit/100) < patchesSize)
+ {
+ if (Debug)
+ std::clog << "Need " << patchesSize << " bytes (Limit is " << sizeLimit/100
+ << ") so fallback to complete download" << std::endl;
+ return false;
}
}
}
if(found)
{
// queue the diffs
- unsigned int last_space = Description.rfind(" ");
+ string::size_type const last_space = Description.rfind(" ");
if(last_space != string::npos)
Description.erase(last_space, Description.size()-last_space);
new pkgAcqIndexDiffs(Owner, RealURI, Description, Desc.ShortDesc,
- ExpectedMD5, available_patches);
+ ExpectedHash, ServerSha1, available_patches);
Complete = false;
Status = StatDone;
Dequeue();
std::clog << "Can't find a patch in the index file" << std::endl;
return false;
}
-
-void pkgAcqDiffIndex::Failed(string Message,pkgAcquire::MethodConfig *Cnf)
+ /*}}}*/
+void pkgAcqDiffIndex::Failed(string Message,pkgAcquire::MethodConfig *Cnf) /*{{{*/
{
if(Debug)
std::clog << "pkgAcqDiffIndex failed: " << Desc.URI << std::endl
<< "Falling back to normal index file aquire" << std::endl;
new pkgAcqIndex(Owner, RealURI, Description, Desc.ShortDesc,
- ExpectedMD5);
+ ExpectedHash);
Complete = false;
Status = StatDone;
Dequeue();
}
-
-void pkgAcqDiffIndex::Done(string Message,unsigned long Size,string Md5Hash,
+ /*}}}*/
+void pkgAcqDiffIndex::Done(string Message,unsigned long Size,string Md5Hash, /*{{{*/
pkgAcquire::MethodConfig *Cnf)
{
if(Debug)
Dequeue();
return;
}
-
-
-
-// AcqIndexDiffs::AcqIndexDiffs - Constructor
+ /*}}}*/
+// AcqIndexDiffs::AcqIndexDiffs - Constructor /*{{{*/
// ---------------------------------------------------------------------
/* The package diff is added to the queue. one object is constructed
* for each diff and the index
*/
pkgAcqIndexDiffs::pkgAcqIndexDiffs(pkgAcquire *Owner,
string URI,string URIDesc,string ShortDesc,
- string ExpectedMD5, vector<DiffInfo> diffs)
- : Item(Owner), RealURI(URI), ExpectedMD5(ExpectedMD5),
- available_patches(diffs)
+ HashString ExpectedHash,
+ string ServerSha1,
+ vector<DiffInfo> diffs)
+ : Item(Owner), RealURI(URI), ExpectedHash(ExpectedHash),
+ available_patches(diffs), ServerSha1(ServerSha1)
{
DestFile = _config->FindDir("Dir::State::lists") + "partial/";
QueueNextDiff();
}
}
-
-
-void pkgAcqIndexDiffs::Failed(string Message,pkgAcquire::MethodConfig *Cnf)
+ /*}}}*/
+void pkgAcqIndexDiffs::Failed(string Message,pkgAcquire::MethodConfig *Cnf) /*{{{*/
{
if(Debug)
std::clog << "pkgAcqIndexDiffs failed: " << Desc.URI << std::endl
<< "Falling back to normal index file aquire" << std::endl;
new pkgAcqIndex(Owner, RealURI, Description,Desc.ShortDesc,
- ExpectedMD5);
+ ExpectedHash);
Finish();
}
-
-
-// helper that cleans the item out of the fetcher queue
+ /*}}}*/
+// Finish - helper that cleans the item out of the fetcher queue /*{{{*/
void pkgAcqIndexDiffs::Finish(bool allDone)
{
// we restore the original name, this is required, otherwise
DestFile = _config->FindDir("Dir::State::lists");
DestFile += URItoFileName(RealURI);
- // do the final md5sum checking
- MD5Summation sum;
- FileFd Fd(DestFile, FileFd::ReadOnly);
- sum.AddFD(Fd.Fd(), Fd.Size());
- Fd.Close();
- string MD5 = (string)sum.Result();
-
- if (!ExpectedMD5.empty() && MD5 != ExpectedMD5)
+ if(!ExpectedHash.empty() && !ExpectedHash.VerifyFile(DestFile))
{
Status = StatAuthError;
ErrorText = _("MD5Sum mismatch");
Dequeue();
return;
}
-
-
-
-bool pkgAcqIndexDiffs::QueueNextDiff()
+ /*}}}*/
+bool pkgAcqIndexDiffs::QueueNextDiff() /*{{{*/
{
// calc sha1 of the just patched file
std::clog << "QueueNextDiff: "
<< FinalFile << " (" << local_sha1 << ")"<<std::endl;
+ // final file reached before all patches are applied
+ if(local_sha1 == ServerSha1)
+ {
+ Finish(true);
+ return true;
+ }
+
// remove all patches until the next matching patch is found
// this requires the Index file to be ordered
for(vector<DiffInfo>::iterator I=available_patches.begin();
return true;
}
-
-
-
-void pkgAcqIndexDiffs::Done(string Message,unsigned long Size,string Md5Hash,
+ /*}}}*/
+void pkgAcqIndexDiffs::Done(string Message,unsigned long Size,string Md5Hash, /*{{{*/
pkgAcquire::MethodConfig *Cnf)
{
if(Debug)
// see if there is more to download
if(available_patches.size() > 0) {
new pkgAcqIndexDiffs(Owner, RealURI, Description, Desc.ShortDesc,
- ExpectedMD5, available_patches);
+ ExpectedHash, ServerSha1, available_patches);
return Finish();
} else
return Finish(true);
}
}
-
-
+ /*}}}*/
// AcqIndex::AcqIndex - Constructor /*{{{*/
// ---------------------------------------------------------------------
/* The package file is added to the queue and a second class is
instantiated to fetch the revision file */
pkgAcqIndex::pkgAcqIndex(pkgAcquire *Owner,
string URI,string URIDesc,string ShortDesc,
- string ExpectedMD5, string comprExt)
- : Item(Owner), RealURI(URI), ExpectedMD5(ExpectedMD5)
+ HashString ExpectedHash, string comprExt)
+ : Item(Owner), RealURI(URI), ExpectedHash(ExpectedHash)
{
Decompression = false;
Erase = false;
if(comprExt.empty())
{
// autoselect the compression method
- if(FileExists("/bin/bzip2"))
- CompressionExtension = ".bz2";
- else
- CompressionExtension = ".gz";
- } else {
- CompressionExtension = comprExt;
+ std::vector<std::string> types = APT::Configuration::getCompressionTypes();
+ if (types.empty() == true)
+ comprExt = "plain";
+ else
+ comprExt = "." + types[0];
}
- Desc.URI = URI + CompressionExtension;
+ CompressionExtension = ((comprExt == "plain" || comprExt == ".") ? "" : comprExt);
+
+ Desc.URI = URI + CompressionExtension;
Desc.Description = URIDesc;
Desc.Owner = this;
return "\nIndex-File: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime);
}
/*}}}*/
-
-void pkgAcqIndex::Failed(string Message,pkgAcquire::MethodConfig *Cnf)
+void pkgAcqIndex::Failed(string Message,pkgAcquire::MethodConfig *Cnf) /*{{{*/
{
- // no .bz2 found, retry with .gz
- if(Desc.URI.substr(Desc.URI.size()-3) == "bz2") {
- Desc.URI = Desc.URI.substr(0,Desc.URI.size()-3) + "gz";
+ std::vector<std::string> types = APT::Configuration::getCompressionTypes();
+
+ for (std::vector<std::string>::const_iterator t = types.begin();
+ t != types.end(); t++)
+ {
+ // jump over all already tried compression types
+ const unsigned int nameLen = Desc.URI.size() - (*t).size();
+ if(Desc.URI.substr(nameLen) != *t)
+ continue;
- // retry with a gzip one
- new pkgAcqIndex(Owner, RealURI, Desc.Description,Desc.ShortDesc,
- ExpectedMD5, string(".gz"));
+ // we want to try it with the next extension (and make sure to
+ // not skip over the end)
+ t++;
+ if (t == types.end())
+ break;
+
+ // queue new download
+ Desc.URI = Desc.URI.substr(0, nameLen) + *t;
+ new pkgAcqIndex(Owner, RealURI, Desc.Description, Desc.ShortDesc,
+ ExpectedHash, string(".").append(*t));
+
Status = StatDone;
Complete = false;
Dequeue();
return;
- }
-
+ }
+
// on decompression failure, remove bad versions in partial/
if(Decompression && Erase) {
string s = _config->FindDir("Dir::State::lists") + "partial/";
Item::Failed(Message,Cnf);
}
-
-
+ /*}}}*/
// AcqIndex::Done - Finished a fetch /*{{{*/
// ---------------------------------------------------------------------
/* This goes through a number of states.. On the initial fetch the
to the uncompressed version of the file. If this is so the file
is copied into the partial directory. In all other cases the file
is decompressed with a gzip uri. */
-void pkgAcqIndex::Done(string Message,unsigned long Size,string MD5,
+void pkgAcqIndex::Done(string Message,unsigned long Size,string Hash,
pkgAcquire::MethodConfig *Cfg)
{
- Item::Done(Message,Size,MD5,Cfg);
+ Item::Done(Message,Size,Hash,Cfg);
if (Decompression == true)
{
if (_config->FindB("Debug::pkgAcquire::Auth", false))
{
- std::cerr << std::endl << RealURI << ": Computed MD5: " << MD5;
- std::cerr << " Expected MD5: " << ExpectedMD5 << std::endl;
- }
-
- if (MD5.empty())
- {
- MD5Summation sum;
- FileFd Fd(DestFile, FileFd::ReadOnly);
- sum.AddFD(Fd.Fd(), Fd.Size());
- Fd.Close();
- MD5 = (string)sum.Result();
+ std::cerr << std::endl << RealURI << ": Computed Hash: " << Hash;
+ std::cerr << " Expected Hash: " << ExpectedHash.toStr() << std::endl;
}
- if (!ExpectedMD5.empty() && MD5 != ExpectedMD5)
+ if (!ExpectedHash.empty() && ExpectedHash.toStr() != Hash)
{
Status = StatAuthError;
- ErrorText = _("MD5Sum mismatch");
+ ErrorText = _("Hash Sum mismatch");
Rename(DestFile,DestFile + ".FAILED");
return;
}
{
// The files timestamp matches
if (StringToBool(LookupTag(Message,"Alt-IMS-Hit"),false) == true)
- {
- unlink(FileName.c_str());
return;
- }
Decompression = true;
Local = true;
DestFile += ".decomp";
// The files timestamp matches
if (StringToBool(LookupTag(Message,"IMS-Hit"),false) == true)
- {
- unlink(FileName.c_str());
return;
- }
if (FileName == DestFile)
Erase = true;
else
Local = true;
- string compExt = Desc.URI.substr(Desc.URI.size()-3);
- char *decompProg;
- if(compExt == "bz2")
- decompProg = "bzip2";
- else if(compExt == ".gz")
- decompProg = "gzip";
+ string compExt = flExtension(flNotDir(URI(Desc.URI).Path));
+ string decompProg;
+
+ // get the binary name for your used compression type
+ decompProg = _config->Find(string("Acquire::CompressionTypes::").append(compExt),"");
+ if(decompProg.empty() == false);
+ // flExtensions returns the full name if no extension is found
+ // this is why we have this complicated compare operation here
+ // FIMXE: add a new flJustExtension() that return "" if no
+ // extension is found and use that above so that it can
+ // be tested against ""
+ else if(compExt == flNotDir(URI(Desc.URI).Path))
+ decompProg = "copy";
else {
_error->Error("Unsupported extension: %s", compExt.c_str());
return;
Decompression = true;
DestFile += ".decomp";
- Desc.URI = string(decompProg) + ":" + FileName;
+ Desc.URI = decompProg + ":" + FileName;
QueueURI(Desc);
- Mode = decompProg;
+ Mode = decompProg.c_str();
}
-
+ /*}}}*/
// AcqIndexTrans::pkgAcqIndexTrans - Constructor /*{{{*/
// ---------------------------------------------------------------------
/* The Translation file is added to the queue */
pkgAcqIndexTrans::pkgAcqIndexTrans(pkgAcquire *Owner,
- string URI,string URIDesc,string ShortDesc) :
- pkgAcqIndex(Owner, URI, URIDesc, ShortDesc, "", "")
+ string URI,string URIDesc,string ShortDesc)
+ : pkgAcqIndex(Owner, URI, URIDesc, ShortDesc, HashString(), "")
{
}
-
/*}}}*/
// AcqIndexTrans::Failed - Silence failure messages for missing files /*{{{*/
// ---------------------------------------------------------------------
Item::Failed(Message,Cnf);
}
/*}}}*/
-
-pkgAcqMetaSig::pkgAcqMetaSig(pkgAcquire *Owner,
+pkgAcqMetaSig::pkgAcqMetaSig(pkgAcquire *Owner, /*{{{*/
string URI,string URIDesc,string ShortDesc,
string MetaIndexURI, string MetaIndexURIDesc,
string MetaIndexShortDesc,
Desc.Owner = this;
Desc.ShortDesc = ShortDesc;
Desc.URI = URI;
-
string Final = _config->FindDir("Dir::State::lists");
Final += URItoFileName(RealURI);
struct stat Buf;
if (stat(Final.c_str(),&Buf) == 0)
{
- // File was already in place. It needs to be re-verified
- // because Release might have changed, so Move it into partial
- Rename(Final,DestFile);
+ // File was already in place. It needs to be re-downloaded/verified
+ // because Release might have changed, we do give it a differnt
+ // name than DestFile because otherwise the http method will
+ // send If-Range requests and there are too many broken servers
+ // out there that do not understand them
+ LastGoodSig = DestFile+".reverify";
+ Rename(Final,LastGoodSig);
}
QueueURI(Desc);
string pkgAcqMetaSig::Custom600Headers()
{
struct stat Buf;
- if (stat(DestFile.c_str(),&Buf) != 0)
+ if (stat(LastGoodSig.c_str(),&Buf) != 0)
return "\nIndex-File: true";
return "\nIndex-File: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime);
Complete = true;
+ // put the last known good file back on i-m-s hit (it will
+ // be re-verified again)
+ // Else do nothing, we have the new file in DestFile then
+ if(StringToBool(LookupTag(Message,"IMS-Hit"),false) == true)
+ Rename(LastGoodSig, DestFile);
+
// queue a pkgAcqMetaIndex to be verified against the sig we just retrieved
new pkgAcqMetaIndex(Owner, MetaIndexURI, MetaIndexURIDesc, MetaIndexShortDesc,
DestFile, IndexTargets, MetaIndexParser);
}
/*}}}*/
-void pkgAcqMetaSig::Failed(string Message,pkgAcquire::MethodConfig *Cnf)
+void pkgAcqMetaSig::Failed(string Message,pkgAcquire::MethodConfig *Cnf)/*{{{*/
{
string Final = _config->FindDir("Dir::State::lists") + URItoFileName(RealURI);
Item::Failed(Message,Cnf);
// move the sigfile back on transient network failures
if(FileExists(DestFile))
- Rename(DestFile,Final);
+ Rename(LastGoodSig,Final);
// set the status back to , Item::Failed likes to reset it
Status = pkgAcquire::Item::StatTransientNetworkError;
Item::Failed(Message,Cnf);
}
-
-pkgAcqMetaIndex::pkgAcqMetaIndex(pkgAcquire *Owner,
+ /*}}}*/
+pkgAcqMetaIndex::pkgAcqMetaIndex(pkgAcquire *Owner, /*{{{*/
string URI,string URIDesc,string ShortDesc,
string SigFile,
const vector<struct IndexTarget*>* IndexTargets,
QueueURI(Desc);
}
-
/*}}}*/
// pkgAcqMetaIndex::Custom600Headers - Insert custom request headers /*{{{*/
// ---------------------------------------------------------------------
return "\nIndex-File: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime);
}
-
-void pkgAcqMetaIndex::Done(string Message,unsigned long Size,string MD5,
+ /*}}}*/
+void pkgAcqMetaIndex::Done(string Message,unsigned long Size,string Hash, /*{{{*/
pkgAcquire::MethodConfig *Cfg)
{
- Item::Done(Message,Size,MD5,Cfg);
+ Item::Done(Message,Size,Hash,Cfg);
// MetaIndexes are done in two passes: one to download the
// metaindex with an appropriate method, and a second to verify it
}
}
}
-
-void pkgAcqMetaIndex::RetrievalDone(string Message)
+ /*}}}*/
+void pkgAcqMetaIndex::RetrievalDone(string Message) /*{{{*/
{
// We have just finished downloading a Release file (it is not
// verified yet)
chmod(FinalFile.c_str(),0644);
DestFile = FinalFile;
}
-
-void pkgAcqMetaIndex::AuthDone(string Message)
+ /*}}}*/
+void pkgAcqMetaIndex::AuthDone(string Message) /*{{{*/
{
// At this point, the gpgv method has succeeded, so there is a
// valid signature from a key in the trusted keyring. We
Rename(SigFile,VerifiedSigFile);
chmod(VerifiedSigFile.c_str(),0644);
}
-
-void pkgAcqMetaIndex::QueueIndexes(bool verify)
+ /*}}}*/
+void pkgAcqMetaIndex::QueueIndexes(bool verify) /*{{{*/
{
for (vector <struct IndexTarget*>::const_iterator Target = IndexTargets->begin();
Target != IndexTargets->end();
Target++)
{
- string ExpectedIndexMD5;
+ HashString ExpectedIndexHash;
if (verify)
{
const indexRecords::checkSum *Record = MetaIndexParser->Lookup((*Target)->MetaKey);
+ (*Target)->MetaKey + " in Meta-index file (malformed Release file?)";
return;
}
- ExpectedIndexMD5 = Record->MD5Hash;
+ ExpectedIndexHash = Record->Hash;
if (_config->FindB("Debug::pkgAcquire::Auth", false))
{
std::cerr << "Queueing: " << (*Target)->URI << std::endl;
- std::cerr << "Expected MD5: " << ExpectedIndexMD5 << std::endl;
+ std::cerr << "Expected Hash: " << ExpectedIndexHash.toStr() << std::endl;
}
- if (ExpectedIndexMD5.empty())
+ if (ExpectedIndexHash.empty())
{
Status = StatAuthError;
- ErrorText = "Unable to find MD5 sum for "
+ ErrorText = "Unable to find hash sum for "
+ (*Target)->MetaKey + " in Meta-index file";
return;
}
}
-
- // Queue Packages file (either diff or full packages files, depending
- // on the users option)
- if(_config->FindB("Acquire::PDiffs",true) == true)
+
+ /* Queue Packages file (either diff or full packages files, depending
+ on the users option) - we also check if the PDiff Index file is listed
+ in the Meta-Index file. Ideal would be if pkgAcqDiffIndex would test this
+ instead, but passing the required info to it is to much hassle */
+ if(_config->FindB("Acquire::PDiffs",true) == true && (verify == false ||
+ MetaIndexParser->Exists(string((*Target)->MetaKey).append(".diff/Index")) == true))
new pkgAcqDiffIndex(Owner, (*Target)->URI, (*Target)->Description,
- (*Target)->ShortDesc, ExpectedIndexMD5);
- else
+ (*Target)->ShortDesc, ExpectedIndexHash);
+ else
new pkgAcqIndex(Owner, (*Target)->URI, (*Target)->Description,
- (*Target)->ShortDesc, ExpectedIndexMD5);
+ (*Target)->ShortDesc, ExpectedIndexHash);
}
}
-
-bool pkgAcqMetaIndex::VerifyVendor(string Message)
+ /*}}}*/
+bool pkgAcqMetaIndex::VerifyVendor(string Message) /*{{{*/
{
// // Maybe this should be made available from above so we don't have
// // to read and parse it every time?
return true;
}
- /*}}}*/
-// pkgAcqMetaIndex::Failed - no Release file present or no signature
-// file present /*{{{*/
+ /*}}}*/
+// pkgAcqMetaIndex::Failed - no Release file present or no signature file present /*{{{*/
// ---------------------------------------------------------------------
/* */
void pkgAcqMetaIndex::Failed(string Message,pkgAcquire::MethodConfig *Cnf)
// back to queueing Packages files without verification
QueueIndexes(false);
}
-
/*}}}*/
-
// AcqArchive::AcqArchive - Constructor /*{{{*/
// ---------------------------------------------------------------------
/* This just sets up the initial fetch environment and queues the first
the archive is already available in the cache and stashs the MD5 for
checking later. */
bool pkgAcqArchive::QueueNext()
-{
+{
+ string const ForceHash = _config->Find("Acquire::ForceHash");
for (; Vf.end() == false; Vf++)
{
// Ignore not source sources
return false;
string PkgFile = Parse.FileName();
- MD5 = Parse.MD5Hash();
+ if (ForceHash.empty() == false)
+ {
+ if(stringcasecmp(ForceHash, "sha256") == 0)
+ ExpectedHash = HashString("SHA256", Parse.SHA256Hash());
+ else if (stringcasecmp(ForceHash, "sha1") == 0)
+ ExpectedHash = HashString("SHA1", Parse.SHA1Hash());
+ else
+ ExpectedHash = HashString("MD5Sum", Parse.MD5Hash());
+ }
+ else
+ {
+ string Hash;
+ if ((Hash = Parse.SHA256Hash()).empty() == false)
+ ExpectedHash = HashString("SHA256", Hash);
+ else if ((Hash = Parse.SHA1Hash()).empty() == false)
+ ExpectedHash = HashString("SHA1", Hash);
+ else
+ ExpectedHash = HashString("MD5Sum", Parse.MD5Hash());
+ }
if (PkgFile.empty() == true)
return _error->Error(_("The package index files are corrupted. No Filename: "
"field for package %s."),
// AcqArchive::Done - Finished fetching /*{{{*/
// ---------------------------------------------------------------------
/* */
-void pkgAcqArchive::Done(string Message,unsigned long Size,string Md5Hash,
+void pkgAcqArchive::Done(string Message,unsigned long Size,string CalcHash,
pkgAcquire::MethodConfig *Cfg)
{
- Item::Done(Message,Size,Md5Hash,Cfg);
+ Item::Done(Message,Size,CalcHash,Cfg);
// Check the size
if (Size != Version->Size)
return;
}
- // Check the md5
- if (Md5Hash.empty() == false && MD5.empty() == false)
+ // Check the hash
+ if(ExpectedHash.toStr() != CalcHash)
{
- if (Md5Hash != MD5)
- {
- Status = StatError;
- ErrorText = _("MD5Sum mismatch");
- if(FileExists(DestFile))
- Rename(DestFile,DestFile + ".FAILED");
- return;
- }
+ Status = StatError;
+ ErrorText = _("Hash Sum mismatch");
+ if(FileExists(DestFile))
+ Rename(DestFile,DestFile + ".FAILED");
+ return;
}
// Grab the output filename
}
}
/*}}}*/
-// AcqArchive::IsTrusted - Determine whether this archive comes from a
-// trusted source /*{{{*/
+// AcqArchive::IsTrusted - Determine whether this archive comes from a trusted source /*{{{*/
// ---------------------------------------------------------------------
bool pkgAcqArchive::IsTrusted()
{
return Trusted;
}
-
+ /*}}}*/
// AcqArchive::Finished - Fetching has finished, tidy up /*{{{*/
// ---------------------------------------------------------------------
/* */
StoreFilename = string();
}
/*}}}*/
-
// AcqFile::pkgAcqFile - Constructor /*{{{*/
// ---------------------------------------------------------------------
/* The file is added to the queue */
-pkgAcqFile::pkgAcqFile(pkgAcquire *Owner,string URI,string MD5,
+pkgAcqFile::pkgAcqFile(pkgAcquire *Owner,string URI,string Hash,
unsigned long Size,string Dsc,string ShortDesc,
- const string &DestDir, const string &DestFilename) :
- Item(Owner), Md5Hash(MD5)
+ const string &DestDir, const string &DestFilename,
+ bool IsIndexFile) :
+ Item(Owner), ExpectedHash(Hash), IsIndexFile(IsIndexFile)
{
Retries = _config->FindI("Acquire::Retries",0);
// AcqFile::Done - Item downloaded OK /*{{{*/
// ---------------------------------------------------------------------
/* */
-void pkgAcqFile::Done(string Message,unsigned long Size,string MD5,
+void pkgAcqFile::Done(string Message,unsigned long Size,string CalcHash,
pkgAcquire::MethodConfig *Cnf)
{
- // Check the md5
- if (Md5Hash.empty() == false && MD5.empty() == false)
+ Item::Done(Message,Size,CalcHash,Cnf);
+
+ // Check the hash
+ if(!ExpectedHash.empty() && ExpectedHash.toStr() != CalcHash)
{
- if (Md5Hash != MD5)
- {
- Status = StatError;
- ErrorText = "MD5Sum mismatch";
- Rename(DestFile,DestFile + ".FAILED");
- return;
- }
+ Status = StatError;
+ ErrorText = "Hash Sum mismatch";
+ Rename(DestFile,DestFile + ".FAILED");
+ return;
}
- Item::Done(Message,Size,MD5,Cnf);
-
string FileName = LookupTag(Message,"Filename");
if (FileName.empty() == true)
{
Item::Failed(Message,Cnf);
}
/*}}}*/
+// AcqIndex::Custom600Headers - Insert custom request headers /*{{{*/
+// ---------------------------------------------------------------------
+/* The only header we use is the last-modified header. */
+string pkgAcqFile::Custom600Headers()
+{
+ if (IsIndexFile)
+ return "\nIndex-File: true";
+ return "";
+}
+ /*}}}*/