X-Git-Url: https://git.saurik.com/apt.git/blobdiff_plain/2dafadd9d8b28dbe3d4a489764c67db90e70a29d..4a3b5e9d609ad7c5437fa34c20f8404d07f8d11b:/apt-pkg/acquire-item.cc diff --git a/apt-pkg/acquire-item.cc b/apt-pkg/acquire-item.cc index cbccfbfae..d2aca597e 100644 --- a/apt-pkg/acquire-item.cc +++ b/apt-pkg/acquire-item.cc @@ -13,11 +13,9 @@ ##################################################################### */ /*}}}*/ // Include Files /*{{{*/ -#ifdef __GNUG__ -#pragma implementation "apt-pkg/acquire-item.h" -#endif #include #include +#include #include #include #include @@ -98,7 +96,7 @@ void pkgAcquire::Item::Start(string /*Message*/,unsigned long Size) // Acquire::Item::Done - Item downloaded OK /*{{{*/ // --------------------------------------------------------------------- /* */ -void pkgAcquire::Item::Done(string Message,unsigned long Size,string, +void pkgAcquire::Item::Done(string Message,unsigned long Size,string Hash, pkgAcquire::MethodConfig *Cnf) { // We just downloaded something.. @@ -134,9 +132,7 @@ void pkgAcquire::Item::Rename(string From,string To) } } /*}}}*/ - - -// AcqDiffIndex::AcqDiffIndex - Constructor +// AcqDiffIndex::AcqDiffIndex - Constructor /*{{{*/ // --------------------------------------------------------------------- /* Get the DiffIndex file first and see if there are patches availabe * If so, create a pkgAcqIndexDiffs fetcher that will get and apply the @@ -145,8 +141,9 @@ void pkgAcquire::Item::Rename(string From,string To) */ pkgAcqDiffIndex::pkgAcqDiffIndex(pkgAcquire *Owner, string URI,string URIDesc,string ShortDesc, - string ExpectedMD5) - : Item(Owner), RealURI(URI), ExpectedMD5(ExpectedMD5), Description(URIDesc) + HashString ExpectedHash) + : Item(Owner), RealURI(URI), ExpectedHash(ExpectedHash), + Description(URIDesc) { Debug = _config->FindB("Debug::pkgAcquire::Diffs",false); @@ -186,7 +183,7 @@ pkgAcqDiffIndex::pkgAcqDiffIndex(pkgAcquire *Owner, QueueURI(Desc); } - + /*}}}*/ // AcqIndex::Custom600Headers - Insert custom request headers /*{{{*/ // --------------------------------------------------------------------- /* The only header we use is the last-modified header. */ @@ -204,9 +201,8 @@ string pkgAcqDiffIndex::Custom600Headers() return "\nIndex-File: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime); } - - -bool pkgAcqDiffIndex::ParseDiffIndex(string IndexDiffFile) + /*}}}*/ +bool pkgAcqDiffIndex::ParseDiffIndex(string IndexDiffFile) /*{{{*/ { if(Debug) std::clog << "pkgAcqIndexDiffs::ParseIndexDiff() " << IndexDiffFile @@ -223,19 +219,19 @@ bool pkgAcqDiffIndex::ParseDiffIndex(string IndexDiffFile) if(TF.Step(Tags) == true) { - string local_sha1; bool found = false; DiffInfo d; string size; - string tmp = Tags.FindS("SHA1-Current"); + string const tmp = Tags.FindS("SHA1-Current"); std::stringstream ss(tmp); - ss >> ServerSha1; + ss >> ServerSha1 >> size; + unsigned long const ServerSize = atol(size.c_str()); FileFd fd(CurrentPackagesFile, FileFd::ReadOnly); SHA1Summation SHA1; SHA1.AddFD(fd.Fd(), fd.Size()); - local_sha1 = string(SHA1.Result()); + string const local_sha1 = SHA1.Result(); if(local_sha1 == ServerSha1) { @@ -252,62 +248,99 @@ bool pkgAcqDiffIndex::ParseDiffIndex(string IndexDiffFile) std::clog << "SHA1-Current: " << ServerSha1 << std::endl; // check the historie and see what patches we need - string history = Tags.FindS("SHA1-History"); + string const history = Tags.FindS("SHA1-History"); std::stringstream hist(history); - while(hist >> d.sha1 >> size >> d.file) + while(hist >> d.sha1 >> size >> d.file) { - d.size = atoi(size.c_str()); // read until the first match is found + // from that point on, we probably need all diffs if(d.sha1 == local_sha1) found=true; - // from that point on, we probably need all diffs - if(found) + else if (found == false) + continue; + + if(Debug) + std::clog << "Need to get diff: " << d.file << std::endl; + available_patches.push_back(d); + } + + if (available_patches.empty() == false) + { + // patching with too many files is rather slow compared to a fast download + unsigned long const fileLimit = _config->FindI("Acquire::PDiffs::FileLimit", 0); + if (fileLimit != 0 && fileLimit < available_patches.size()) + { + if (Debug) + std::clog << "Need " << available_patches.size() << " diffs (Limit is " << fileLimit + << ") so fallback to complete download" << std::endl; + return false; + } + + // see if the patches are too big + found = false; // it was true and it will be true again at the end + d = *available_patches.begin(); + string const firstPatch = d.file; + unsigned long patchesSize = 0; + std::stringstream patches(Tags.FindS("SHA1-Patches")); + while(patches >> d.sha1 >> size >> d.file) + { + if (firstPatch == d.file) + found = true; + else if (found == false) + continue; + + patchesSize += atol(size.c_str()); + } + unsigned long const sizeLimit = ServerSize * _config->FindI("Acquire::PDiffs::SizeLimit", 100); + if (sizeLimit > 0 && (sizeLimit/100) < patchesSize) { - if(Debug) - std::clog << "Need to get diff: " << d.file << std::endl; - available_patches.push_back(d); + if (Debug) + std::clog << "Need " << patchesSize << " bytes (Limit is " << sizeLimit/100 + << ") so fallback to complete download" << std::endl; + return false; } } } - // no information how to get the patches, bail out - if(!found) - { - if(Debug) - std::clog << "Can't find a patch in the index file" << std::endl; - // Failed will queue a big package file - Failed("", NULL); - } - else + // we have something, queue the next diff + if(found) { // queue the diffs + string::size_type const last_space = Description.rfind(" "); + if(last_space != string::npos) + Description.erase(last_space, Description.size()-last_space); new pkgAcqIndexDiffs(Owner, RealURI, Description, Desc.ShortDesc, - ExpectedMD5, available_patches); + ExpectedHash, available_patches); Complete = false; Status = StatDone; Dequeue(); return true; } } - + + // Nothing found, report and return false + // Failing here is ok, if we return false later, the full + // IndexFile is queued + if(Debug) + std::clog << "Can't find a patch in the index file" << std::endl; return false; } - -void pkgAcqDiffIndex::Failed(string Message,pkgAcquire::MethodConfig *Cnf) + /*}}}*/ +void pkgAcqDiffIndex::Failed(string Message,pkgAcquire::MethodConfig *Cnf) /*{{{*/ { if(Debug) std::clog << "pkgAcqDiffIndex failed: " << Desc.URI << std::endl << "Falling back to normal index file aquire" << std::endl; new pkgAcqIndex(Owner, RealURI, Description, Desc.ShortDesc, - ExpectedMD5); + ExpectedHash); Complete = false; Status = StatDone; Dequeue(); } - -void pkgAcqDiffIndex::Done(string Message,unsigned long Size,string Md5Hash, + /*}}}*/ +void pkgAcqDiffIndex::Done(string Message,unsigned long Size,string Md5Hash, /*{{{*/ pkgAcquire::MethodConfig *Cnf) { if(Debug) @@ -336,18 +369,17 @@ void pkgAcqDiffIndex::Done(string Message,unsigned long Size,string Md5Hash, Dequeue(); return; } - - - -// AcqIndexDiffs::AcqIndexDiffs - Constructor + /*}}}*/ +// AcqIndexDiffs::AcqIndexDiffs - Constructor /*{{{*/ // --------------------------------------------------------------------- /* The package diff is added to the queue. one object is constructed * for each diff and the index */ pkgAcqIndexDiffs::pkgAcqIndexDiffs(pkgAcquire *Owner, string URI,string URIDesc,string ShortDesc, - string ExpectedMD5, vector diffs) - : Item(Owner), RealURI(URI), ExpectedMD5(ExpectedMD5), + HashString ExpectedHash, + vector diffs) + : Item(Owner), RealURI(URI), ExpectedHash(ExpectedHash), available_patches(diffs) { @@ -356,7 +388,7 @@ pkgAcqIndexDiffs::pkgAcqIndexDiffs(pkgAcquire *Owner, Debug = _config->FindB("Debug::pkgAcquire::Diffs",false); - Desc.Description = URIDesc; + Description = URIDesc; Desc.Owner = this; Desc.ShortDesc = ShortDesc; @@ -372,20 +404,18 @@ pkgAcqIndexDiffs::pkgAcqIndexDiffs(pkgAcquire *Owner, QueueNextDiff(); } } - - -void pkgAcqIndexDiffs::Failed(string Message,pkgAcquire::MethodConfig *Cnf) + /*}}}*/ +void pkgAcqIndexDiffs::Failed(string Message,pkgAcquire::MethodConfig *Cnf) /*{{{*/ { if(Debug) std::clog << "pkgAcqIndexDiffs failed: " << Desc.URI << std::endl << "Falling back to normal index file aquire" << std::endl; new pkgAcqIndex(Owner, RealURI, Description,Desc.ShortDesc, - ExpectedMD5); + ExpectedHash); Finish(); } - - -// helper that cleans the item out of the fetcher queue + /*}}}*/ +// Finish - helper that cleans the item out of the fetcher queue /*{{{*/ void pkgAcqIndexDiffs::Finish(bool allDone) { // we restore the original name, this is required, otherwise @@ -395,14 +425,7 @@ void pkgAcqIndexDiffs::Finish(bool allDone) DestFile = _config->FindDir("Dir::State::lists"); DestFile += URItoFileName(RealURI); - // do the final md5sum checking - MD5Summation sum; - FileFd Fd(DestFile, FileFd::ReadOnly); - sum.AddFD(Fd.Fd(), Fd.Size()); - Fd.Close(); - string MD5 = (string)sum.Result(); - - if (!ExpectedMD5.empty() && MD5 != ExpectedMD5) + if(!ExpectedHash.empty() && !ExpectedHash.VerifyFile(DestFile)) { Status = StatAuthError; ErrorText = _("MD5Sum mismatch"); @@ -427,10 +450,8 @@ void pkgAcqIndexDiffs::Finish(bool allDone) Dequeue(); return; } - - - -bool pkgAcqIndexDiffs::QueueNextDiff() + /*}}}*/ +bool pkgAcqIndexDiffs::QueueNextDiff() /*{{{*/ { // calc sha1 of the just patched file @@ -465,8 +486,7 @@ bool pkgAcqIndexDiffs::QueueNextDiff() // queue the right diff Desc.URI = string(RealURI) + ".diff/" + available_patches[0].file + ".gz"; - Desc.Description = available_patches[0].file + string(".pdiff"); - + Desc.Description = Description + " " + available_patches[0].file + string(".pdiff"); DestFile = _config->FindDir("Dir::State::lists") + "partial/"; DestFile += URItoFileName(RealURI + ".diff/" + available_patches[0].file); @@ -477,10 +497,8 @@ bool pkgAcqIndexDiffs::QueueNextDiff() return true; } - - - -void pkgAcqIndexDiffs::Done(string Message,unsigned long Size,string Md5Hash, + /*}}}*/ +void pkgAcqIndexDiffs::Done(string Message,unsigned long Size,string Md5Hash, /*{{{*/ pkgAcquire::MethodConfig *Cnf) { if(Debug) @@ -540,26 +558,26 @@ void pkgAcqIndexDiffs::Done(string Message,unsigned long Size,string Md5Hash, << DestFile << " -> " << FinalFile << std::endl; } Rename(DestFile,FinalFile); + chmod(FinalFile.c_str(),0644); // see if there is more to download if(available_patches.size() > 0) { new pkgAcqIndexDiffs(Owner, RealURI, Description, Desc.ShortDesc, - ExpectedMD5, available_patches); + ExpectedHash, available_patches); return Finish(); } else return Finish(true); } } - - + /*}}}*/ // AcqIndex::AcqIndex - Constructor /*{{{*/ // --------------------------------------------------------------------- /* The package file is added to the queue and a second class is instantiated to fetch the revision file */ pkgAcqIndex::pkgAcqIndex(pkgAcquire *Owner, string URI,string URIDesc,string ShortDesc, - string ExpectedMD5, string comprExt) - : Item(Owner), RealURI(URI), ExpectedMD5(ExpectedMD5) + HashString ExpectedHash, string comprExt) + : Item(Owner), RealURI(URI), ExpectedHash(ExpectedHash) { Decompression = false; Erase = false; @@ -570,14 +588,15 @@ pkgAcqIndex::pkgAcqIndex(pkgAcquire *Owner, if(comprExt.empty()) { // autoselect the compression method - if(FileExists("/usr/bin/bzip2")) - CompressionExtension = ".bz2"; - else - CompressionExtension = ".gz"; - } else { - CompressionExtension = comprExt; + std::vector types = APT::Configuration::getCompressionTypes(); + if (types.empty() == true) + comprExt = "plain"; + else + comprExt = "." + types[0]; } - Desc.URI = URI + CompressionExtension; + CompressionExtension = ((comprExt == "plain" || comprExt == ".") ? "" : comprExt); + + Desc.URI = URI + CompressionExtension; Desc.Description = URIDesc; Desc.Owner = this; @@ -601,27 +620,45 @@ string pkgAcqIndex::Custom600Headers() return "\nIndex-File: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime); } /*}}}*/ - -void pkgAcqIndex::Failed(string Message,pkgAcquire::MethodConfig *Cnf) +void pkgAcqIndex::Failed(string Message,pkgAcquire::MethodConfig *Cnf) /*{{{*/ { - // no .bz2 found, retry with .gz - if(Desc.URI.substr(Desc.URI.size()-3) == "bz2") { - Desc.URI = Desc.URI.substr(0,Desc.URI.size()-3) + "gz"; + std::vector types = APT::Configuration::getCompressionTypes(); + + for (std::vector::const_iterator t = types.begin(); + t != types.end(); t++) + { + // jump over all already tried compression types + const unsigned int nameLen = Desc.URI.size() - (*t).size(); + if(Desc.URI.substr(nameLen) != *t) + continue; - // retry with a gzip one - new pkgAcqIndex(Owner, RealURI, Desc.Description,Desc.ShortDesc, - ExpectedMD5, string(".gz")); + // we want to try it with the next extension (and make sure to + // not skip over the end) + t++; + if (t == types.end()) + break; + + // queue new download + Desc.URI = Desc.URI.substr(0, nameLen) + *t; + new pkgAcqIndex(Owner, RealURI, Desc.Description, Desc.ShortDesc, + ExpectedHash, string(".").append(*t)); + Status = StatDone; Complete = false; Dequeue(); return; } - + // on decompression failure, remove bad versions in partial/ + if(Decompression && Erase) { + string s = _config->FindDir("Dir::State::lists") + "partial/"; + s += URItoFileName(RealURI); + unlink(s.c_str()); + } + Item::Failed(Message,Cnf); } - - + /*}}}*/ // AcqIndex::Done - Finished a fetch /*{{{*/ // --------------------------------------------------------------------- /* This goes through a number of states.. On the initial fetch the @@ -629,32 +666,23 @@ void pkgAcqIndex::Failed(string Message,pkgAcquire::MethodConfig *Cnf) to the uncompressed version of the file. If this is so the file is copied into the partial directory. In all other cases the file is decompressed with a gzip uri. */ -void pkgAcqIndex::Done(string Message,unsigned long Size,string MD5, +void pkgAcqIndex::Done(string Message,unsigned long Size,string Hash, pkgAcquire::MethodConfig *Cfg) { - Item::Done(Message,Size,MD5,Cfg); + Item::Done(Message,Size,Hash,Cfg); if (Decompression == true) { if (_config->FindB("Debug::pkgAcquire::Auth", false)) { - std::cerr << std::endl << RealURI << ": Computed MD5: " << MD5; - std::cerr << " Expected MD5: " << ExpectedMD5 << std::endl; - } - - if (MD5.empty()) - { - MD5Summation sum; - FileFd Fd(DestFile, FileFd::ReadOnly); - sum.AddFD(Fd.Fd(), Fd.Size()); - Fd.Close(); - MD5 = (string)sum.Result(); + std::cerr << std::endl << RealURI << ": Computed Hash: " << Hash; + std::cerr << " Expected Hash: " << ExpectedHash.toStr() << std::endl; } - if (!ExpectedMD5.empty() && MD5 != ExpectedMD5) + if (!ExpectedHash.empty() && ExpectedHash.toStr() != Hash) { Status = StatAuthError; - ErrorText = _("MD5Sum mismatch"); + ErrorText = _("Hash Sum mismatch"); Rename(DestFile,DestFile + ".FAILED"); return; } @@ -685,7 +713,6 @@ void pkgAcqIndex::Done(string Message,unsigned long Size,string MD5, // The files timestamp matches if (StringToBool(LookupTag(Message,"Alt-IMS-Hit"),false) == true) return; - Decompression = true; Local = true; DestFile += ".decomp"; @@ -711,12 +738,19 @@ void pkgAcqIndex::Done(string Message,unsigned long Size,string MD5, else Local = true; - string compExt = Desc.URI.substr(Desc.URI.size()-3); - char *decompProg; - if(compExt == "bz2") - decompProg = "bzip2"; - else if(compExt == ".gz") - decompProg = "gzip"; + string compExt = flExtension(flNotDir(URI(Desc.URI).Path)); + string decompProg; + + // get the binary name for your used compression type + decompProg = _config->Find(string("Acquire::CompressionTypes::").append(compExt),""); + if(decompProg.empty() == false); + // flExtensions returns the full name if no extension is found + // this is why we have this complicated compare operation here + // FIMXE: add a new flJustExtension() that return "" if no + // extension is found and use that above so that it can + // be tested against "" + else if(compExt == flNotDir(URI(Desc.URI).Path)) + decompProg = "copy"; else { _error->Error("Unsupported extension: %s", compExt.c_str()); return; @@ -724,12 +758,39 @@ void pkgAcqIndex::Done(string Message,unsigned long Size,string MD5, Decompression = true; DestFile += ".decomp"; - Desc.URI = string(decompProg) + ":" + FileName; + Desc.URI = decompProg + ":" + FileName; QueueURI(Desc); - Mode = decompProg; + Mode = decompProg.c_str(); } - -pkgAcqMetaSig::pkgAcqMetaSig(pkgAcquire *Owner, + /*}}}*/ +// AcqIndexTrans::pkgAcqIndexTrans - Constructor /*{{{*/ +// --------------------------------------------------------------------- +/* The Translation file is added to the queue */ +pkgAcqIndexTrans::pkgAcqIndexTrans(pkgAcquire *Owner, + string URI,string URIDesc,string ShortDesc) + : pkgAcqIndex(Owner, URI, URIDesc, ShortDesc, HashString(), "") +{ +} + /*}}}*/ +// AcqIndexTrans::Failed - Silence failure messages for missing files /*{{{*/ +// --------------------------------------------------------------------- +/* */ +void pkgAcqIndexTrans::Failed(string Message,pkgAcquire::MethodConfig *Cnf) +{ + if (Cnf->LocalOnly == true || + StringToBool(LookupTag(Message,"Transient-Failure"),false) == false) + { + // Ignore this + Status = StatDone; + Complete = false; + Dequeue(); + return; + } + + Item::Failed(Message,Cnf); +} + /*}}}*/ +pkgAcqMetaSig::pkgAcqMetaSig(pkgAcquire *Owner, /*{{{*/ string URI,string URIDesc,string ShortDesc, string MetaIndexURI, string MetaIndexURIDesc, string MetaIndexShortDesc, @@ -742,8 +803,9 @@ pkgAcqMetaSig::pkgAcqMetaSig(pkgAcquire *Owner, DestFile = _config->FindDir("Dir::State::lists") + "partial/"; DestFile += URItoFileName(URI); - // remove any partial downloaded sig-file. it may confuse proxies - // and is too small to warrant a partial download anyway + // remove any partial downloaded sig-file in partial/. + // it may confuse proxies and is too small to warrant a + // partial download anyway unlink(DestFile.c_str()); // Create the item @@ -751,16 +813,19 @@ pkgAcqMetaSig::pkgAcqMetaSig(pkgAcquire *Owner, Desc.Owner = this; Desc.ShortDesc = ShortDesc; Desc.URI = URI; - string Final = _config->FindDir("Dir::State::lists"); Final += URItoFileName(RealURI); struct stat Buf; if (stat(Final.c_str(),&Buf) == 0) { - // File was already in place. It needs to be re-verified - // because Release might have changed, so Move it into partial - Rename(Final,DestFile); + // File was already in place. It needs to be re-downloaded/verified + // because Release might have changed, we do give it a differnt + // name than DestFile because otherwise the http method will + // send If-Range requests and there are too many broken servers + // out there that do not understand them + LastGoodSig = DestFile+".reverify"; + Rename(Final,LastGoodSig); } QueueURI(Desc); @@ -772,7 +837,7 @@ pkgAcqMetaSig::pkgAcqMetaSig(pkgAcquire *Owner, string pkgAcqMetaSig::Custom600Headers() { struct stat Buf; - if (stat(DestFile.c_str(),&Buf) != 0) + if (stat(LastGoodSig.c_str(),&Buf) != 0) return "\nIndex-File: true"; return "\nIndex-File: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime); @@ -802,25 +867,36 @@ void pkgAcqMetaSig::Done(string Message,unsigned long Size,string MD5, Complete = true; + // put the last known good file back on i-m-s hit (it will + // be re-verified again) + // Else do nothing, we have the new file in DestFile then + if(StringToBool(LookupTag(Message,"IMS-Hit"),false) == true) + Rename(LastGoodSig, DestFile); + // queue a pkgAcqMetaIndex to be verified against the sig we just retrieved new pkgAcqMetaIndex(Owner, MetaIndexURI, MetaIndexURIDesc, MetaIndexShortDesc, DestFile, IndexTargets, MetaIndexParser); } /*}}}*/ -void pkgAcqMetaSig::Failed(string Message,pkgAcquire::MethodConfig *Cnf) +void pkgAcqMetaSig::Failed(string Message,pkgAcquire::MethodConfig *Cnf)/*{{{*/ { + string Final = _config->FindDir("Dir::State::lists") + URItoFileName(RealURI); // if we get a network error we fail gracefully - if(LookupTag(Message,"FailReason") == "Timeout" || - LookupTag(Message,"FailReason") == "TmpResolveFailure" || - LookupTag(Message,"FailReason") == "ConnectionRefused") { + if(Status == StatTransientNetworkError) + { Item::Failed(Message,Cnf); + // move the sigfile back on transient network failures + if(FileExists(DestFile)) + Rename(LastGoodSig,Final); + + // set the status back to , Item::Failed likes to reset it + Status = pkgAcquire::Item::StatTransientNetworkError; return; } // Delete any existing sigfile when the acquire failed - string Final = _config->FindDir("Dir::State::lists") + URItoFileName(RealURI); unlink(Final.c_str()); // queue a pkgAcqMetaIndex with no sigfile @@ -839,14 +915,14 @@ void pkgAcqMetaSig::Failed(string Message,pkgAcquire::MethodConfig *Cnf) Item::Failed(Message,Cnf); } - -pkgAcqMetaIndex::pkgAcqMetaIndex(pkgAcquire *Owner, + /*}}}*/ +pkgAcqMetaIndex::pkgAcqMetaIndex(pkgAcquire *Owner, /*{{{*/ string URI,string URIDesc,string ShortDesc, string SigFile, const vector* IndexTargets, indexRecords* MetaIndexParser) : - Item(Owner), RealURI(URI), SigFile(SigFile), AuthPass(false), - MetaIndexParser(MetaIndexParser), IndexTargets(IndexTargets), IMSHit(false) + Item(Owner), RealURI(URI), SigFile(SigFile), IndexTargets(IndexTargets), + MetaIndexParser(MetaIndexParser), AuthPass(false), IMSHit(false) { DestFile = _config->FindDir("Dir::State::lists") + "partial/"; DestFile += URItoFileName(URI); @@ -859,7 +935,6 @@ pkgAcqMetaIndex::pkgAcqMetaIndex(pkgAcquire *Owner, QueueURI(Desc); } - /*}}}*/ // pkgAcqMetaIndex::Custom600Headers - Insert custom request headers /*{{{*/ // --------------------------------------------------------------------- @@ -875,11 +950,11 @@ string pkgAcqMetaIndex::Custom600Headers() return "\nIndex-File: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime); } - -void pkgAcqMetaIndex::Done(string Message,unsigned long Size,string MD5, + /*}}}*/ +void pkgAcqMetaIndex::Done(string Message,unsigned long Size,string Hash, /*{{{*/ pkgAcquire::MethodConfig *Cfg) { - Item::Done(Message,Size,MD5,Cfg); + Item::Done(Message,Size,Hash,Cfg); // MetaIndexes are done in two passes: one to download the // metaindex with an appropriate method, and a second to verify it @@ -917,8 +992,8 @@ void pkgAcqMetaIndex::Done(string Message,unsigned long Size,string MD5, } } } - -void pkgAcqMetaIndex::RetrievalDone(string Message) + /*}}}*/ +void pkgAcqMetaIndex::RetrievalDone(string Message) /*{{{*/ { // We have just finished downloading a Release file (it is not // verified yet) @@ -941,22 +1016,23 @@ void pkgAcqMetaIndex::RetrievalDone(string Message) // see if the download was a IMSHit IMSHit = StringToBool(LookupTag(Message,"IMS-Hit"),false); - Complete = true; string FinalFile = _config->FindDir("Dir::State::lists"); FinalFile += URItoFileName(RealURI); - // The files timestamp matches - if (StringToBool(LookupTag(Message,"IMS-Hit"),false) == false) - { - // Move it into position + // If we get a IMS hit we can remove the empty file in partial + // othersie we move the file in place + if (IMSHit) + unlink(DestFile.c_str()); + else Rename(DestFile,FinalFile); - } + + chmod(FinalFile.c_str(),0644); DestFile = FinalFile; } - -void pkgAcqMetaIndex::AuthDone(string Message) + /*}}}*/ +void pkgAcqMetaIndex::AuthDone(string Message) /*{{{*/ { // At this point, the gpgv method has succeeded, so there is a // valid signature from a key in the trusted keyring. We @@ -989,14 +1065,14 @@ void pkgAcqMetaIndex::AuthDone(string Message) Rename(SigFile,VerifiedSigFile); chmod(VerifiedSigFile.c_str(),0644); } - -void pkgAcqMetaIndex::QueueIndexes(bool verify) + /*}}}*/ +void pkgAcqMetaIndex::QueueIndexes(bool verify) /*{{{*/ { for (vector ::const_iterator Target = IndexTargets->begin(); Target != IndexTargets->end(); Target++) { - string ExpectedIndexMD5; + HashString ExpectedIndexHash; if (verify) { const indexRecords::checkSum *Record = MetaIndexParser->Lookup((*Target)->MetaKey); @@ -1007,16 +1083,16 @@ void pkgAcqMetaIndex::QueueIndexes(bool verify) + (*Target)->MetaKey + " in Meta-index file (malformed Release file?)"; return; } - ExpectedIndexMD5 = Record->MD5Hash; + ExpectedIndexHash = Record->Hash; if (_config->FindB("Debug::pkgAcquire::Auth", false)) { std::cerr << "Queueing: " << (*Target)->URI << std::endl; - std::cerr << "Expected MD5: " << ExpectedIndexMD5 << std::endl; + std::cerr << "Expected Hash: " << ExpectedIndexHash.toStr() << std::endl; } - if (ExpectedIndexMD5.empty()) + if (ExpectedIndexHash.empty()) { Status = StatAuthError; - ErrorText = "Unable to find MD5 sum for " + ErrorText = "Unable to find hash sum for " + (*Target)->MetaKey + " in Meta-index file"; return; } @@ -1024,16 +1100,16 @@ void pkgAcqMetaIndex::QueueIndexes(bool verify) // Queue Packages file (either diff or full packages files, depending // on the users option) - if(_config->FindB("Acquire::PDiffs",false) == false) + if(_config->FindB("Acquire::PDiffs",true) == true) new pkgAcqDiffIndex(Owner, (*Target)->URI, (*Target)->Description, - (*Target)->ShortDesc, ExpectedIndexMD5); + (*Target)->ShortDesc, ExpectedIndexHash); else new pkgAcqIndex(Owner, (*Target)->URI, (*Target)->Description, - (*Target)->ShortDesc, ExpectedIndexMD5); + (*Target)->ShortDesc, ExpectedIndexHash); } } - -bool pkgAcqMetaIndex::VerifyVendor(string Message) + /*}}}*/ +bool pkgAcqMetaIndex::VerifyVendor(string Message) /*{{{*/ { // // Maybe this should be made available from above so we don't have // // to read and parse it every time? @@ -1063,7 +1139,7 @@ bool pkgAcqMetaIndex::VerifyVendor(string Message) // check for missing sigs (that where not fatal because otherwise we had // bombed earlier) string missingkeys; - string msg = _("There are no public key available for the " + string msg = _("There is no public key available for the " "following key IDs:\n"); pos = Message.find("NO_PUBKEY "); if (pos != std::string::npos) @@ -1119,9 +1195,8 @@ bool pkgAcqMetaIndex::VerifyVendor(string Message) return true; } - /*}}}*/ -// pkgAcqMetaIndex::Failed - no Release file present or no signature -// file present /*{{{*/ + /*}}}*/ +// pkgAcqMetaIndex::Failed - no Release file present or no signature file present /*{{{*/ // --------------------------------------------------------------------- /* */ void pkgAcqMetaIndex::Failed(string Message,pkgAcquire::MethodConfig *Cnf) @@ -1158,9 +1233,7 @@ void pkgAcqMetaIndex::Failed(string Message,pkgAcquire::MethodConfig *Cnf) // back to queueing Packages files without verification QueueIndexes(false); } - /*}}}*/ - // AcqArchive::AcqArchive - Constructor /*{{{*/ // --------------------------------------------------------------------- /* This just sets up the initial fetch environment and queues the first @@ -1269,7 +1342,12 @@ bool pkgAcqArchive::QueueNext() return false; string PkgFile = Parse.FileName(); - MD5 = Parse.MD5Hash(); + if(Parse.SHA256Hash() != "") + ExpectedHash = HashString("SHA256", Parse.SHA256Hash()); + else if (Parse.SHA1Hash() != "") + ExpectedHash = HashString("SHA1", Parse.SHA1Hash()); + else + ExpectedHash = HashString("MD5Sum", Parse.MD5Hash()); if (PkgFile.empty() == true) return _error->Error(_("The package index files are corrupted. No Filename: " "field for package %s."), @@ -1349,10 +1427,10 @@ bool pkgAcqArchive::QueueNext() // AcqArchive::Done - Finished fetching /*{{{*/ // --------------------------------------------------------------------- /* */ -void pkgAcqArchive::Done(string Message,unsigned long Size,string Md5Hash, +void pkgAcqArchive::Done(string Message,unsigned long Size,string CalcHash, pkgAcquire::MethodConfig *Cfg) { - Item::Done(Message,Size,Md5Hash,Cfg); + Item::Done(Message,Size,CalcHash,Cfg); // Check the size if (Size != Version->Size) @@ -1362,17 +1440,14 @@ void pkgAcqArchive::Done(string Message,unsigned long Size,string Md5Hash, return; } - // Check the md5 - if (Md5Hash.empty() == false && MD5.empty() == false) + // Check the hash + if(ExpectedHash.toStr() != CalcHash) { - if (Md5Hash != MD5) - { - Status = StatError; - ErrorText = _("MD5Sum mismatch"); - if(FileExists(DestFile)) - Rename(DestFile,DestFile + ".FAILED"); - return; - } + Status = StatError; + ErrorText = _("Hash Sum mismatch"); + if(FileExists(DestFile)) + Rename(DestFile,DestFile + ".FAILED"); + return; } // Grab the output filename @@ -1441,14 +1516,13 @@ void pkgAcqArchive::Failed(string Message,pkgAcquire::MethodConfig *Cnf) } } /*}}}*/ -// AcqArchive::IsTrusted - Determine whether this archive comes from a -// trusted source /*{{{*/ +// AcqArchive::IsTrusted - Determine whether this archive comes from a trusted source /*{{{*/ // --------------------------------------------------------------------- bool pkgAcqArchive::IsTrusted() { return Trusted; } - + /*}}}*/ // AcqArchive::Finished - Fetching has finished, tidy up /*{{{*/ // --------------------------------------------------------------------- /* */ @@ -1460,14 +1534,13 @@ void pkgAcqArchive::Finished() StoreFilename = string(); } /*}}}*/ - // AcqFile::pkgAcqFile - Constructor /*{{{*/ // --------------------------------------------------------------------- /* The file is added to the queue */ -pkgAcqFile::pkgAcqFile(pkgAcquire *Owner,string URI,string MD5, +pkgAcqFile::pkgAcqFile(pkgAcquire *Owner,string URI,string Hash, unsigned long Size,string Dsc,string ShortDesc, const string &DestDir, const string &DestFilename) : - Item(Owner), Md5Hash(MD5) + Item(Owner), ExpectedHash(Hash) { Retries = _config->FindI("Acquire::Retries",0); @@ -1504,23 +1577,20 @@ pkgAcqFile::pkgAcqFile(pkgAcquire *Owner,string URI,string MD5, // AcqFile::Done - Item downloaded OK /*{{{*/ // --------------------------------------------------------------------- /* */ -void pkgAcqFile::Done(string Message,unsigned long Size,string MD5, +void pkgAcqFile::Done(string Message,unsigned long Size,string CalcHash, pkgAcquire::MethodConfig *Cnf) { - // Check the md5 - if (Md5Hash.empty() == false && MD5.empty() == false) + Item::Done(Message,Size,CalcHash,Cnf); + + // Check the hash + if(!ExpectedHash.empty() && ExpectedHash.toStr() != CalcHash) { - if (Md5Hash != MD5) - { - Status = StatError; - ErrorText = "MD5Sum mismatch"; - Rename(DestFile,DestFile + ".FAILED"); - return; - } + Status = StatError; + ErrorText = "Hash Sum mismatch"; + Rename(DestFile,DestFile + ".FAILED"); + return; } - Item::Done(Message,Size,MD5,Cnf); - string FileName = LookupTag(Message,"Filename"); if (FileName.empty() == true) {