X-Git-Url: https://git.saurik.com/apt.git/blobdiff_plain/b6f0036a8ed8fb5278d6fb9da601edc254eb03fe..bda94cb8432b3905f5757e92573fd16e73cb183f:/apt-pkg/acquire-item.cc?ds=sidebyside diff --git a/apt-pkg/acquire-item.cc b/apt-pkg/acquire-item.cc index 94288a132..cf88ded7b 100644 --- a/apt-pkg/acquire-item.cc +++ b/apt-pkg/acquire-item.cc @@ -13,17 +13,16 @@ ##################################################################### */ /*}}}*/ // Include Files /*{{{*/ -#ifdef __GNUG__ -#pragma implementation "apt-pkg/acquire-item.h" -#endif #include #include +#include #include -#include #include #include #include #include +#include +#include #include @@ -31,7 +30,9 @@ #include #include #include +#include #include +#include /*}}}*/ using namespace std; @@ -63,6 +64,7 @@ void pkgAcquire::Item::Failed(string Message,pkgAcquire::MethodConfig *Cnf) { Status = StatIdle; ErrorText = LookupTag(Message,"Message"); + UsedMirror = LookupTag(Message,"UsedMirror"); if (QueueCounter <= 1) { /* This indicates that the file is not available right now but might @@ -79,6 +81,13 @@ void pkgAcquire::Item::Failed(string Message,pkgAcquire::MethodConfig *Cnf) Status = StatError; Dequeue(); } + + // report mirror failure back to LP if we actually use a mirror + string FailReason = LookupTag(Message, "FailReason"); + if(FailReason.size() != 0) + ReportMirrorFailure(FailReason); + else + ReportMirrorFailure(ErrorText); } /*}}}*/ // Acquire::Item::Start - Item has begun to download /*{{{*/ @@ -95,12 +104,13 @@ void pkgAcquire::Item::Start(string /*Message*/,unsigned long Size) // Acquire::Item::Done - Item downloaded OK /*{{{*/ // --------------------------------------------------------------------- /* */ -void pkgAcquire::Item::Done(string Message,unsigned long Size,string, +void pkgAcquire::Item::Done(string Message,unsigned long Size,string Hash, pkgAcquire::MethodConfig *Cnf) { // We just downloaded something.. string FileName = LookupTag(Message,"Filename"); - if (Complete == false && FileName == DestFile) + UsedMirror = LookupTag(Message,"UsedMirror"); + if (Complete == false && !Local && FileName == DestFile) { if (Owner->Log != 0) Owner->Log->Fetched(Size,atoi(LookupTag(Message,"Resume-Point","0").c_str())); @@ -108,7 +118,6 @@ void pkgAcquire::Item::Done(string Message,unsigned long Size,string, if (FileSize == 0) FileSize= Size; - Status = StatDone; ErrorText = string(); Owner->Dequeue(this); @@ -130,49 +139,258 @@ void pkgAcquire::Item::Rename(string From,string To) } } /*}}}*/ - -// AcqIndex::AcqIndex - Constructor /*{{{*/ +// Acquire::Item::ReportMirrorFailure /*{{{*/ // --------------------------------------------------------------------- -/* The package file is added to the queue and a second class is - instantiated to fetch the revision file */ -pkgAcqIndex::pkgAcqIndex(pkgAcquire *Owner, - string URI,string URIDesc,string ShortDesc, - string ExpectedMD5, string comprExt) : - Item(Owner), RealURI(URI), ExpectedMD5(ExpectedMD5) +void pkgAcquire::Item::ReportMirrorFailure(string FailCode) { - Decompression = false; - Erase = false; + // we only act if a mirror was used at all + if(UsedMirror.empty()) + return; +#if 0 + std::cerr << "\nReportMirrorFailure: " + << UsedMirror + << " Uri: " << DescURI() + << " FailCode: " + << FailCode << std::endl; +#endif + const char *Args[40]; + unsigned int i = 0; + string report = _config->Find("Methods::Mirror::ProblemReporting", + "/usr/lib/apt/apt-report-mirror-failure"); + if(!FileExists(report)) + return; + Args[i++] = report.c_str(); + Args[i++] = UsedMirror.c_str(); + Args[i++] = DescURI().c_str(); + Args[i++] = FailCode.c_str(); + Args[i++] = NULL; + pid_t pid = ExecFork(); + if(pid < 0) + { + _error->Error("ReportMirrorFailure Fork failed"); + return; + } + else if(pid == 0) + { + execvp(Args[0], (char**)Args); + std::cerr << "Could not exec " << Args[0] << std::endl; + _exit(100); + } + if(!ExecWait(pid, "report-mirror-failure")) + { + _error->Warning("Couldn't report problem to '%s'", + _config->Find("Methods::Mirror::ProblemReporting").c_str()); + } +} + /*}}}*/ +// AcqSubIndex::AcqSubIndex - Constructor /*{{{*/ +// --------------------------------------------------------------------- +/* Get the Index file first and see if there are languages available + * If so, create a pkgAcqIndexTrans for the found language(s). + */ +pkgAcqSubIndex::pkgAcqSubIndex(pkgAcquire *Owner, string const &URI, + string const &URIDesc, string const &ShortDesc, + HashString const &ExpectedHash) + : Item(Owner), ExpectedHash(ExpectedHash) +{ + Debug = _config->FindB("Debug::pkgAcquire::SubIndex",false); DestFile = _config->FindDir("Dir::State::lists") + "partial/"; DestFile += URItoFileName(URI); - if(comprExt.empty()) + Desc.URI = URI; + Desc.Description = URIDesc; + Desc.Owner = this; + Desc.ShortDesc = ShortDesc; + + QueueURI(Desc); + + if(Debug) + std::clog << "pkgAcqSubIndex: " << Desc.URI << std::endl; +} + /*}}}*/ +// AcqSubIndex::Custom600Headers - Insert custom request headers /*{{{*/ +// --------------------------------------------------------------------- +/* The only header we use is the last-modified header. */ +string pkgAcqSubIndex::Custom600Headers() +{ + string Final = _config->FindDir("Dir::State::lists"); + Final += URItoFileName(Desc.URI); + + struct stat Buf; + if (stat(Final.c_str(),&Buf) != 0) + return "\nIndex-File: true\nFail-Ignore: true\n"; + return "\nIndex-File: true\nFail-Ignore: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime); +} + /*}}}*/ +void pkgAcqSubIndex::Failed(string Message,pkgAcquire::MethodConfig *Cnf) /*{{{*/ +{ + if(Debug) + std::clog << "pkgAcqSubIndex failed: " << Desc.URI << std::endl; + + Complete = false; + Status = StatDone; + Dequeue(); + + // No good Index is provided, so try guessing + std::vector langs = APT::Configuration::getLanguages(true); + for (std::vector::const_iterator l = langs.begin(); + l != langs.end(); ++l) { - // autoselect the compression method - if(FileExists("/bin/bzip2")) - CompressionExtension = ".bz2"; - else - CompressionExtension = ".gz"; - } else { - CompressionExtension = comprExt; + if (*l == "none") continue; + string const file = "Translation-" + *l; + new pkgAcqIndexTrans(Owner, Desc.URI.substr(0, Desc.URI.rfind('/')+1).append(file), + Desc.Description.erase(Desc.Description.rfind(' ')+1).append(file), + file); } - Desc.URI = URI + CompressionExtension; +} + /*}}}*/ +void pkgAcqSubIndex::Done(string Message,unsigned long Size,string Md5Hash, /*{{{*/ + pkgAcquire::MethodConfig *Cnf) +{ + if(Debug) + std::clog << "pkgAcqSubIndex::Done(): " << Desc.URI << std::endl; - Desc.Description = URIDesc; + string FileName = LookupTag(Message,"Filename"); + if (FileName.empty() == true) + { + Status = StatError; + ErrorText = "Method gave a blank filename"; + return; + } + + if (FileName != DestFile) + { + Local = true; + Desc.URI = "copy:" + FileName; + QueueURI(Desc); + return; + } + + Item::Done(Message,Size,Md5Hash,Cnf); + + string FinalFile = _config->FindDir("Dir::State::lists")+URItoFileName(Desc.URI); + + // sucess in downloading the index + // rename the index + if(Debug) + std::clog << "Renaming: " << DestFile << " -> " << FinalFile << std::endl; + Rename(DestFile,FinalFile); + chmod(FinalFile.c_str(),0644); + DestFile = FinalFile; + + if(ParseIndex(DestFile) == false) + return Failed("", NULL); + + Complete = true; + Status = StatDone; + Dequeue(); + return; +} + /*}}}*/ +bool pkgAcqSubIndex::ParseIndex(string const &IndexFile) /*{{{*/ +{ + indexRecords SubIndexParser; + if (FileExists(IndexFile) == false || SubIndexParser.Load(IndexFile) == false) + return false; + + std::vector lang = APT::Configuration::getLanguages(true); + for (std::vector::const_iterator l = lang.begin(); + l != lang.end(); ++l) + { + if (*l == "none") + continue; + + string file = "Translation-" + *l; + indexRecords::checkSum const *Record = SubIndexParser.Lookup(file); + HashString expected; + if (Record == NULL) + { + // FIXME: the Index file provided by debian currently only includes bz2 records + Record = SubIndexParser.Lookup(file + ".bz2"); + if (Record == NULL) + continue; + } + else + { + expected = Record->Hash; + if (expected.empty() == true) + continue; + } + + IndexTarget target; + target.Description = Desc.Description.erase(Desc.Description.rfind(' ')+1).append(file); + target.MetaKey = file; + target.ShortDesc = file; + target.URI = Desc.URI.substr(0, Desc.URI.rfind('/')+1).append(file); + new pkgAcqIndexTrans(Owner, &target, expected, &SubIndexParser); + } + return true; +} + /*}}}*/ +// AcqDiffIndex::AcqDiffIndex - Constructor /*{{{*/ +// --------------------------------------------------------------------- +/* Get the DiffIndex file first and see if there are patches availabe + * If so, create a pkgAcqIndexDiffs fetcher that will get and apply the + * patches. If anything goes wrong in that process, it will fall back to + * the original packages file + */ +pkgAcqDiffIndex::pkgAcqDiffIndex(pkgAcquire *Owner, + string URI,string URIDesc,string ShortDesc, + HashString ExpectedHash) + : Item(Owner), RealURI(URI), ExpectedHash(ExpectedHash), + Description(URIDesc) +{ + + Debug = _config->FindB("Debug::pkgAcquire::Diffs",false); + + Desc.Description = URIDesc + "/DiffIndex"; Desc.Owner = this; Desc.ShortDesc = ShortDesc; - + Desc.URI = URI + ".diff/Index"; + + DestFile = _config->FindDir("Dir::State::lists") + "partial/"; + DestFile += URItoFileName(URI) + string(".DiffIndex"); + + if(Debug) + std::clog << "pkgAcqDiffIndex: " << Desc.URI << std::endl; + + // look for the current package file + CurrentPackagesFile = _config->FindDir("Dir::State::lists"); + CurrentPackagesFile += URItoFileName(RealURI); + + // FIXME: this file:/ check is a hack to prevent fetching + // from local sources. this is really silly, and + // should be fixed cleanly as soon as possible + if(!FileExists(CurrentPackagesFile) || + Desc.URI.substr(0,strlen("file:/")) == "file:/") + { + // we don't have a pkg file or we don't want to queue + if(Debug) + std::clog << "No index file, local or canceld by user" << std::endl; + Failed("", NULL); + return; + } + + if(Debug) + std::clog << "pkgAcqIndexDiffs::pkgAcqIndexDiffs(): " + << CurrentPackagesFile << std::endl; + QueueURI(Desc); + } /*}}}*/ // AcqIndex::Custom600Headers - Insert custom request headers /*{{{*/ // --------------------------------------------------------------------- /* The only header we use is the last-modified header. */ -string pkgAcqIndex::Custom600Headers() +string pkgAcqDiffIndex::Custom600Headers() { string Final = _config->FindDir("Dir::State::lists"); - Final += URItoFileName(RealURI); + Final += URItoFileName(RealURI) + string(".IndexDiff"); + if(Debug) + std::clog << "Custom600Header-IMS: " << Final << std::endl; + struct stat Buf; if (stat(Final.c_str(),&Buf) != 0) return "\nIndex-File: true"; @@ -180,27 +398,474 @@ string pkgAcqIndex::Custom600Headers() return "\nIndex-File: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime); } /*}}}*/ +bool pkgAcqDiffIndex::ParseDiffIndex(string IndexDiffFile) /*{{{*/ +{ + if(Debug) + std::clog << "pkgAcqIndexDiffs::ParseIndexDiff() " << IndexDiffFile + << std::endl; + + pkgTagSection Tags; + string ServerSha1; + vector available_patches; + + FileFd Fd(IndexDiffFile,FileFd::ReadOnly); + pkgTagFile TF(&Fd); + if (_error->PendingError() == true) + return false; + + if(TF.Step(Tags) == true) + { + bool found = false; + DiffInfo d; + string size; + + string const tmp = Tags.FindS("SHA1-Current"); + std::stringstream ss(tmp); + ss >> ServerSha1 >> size; + unsigned long const ServerSize = atol(size.c_str()); + + FileFd fd(CurrentPackagesFile, FileFd::ReadOnly); + SHA1Summation SHA1; + SHA1.AddFD(fd.Fd(), fd.Size()); + string const local_sha1 = SHA1.Result(); + + if(local_sha1 == ServerSha1) + { + // we have the same sha1 as the server + if(Debug) + std::clog << "Package file is up-to-date" << std::endl; + // set found to true, this will queue a pkgAcqIndexDiffs with + // a empty availabe_patches + found = true; + } + else + { + if(Debug) + std::clog << "SHA1-Current: " << ServerSha1 << " and we start at "<< fd.Name() << " " << fd.Size() << " " << local_sha1 << std::endl; + + // check the historie and see what patches we need + string const history = Tags.FindS("SHA1-History"); + std::stringstream hist(history); + while(hist >> d.sha1 >> size >> d.file) + { + // read until the first match is found + // from that point on, we probably need all diffs + if(d.sha1 == local_sha1) + found=true; + else if (found == false) + continue; + + if(Debug) + std::clog << "Need to get diff: " << d.file << std::endl; + available_patches.push_back(d); + } + + if (available_patches.empty() == false) + { + // patching with too many files is rather slow compared to a fast download + unsigned long const fileLimit = _config->FindI("Acquire::PDiffs::FileLimit", 0); + if (fileLimit != 0 && fileLimit < available_patches.size()) + { + if (Debug) + std::clog << "Need " << available_patches.size() << " diffs (Limit is " << fileLimit + << ") so fallback to complete download" << std::endl; + return false; + } + + // see if the patches are too big + found = false; // it was true and it will be true again at the end + d = *available_patches.begin(); + string const firstPatch = d.file; + unsigned long patchesSize = 0; + std::stringstream patches(Tags.FindS("SHA1-Patches")); + while(patches >> d.sha1 >> size >> d.file) + { + if (firstPatch == d.file) + found = true; + else if (found == false) + continue; + + patchesSize += atol(size.c_str()); + } + unsigned long const sizeLimit = ServerSize * _config->FindI("Acquire::PDiffs::SizeLimit", 100); + if (sizeLimit > 0 && (sizeLimit/100) < patchesSize) + { + if (Debug) + std::clog << "Need " << patchesSize << " bytes (Limit is " << sizeLimit/100 + << ") so fallback to complete download" << std::endl; + return false; + } + } + } + + // we have something, queue the next diff + if(found) + { + // queue the diffs + string::size_type const last_space = Description.rfind(" "); + if(last_space != string::npos) + Description.erase(last_space, Description.size()-last_space); + new pkgAcqIndexDiffs(Owner, RealURI, Description, Desc.ShortDesc, + ExpectedHash, ServerSha1, available_patches); + Complete = false; + Status = StatDone; + Dequeue(); + return true; + } + } + + // Nothing found, report and return false + // Failing here is ok, if we return false later, the full + // IndexFile is queued + if(Debug) + std::clog << "Can't find a patch in the index file" << std::endl; + return false; +} + /*}}}*/ +void pkgAcqDiffIndex::Failed(string Message,pkgAcquire::MethodConfig *Cnf) /*{{{*/ +{ + if(Debug) + std::clog << "pkgAcqDiffIndex failed: " << Desc.URI << std::endl + << "Falling back to normal index file aquire" << std::endl; + + new pkgAcqIndex(Owner, RealURI, Description, Desc.ShortDesc, + ExpectedHash); + + Complete = false; + Status = StatDone; + Dequeue(); +} + /*}}}*/ +void pkgAcqDiffIndex::Done(string Message,unsigned long Size,string Md5Hash, /*{{{*/ + pkgAcquire::MethodConfig *Cnf) +{ + if(Debug) + std::clog << "pkgAcqDiffIndex::Done(): " << Desc.URI << std::endl; + + Item::Done(Message,Size,Md5Hash,Cnf); -void pkgAcqIndex::Failed(string Message,pkgAcquire::MethodConfig *Cnf) + string FinalFile; + FinalFile = _config->FindDir("Dir::State::lists")+URItoFileName(RealURI); + + // sucess in downloading the index + // rename the index + FinalFile += string(".IndexDiff"); + if(Debug) + std::clog << "Renaming: " << DestFile << " -> " << FinalFile + << std::endl; + Rename(DestFile,FinalFile); + chmod(FinalFile.c_str(),0644); + DestFile = FinalFile; + + if(!ParseDiffIndex(DestFile)) + return Failed("", NULL); + + Complete = true; + Status = StatDone; + Dequeue(); + return; +} + /*}}}*/ +// AcqIndexDiffs::AcqIndexDiffs - Constructor /*{{{*/ +// --------------------------------------------------------------------- +/* The package diff is added to the queue. one object is constructed + * for each diff and the index + */ +pkgAcqIndexDiffs::pkgAcqIndexDiffs(pkgAcquire *Owner, + string URI,string URIDesc,string ShortDesc, + HashString ExpectedHash, + string ServerSha1, + vector diffs) + : Item(Owner), RealURI(URI), ExpectedHash(ExpectedHash), + available_patches(diffs), ServerSha1(ServerSha1) +{ + + DestFile = _config->FindDir("Dir::State::lists") + "partial/"; + DestFile += URItoFileName(URI); + + Debug = _config->FindB("Debug::pkgAcquire::Diffs",false); + + Description = URIDesc; + Desc.Owner = this; + Desc.ShortDesc = ShortDesc; + + if(available_patches.size() == 0) + { + // we are done (yeah!) + Finish(true); + } + else + { + // get the next diff + State = StateFetchDiff; + QueueNextDiff(); + } +} + /*}}}*/ +void pkgAcqIndexDiffs::Failed(string Message,pkgAcquire::MethodConfig *Cnf) /*{{{*/ +{ + if(Debug) + std::clog << "pkgAcqIndexDiffs failed: " << Desc.URI << std::endl + << "Falling back to normal index file aquire" << std::endl; + new pkgAcqIndex(Owner, RealURI, Description,Desc.ShortDesc, + ExpectedHash); + Finish(); +} + /*}}}*/ +// Finish - helper that cleans the item out of the fetcher queue /*{{{*/ +void pkgAcqIndexDiffs::Finish(bool allDone) { - // no .bz2 found, retry with .gz - if(Desc.URI.substr(Desc.URI.size()-3) == "bz2") { - Desc.URI = Desc.URI.substr(0,Desc.URI.size()-3) + "gz"; + // we restore the original name, this is required, otherwise + // the file will be cleaned + if(allDone) + { + DestFile = _config->FindDir("Dir::State::lists"); + DestFile += URItoFileName(RealURI); - // retry with a gzip one - new pkgAcqIndex(Owner, RealURI, Desc.Description,Desc.ShortDesc, - ExpectedMD5, string(".gz")); + if(!ExpectedHash.empty() && !ExpectedHash.VerifyFile(DestFile)) + { + Status = StatAuthError; + ErrorText = _("MD5Sum mismatch"); + Rename(DestFile,DestFile + ".FAILED"); + Dequeue(); + return; + } + + // this is for the "real" finish + Complete = true; Status = StatDone; - Complete = false; Dequeue(); + if(Debug) + std::clog << "\n\nallDone: " << DestFile << "\n" << std::endl; return; } + if(Debug) + std::clog << "Finishing: " << Desc.URI << std::endl; + Complete = false; + Status = StatDone; + Dequeue(); + return; +} + /*}}}*/ +bool pkgAcqIndexDiffs::QueueNextDiff() /*{{{*/ +{ + + // calc sha1 of the just patched file + string FinalFile = _config->FindDir("Dir::State::lists"); + FinalFile += URItoFileName(RealURI); + + FileFd fd(FinalFile, FileFd::ReadOnly); + SHA1Summation SHA1; + SHA1.AddFD(fd.Fd(), fd.Size()); + string local_sha1 = string(SHA1.Result()); + if(Debug) + std::clog << "QueueNextDiff: " + << FinalFile << " (" << local_sha1 << ")"<::iterator I=available_patches.begin(); + available_patches.size() > 0 && + I != available_patches.end() && + (*I).sha1 != local_sha1; + I++) + { + available_patches.erase(I); + } + + // error checking and falling back if no patch was found + if(available_patches.size() == 0) + { + Failed("", NULL); + return false; + } + + // queue the right diff + Desc.URI = string(RealURI) + ".diff/" + available_patches[0].file + ".gz"; + Desc.Description = Description + " " + available_patches[0].file + string(".pdiff"); + DestFile = _config->FindDir("Dir::State::lists") + "partial/"; + DestFile += URItoFileName(RealURI + ".diff/" + available_patches[0].file); + + if(Debug) + std::clog << "pkgAcqIndexDiffs::QueueNextDiff(): " << Desc.URI << std::endl; - Item::Failed(Message,Cnf); + QueueURI(Desc); + + return true; } + /*}}}*/ +void pkgAcqIndexDiffs::Done(string Message,unsigned long Size,string Md5Hash, /*{{{*/ + pkgAcquire::MethodConfig *Cnf) +{ + if(Debug) + std::clog << "pkgAcqIndexDiffs::Done(): " << Desc.URI << std::endl; + + Item::Done(Message,Size,Md5Hash,Cnf); + + string FinalFile; + FinalFile = _config->FindDir("Dir::State::lists")+URItoFileName(RealURI); + + // sucess in downloading a diff, enter ApplyDiff state + if(State == StateFetchDiff) + { + + // rred excepts the patch as $FinalFile.ed + Rename(DestFile,FinalFile+".ed"); + + if(Debug) + std::clog << "Sending to rred method: " << FinalFile << std::endl; + + State = StateApplyDiff; + Local = true; + Desc.URI = "rred:" + FinalFile; + QueueURI(Desc); + Mode = "rred"; + return; + } + // success in download/apply a diff, queue next (if needed) + if(State == StateApplyDiff) + { + // remove the just applied patch + available_patches.erase(available_patches.begin()); + + // move into place + if(Debug) + { + std::clog << "Moving patched file in place: " << std::endl + << DestFile << " -> " << FinalFile << std::endl; + } + Rename(DestFile,FinalFile); + chmod(FinalFile.c_str(),0644); + + // see if there is more to download + if(available_patches.size() > 0) { + new pkgAcqIndexDiffs(Owner, RealURI, Description, Desc.ShortDesc, + ExpectedHash, ServerSha1, available_patches); + return Finish(); + } else + return Finish(true); + } +} + /*}}}*/ +// AcqIndex::AcqIndex - Constructor /*{{{*/ +// --------------------------------------------------------------------- +/* The package file is added to the queue and a second class is + instantiated to fetch the revision file */ +pkgAcqIndex::pkgAcqIndex(pkgAcquire *Owner, + string URI,string URIDesc,string ShortDesc, + HashString ExpectedHash, string comprExt) + : Item(Owner), RealURI(URI), ExpectedHash(ExpectedHash) +{ + if(comprExt.empty() == true) + { + // autoselect the compression method + std::vector types = APT::Configuration::getCompressionTypes(); + for (std::vector::const_iterator t = types.begin(); t != types.end(); ++t) + comprExt.append(*t).append(" "); + if (comprExt.empty() == false) + comprExt.erase(comprExt.end()-1); + } + CompressionExtension = comprExt; + + Init(URI, URIDesc, ShortDesc); +} +pkgAcqIndex::pkgAcqIndex(pkgAcquire *Owner, IndexTarget const *Target, + HashString const &ExpectedHash, indexRecords const *MetaIndexParser) + : Item(Owner), RealURI(Target->URI), ExpectedHash(ExpectedHash) +{ + // autoselect the compression method + std::vector types = APT::Configuration::getCompressionTypes(); + CompressionExtension = ""; + if (ExpectedHash.empty() == false) + { + for (std::vector::const_iterator t = types.begin(); t != types.end(); ++t) + if (*t == "uncompressed" || MetaIndexParser->Exists(string(Target->MetaKey).append(".").append(*t)) == true) + CompressionExtension.append(*t).append(" "); + } + else + { + for (std::vector::const_iterator t = types.begin(); t != types.end(); ++t) + CompressionExtension.append(*t).append(" "); + } + if (CompressionExtension.empty() == false) + CompressionExtension.erase(CompressionExtension.end()-1); + + Init(Target->URI, Target->Description, Target->ShortDesc); +} + /*}}}*/ +// AcqIndex::Init - defered Constructor /*{{{*/ +void pkgAcqIndex::Init(string const &URI, string const &URIDesc, string const &ShortDesc) { + Decompression = false; + Erase = false; + + DestFile = _config->FindDir("Dir::State::lists") + "partial/"; + DestFile += URItoFileName(URI); + + std::string const comprExt = CompressionExtension.substr(0, CompressionExtension.find(' ')); + if (comprExt == "uncompressed") + Desc.URI = URI; + else + Desc.URI = URI + '.' + comprExt; + + Desc.Description = URIDesc; + Desc.Owner = this; + Desc.ShortDesc = ShortDesc; + + QueueURI(Desc); +} + /*}}}*/ +// AcqIndex::Custom600Headers - Insert custom request headers /*{{{*/ +// --------------------------------------------------------------------- +/* The only header we use is the last-modified header. */ +string pkgAcqIndex::Custom600Headers() +{ + string Final = _config->FindDir("Dir::State::lists"); + Final += URItoFileName(RealURI); + if (_config->FindB("Acquire::GzipIndexes",false)) + Final += ".gz"; + + string msg = "\nIndex-File: true"; + // FIXME: this really should use "IndexTarget::IsOptional()" but that + // seems to be difficult without breaking ABI + if (ShortDesc().find("Translation") != 0) + msg += "\nFail-Ignore: true"; + struct stat Buf; + if (stat(Final.c_str(),&Buf) != 0) + msg += "\nLast-Modified: " + TimeRFC1123(Buf.st_mtime); + + return msg; +} + /*}}}*/ +void pkgAcqIndex::Failed(string Message,pkgAcquire::MethodConfig *Cnf) /*{{{*/ +{ + size_t const nextExt = CompressionExtension.find(' '); + if (nextExt != std::string::npos) + { + CompressionExtension = CompressionExtension.substr(nextExt+1); + Init(RealURI, Desc.Description, Desc.ShortDesc); + return; + } + + // on decompression failure, remove bad versions in partial/ + if (Decompression && Erase) { + string s = _config->FindDir("Dir::State::lists") + "partial/"; + s.append(URItoFileName(RealURI)); + unlink(s.c_str()); + } + + Item::Failed(Message,Cnf); +} + /*}}}*/ // AcqIndex::Done - Finished a fetch /*{{{*/ // --------------------------------------------------------------------- /* This goes through a number of states.. On the initial fetch the @@ -208,33 +873,25 @@ void pkgAcqIndex::Failed(string Message,pkgAcquire::MethodConfig *Cnf) to the uncompressed version of the file. If this is so the file is copied into the partial directory. In all other cases the file is decompressed with a gzip uri. */ -void pkgAcqIndex::Done(string Message,unsigned long Size,string MD5, +void pkgAcqIndex::Done(string Message,unsigned long Size,string Hash, pkgAcquire::MethodConfig *Cfg) { - Item::Done(Message,Size,MD5,Cfg); + Item::Done(Message,Size,Hash,Cfg); if (Decompression == true) { if (_config->FindB("Debug::pkgAcquire::Auth", false)) { - std::cerr << std::endl << RealURI << ": Computed MD5: " << MD5; - std::cerr << " Expected MD5: " << ExpectedMD5 << std::endl; - } - - if (MD5.empty()) - { - MD5Summation sum; - FileFd Fd(DestFile, FileFd::ReadOnly); - sum.AddFD(Fd.Fd(), Fd.Size()); - Fd.Close(); - MD5 = (string)sum.Result(); + std::cerr << std::endl << RealURI << ": Computed Hash: " << Hash; + std::cerr << " Expected Hash: " << ExpectedHash.toStr() << std::endl; } - if (!ExpectedMD5.empty() && MD5 != ExpectedMD5) + if (!ExpectedHash.empty() && ExpectedHash.toStr() != Hash) { Status = StatAuthError; - ErrorText = _("MD5Sum mismatch"); + ErrorText = _("Hash Sum mismatch"); Rename(DestFile,DestFile + ".FAILED"); + ReportMirrorFailure("HashChecksumFailure"); return; } // Done, move it into position @@ -264,7 +921,6 @@ void pkgAcqIndex::Done(string Message,unsigned long Size,string MD5, // The files timestamp matches if (StringToBool(LookupTag(Message,"Alt-IMS-Hit"),false) == true) return; - Decompression = true; Local = true; DestFile += ".decomp"; @@ -280,22 +936,42 @@ void pkgAcqIndex::Done(string Message,unsigned long Size,string MD5, Status = StatError; ErrorText = "Method gave a blank filename"; } - + + std::string const compExt = CompressionExtension.substr(0, CompressionExtension.find(' ')); + // The files timestamp matches - if (StringToBool(LookupTag(Message,"IMS-Hit"),false) == true) + if (StringToBool(LookupTag(Message,"IMS-Hit"),false) == true) { + if (_config->FindB("Acquire::GzipIndexes",false) && compExt == "gz") + // Update DestFile for .gz suffix so that the clean operation keeps it + DestFile += ".gz"; return; + } if (FileName == DestFile) Erase = true; else Local = true; - string compExt = Desc.URI.substr(Desc.URI.size()-3); - char *decompProg; - if(compExt == "bz2") - decompProg = "bzip2"; - else if(compExt == ".gz") - decompProg = "gzip"; + string decompProg; + + // If we enable compressed indexes and already have gzip, keep it + if (_config->FindB("Acquire::GzipIndexes",false) && compExt == "gz" && !Local) { + string FinalFile = _config->FindDir("Dir::State::lists"); + FinalFile += URItoFileName(RealURI) + ".gz"; + Rename(DestFile,FinalFile); + chmod(FinalFile.c_str(),0644); + + // Update DestFile for .gz suffix so that the clean operation keeps it + DestFile = _config->FindDir("Dir::State::lists") + "partial/"; + DestFile += URItoFileName(RealURI) + ".gz"; + return; + } + + // get the binary name for your used compression type + decompProg = _config->Find(string("Acquire::CompressionTypes::").append(compExt),""); + if(decompProg.empty() == false); + else if(compExt == "uncompressed") + decompProg = "copy"; else { _error->Error("Unsupported extension: %s", compExt.c_str()); return; @@ -303,26 +979,52 @@ void pkgAcqIndex::Done(string Message,unsigned long Size,string MD5, Decompression = true; DestFile += ".decomp"; - Desc.URI = string(decompProg) + ":" + FileName; + Desc.URI = decompProg + ":" + FileName; QueueURI(Desc); - Mode = decompProg; + Mode = decompProg.c_str(); } - + /*}}}*/ // AcqIndexTrans::pkgAcqIndexTrans - Constructor /*{{{*/ // --------------------------------------------------------------------- /* The Translation file is added to the queue */ pkgAcqIndexTrans::pkgAcqIndexTrans(pkgAcquire *Owner, - string URI,string URIDesc,string ShortDesc) : - pkgAcqIndex(Owner, URI, URIDesc, ShortDesc, "", "") + string URI,string URIDesc,string ShortDesc) + : pkgAcqIndex(Owner, URI, URIDesc, ShortDesc, HashString(), "") +{ +} +pkgAcqIndexTrans::pkgAcqIndexTrans(pkgAcquire *Owner, IndexTarget const *Target, + HashString const &ExpectedHash, indexRecords const *MetaIndexParser) + : pkgAcqIndex(Owner, Target, ExpectedHash, MetaIndexParser) { } + /*}}}*/ +// AcqIndexTrans::Custom600Headers - Insert custom request headers /*{{{*/ +// --------------------------------------------------------------------- +string pkgAcqIndexTrans::Custom600Headers() +{ + string Final = _config->FindDir("Dir::State::lists"); + Final += URItoFileName(RealURI); + struct stat Buf; + if (stat(Final.c_str(),&Buf) != 0) + return "\nFail-Ignore: true\nIndex-File: true"; + return "\nFail-Ignore: true\nIndex-File: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime); +} /*}}}*/ // AcqIndexTrans::Failed - Silence failure messages for missing files /*{{{*/ // --------------------------------------------------------------------- /* */ void pkgAcqIndexTrans::Failed(string Message,pkgAcquire::MethodConfig *Cnf) { + size_t const nextExt = CompressionExtension.find(' '); + if (nextExt != std::string::npos) + { + CompressionExtension = CompressionExtension.substr(nextExt+1); + Init(RealURI, Desc.Description, Desc.ShortDesc); + Status = StatIdle; + return; + } + if (Cnf->LocalOnly == true || StringToBool(LookupTag(Message,"Transient-Failure"),false) == false) { @@ -332,12 +1034,11 @@ void pkgAcqIndexTrans::Failed(string Message,pkgAcquire::MethodConfig *Cnf) Dequeue(); return; } - + Item::Failed(Message,Cnf); } /*}}}*/ - -pkgAcqMetaSig::pkgAcqMetaSig(pkgAcquire *Owner, +pkgAcqMetaSig::pkgAcqMetaSig(pkgAcquire *Owner, /*{{{*/ string URI,string URIDesc,string ShortDesc, string MetaIndexURI, string MetaIndexURIDesc, string MetaIndexShortDesc, @@ -360,16 +1061,19 @@ pkgAcqMetaSig::pkgAcqMetaSig(pkgAcquire *Owner, Desc.Owner = this; Desc.ShortDesc = ShortDesc; Desc.URI = URI; - string Final = _config->FindDir("Dir::State::lists"); Final += URItoFileName(RealURI); struct stat Buf; if (stat(Final.c_str(),&Buf) == 0) { - // File was already in place. It needs to be re-verified - // because Release might have changed, so Move it into partial - Rename(Final,DestFile); + // File was already in place. It needs to be re-downloaded/verified + // because Release might have changed, we do give it a differnt + // name than DestFile because otherwise the http method will + // send If-Range requests and there are too many broken servers + // out there that do not understand them + LastGoodSig = DestFile+".reverify"; + Rename(Final,LastGoodSig); } QueueURI(Desc); @@ -381,7 +1085,7 @@ pkgAcqMetaSig::pkgAcqMetaSig(pkgAcquire *Owner, string pkgAcqMetaSig::Custom600Headers() { struct stat Buf; - if (stat(DestFile.c_str(),&Buf) != 0) + if (stat(LastGoodSig.c_str(),&Buf) != 0) return "\nIndex-File: true"; return "\nIndex-File: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime); @@ -411,13 +1115,20 @@ void pkgAcqMetaSig::Done(string Message,unsigned long Size,string MD5, Complete = true; + // put the last known good file back on i-m-s hit (it will + // be re-verified again) + // Else do nothing, we have the new file in DestFile then + if(StringToBool(LookupTag(Message,"IMS-Hit"),false) == true) + Rename(LastGoodSig, DestFile); + // queue a pkgAcqMetaIndex to be verified against the sig we just retrieved - new pkgAcqMetaIndex(Owner, MetaIndexURI, MetaIndexURIDesc, MetaIndexShortDesc, - DestFile, IndexTargets, MetaIndexParser); + new pkgAcqMetaIndex(Owner, MetaIndexURI, MetaIndexURIDesc, + MetaIndexShortDesc, DestFile, IndexTargets, + MetaIndexParser); } /*}}}*/ -void pkgAcqMetaSig::Failed(string Message,pkgAcquire::MethodConfig *Cnf) +void pkgAcqMetaSig::Failed(string Message,pkgAcquire::MethodConfig *Cnf)/*{{{*/ { string Final = _config->FindDir("Dir::State::lists") + URItoFileName(RealURI); @@ -425,9 +1136,9 @@ void pkgAcqMetaSig::Failed(string Message,pkgAcquire::MethodConfig *Cnf) if(Status == StatTransientNetworkError) { Item::Failed(Message,Cnf); - // move the sigfile back on network failures (and re-authenticated?) - if(FileExists(DestFile)) - Rename(DestFile,Final); + // move the sigfile back on transient network failures + if(FileExists(LastGoodSig)) + Rename(LastGoodSig,Final); // set the status back to , Item::Failed likes to reset it Status = pkgAcquire::Item::StatTransientNetworkError; @@ -453,14 +1164,14 @@ void pkgAcqMetaSig::Failed(string Message,pkgAcquire::MethodConfig *Cnf) Item::Failed(Message,Cnf); } - -pkgAcqMetaIndex::pkgAcqMetaIndex(pkgAcquire *Owner, + /*}}}*/ +pkgAcqMetaIndex::pkgAcqMetaIndex(pkgAcquire *Owner, /*{{{*/ string URI,string URIDesc,string ShortDesc, string SigFile, const vector* IndexTargets, indexRecords* MetaIndexParser) : - Item(Owner), RealURI(URI), SigFile(SigFile), AuthPass(false), - MetaIndexParser(MetaIndexParser), IndexTargets(IndexTargets), IMSHit(false) + Item(Owner), RealURI(URI), SigFile(SigFile), IndexTargets(IndexTargets), + MetaIndexParser(MetaIndexParser), AuthPass(false), IMSHit(false) { DestFile = _config->FindDir("Dir::State::lists") + "partial/"; DestFile += URItoFileName(URI); @@ -473,7 +1184,6 @@ pkgAcqMetaIndex::pkgAcqMetaIndex(pkgAcquire *Owner, QueueURI(Desc); } - /*}}}*/ // pkgAcqMetaIndex::Custom600Headers - Insert custom request headers /*{{{*/ // --------------------------------------------------------------------- @@ -489,11 +1199,11 @@ string pkgAcqMetaIndex::Custom600Headers() return "\nIndex-File: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime); } - -void pkgAcqMetaIndex::Done(string Message,unsigned long Size,string MD5, + /*}}}*/ +void pkgAcqMetaIndex::Done(string Message,unsigned long Size,string Hash, /*{{{*/ pkgAcquire::MethodConfig *Cfg) { - Item::Done(Message,Size,MD5,Cfg); + Item::Done(Message,Size,Hash,Cfg); // MetaIndexes are done in two passes: one to download the // metaindex with an appropriate method, and a second to verify it @@ -502,6 +1212,9 @@ void pkgAcqMetaIndex::Done(string Message,unsigned long Size,string MD5, if (AuthPass == true) { AuthDone(Message); + + // all cool, move Release file into place + Complete = true; } else { @@ -528,11 +1241,23 @@ void pkgAcqMetaIndex::Done(string Message,unsigned long Size,string MD5, Desc.URI = "gpgv:" + SigFile; QueueURI(Desc); Mode = "gpgv"; + return; } } -} -void pkgAcqMetaIndex::RetrievalDone(string Message) + if (Complete == true) + { + string FinalFile = _config->FindDir("Dir::State::lists"); + FinalFile += URItoFileName(RealURI); + if (SigFile == DestFile) + SigFile = FinalFile; + Rename(DestFile,FinalFile); + chmod(FinalFile.c_str(),0644); + DestFile = FinalFile; + } +} + /*}}}*/ +void pkgAcqMetaIndex::RetrievalDone(string Message) /*{{{*/ { // We have just finished downloading a Release file (it is not // verified yet) @@ -553,24 +1278,20 @@ void pkgAcqMetaIndex::RetrievalDone(string Message) return; } - // see if the download was a IMSHit + // make sure to verify against the right file on I-M-S hit IMSHit = StringToBool(LookupTag(Message,"IMS-Hit"),false); - - Complete = true; - - string FinalFile = _config->FindDir("Dir::State::lists"); - FinalFile += URItoFileName(RealURI); - - // The files timestamp matches - if (StringToBool(LookupTag(Message,"IMS-Hit"),false) == false) + if(IMSHit) { - // Move it into position - Rename(DestFile,FinalFile); + string FinalFile = _config->FindDir("Dir::State::lists"); + FinalFile += URItoFileName(RealURI); + if (SigFile == DestFile) + SigFile = FinalFile; + DestFile = FinalFile; } - DestFile = FinalFile; + Complete = true; } - -void pkgAcqMetaIndex::AuthDone(string Message) + /*}}}*/ +void pkgAcqMetaIndex::AuthDone(string Message) /*{{{*/ { // At this point, the gpgv method has succeeded, so there is a // valid signature from a key in the trusted keyring. We @@ -596,77 +1317,78 @@ void pkgAcqMetaIndex::AuthDone(string Message) // Download further indexes with verification QueueIndexes(true); - // Done, move signature file into position + // is it a clearsigned MetaIndex file? + if (DestFile == SigFile) + return; + // Done, move signature file into position string VerifiedSigFile = _config->FindDir("Dir::State::lists") + URItoFileName(RealURI) + ".gpg"; Rename(SigFile,VerifiedSigFile); chmod(VerifiedSigFile.c_str(),0644); } - -void pkgAcqMetaIndex::QueueIndexes(bool verify) + /*}}}*/ +void pkgAcqMetaIndex::QueueIndexes(bool verify) /*{{{*/ { for (vector ::const_iterator Target = IndexTargets->begin(); Target != IndexTargets->end(); Target++) { - string ExpectedIndexMD5; + HashString ExpectedIndexHash; if (verify) { - const indexRecords::checkSum *Record = MetaIndexParser->Lookup((*Target)->MetaKey); - if (!Record) - { - Status = StatAuthError; - ErrorText = "Unable to find expected entry " - + (*Target)->MetaKey + " in Meta-index file (malformed Release file?)"; - return; - } - ExpectedIndexMD5 = Record->MD5Hash; - if (_config->FindB("Debug::pkgAcquire::Auth", false)) - { - std::cerr << "Queueing: " << (*Target)->URI << std::endl; - std::cerr << "Expected MD5: " << ExpectedIndexMD5 << std::endl; - } - if (ExpectedIndexMD5.empty()) - { - Status = StatAuthError; - ErrorText = "Unable to find MD5 sum for " - + (*Target)->MetaKey + " in Meta-index file"; - return; - } + const indexRecords::checkSum *Record = MetaIndexParser->Lookup((*Target)->MetaKey); + if (Record == NULL) + { + if ((*Target)->IsOptional() == false) + { + Status = StatAuthError; + strprintf(ErrorText, _("Unable to find expected entry '%s' in Release file (Wrong sources.list entry or malformed file)"), (*Target)->MetaKey.c_str()); + return; + } + } + else + { + ExpectedIndexHash = Record->Hash; + if (_config->FindB("Debug::pkgAcquire::Auth", false)) + { + std::cerr << "Queueing: " << (*Target)->URI << std::endl; + std::cerr << "Expected Hash: " << ExpectedIndexHash.toStr() << std::endl; + } + if (ExpectedIndexHash.empty() == true && (*Target)->IsOptional() == false) + { + Status = StatAuthError; + strprintf(ErrorText, _("Unable to find hash sum for '%s' in Release file"), (*Target)->MetaKey.c_str()); + return; + } + } } - - // Queue Packages file - new pkgAcqIndex(Owner, (*Target)->URI, (*Target)->Description, - (*Target)->ShortDesc, ExpectedIndexMD5); + + if ((*Target)->IsOptional() == true) + { + if ((*Target)->IsSubIndex() == true) + new pkgAcqSubIndex(Owner, (*Target)->URI, (*Target)->Description, + (*Target)->ShortDesc, ExpectedIndexHash); + else + new pkgAcqIndexTrans(Owner, *Target, ExpectedIndexHash, MetaIndexParser); + continue; + } + + /* Queue Packages file (either diff or full packages files, depending + on the users option) - we also check if the PDiff Index file is listed + in the Meta-Index file. Ideal would be if pkgAcqDiffIndex would test this + instead, but passing the required info to it is to much hassle */ + if(_config->FindB("Acquire::PDiffs",true) == true && (verify == false || + MetaIndexParser->Exists(string((*Target)->MetaKey).append(".diff/Index")) == true)) + new pkgAcqDiffIndex(Owner, (*Target)->URI, (*Target)->Description, + (*Target)->ShortDesc, ExpectedIndexHash); + else + new pkgAcqIndex(Owner, *Target, ExpectedIndexHash, MetaIndexParser); } } - -bool pkgAcqMetaIndex::VerifyVendor(string Message) + /*}}}*/ +bool pkgAcqMetaIndex::VerifyVendor(string Message) /*{{{*/ { -// // Maybe this should be made available from above so we don't have -// // to read and parse it every time? -// pkgVendorList List; -// List.ReadMainList(); - -// const Vendor* Vndr = NULL; -// for (std::vector::const_iterator I = GPGVOutput.begin(); I != GPGVOutput.end(); I++) -// { -// string::size_type pos = (*I).find("VALIDSIG "); -// if (_config->FindB("Debug::Vendor", false)) -// std::cerr << "Looking for VALIDSIG in \"" << (*I) << "\": pos " << pos -// << std::endl; -// if (pos != std::string::npos) -// { -// string Fingerprint = (*I).substr(pos+sizeof("VALIDSIG")); -// if (_config->FindB("Debug::Vendor", false)) -// std::cerr << "Looking for \"" << Fingerprint << "\" in vendor..." << -// std::endl; -// Vndr = List.FindVendor(Fingerprint) != ""; -// if (Vndr != NULL); -// break; -// } -// } string::size_type pos; // check for missing sigs (that where not fatal because otherwise we had @@ -702,6 +1424,17 @@ bool pkgAcqMetaIndex::VerifyVendor(string Message) Transformed = ""; } + if (_config->FindB("Acquire::Check-Valid-Until", true) == true && + MetaIndexParser->GetValidUntil() > 0) { + time_t const invalid_since = time(NULL) - MetaIndexParser->GetValidUntil(); + if (invalid_since > 0) + // TRANSLATOR: The first %s is the URL of the bad Release file, the second is + // the time since then the file is invalid - formated in the same way as in + // the download progress display (e.g. 7d 3h 42min 1s) + return _error->Error(_("Release file expired, ignoring %s (invalid since %s)"), + RealURI.c_str(), TimeToStr(invalid_since).c_str()); + } + if (_config->FindB("Debug::pkgAcquire::Auth", false)) { std::cerr << "Got Codename: " << MetaIndexParser->GetDist() << std::endl; @@ -719,7 +1452,7 @@ bool pkgAcqMetaIndex::VerifyVendor(string Message) // return false; if (!Transformed.empty()) { - _error->Warning("Conflicting distribution: %s (expected %s but got %s)", + _error->Warning(_("Conflicting distribution: %s (expected %s but got %s)"), Desc.Description.c_str(), Transformed.c_str(), MetaIndexParser->GetDist().c_str()); @@ -728,48 +1461,96 @@ bool pkgAcqMetaIndex::VerifyVendor(string Message) return true; } - /*}}}*/ -// pkgAcqMetaIndex::Failed - no Release file present or no signature -// file present /*{{{*/ + /*}}}*/ +// pkgAcqMetaIndex::Failed - no Release file present or no signature file present /*{{{*/ // --------------------------------------------------------------------- /* */ void pkgAcqMetaIndex::Failed(string Message,pkgAcquire::MethodConfig *Cnf) { if (AuthPass == true) { - // if we fail the authentication but got the file via a IMS-Hit - // this means that the file wasn't downloaded and that it might be - // just stale (server problem, proxy etc). we delete what we have - // queue it again without i-m-s - // alternatively we could just unlink the file and let the user try again - if (IMSHit) - { - Complete = false; - Local = false; - AuthPass = false; - unlink(DestFile.c_str()); + // gpgv method failed, if we have a good signature + string LastGoodSigFile = _config->FindDir("Dir::State::lists"); + if (DestFile == SigFile) + LastGoodSigFile.append(URItoFileName(RealURI)); + else + LastGoodSigFile.append("partial/").append(URItoFileName(RealURI)).append(".gpg.reverify"); - DestFile = _config->FindDir("Dir::State::lists") + "partial/"; - DestFile += URItoFileName(RealURI); - Desc.URI = RealURI; - QueueURI(Desc); + if(FileExists(LastGoodSigFile)) + { + if (DestFile != SigFile) + { + string VerifiedSigFile = _config->FindDir("Dir::State::lists") + + URItoFileName(RealURI) + ".gpg"; + Rename(LastGoodSigFile,VerifiedSigFile); + } + Status = StatTransientNetworkError; + _error->Warning(_("A error occurred during the signature " + "verification. The repository is not updated " + "and the previous index files will be used. " + "GPG error: %s: %s\n"), + Desc.Description.c_str(), + LookupTag(Message,"Message").c_str()); + RunScripts("APT::Update::Auth-Failure"); return; + } else { + _error->Warning(_("GPG error: %s: %s"), + Desc.Description.c_str(), + LookupTag(Message,"Message").c_str()); } - // gpgv method failed - _error->Warning("GPG error: %s: %s", - Desc.Description.c_str(), - LookupTag(Message,"Message").c_str()); - + ReportMirrorFailure("GPGFailure"); } // No Release file was present, or verification failed, so fall // back to queueing Packages files without verification QueueIndexes(false); } - /*}}}*/ +pkgAcqMetaClearSig::pkgAcqMetaClearSig(pkgAcquire *Owner, /*{{{*/ + string const &URI, string const &URIDesc, string const &ShortDesc, + string const &MetaIndexURI, string const &MetaIndexURIDesc, string const &MetaIndexShortDesc, + string const &MetaSigURI, string const &MetaSigURIDesc, string const &MetaSigShortDesc, + const vector* IndexTargets, + indexRecords* MetaIndexParser) : + pkgAcqMetaIndex(Owner, URI, URIDesc, ShortDesc, "", IndexTargets, MetaIndexParser), + MetaIndexURI(MetaIndexURI), MetaIndexURIDesc(MetaIndexURIDesc), MetaIndexShortDesc(MetaIndexShortDesc), + MetaSigURI(MetaSigURI), MetaSigURIDesc(MetaSigURIDesc), MetaSigShortDesc(MetaSigShortDesc) +{ + SigFile = DestFile; +} + /*}}}*/ +// pkgAcqMetaClearSig::Custom600Headers - Insert custom request headers /*{{{*/ +// --------------------------------------------------------------------- +// FIXME: this can go away once the InRelease file is used widely +string pkgAcqMetaClearSig::Custom600Headers() +{ + string Final = _config->FindDir("Dir::State::lists"); + Final += URItoFileName(RealURI); + + struct stat Buf; + if (stat(Final.c_str(),&Buf) != 0) + return "\nIndex-File: true\nFail-Ignore: true\n"; + return "\nIndex-File: true\nFail-Ignore: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime); +} + /*}}}*/ +void pkgAcqMetaClearSig::Failed(string Message,pkgAcquire::MethodConfig *Cnf) /*{{{*/ +{ + if (AuthPass == false) + { + new pkgAcqMetaSig(Owner, + MetaSigURI, MetaSigURIDesc, MetaSigShortDesc, + MetaIndexURI, MetaIndexURIDesc, MetaIndexShortDesc, + IndexTargets, MetaIndexParser); + if (Cnf->LocalOnly == true || + StringToBool(LookupTag(Message, "Transient-Failure"), false) == false) + Dequeue(); + } + else + pkgAcqMetaIndex::Failed(Message, Cnf); +} + /*}}}*/ // AcqArchive::AcqArchive - Constructor /*{{{*/ // --------------------------------------------------------------------- /* This just sets up the initial fetch environment and queues the first @@ -855,7 +1636,8 @@ pkgAcqArchive::pkgAcqArchive(pkgAcquire *Owner,pkgSourceList *Sources, the archive is already available in the cache and stashs the MD5 for checking later. */ bool pkgAcqArchive::QueueNext() -{ +{ + string const ForceHash = _config->Find("Acquire::ForceHash"); for (; Vf.end() == false; Vf++) { // Ignore not source sources @@ -878,7 +1660,25 @@ bool pkgAcqArchive::QueueNext() return false; string PkgFile = Parse.FileName(); - MD5 = Parse.MD5Hash(); + if (ForceHash.empty() == false) + { + if(stringcasecmp(ForceHash, "sha256") == 0) + ExpectedHash = HashString("SHA256", Parse.SHA256Hash()); + else if (stringcasecmp(ForceHash, "sha1") == 0) + ExpectedHash = HashString("SHA1", Parse.SHA1Hash()); + else + ExpectedHash = HashString("MD5Sum", Parse.MD5Hash()); + } + else + { + string Hash; + if ((Hash = Parse.SHA256Hash()).empty() == false) + ExpectedHash = HashString("SHA256", Hash); + else if ((Hash = Parse.SHA1Hash()).empty() == false) + ExpectedHash = HashString("SHA1", Hash); + else + ExpectedHash = HashString("MD5Sum", Parse.MD5Hash()); + } if (PkgFile.empty() == true) return _error->Error(_("The package index files are corrupted. No Filename: " "field for package %s."), @@ -958,10 +1758,10 @@ bool pkgAcqArchive::QueueNext() // AcqArchive::Done - Finished fetching /*{{{*/ // --------------------------------------------------------------------- /* */ -void pkgAcqArchive::Done(string Message,unsigned long Size,string Md5Hash, +void pkgAcqArchive::Done(string Message,unsigned long Size,string CalcHash, pkgAcquire::MethodConfig *Cfg) { - Item::Done(Message,Size,Md5Hash,Cfg); + Item::Done(Message,Size,CalcHash,Cfg); // Check the size if (Size != Version->Size) @@ -971,17 +1771,14 @@ void pkgAcqArchive::Done(string Message,unsigned long Size,string Md5Hash, return; } - // Check the md5 - if (Md5Hash.empty() == false && MD5.empty() == false) + // Check the hash + if(ExpectedHash.toStr() != CalcHash) { - if (Md5Hash != MD5) - { - Status = StatError; - ErrorText = _("MD5Sum mismatch"); - if(FileExists(DestFile)) - Rename(DestFile,DestFile + ".FAILED"); - return; - } + Status = StatError; + ErrorText = _("Hash Sum mismatch"); + if(FileExists(DestFile)) + Rename(DestFile,DestFile + ".FAILED"); + return; } // Grab the output filename @@ -1050,14 +1847,13 @@ void pkgAcqArchive::Failed(string Message,pkgAcquire::MethodConfig *Cnf) } } /*}}}*/ -// AcqArchive::IsTrusted - Determine whether this archive comes from a -// trusted source /*{{{*/ +// AcqArchive::IsTrusted - Determine whether this archive comes from a trusted source /*{{{*/ // --------------------------------------------------------------------- bool pkgAcqArchive::IsTrusted() { return Trusted; } - + /*}}}*/ // AcqArchive::Finished - Fetching has finished, tidy up /*{{{*/ // --------------------------------------------------------------------- /* */ @@ -1069,14 +1865,14 @@ void pkgAcqArchive::Finished() StoreFilename = string(); } /*}}}*/ - // AcqFile::pkgAcqFile - Constructor /*{{{*/ // --------------------------------------------------------------------- /* The file is added to the queue */ -pkgAcqFile::pkgAcqFile(pkgAcquire *Owner,string URI,string MD5, +pkgAcqFile::pkgAcqFile(pkgAcquire *Owner,string URI,string Hash, unsigned long Size,string Dsc,string ShortDesc, - const string &DestDir, const string &DestFilename) : - Item(Owner), Md5Hash(MD5) + const string &DestDir, const string &DestFilename, + bool IsIndexFile) : + Item(Owner), ExpectedHash(Hash), IsIndexFile(IsIndexFile) { Retries = _config->FindI("Acquire::Retries",0); @@ -1113,23 +1909,20 @@ pkgAcqFile::pkgAcqFile(pkgAcquire *Owner,string URI,string MD5, // AcqFile::Done - Item downloaded OK /*{{{*/ // --------------------------------------------------------------------- /* */ -void pkgAcqFile::Done(string Message,unsigned long Size,string MD5, +void pkgAcqFile::Done(string Message,unsigned long Size,string CalcHash, pkgAcquire::MethodConfig *Cnf) { - // Check the md5 - if (Md5Hash.empty() == false && MD5.empty() == false) + Item::Done(Message,Size,CalcHash,Cnf); + + // Check the hash + if(!ExpectedHash.empty() && ExpectedHash.toStr() != CalcHash) { - if (Md5Hash != MD5) - { - Status = StatError; - ErrorText = "MD5Sum mismatch"; - Rename(DestFile,DestFile + ".FAILED"); - return; - } + Status = StatError; + ErrorText = _("Hash Sum mismatch"); + Rename(DestFile,DestFile + ".FAILED"); + return; } - Item::Done(Message,Size,MD5,Cnf); - string FileName = LookupTag(Message,"Filename"); if (FileName.empty() == true) { @@ -1194,3 +1987,23 @@ void pkgAcqFile::Failed(string Message,pkgAcquire::MethodConfig *Cnf) Item::Failed(Message,Cnf); } /*}}}*/ +// AcqIndex::Custom600Headers - Insert custom request headers /*{{{*/ +// --------------------------------------------------------------------- +/* The only header we use is the last-modified header. */ +string pkgAcqFile::Custom600Headers() +{ + if (IsIndexFile) + return "\nIndex-File: true"; + return ""; +} + /*}}}*/ +bool IndexTarget::IsOptional() const { + if (strncmp(ShortDesc.c_str(), "Translation", 11) != 0) + return false; + return true; +} +bool IndexTarget::IsSubIndex() const { + if (ShortDesc != "TranslationIndex") + return false; + return true; +}