X-Git-Url: https://git.saurik.com/apt.git/blobdiff_plain/0dfd2728dea9b0a6a92f1272ecaa475adc2f9276..baec76f5f0f9fcbd71f6e2afaa7fc85543bd624c:/ftparchive/apt-ftparchive.cc diff --git a/ftparchive/apt-ftparchive.cc b/ftparchive/apt-ftparchive.cc index 287f40a10..ab6f48d61 100644 --- a/ftparchive/apt-ftparchive.cc +++ b/ftparchive/apt-ftparchive.cc @@ -1,42 +1,50 @@ // -*- mode: cpp; mode: fold -*- // Description /*{{{*/ -// $Id: apt-ftparchive.cc,v 1.11 2003/12/26 22:50:52 mdz Exp $ +// $Id: apt-ftparchive.cc,v 1.8.2.3 2004/01/02 22:01:48 mdz Exp $ /* ###################################################################### - apt-scanpackages - Efficient work-alike for dpkg-scanpackages + apt-ftparchive - Efficient work-alike for dpkg-scanpackages Let contents be disabled from the conf ##################################################################### */ /*}}}*/ // Include Files /*{{{*/ -#ifdef __GNUG__ -#pragma implementation "apt-ftparchive.h" -#endif +#include -#include "apt-ftparchive.h" - #include #include #include #include -#include -#include -#include +#include +#include +#include +#include +#include + +#include +#include #include -#include +#include +#include +#include +#include +#include +#include +#include +#include -#include "contents.h" +#include "cachedb.h" +#include "override.h" +#include "apt-ftparchive.h" #include "multicompress.h" -#include "writer.h" +#include "writer.h" + +#include /*}}}*/ -using namespace std; -ostream c0out(0); -ostream c1out(0); -ostream c2out(0); -ofstream devnull("/dev/null"); +using namespace std; unsigned Quiet = 0; // struct PackageMap - List of all package files in the config file /*{{{*/ @@ -54,14 +62,23 @@ struct PackageMap // Stuff for the Package File string PkgFile; string BinCacheDB; + string SrcCacheDB; string BinOverride; string ExtraOverride; + + // We generate for this given arch + string Arch; + bool IncludeArchAll; // Stuff for the Source File string SrcFile; string SrcOverride; string SrcExtraOverride; + // Translation master file + bool LongDesc; + TranslationWriter *TransWriter; + // Contents string Contents; string ContentsHead; @@ -91,6 +108,12 @@ struct PackageMap inline bool operator() (const PackageMap &x,const PackageMap &y) {return x.BinCacheDB < y.BinCacheDB;}; }; + + struct SrcDBCompare : public binary_function + { + inline bool operator() (const PackageMap &x,const PackageMap &y) + {return x.SrcCacheDB < y.SrcCacheDB;}; + }; void GetGeneral(Configuration &Setup,Configuration &Block); bool GenPackages(Configuration &Setup,struct CacheDB::Stats &Stats); @@ -100,8 +123,9 @@ struct PackageMap vector::iterator End, unsigned long &Left); - PackageMap() : DeLinkLimit(0), Permissions(1), ContentsDone(false), - PkgDone(false), SrcDone(false), ContentsMTime(0) {}; + PackageMap() : IncludeArchAll(true), LongDesc(true), TransWriter(NULL), + DeLinkLimit(0), Permissions(1), ContentsDone(false), + PkgDone(false), SrcDone(false), ContentsMTime(0) {}; }; /*}}}*/ @@ -113,7 +137,7 @@ void PackageMap::GetGeneral(Configuration &Setup,Configuration &Block) PathPrefix = Block.Find("PathPrefix"); if (Block.FindB("External-Links",true) == false) - DeLinkLimit = Setup.FindI("Default::DeLinkLimit",UINT_MAX); + DeLinkLimit = Setup.FindI("Default::DeLinkLimit", std::numeric_limits::max()); else DeLinkLimit = 0; @@ -130,7 +154,7 @@ void PackageMap::GetGeneral(Configuration &Setup,Configuration &Block) Setup.Find("Default::Packages::Extensions",".deb").c_str()); Permissions = Setup.FindI("Default::FileMode",0644); - + if (FLFile.empty() == false) FLFile = flCombine(Setup.Find("Dir::FileListDir"),FLFile); @@ -156,27 +180,28 @@ bool PackageMap::GenPackages(Configuration &Setup,struct CacheDB::Stats &Stats) PkgDone = true; // Create a package writer object. - PackagesWriter Packages(flCombine(CacheDir,BinCacheDB), + MultiCompress Comp(flCombine(ArchiveDir,PkgFile), + PkgCompress,Permissions); + PackagesWriter Packages(&Comp.Input, TransWriter, flCombine(CacheDir,BinCacheDB), flCombine(OverrideDir,BinOverride), - flCombine(OverrideDir,ExtraOverride)); + flCombine(OverrideDir,ExtraOverride), + Arch, IncludeArchAll); if (PkgExt.empty() == false && Packages.SetExts(PkgExt) == false) return _error->Error(_("Package extension list is too long")); if (_error->PendingError() == true) - return _error->Error(_("Error Processing directory %s"),BaseDir.c_str()); + return _error->Error(_("Error processing directory %s"),BaseDir.c_str()); Packages.PathPrefix = PathPrefix; Packages.DirStrip = ArchiveDir; Packages.InternalPrefix = flCombine(ArchiveDir,InternalPrefix); + Packages.LongDescription = LongDesc; + Packages.Stats.DeLinkBytes = Stats.DeLinkBytes; Packages.DeLinkLimit = DeLinkLimit; - // Create a compressor object - MultiCompress Comp(flCombine(ArchiveDir,PkgFile), - PkgCompress,Permissions); - Packages.Output = Comp.Input; if (_error->PendingError() == true) - return _error->Error(_("Error Processing directory %s"),BaseDir.c_str()); + return _error->Error(_("Error processing directory %s"),BaseDir.c_str()); c0out << ' ' << BaseDir << ":" << flush; @@ -195,11 +220,11 @@ bool PackageMap::GenPackages(Configuration &Setup,struct CacheDB::Stats &Stats) Packages.Output = 0; // Just in case // Finish compressing - unsigned long Size; + unsigned long long Size; if (Comp.Finalize(Size) == false) { c0out << endl; - return _error->Error(_("Error Processing directory %s"),BaseDir.c_str()); + return _error->Error(_("Error processing directory %s"),BaseDir.c_str()); } if (Size != 0) @@ -212,11 +237,14 @@ bool PackageMap::GenPackages(Configuration &Setup,struct CacheDB::Stats &Stats) gettimeofday(&NewTime,0); double Delta = NewTime.tv_sec - StartTime.tv_sec + (NewTime.tv_usec - StartTime.tv_usec)/1000000.0; - + c0out << Packages.Stats.Packages << " files " << /* SizeToStr(Packages.Stats.MD5Bytes) << "B/" << */ SizeToStr(Packages.Stats.Bytes) << "B " << TimeToStr((long)Delta) << endl; + + if(_config->FindB("APT::FTPArchive::ShowCacheMisses", false) == true) + c0out << " Misses in Cache: " << Packages.Stats.Misses<< endl; Stats.Add(Packages.Stats); Stats.DeLinkBytes = Packages.Stats.DeLinkBytes; @@ -243,13 +271,16 @@ bool PackageMap::GenSources(Configuration &Setup,struct CacheDB::Stats &Stats) SrcDone = true; // Create a package writer object. - SourcesWriter Sources(flCombine(OverrideDir,BinOverride), + MultiCompress Comp(flCombine(ArchiveDir,SrcFile), + SrcCompress,Permissions); + SourcesWriter Sources(&Comp.Input, flCombine(CacheDir, SrcCacheDB), + flCombine(OverrideDir,BinOverride), flCombine(OverrideDir,SrcOverride), flCombine(OverrideDir,SrcExtraOverride)); if (SrcExt.empty() == false && Sources.SetExts(SrcExt) == false) return _error->Error(_("Source extension list is too long")); if (_error->PendingError() == true) - return _error->Error(_("Error Processing directory %s"),BaseDir.c_str()); + return _error->Error(_("Error processing directory %s"),BaseDir.c_str()); Sources.PathPrefix = PathPrefix; Sources.DirStrip = ArchiveDir; @@ -257,13 +288,9 @@ bool PackageMap::GenSources(Configuration &Setup,struct CacheDB::Stats &Stats) Sources.DeLinkLimit = DeLinkLimit; Sources.Stats.DeLinkBytes = Stats.DeLinkBytes; - - // Create a compressor object - MultiCompress Comp(flCombine(ArchiveDir,SrcFile), - SrcCompress,Permissions); - Sources.Output = Comp.Input; + if (_error->PendingError() == true) - return _error->Error(_("Error Processing directory %s"),BaseDir.c_str()); + return _error->Error(_("Error processing directory %s"),BaseDir.c_str()); c0out << ' ' << BaseDir << ":" << flush; @@ -281,11 +308,11 @@ bool PackageMap::GenSources(Configuration &Setup,struct CacheDB::Stats &Stats) Sources.Output = 0; // Just in case // Finish compressing - unsigned long Size; + unsigned long long Size; if (Comp.Finalize(Size) == false) { c0out << endl; - return _error->Error(_("Error Processing directory %s"),BaseDir.c_str()); + return _error->Error(_("Error processing directory %s"),BaseDir.c_str()); } if (Size != 0) @@ -302,6 +329,9 @@ bool PackageMap::GenSources(Configuration &Setup,struct CacheDB::Stats &Stats) c0out << Sources.Stats.Packages << " pkgs in " << TimeToStr((long)Delta) << endl; + if(_config->FindB("APT::FTPArchive::ShowCacheMisses", false) == true) + c0out << " Misses in Cache: " << Sources.Stats.Misses << endl; + Stats.Add(Sources.Stats); Stats.DeLinkBytes = Sources.Stats.DeLinkBytes; @@ -332,16 +362,15 @@ bool PackageMap::GenContents(Configuration &Setup, gettimeofday(&StartTime,0); // Create a package writer object. - ContentsWriter Contents(""); + MultiCompress Comp(flCombine(ArchiveDir,this->Contents), + CntCompress,Permissions); + Comp.UpdateMTime = Setup.FindI("Default::ContentsAge",10)*24*60*60; + ContentsWriter Contents(&Comp.Input, "", Arch, IncludeArchAll); if (PkgExt.empty() == false && Contents.SetExts(PkgExt) == false) return _error->Error(_("Package extension list is too long")); if (_error->PendingError() == true) return false; - MultiCompress Comp(flCombine(ArchiveDir,this->Contents), - CntCompress,Permissions); - Comp.UpdateMTime = Setup.FindI("Default::ContentsAge",10)*24*60*60; - Contents.Output = Comp.Input; if (_error->PendingError() == true) return false; @@ -351,30 +380,30 @@ bool PackageMap::GenContents(Configuration &Setup, FileFd Head(flCombine(OverrideDir,ContentsHead),FileFd::ReadOnly); if (_error->PendingError() == true) return false; - - unsigned long Size = Head.Size(); + + unsigned long long Size = Head.Size(); unsigned char Buf[4096]; while (Size != 0) { - unsigned long ToRead = Size; + unsigned long long ToRead = Size; if (Size > sizeof(Buf)) ToRead = sizeof(Buf); - + if (Head.Read(Buf,ToRead) == false) return false; - - if (fwrite(Buf,1,ToRead,Comp.Input) != ToRead) + + if (Comp.Input.Write(Buf, ToRead) == false) return _error->Errno("fwrite",_("Error writing header to contents file")); - + Size -= ToRead; - } - } - + } + } + /* Go over all the package file records and parse all the package files associated with this contents file into one great big honking memory structure, then dump the sorted version */ c0out << ' ' << this->Contents << ":" << flush; - for (vector::iterator I = Begin; I != End; I++) + for (vector::iterator I = Begin; I != End; ++I) { if (I->Contents != this->Contents) continue; @@ -390,11 +419,11 @@ bool PackageMap::GenContents(Configuration &Setup, Contents.Finish(); // Finish compressing - unsigned long Size; + unsigned long long Size; if (Comp.Finalize(Size) == false || _error->PendingError() == true) { c0out << endl; - return _error->Error(_("Error Processing Contents %s"), + return _error->Error(_("Error processing contents %s"), this->Contents.c_str()); } @@ -414,6 +443,9 @@ bool PackageMap::GenContents(Configuration &Setup, double Delta = NewTime.tv_sec - StartTime.tv_sec + (NewTime.tv_usec - StartTime.tv_usec)/1000000.0; + if(_config->FindB("APT::FTPArchive::ShowCacheMisses", false) == true) + c0out << " Misses in Cache: " << Contents.Stats.Misses<< endl; + c0out << Contents.Stats.Packages << " files " << SizeToStr(Contents.Stats.Bytes) << "B " << TimeToStr((long)Delta) << endl; @@ -426,7 +458,7 @@ bool PackageMap::GenContents(Configuration &Setup, // --------------------------------------------------------------------- /* This populates the PkgList with all the possible permutations of the section/arch lists. */ -void LoadTree(vector &PkgList,Configuration &Setup) +static void LoadTree(vector &PkgList, std::vector &TransList, Configuration &Setup) { // Load the defaults string DDir = Setup.Find("TreeDefault::Directory", @@ -435,18 +467,30 @@ void LoadTree(vector &PkgList,Configuration &Setup) "$(DIST)/$(SECTION)/source/"); string DPkg = Setup.Find("TreeDefault::Packages", "$(DIST)/$(SECTION)/binary-$(ARCH)/Packages"); + string DTrans = Setup.Find("TreeDefault::Translation", + "$(DIST)/$(SECTION)/i18n/Translation-en"); string DIPrfx = Setup.Find("TreeDefault::InternalPrefix", "$(DIST)/$(SECTION)/"); string DContents = Setup.Find("TreeDefault::Contents", - "$(DIST)/Contents-$(ARCH)"); + "$(DIST)/$(SECTION)/Contents-$(ARCH)"); string DContentsH = Setup.Find("TreeDefault::Contents::Header",""); string DBCache = Setup.Find("TreeDefault::BinCacheDB", "packages-$(ARCH).db"); + string SrcDBCache = Setup.Find("TreeDefault::SrcCacheDB", + "sources-$(SECTION).db"); string DSources = Setup.Find("TreeDefault::Sources", "$(DIST)/$(SECTION)/source/Sources"); string DFLFile = Setup.Find("TreeDefault::FileList", ""); string DSFLFile = Setup.Find("TreeDefault::SourceFileList", ""); + mode_t const Permissions = Setup.FindI("Default::FileMode",0644); + + bool const LongDescription = Setup.FindB("Default::LongDescription", + _config->FindB("APT::FTPArchive::LongDescription", true)); + string const TranslationCompress = Setup.Find("Default::Translation::Compress",". gzip").c_str(); + bool const ConfIncludeArchAllExists = _config->Exists("APT::FTPArchive::IncludeArchitectureAll"); + bool const ConfIncludeArchAll = _config->FindB("APT::FTPArchive::IncludeArchitectureAll", true); + // Process 'tree' type sections const Configuration::Item *Top = Setup.Tree("tree"); for (Top = (Top == 0?0:Top->Child); Top != 0;) @@ -460,21 +504,32 @@ void LoadTree(vector &PkgList,Configuration &Setup) string Section; while (ParseQuoteWord(Sections,Section) == true) { - string Tmp2 = Block.Find("Architectures"); - string Arch; - const char *Archs = Tmp2.c_str(); - while (ParseQuoteWord(Archs,Arch) == true) + struct SubstVar Vars[] = {{"$(DIST)",&Dist}, + {"$(SECTION)",&Section}, + {"$(ARCH)",nullptr}, + {nullptr, nullptr}}; + mode_t const Perms = Block.FindI("FileMode", Permissions); + bool const LongDesc = Block.FindB("LongDescription", LongDescription); + TranslationWriter *TransWriter = nullptr; + + std::string Tmp2 = Block.Find("Architectures"); + std::transform(Tmp2.begin(), Tmp2.end(), Tmp2.begin(), ::tolower); + std::vector const Archs = VectorizeString(Tmp2, ' '); + bool IncludeArchAll; + if (ConfIncludeArchAllExists == true) + IncludeArchAll = ConfIncludeArchAll; + else + IncludeArchAll = std::find(Archs.begin(), Archs.end(), "all") == Archs.end(); + for (auto const& Arch: Archs) { - struct SubstVar Vars[] = {{"$(DIST)",&Dist}, - {"$(SECTION)",&Section}, - {"$(ARCH)",&Arch}, - {}}; + if (Arch.empty()) continue; + Vars[2].Contents = &Arch; PackageMap Itm; - + Itm.Permissions = Perms; Itm.BinOverride = SubstVar(Block.Find("BinOverride"),Vars); Itm.InternalPrefix = SubstVar(Block.Find("InternalPrefix",DIPrfx.c_str()),Vars); - if (stringcasecmp(Arch,"source") == 0) + if (Arch == "source") { Itm.SrcOverride = SubstVar(Block.Find("SrcOverride"),Vars); Itm.BaseDir = SubstVar(Block.Find("SrcDirectory",DSDir.c_str()),Vars); @@ -482,6 +537,7 @@ void LoadTree(vector &PkgList,Configuration &Setup) Itm.Tag = SubstVar("$(DIST)/$(SECTION)/source",Vars); Itm.FLFile = SubstVar(Block.Find("SourceFileList",DSFLFile.c_str()),Vars); Itm.SrcExtraOverride = SubstVar(Block.Find("SrcExtraOverride"),Vars); + Itm.SrcCacheDB = SubstVar(Block.Find("SrcCacheDB",SrcDBCache.c_str()),Vars); } else { @@ -489,26 +545,46 @@ void LoadTree(vector &PkgList,Configuration &Setup) Itm.BaseDir = SubstVar(Block.Find("Directory",DDir.c_str()),Vars); Itm.PkgFile = SubstVar(Block.Find("Packages",DPkg.c_str()),Vars); Itm.Tag = SubstVar("$(DIST)/$(SECTION)/$(ARCH)",Vars); + Itm.Arch = Arch; + Itm.IncludeArchAll = IncludeArchAll; + Itm.LongDesc = LongDesc; + if (TransWriter == NULL && DTrans.empty() == false && LongDesc == false && DTrans != "/dev/null") + { + string const TranslationFile = flCombine(Setup.FindDir("Dir::ArchiveDir"), + SubstVar(Block.Find("Translation", DTrans.c_str()), Vars)); + string const TransCompress = Block.Find("Translation::Compress", TranslationCompress); + TransWriter = new TranslationWriter(TranslationFile, TransCompress, Perms); + TransList.push_back(TransWriter); + } + Itm.TransWriter = TransWriter; Itm.Contents = SubstVar(Block.Find("Contents",DContents.c_str()),Vars); Itm.ContentsHead = SubstVar(Block.Find("Contents::Header",DContentsH.c_str()),Vars); Itm.FLFile = SubstVar(Block.Find("FileList",DFLFile.c_str()),Vars); Itm.ExtraOverride = SubstVar(Block.Find("ExtraOverride"),Vars); } - Itm.GetGeneral(Setup,Block); + Itm.GetGeneral(Setup,Block); PkgList.push_back(Itm); } } - + Top = Top->Next; - } + } +} + /*}}}*/ +static void UnloadTree(std::vector const &Trans) /*{{{*/ +{ + for (std::vector::const_reverse_iterator T = Trans.rbegin(); T != Trans.rend(); ++T) + delete *T; } /*}}}*/ // LoadBinDir - Load a 'bindirectory' section from the Generate Config /*{{{*/ // --------------------------------------------------------------------- /* */ -void LoadBinDir(vector &PkgList,Configuration &Setup) +static void LoadBinDir(vector &PkgList,Configuration &Setup) { + mode_t const Permissions = Setup.FindI("Default::FileMode",0644); + // Process 'bindirectory' type sections const Configuration::Item *Top = Setup.Tree("bindirectory"); for (Top = (Top == 0?0:Top->Child); Top != 0;) @@ -519,6 +595,7 @@ void LoadBinDir(vector &PkgList,Configuration &Setup) Itm.PkgFile = Block.Find("Packages"); Itm.SrcFile = Block.Find("Sources"); Itm.BinCacheDB = Block.Find("BinCacheDB"); + Itm.SrcCacheDB = Block.Find("SrcCacheDB"); Itm.BinOverride = Block.Find("BinOverride"); Itm.ExtraOverride = Block.Find("ExtraOverride"); Itm.SrcExtraOverride = Block.Find("SrcExtraOverride"); @@ -528,6 +605,7 @@ void LoadBinDir(vector &PkgList,Configuration &Setup) Itm.InternalPrefix = Block.Find("InternalPrefix",Top->Tag.c_str()); Itm.Contents = Block.Find("Contents"); Itm.ContentsHead = Block.Find("Contents::Header"); + Itm.Permissions = Block.FindI("FileMode", Permissions); Itm.GetGeneral(Setup,Block); PkgList.push_back(Itm); @@ -537,17 +615,9 @@ void LoadBinDir(vector &PkgList,Configuration &Setup) } /*}}}*/ -// ShowHelp - Show the help text /*{{{*/ -// --------------------------------------------------------------------- -/* */ -bool ShowHelp(CommandLine &CmdL) +static bool ShowHelp(CommandLine &) /*{{{*/ { - ioprintf(cout,_("%s %s for %s %s compiled on %s %s\n"),PACKAGE,VERSION, - COMMON_OS,COMMON_CPU,__DATE__,__TIME__); - if (_config->FindB("version") == true) - return true; - - cout << + std::cout << _("Usage: apt-ftparchive [options] command\n" "Commands: packages binarypath [overridefile [pathprefix]]\n" " sources srcpath [overridefile [pathprefix]]\n" @@ -585,15 +655,14 @@ bool ShowHelp(CommandLine &CmdL) " --no-delink Enable delinking debug mode\n" " --contents Control contents file generation\n" " -c=? Read this configuration file\n" - " -o=? Set an arbitary configuration option") << endl; - + " -o=? Set an arbitrary configuration option") << endl; return true; } /*}}}*/ // SimpleGenPackages - Generate a Packages file for a directory tree /*{{{*/ // --------------------------------------------------------------------- /* This emulates dpkg-scanpackages's command line interface. 'mostly' */ -bool SimpleGenPackages(CommandLine &CmdL) +static bool SimpleGenPackages(CommandLine &CmdL) { if (CmdL.FileSize() < 2) return ShowHelp(CmdL); @@ -603,8 +672,9 @@ bool SimpleGenPackages(CommandLine &CmdL) Override = CmdL.FileList[2]; // Create a package writer object. - PackagesWriter Packages(_config->Find("APT::FTPArchive::DB"), - Override, ""); + PackagesWriter Packages(NULL, NULL, _config->Find("APT::FTPArchive::DB"), + Override, "", _config->Find("APT::FTPArchive::Architecture"), + _config->FindB("APT::FTPArchive::IncludeArchitectureAll", true)); if (_error->PendingError() == true) return false; @@ -615,19 +685,23 @@ bool SimpleGenPackages(CommandLine &CmdL) if (Packages.RecursiveScan(CmdL.FileList[1]) == false) return false; + // Give some stats if asked for + if(_config->FindB("APT::FTPArchive::ShowCacheMisses", false) == true) + c0out << " Misses in Cache: " << Packages.Stats.Misses<< endl; + return true; } /*}}}*/ // SimpleGenContents - Generate a Contents listing /*{{{*/ // --------------------------------------------------------------------- /* */ -bool SimpleGenContents(CommandLine &CmdL) +static bool SimpleGenContents(CommandLine &CmdL) { if (CmdL.FileSize() < 2) return ShowHelp(CmdL); // Create a package writer object. - ContentsWriter Contents(_config->Find("APT::FTPArchive::DB")); + ContentsWriter Contents(NULL, _config->Find("APT::FTPArchive::DB"), _config->Find("APT::FTPArchive::Architecture")); if (_error->PendingError() == true) return false; @@ -643,7 +717,7 @@ bool SimpleGenContents(CommandLine &CmdL) // SimpleGenSources - Generate a Sources file for a directory tree /*{{{*/ // --------------------------------------------------------------------- /* This emulates dpkg-scanpackages's command line interface. 'mostly' */ -bool SimpleGenSources(CommandLine &CmdL) +static bool SimpleGenSources(CommandLine &CmdL) { if (CmdL.FileSize() < 2) return ShowHelp(CmdL); @@ -660,7 +734,7 @@ bool SimpleGenSources(CommandLine &CmdL) SOverride.c_str()); // Create a package writer object. - SourcesWriter Sources(Override,SOverride); + SourcesWriter Sources(NULL, _config->Find("APT::FTPArchive::DB"),Override,SOverride); if (_error->PendingError() == true) return false; @@ -671,21 +745,29 @@ bool SimpleGenSources(CommandLine &CmdL) if (Sources.RecursiveScan(CmdL.FileList[1]) == false) return false; + // Give some stats if asked for + if(_config->FindB("APT::FTPArchive::ShowCacheMisses", false) == true) + c0out << " Misses in Cache: " << Sources.Stats.Misses<< endl; + return true; } /*}}}*/ // SimpleGenRelease - Generate a Release file for a directory tree /*{{{*/ // --------------------------------------------------------------------- -bool SimpleGenRelease(CommandLine &CmdL) +static bool SimpleGenRelease(CommandLine &CmdL) { if (CmdL.FileSize() < 2) return ShowHelp(CmdL); - ReleaseWriter Release(""); + string Dir = CmdL.FileList[1]; + + ReleaseWriter Release(NULL, ""); + Release.DirStrip = Dir; + if (_error->PendingError() == true) return false; - if (Release.RecursiveScan(CmdL.FileList[1]) == false) + if (Release.RecursiveScan(Dir) == false) return false; Release.Finish(); @@ -694,38 +776,20 @@ bool SimpleGenRelease(CommandLine &CmdL) } /*}}}*/ -// Generate - Full generate, using a config file /*{{{*/ +// DoGeneratePackagesAndSources - Helper for Generate /*{{{*/ // --------------------------------------------------------------------- -/* */ -bool Generate(CommandLine &CmdL) +static bool DoGeneratePackagesAndSources(Configuration &Setup, + vector &PkgList, + struct CacheDB::Stats &SrcStats, + struct CacheDB::Stats &Stats, + CommandLine &CmdL) { - struct CacheDB::Stats SrcStats; - if (CmdL.FileSize() < 2) - return ShowHelp(CmdL); - - struct timeval StartTime; - gettimeofday(&StartTime,0); - struct CacheDB::Stats Stats; - - // Read the configuration file. - Configuration Setup; - if (ReadConfigFile(Setup,CmdL.FileList[1],true) == false) - return false; - - vector PkgList; - LoadTree(PkgList,Setup); - LoadBinDir(PkgList,Setup); - - // Sort by cache DB to improve IO locality. - stable_sort(PkgList.begin(),PkgList.end(),PackageMap::DBCompare()); - - // Generate packages if (CmdL.FileSize() <= 2) { - for (vector::iterator I = PkgList.begin(); I != PkgList.end(); I++) + for (vector::iterator I = PkgList.begin(); I != PkgList.end(); ++I) if (I->GenPackages(Setup,Stats) == false) _error->DumpErrors(); - for (vector::iterator I = PkgList.begin(); I != PkgList.end(); I++) + for (vector::iterator I = PkgList.begin(); I != PkgList.end(); ++I) if (I->GenSources(Setup,SrcStats) == false) _error->DumpErrors(); } @@ -734,7 +798,7 @@ bool Generate(CommandLine &CmdL) // Make a choice list out of the package list.. RxChoiceList *List = new RxChoiceList[2*PkgList.size()+1]; RxChoiceList *End = List; - for (vector::iterator I = PkgList.begin(); I != PkgList.end(); I++) + for (vector::iterator I = PkgList.begin(); I != PkgList.end(); ++I) { End->UserData = &(*I); End->Str = I->BaseDir.c_str(); @@ -755,12 +819,12 @@ bool Generate(CommandLine &CmdL) _error->DumpErrors(); // Do the generation for Packages - for (End = List; End->Str != 0; End++) + for (End = List; End->Str != 0; ++End) { if (End->Hit == false) continue; - PackageMap *I = (PackageMap *)End->UserData; + PackageMap * const I = static_cast(End->UserData); if (I->PkgDone == true) continue; if (I->GenPackages(Setup,Stats) == false) @@ -768,12 +832,12 @@ bool Generate(CommandLine &CmdL) } // Do the generation for Sources - for (End = List; End->Str != 0; End++) + for (End = List; End->Str != 0; ++End) { if (End->Hit == false) continue; - PackageMap *I = (PackageMap *)End->UserData; + PackageMap * const I = static_cast(End->UserData); if (I->SrcDone == true) continue; if (I->GenSources(Setup,SrcStats) == false) @@ -782,15 +846,21 @@ bool Generate(CommandLine &CmdL) delete [] List; } - - if (_config->FindB("APT::FTPArchive::Contents",true) == false) - return true; - - c1out << "Done Packages, Starting contents." << endl; + return true; +} + + /*}}}*/ +// DoGenerateContents - Helper for Generate to generate the Contents /*{{{*/ +// --------------------------------------------------------------------- +static bool DoGenerateContents(Configuration &Setup, + vector &PkgList, + CommandLine &CmdL) +{ + c1out << "Packages done, Starting contents." << endl; // Sort the contents file list by date string ArchiveDir = Setup.FindDir("Dir::ArchiveDir"); - for (vector::iterator I = PkgList.begin(); I != PkgList.end(); I++) + for (vector::iterator I = PkgList.begin(); I != PkgList.end(); ++I) { struct stat A; if (MultiCompress::GetStat(flCombine(ArchiveDir,I->Contents), @@ -806,10 +876,11 @@ bool Generate(CommandLine &CmdL) that describe the debs it indexes. Since the package files contain hashes of the .debs this means they have not changed either so the contents must be up to date. */ - unsigned long MaxContentsChange = Setup.FindI("Default::MaxContentsChange",UINT_MAX)*1024; - for (vector::iterator I = PkgList.begin(); I != PkgList.end(); I++) + unsigned long MaxContentsChange = Setup.FindI("Default::MaxContentsChange", + std::numeric_limits::max())*1024; + for (vector::iterator I = PkgList.begin(); I != PkgList.end(); ++I) { - // This record is not relevent + // This record is not relevant if (I->ContentsDone == true || I->Contents.empty() == true) continue; @@ -842,21 +913,74 @@ bool Generate(CommandLine &CmdL) break; } } + + return true; +} + + /*}}}*/ +// Generate - Full generate, using a config file /*{{{*/ +// --------------------------------------------------------------------- +/* */ +static bool Generate(CommandLine &CmdL) +{ + struct CacheDB::Stats SrcStats; + if (CmdL.FileSize() < 2) + return ShowHelp(CmdL); + + struct timeval StartTime; + gettimeofday(&StartTime,0); + struct CacheDB::Stats Stats; + // Read the configuration file. + Configuration Setup; + if (ReadConfigFile(Setup,CmdL.FileList[1],true) == false) + return false; + + vector PkgList; + std::vector TransList; + LoadTree(PkgList, TransList, Setup); + LoadBinDir(PkgList,Setup); + + // Sort by cache DB to improve IO locality. + stable_sort(PkgList.begin(),PkgList.end(),PackageMap::DBCompare()); + stable_sort(PkgList.begin(),PkgList.end(),PackageMap::SrcDBCompare()); + + // Generate packages + if (_config->FindB("APT::FTPArchive::ContentsOnly", false) == false) + { + if(DoGeneratePackagesAndSources(Setup, PkgList, SrcStats, Stats, CmdL) == false) + { + UnloadTree(TransList); + return false; + } + } else { + c1out << "Skipping Packages/Sources generation" << endl; + } + + // do Contents if needed + if (_config->FindB("APT::FTPArchive::Contents", true) == true) + if (DoGenerateContents(Setup, PkgList, CmdL) == false) + { + UnloadTree(TransList); + return false; + } + struct timeval NewTime; - gettimeofday(&NewTime,0); - double Delta = NewTime.tv_sec - StartTime.tv_sec + + gettimeofday(&NewTime,0); + double Delta = NewTime.tv_sec - StartTime.tv_sec + (NewTime.tv_usec - StartTime.tv_usec)/1000000.0; - c1out << "Done. " << SizeToStr(Stats.Bytes) << "B in " << Stats.Packages + c1out << "Done. " << SizeToStr(Stats.Bytes) << "B in " << Stats.Packages << " archives. Took " << TimeToStr((long)Delta) << endl; - + + UnloadTree(TransList); return true; } - /*}}}*/ + + /*}}}*/ // Clean - Clean out the databases /*{{{*/ // --------------------------------------------------------------------- /* */ -bool Clean(CommandLine &CmdL) +static bool Clean(CommandLine &CmdL) { if (CmdL.FileSize() != 2) return ShowHelp(CmdL); @@ -865,91 +989,69 @@ bool Clean(CommandLine &CmdL) Configuration Setup; if (ReadConfigFile(Setup,CmdL.FileList[1],true) == false) return false; + // we don't need translation creation here + Setup.Set("TreeDefault::Translation", "/dev/null"); vector PkgList; - LoadTree(PkgList,Setup); + std::vector TransList; + LoadTree(PkgList, TransList, Setup); LoadBinDir(PkgList,Setup); // Sort by cache DB to improve IO locality. stable_sort(PkgList.begin(),PkgList.end(),PackageMap::DBCompare()); + stable_sort(PkgList.begin(),PkgList.end(),PackageMap::SrcDBCompare()); string CacheDir = Setup.FindDir("Dir::CacheDir"); for (vector::iterator I = PkgList.begin(); I != PkgList.end(); ) { - c0out << I->BinCacheDB << endl; + if(I->BinCacheDB != "") + c0out << I->BinCacheDB << endl; + if(I->SrcCacheDB != "") + c0out << I->SrcCacheDB << endl; CacheDB DB(flCombine(CacheDir,I->BinCacheDB)); + CacheDB DB_SRC(flCombine(CacheDir,I->SrcCacheDB)); if (DB.Clean() == false) _error->DumpErrors(); - + if (DB_SRC.Clean() == false) + _error->DumpErrors(); + string CacheDB = I->BinCacheDB; - for (; I != PkgList.end() && I->BinCacheDB == CacheDB; I++); + string SrcCacheDB = I->SrcCacheDB; + while(I != PkgList.end() && + I->BinCacheDB == CacheDB && + I->SrcCacheDB == SrcCacheDB) + ++I; } - + + return true; } /*}}}*/ -int main(int argc, const char *argv[]) +static std::vector GetCommands() /*{{{*/ +{ + return { + {"packages",&SimpleGenPackages, nullptr}, + {"contents",&SimpleGenContents, nullptr}, + {"sources",&SimpleGenSources, nullptr}, + {"release",&SimpleGenRelease, nullptr}, + {"generate",&Generate, nullptr}, + {"clean",&Clean, nullptr}, + {nullptr, nullptr, nullptr} + }; +} + /*}}}*/ +int main(int argc, const char *argv[]) /*{{{*/ { - CommandLine::Args Args[] = { - {'h',"help","help",0}, - {0,"md5","APT::FTPArchive::MD5",0}, - {'v',"version","version",0}, - {'d',"db","APT::FTPArchive::DB",CommandLine::HasArg}, - {'s',"source-override","APT::FTPArchive::SourceOverride",CommandLine::HasArg}, - {'q',"quiet","quiet",CommandLine::IntLevel}, - {'q',"silent","quiet",CommandLine::IntLevel}, - {0,"delink","APT::FTPArchive::DeLinkAct",0}, - {0,"readonly","APT::FTPArchive::ReadOnlyDB",0}, - {0,"contents","APT::FTPArchive::Contents",0}, - {'c',"config-file",0,CommandLine::ConfigFile}, - {'o',"option",0,CommandLine::ArbItem}, - {0,0,0,0}}; - CommandLine::Dispatch Cmds[] = {{"packages",&SimpleGenPackages}, - {"contents",&SimpleGenContents}, - {"sources",&SimpleGenSources}, - {"release",&SimpleGenRelease}, - {"generate",&Generate}, - {"clean",&Clean}, - {"help",&ShowHelp}, - {0,0}}; - // Parse the command line and initialize the package library - CommandLine CmdL(Args,_config); - if (CmdL.Parse(argc,argv) == false) - { - _error->DumpErrors(); - return 100; - } - - // See if the help should be shown - if (_config->FindB("help") == true || - _config->FindB("version") == true || - CmdL.FileSize() == 0) - { - ShowHelp(CmdL); - return 0; - } - - // Setup the output streams - c0out.rdbuf(clog.rdbuf()); - c1out.rdbuf(clog.rdbuf()); - c2out.rdbuf(clog.rdbuf()); + CommandLine CmdL; + auto const Cmds = ParseCommandLine(CmdL, APT_CMD::APT_FTPARCHIVE, &_config, NULL, argc, argv, ShowHelp, &GetCommands); + + _config->CndSet("quiet",0); Quiet = _config->FindI("quiet",0); - if (Quiet > 0) - c0out.rdbuf(devnull.rdbuf()); - if (Quiet > 1) - c1out.rdbuf(devnull.rdbuf()); - - // Match the operation - CmdL.DispatchArg(Cmds); - - if (_error->empty() == false) - { - bool Errors = _error->PendingError(); - _error->DumpErrors(); - return Errors == true?100:0; - } - return 0; + InitOutput(clog.rdbuf()); + + return DispatchCommandLine(CmdL, Cmds); } + /*}}}*/