X-Git-Url: https://git.saurik.com/apt.git/blobdiff_plain/b2e465d6d32d2dc884f58b94acb7e35f671a87fe..98cc7fd2c1d397623960baf69ae3cec04a87a23e:/ftparchive/apt-ftparchive.cc?ds=inline diff --git a/ftparchive/apt-ftparchive.cc b/ftparchive/apt-ftparchive.cc index 055d876d0..cf667483c 100644 --- a/ftparchive/apt-ftparchive.cc +++ b/ftparchive/apt-ftparchive.cc @@ -1,41 +1,49 @@ // -*- mode: cpp; mode: fold -*- // Description /*{{{*/ -// $Id: apt-ftparchive.cc,v 1.2 2001/02/20 07:03:18 jgg Exp $ +// $Id: apt-ftparchive.cc,v 1.8.2.3 2004/01/02 22:01:48 mdz Exp $ /* ###################################################################### - apt-scanpackages - Efficient work-alike for dpkg-scanpackages + apt-ftparchive - Efficient work-alike for dpkg-scanpackages Let contents be disabled from the conf ##################################################################### */ /*}}}*/ // Include Files /*{{{*/ -#ifdef __GNUG__ -#pragma implementation "apt-ftparchive.h" -#endif +#include -#include "apt-ftparchive.h" - #include #include #include #include -#include -#include -#include +#include +#include +#include +#include + +#include +#include #include -#include +#include +#include +#include +#include +#include +#include +#include +#include -#include "contents.h" +#include "cachedb.h" +#include "override.h" +#include "apt-ftparchive.h" #include "multicompress.h" -#include "writer.h" +#include "writer.h" + +#include /*}}}*/ -ostream c0out; -ostream c1out; -ostream c2out; -ofstream devnull("/dev/null"); +using namespace std; unsigned Quiet = 0; // struct PackageMap - List of all package files in the config file /*{{{*/ @@ -53,11 +61,21 @@ struct PackageMap // Stuff for the Package File string PkgFile; string BinCacheDB; + string SrcCacheDB; string BinOverride; + string ExtraOverride; + + // We generate for this given arch + string Arch; // Stuff for the Source File string SrcFile; string SrcOverride; + string SrcExtraOverride; + + // Translation master file + bool LongDesc; + TranslationWriter *TransWriter; // Contents string Contents; @@ -88,16 +106,24 @@ struct PackageMap inline bool operator() (const PackageMap &x,const PackageMap &y) {return x.BinCacheDB < y.BinCacheDB;}; }; + + struct SrcDBCompare : public binary_function + { + inline bool operator() (const PackageMap &x,const PackageMap &y) + {return x.SrcCacheDB < y.SrcCacheDB;}; + }; void GetGeneral(Configuration &Setup,Configuration &Block); bool GenPackages(Configuration &Setup,struct CacheDB::Stats &Stats); bool GenSources(Configuration &Setup,struct CacheDB::Stats &Stats); bool GenContents(Configuration &Setup, - PackageMap *Begin,PackageMap *End, + vector::iterator Begin, + vector::iterator End, unsigned long &Left); - PackageMap() : DeLinkLimit(0), Permissions(1), ContentsDone(false), - PkgDone(false), SrcDone(false), ContentsMTime(0) {}; + PackageMap() : LongDesc(true), TransWriter(NULL), DeLinkLimit(0), Permissions(1), + ContentsDone(false), PkgDone(false), SrcDone(false), + ContentsMTime(0) {}; }; /*}}}*/ @@ -126,7 +152,7 @@ void PackageMap::GetGeneral(Configuration &Setup,Configuration &Block) Setup.Find("Default::Packages::Extensions",".deb").c_str()); Permissions = Setup.FindI("Default::FileMode",0644); - + if (FLFile.empty() == false) FLFile = flCombine(Setup.Find("Dir::FileListDir"),FLFile); @@ -152,26 +178,28 @@ bool PackageMap::GenPackages(Configuration &Setup,struct CacheDB::Stats &Stats) PkgDone = true; // Create a package writer object. - PackagesWriter Packages(flCombine(CacheDir,BinCacheDB), - flCombine(OverrideDir,BinOverride)); + MultiCompress Comp(flCombine(ArchiveDir,PkgFile), + PkgCompress,Permissions); + PackagesWriter Packages(&Comp.Input, TransWriter, flCombine(CacheDir,BinCacheDB), + flCombine(OverrideDir,BinOverride), + flCombine(OverrideDir,ExtraOverride), + Arch); if (PkgExt.empty() == false && Packages.SetExts(PkgExt) == false) - return _error->Error("Package extension list is too long"); + return _error->Error(_("Package extension list is too long")); if (_error->PendingError() == true) - return _error->Error("Error Processing directory %s",BaseDir.c_str()); + return _error->Error(_("Error processing directory %s"),BaseDir.c_str()); Packages.PathPrefix = PathPrefix; Packages.DirStrip = ArchiveDir; Packages.InternalPrefix = flCombine(ArchiveDir,InternalPrefix); + Packages.LongDescription = LongDesc; + Packages.Stats.DeLinkBytes = Stats.DeLinkBytes; Packages.DeLinkLimit = DeLinkLimit; - - // Create a compressor object - MultiCompress Comp(flCombine(ArchiveDir,PkgFile), - PkgCompress,Permissions); - Packages.Output = Comp.Input; + if (_error->PendingError() == true) - return _error->Error("Error Processing directory %s",BaseDir.c_str()); + return _error->Error(_("Error processing directory %s"),BaseDir.c_str()); c0out << ' ' << BaseDir << ":" << flush; @@ -190,11 +218,11 @@ bool PackageMap::GenPackages(Configuration &Setup,struct CacheDB::Stats &Stats) Packages.Output = 0; // Just in case // Finish compressing - unsigned long Size; + unsigned long long Size; if (Comp.Finalize(Size) == false) { c0out << endl; - return _error->Error("Error Processing directory %s",BaseDir.c_str()); + return _error->Error(_("Error processing directory %s"),BaseDir.c_str()); } if (Size != 0) @@ -207,19 +235,23 @@ bool PackageMap::GenPackages(Configuration &Setup,struct CacheDB::Stats &Stats) gettimeofday(&NewTime,0); double Delta = NewTime.tv_sec - StartTime.tv_sec + (NewTime.tv_usec - StartTime.tv_usec)/1000000.0; - + c0out << Packages.Stats.Packages << " files " << /* SizeToStr(Packages.Stats.MD5Bytes) << "B/" << */ SizeToStr(Packages.Stats.Bytes) << "B " << TimeToStr((long)Delta) << endl; + + if(_config->FindB("APT::FTPArchive::ShowCacheMisses", false) == true) + c0out << " Misses in Cache: " << Packages.Stats.Misses<< endl; Stats.Add(Packages.Stats); Stats.DeLinkBytes = Packages.Stats.DeLinkBytes; return !_error->PendingError(); } + /*}}}*/ -// PackageMap::GenSources - Actually generate a Package file /*{{{*/ +// PackageMap::GenSources - Actually generate a Source file /*{{{*/ // --------------------------------------------------------------------- /* This generates the Sources File described by this object. */ bool PackageMap::GenSources(Configuration &Setup,struct CacheDB::Stats &Stats) @@ -237,12 +269,16 @@ bool PackageMap::GenSources(Configuration &Setup,struct CacheDB::Stats &Stats) SrcDone = true; // Create a package writer object. - SourcesWriter Sources(flCombine(OverrideDir,BinOverride), - flCombine(OverrideDir,SrcOverride)); + MultiCompress Comp(flCombine(ArchiveDir,SrcFile), + SrcCompress,Permissions); + SourcesWriter Sources(&Comp.Input, flCombine(CacheDir, SrcCacheDB), + flCombine(OverrideDir,BinOverride), + flCombine(OverrideDir,SrcOverride), + flCombine(OverrideDir,SrcExtraOverride)); if (SrcExt.empty() == false && Sources.SetExts(SrcExt) == false) - return _error->Error("Source extension list is too long"); + return _error->Error(_("Source extension list is too long")); if (_error->PendingError() == true) - return _error->Error("Error Processing directory %s",BaseDir.c_str()); + return _error->Error(_("Error processing directory %s"),BaseDir.c_str()); Sources.PathPrefix = PathPrefix; Sources.DirStrip = ArchiveDir; @@ -250,13 +286,9 @@ bool PackageMap::GenSources(Configuration &Setup,struct CacheDB::Stats &Stats) Sources.DeLinkLimit = DeLinkLimit; Sources.Stats.DeLinkBytes = Stats.DeLinkBytes; - - // Create a compressor object - MultiCompress Comp(flCombine(ArchiveDir,SrcFile), - SrcCompress,Permissions); - Sources.Output = Comp.Input; + if (_error->PendingError() == true) - return _error->Error("Error Processing directory %s",BaseDir.c_str()); + return _error->Error(_("Error processing directory %s"),BaseDir.c_str()); c0out << ' ' << BaseDir << ":" << flush; @@ -274,11 +306,11 @@ bool PackageMap::GenSources(Configuration &Setup,struct CacheDB::Stats &Stats) Sources.Output = 0; // Just in case // Finish compressing - unsigned long Size; + unsigned long long Size; if (Comp.Finalize(Size) == false) { c0out << endl; - return _error->Error("Error Processing directory %s",BaseDir.c_str()); + return _error->Error(_("Error processing directory %s"),BaseDir.c_str()); } if (Size != 0) @@ -295,6 +327,9 @@ bool PackageMap::GenSources(Configuration &Setup,struct CacheDB::Stats &Stats) c0out << Sources.Stats.Packages << " pkgs in " << TimeToStr((long)Delta) << endl; + if(_config->FindB("APT::FTPArchive::ShowCacheMisses", false) == true) + c0out << " Misses in Cache: " << Sources.Stats.Misses << endl; + Stats.Add(Sources.Stats); Stats.DeLinkBytes = Sources.Stats.DeLinkBytes; @@ -307,8 +342,9 @@ bool PackageMap::GenSources(Configuration &Setup,struct CacheDB::Stats &Stats) It searches the given iterator range for other package files that map into this contents file and includes their data as well when building. */ bool PackageMap::GenContents(Configuration &Setup, - PackageMap *Begin,PackageMap *End, - unsigned long &Left) + vector::iterator Begin, + vector::iterator End, + unsigned long &Left) { if (Contents.empty() == true) return true; @@ -324,16 +360,15 @@ bool PackageMap::GenContents(Configuration &Setup, gettimeofday(&StartTime,0); // Create a package writer object. - ContentsWriter Contents(""); + MultiCompress Comp(flCombine(ArchiveDir,this->Contents), + CntCompress,Permissions); + Comp.UpdateMTime = Setup.FindI("Default::ContentsAge",10)*24*60*60; + ContentsWriter Contents(&Comp.Input, "", Arch); if (PkgExt.empty() == false && Contents.SetExts(PkgExt) == false) - return _error->Error("Package extension list is too long"); + return _error->Error(_("Package extension list is too long")); if (_error->PendingError() == true) return false; - MultiCompress Comp(flCombine(ArchiveDir,this->Contents), - CntCompress,Permissions); - Comp.UpdateMTime = Setup.FindI("Default::ContentsAge",10)*24*60*60; - Contents.Output = Comp.Input; if (_error->PendingError() == true) return false; @@ -343,30 +378,30 @@ bool PackageMap::GenContents(Configuration &Setup, FileFd Head(flCombine(OverrideDir,ContentsHead),FileFd::ReadOnly); if (_error->PendingError() == true) return false; - - unsigned long Size = Head.Size(); + + unsigned long long Size = Head.Size(); unsigned char Buf[4096]; while (Size != 0) { - unsigned long ToRead = Size; + unsigned long long ToRead = Size; if (Size > sizeof(Buf)) ToRead = sizeof(Buf); - + if (Head.Read(Buf,ToRead) == false) return false; - - if (fwrite(Buf,1,ToRead,Comp.Input) != ToRead) - return _error->Errno("fwrite","Error writing header to contents file"); - + + if (Comp.Input.Write(Buf, ToRead) == false) + return _error->Errno("fwrite",_("Error writing header to contents file")); + Size -= ToRead; - } - } - + } + } + /* Go over all the package file records and parse all the package files associated with this contents file into one great big honking memory structure, then dump the sorted version */ c0out << ' ' << this->Contents << ":" << flush; - for (PackageMap *I = Begin; I != End; I++) + for (vector::iterator I = Begin; I != End; ++I) { if (I->Contents != this->Contents) continue; @@ -382,11 +417,11 @@ bool PackageMap::GenContents(Configuration &Setup, Contents.Finish(); // Finish compressing - unsigned long Size; + unsigned long long Size; if (Comp.Finalize(Size) == false || _error->PendingError() == true) { c0out << endl; - return _error->Error("Error Processing Contents %s", + return _error->Error(_("Error processing contents %s"), this->Contents.c_str()); } @@ -406,6 +441,9 @@ bool PackageMap::GenContents(Configuration &Setup, double Delta = NewTime.tv_sec - StartTime.tv_sec + (NewTime.tv_usec - StartTime.tv_usec)/1000000.0; + if(_config->FindB("APT::FTPArchive::ShowCacheMisses", false) == true) + c0out << " Misses in Cache: " << Contents.Stats.Misses<< endl; + c0out << Contents.Stats.Packages << " files " << SizeToStr(Contents.Stats.Bytes) << "B " << TimeToStr((long)Delta) << endl; @@ -418,7 +456,7 @@ bool PackageMap::GenContents(Configuration &Setup, // --------------------------------------------------------------------- /* This populates the PkgList with all the possible permutations of the section/arch lists. */ -void LoadTree(vector &PkgList,Configuration &Setup) +static void LoadTree(vector &PkgList, std::vector &TransList, Configuration &Setup) { // Load the defaults string DDir = Setup.Find("TreeDefault::Directory", @@ -427,18 +465,28 @@ void LoadTree(vector &PkgList,Configuration &Setup) "$(DIST)/$(SECTION)/source/"); string DPkg = Setup.Find("TreeDefault::Packages", "$(DIST)/$(SECTION)/binary-$(ARCH)/Packages"); + string DTrans = Setup.Find("TreeDefault::Translation", + "$(DIST)/$(SECTION)/i18n/Translation-en"); string DIPrfx = Setup.Find("TreeDefault::InternalPrefix", "$(DIST)/$(SECTION)/"); string DContents = Setup.Find("TreeDefault::Contents", - "$(DIST)/Contents-$(ARCH)"); + "$(DIST)/$(SECTION)/Contents-$(ARCH)"); string DContentsH = Setup.Find("TreeDefault::Contents::Header",""); string DBCache = Setup.Find("TreeDefault::BinCacheDB", "packages-$(ARCH).db"); + string SrcDBCache = Setup.Find("TreeDefault::SrcCacheDB", + "sources-$(SECTION).db"); string DSources = Setup.Find("TreeDefault::Sources", "$(DIST)/$(SECTION)/source/Sources"); string DFLFile = Setup.Find("TreeDefault::FileList", ""); string DSFLFile = Setup.Find("TreeDefault::SourceFileList", ""); + mode_t const Permissions = Setup.FindI("Default::FileMode",0644); + + bool const LongDescription = Setup.FindB("Default::LongDescription", + _config->FindB("APT::FTPArchive::LongDescription", true)); + string const TranslationCompress = Setup.Find("Default::Translation::Compress",". gzip").c_str(); + // Process 'tree' type sections const Configuration::Item *Top = Setup.Tree("tree"); for (Top = (Top == 0?0:Top->Child); Top != 0;) @@ -447,20 +495,26 @@ void LoadTree(vector &PkgList,Configuration &Setup) string Dist = Top->Tag; // Parse the sections - const char *Sections = Block.Find("Sections").c_str(); + string Tmp = Block.Find("Sections"); + const char *Sections = Tmp.c_str(); string Section; while (ParseQuoteWord(Sections,Section) == true) { - const char *Archs = Block.Find("Architectures").c_str(); string Arch; + struct SubstVar const Vars[] = {{"$(DIST)",&Dist}, + {"$(SECTION)",&Section}, + {"$(ARCH)",&Arch}, + {NULL, NULL}}; + mode_t const Perms = Block.FindI("FileMode", Permissions); + bool const LongDesc = Block.FindB("LongDescription", LongDescription); + TranslationWriter *TransWriter = NULL; + + string const Tmp2 = Block.Find("Architectures"); + const char *Archs = Tmp2.c_str(); while (ParseQuoteWord(Archs,Arch) == true) { - struct SubstVar Vars[] = {{"$(DIST)",&Dist}, - {"$(SECTION)",&Section}, - {"$(ARCH)",&Arch}, - {}}; PackageMap Itm; - + Itm.Permissions = Perms; Itm.BinOverride = SubstVar(Block.Find("BinOverride"),Vars); Itm.InternalPrefix = SubstVar(Block.Find("InternalPrefix",DIPrfx.c_str()),Vars); @@ -471,6 +525,8 @@ void LoadTree(vector &PkgList,Configuration &Setup) Itm.SrcFile = SubstVar(Block.Find("Sources",DSources.c_str()),Vars); Itm.Tag = SubstVar("$(DIST)/$(SECTION)/source",Vars); Itm.FLFile = SubstVar(Block.Find("SourceFileList",DSFLFile.c_str()),Vars); + Itm.SrcExtraOverride = SubstVar(Block.Find("SrcExtraOverride"),Vars); + Itm.SrcCacheDB = SubstVar(Block.Find("SrcCacheDB",SrcDBCache.c_str()),Vars); } else { @@ -478,25 +534,45 @@ void LoadTree(vector &PkgList,Configuration &Setup) Itm.BaseDir = SubstVar(Block.Find("Directory",DDir.c_str()),Vars); Itm.PkgFile = SubstVar(Block.Find("Packages",DPkg.c_str()),Vars); Itm.Tag = SubstVar("$(DIST)/$(SECTION)/$(ARCH)",Vars); + Itm.Arch = Arch; + Itm.LongDesc = LongDesc; + if (TransWriter == NULL && DTrans.empty() == false && LongDesc == false && DTrans != "/dev/null") + { + string const TranslationFile = flCombine(Setup.FindDir("Dir::ArchiveDir"), + SubstVar(Block.Find("Translation", DTrans.c_str()), Vars)); + string const TransCompress = Block.Find("Translation::Compress", TranslationCompress); + TransWriter = new TranslationWriter(TranslationFile, TransCompress, Perms); + TransList.push_back(TransWriter); + } + Itm.TransWriter = TransWriter; Itm.Contents = SubstVar(Block.Find("Contents",DContents.c_str()),Vars); Itm.ContentsHead = SubstVar(Block.Find("Contents::Header",DContentsH.c_str()),Vars); Itm.FLFile = SubstVar(Block.Find("FileList",DFLFile.c_str()),Vars); + Itm.ExtraOverride = SubstVar(Block.Find("ExtraOverride"),Vars); } - Itm.GetGeneral(Setup,Block); + Itm.GetGeneral(Setup,Block); PkgList.push_back(Itm); } } - + Top = Top->Next; - } + } +} + /*}}}*/ +static void UnloadTree(std::vector const &Trans) /*{{{*/ +{ + for (std::vector::const_reverse_iterator T = Trans.rbegin(); T != Trans.rend(); ++T) + delete *T; } /*}}}*/ // LoadBinDir - Load a 'bindirectory' section from the Generate Config /*{{{*/ // --------------------------------------------------------------------- /* */ -void LoadBinDir(vector &PkgList,Configuration &Setup) +static void LoadBinDir(vector &PkgList,Configuration &Setup) { + mode_t const Permissions = Setup.FindI("Default::FileMode",0644); + // Process 'bindirectory' type sections const Configuration::Item *Top = Setup.Tree("bindirectory"); for (Top = (Top == 0?0:Top->Child); Top != 0;) @@ -507,13 +583,17 @@ void LoadBinDir(vector &PkgList,Configuration &Setup) Itm.PkgFile = Block.Find("Packages"); Itm.SrcFile = Block.Find("Sources"); Itm.BinCacheDB = Block.Find("BinCacheDB"); + Itm.SrcCacheDB = Block.Find("SrcCacheDB"); Itm.BinOverride = Block.Find("BinOverride"); + Itm.ExtraOverride = Block.Find("ExtraOverride"); + Itm.SrcExtraOverride = Block.Find("SrcExtraOverride"); Itm.SrcOverride = Block.Find("SrcOverride"); Itm.BaseDir = Top->Tag; Itm.FLFile = Block.Find("FileList"); Itm.InternalPrefix = Block.Find("InternalPrefix",Top->Tag.c_str()); Itm.Contents = Block.Find("Contents"); Itm.ContentsHead = Block.Find("Contents::Header"); + Itm.Permissions = Block.FindI("FileMode", Permissions); Itm.GetGeneral(Setup,Block); PkgList.push_back(Itm); @@ -526,18 +606,18 @@ void LoadBinDir(vector &PkgList,Configuration &Setup) // ShowHelp - Show the help text /*{{{*/ // --------------------------------------------------------------------- /* */ -bool ShowHelp(CommandLine &CmdL) +static bool ShowHelp(CommandLine &) { - ioprintf(cout,_("%s %s for %s %s compiled on %s %s\n"),PACKAGE,VERSION, - COMMON_OS,COMMON_CPU,__DATE__,__TIME__); + ioprintf(cout, "%s %s (%s)\n", PACKAGE, PACKAGE_VERSION, COMMON_ARCH); if (_config->FindB("version") == true) return true; cout << - "Usage: apt-ftparchive [options] command\n" - "Commands: packges binarypath [overridefile [pathprefix]]\n" + _("Usage: apt-ftparchive [options] command\n" + "Commands: packages binarypath [overridefile [pathprefix]]\n" " sources srcpath [overridefile [pathprefix]]\n" " contents path\n" + " release path\n" " generate config [groups]\n" " clean config\n" "\n" @@ -555,9 +635,9 @@ bool ShowHelp(CommandLine &CmdL) "\n" "The 'packages' and 'sources' command should be run in the root of the\n" "tree. BinaryPath should point to the base of the recursive search and \n" - "override file should contian the override flags. Pathprefix is\n" + "override file should contain the override flags. Pathprefix is\n" "appended to the filename fields if present. Example usage from the \n" - "debian archive:\n" + "Debian archive:\n" " apt-ftparchive packages dists/potato/main/binary-i386/ > \\\n" " dists/potato/main/binary-i386/Packages\n" "\n" @@ -570,7 +650,7 @@ bool ShowHelp(CommandLine &CmdL) " --no-delink Enable delinking debug mode\n" " --contents Control contents file generation\n" " -c=? Read this configuration file\n" - " -o=? Set an arbitary configuration option" << endl; + " -o=? Set an arbitrary configuration option") << endl; return true; } @@ -578,7 +658,7 @@ bool ShowHelp(CommandLine &CmdL) // SimpleGenPackages - Generate a Packages file for a directory tree /*{{{*/ // --------------------------------------------------------------------- /* This emulates dpkg-scanpackages's command line interface. 'mostly' */ -bool SimpleGenPackages(CommandLine &CmdL) +static bool SimpleGenPackages(CommandLine &CmdL) { if (CmdL.FileSize() < 2) return ShowHelp(CmdL); @@ -588,8 +668,8 @@ bool SimpleGenPackages(CommandLine &CmdL) Override = CmdL.FileList[2]; // Create a package writer object. - PackagesWriter Packages(_config->Find("APT::FTPArchive::DB"), - Override); + PackagesWriter Packages(NULL, NULL, _config->Find("APT::FTPArchive::DB"), + Override, "", _config->Find("APT::FTPArchive::Architecture")); if (_error->PendingError() == true) return false; @@ -600,19 +680,23 @@ bool SimpleGenPackages(CommandLine &CmdL) if (Packages.RecursiveScan(CmdL.FileList[1]) == false) return false; + // Give some stats if asked for + if(_config->FindB("APT::FTPArchive::ShowCacheMisses", false) == true) + c0out << " Misses in Cache: " << Packages.Stats.Misses<< endl; + return true; } /*}}}*/ // SimpleGenContents - Generate a Contents listing /*{{{*/ // --------------------------------------------------------------------- /* */ -bool SimpleGenContents(CommandLine &CmdL) +static bool SimpleGenContents(CommandLine &CmdL) { if (CmdL.FileSize() < 2) return ShowHelp(CmdL); // Create a package writer object. - ContentsWriter Contents(_config->Find("APT::FTPArchive::DB")); + ContentsWriter Contents(NULL, _config->Find("APT::FTPArchive::DB"), _config->Find("APT::FTPArchive::Architecture")); if (_error->PendingError() == true) return false; @@ -628,7 +712,7 @@ bool SimpleGenContents(CommandLine &CmdL) // SimpleGenSources - Generate a Sources file for a directory tree /*{{{*/ // --------------------------------------------------------------------- /* This emulates dpkg-scanpackages's command line interface. 'mostly' */ -bool SimpleGenSources(CommandLine &CmdL) +static bool SimpleGenSources(CommandLine &CmdL) { if (CmdL.FileSize() < 2) return ShowHelp(CmdL); @@ -645,7 +729,7 @@ bool SimpleGenSources(CommandLine &CmdL) SOverride.c_str()); // Create a package writer object. - SourcesWriter Sources(Override,SOverride); + SourcesWriter Sources(NULL, _config->Find("APT::FTPArchive::DB"),Override,SOverride); if (_error->PendingError() == true) return false; @@ -656,41 +740,51 @@ bool SimpleGenSources(CommandLine &CmdL) if (Sources.RecursiveScan(CmdL.FileList[1]) == false) return false; + // Give some stats if asked for + if(_config->FindB("APT::FTPArchive::ShowCacheMisses", false) == true) + c0out << " Misses in Cache: " << Sources.Stats.Misses<< endl; + return true; } /*}}}*/ -// Generate - Full generate, using a config file /*{{{*/ +// SimpleGenRelease - Generate a Release file for a directory tree /*{{{*/ // --------------------------------------------------------------------- -/* */ -bool Generate(CommandLine &CmdL) +static bool SimpleGenRelease(CommandLine &CmdL) { - struct CacheDB::Stats SrcStats; if (CmdL.FileSize() < 2) return ShowHelp(CmdL); - struct timeval StartTime; - gettimeofday(&StartTime,0); - struct CacheDB::Stats Stats; - - // Read the configuration file. - Configuration Setup; - if (ReadConfigFile(Setup,CmdL.FileList[1],true) == false) + string Dir = CmdL.FileList[1]; + + ReleaseWriter Release(NULL, ""); + Release.DirStrip = Dir; + + if (_error->PendingError() == true) return false; - vector PkgList; - LoadTree(PkgList,Setup); - LoadBinDir(PkgList,Setup); + if (Release.RecursiveScan(Dir) == false) + return false; - // Sort by cache DB to improve IO locality. - stable_sort(PkgList.begin(),PkgList.end(),PackageMap::DBCompare()); - - // Generate packages + Release.Finish(); + + return true; +} + + /*}}}*/ +// DoGeneratePackagesAndSources - Helper for Generate /*{{{*/ +// --------------------------------------------------------------------- +static bool DoGeneratePackagesAndSources(Configuration &Setup, + vector &PkgList, + struct CacheDB::Stats &SrcStats, + struct CacheDB::Stats &Stats, + CommandLine &CmdL) +{ if (CmdL.FileSize() <= 2) { - for (PackageMap *I = PkgList.begin(); I != PkgList.end(); I++) + for (vector::iterator I = PkgList.begin(); I != PkgList.end(); ++I) if (I->GenPackages(Setup,Stats) == false) _error->DumpErrors(); - for (PackageMap *I = PkgList.begin(); I != PkgList.end(); I++) + for (vector::iterator I = PkgList.begin(); I != PkgList.end(); ++I) if (I->GenSources(Setup,SrcStats) == false) _error->DumpErrors(); } @@ -699,13 +793,13 @@ bool Generate(CommandLine &CmdL) // Make a choice list out of the package list.. RxChoiceList *List = new RxChoiceList[2*PkgList.size()+1]; RxChoiceList *End = List; - for (PackageMap *I = PkgList.begin(); I != PkgList.end(); I++) + for (vector::iterator I = PkgList.begin(); I != PkgList.end(); ++I) { - End->UserData = I; + End->UserData = &(*I); End->Str = I->BaseDir.c_str(); End++; - End->UserData = I; + End->UserData = &(*I); End->Str = I->Tag.c_str(); End++; } @@ -715,7 +809,7 @@ bool Generate(CommandLine &CmdL) if (RegexChoice(List,CmdL.FileList + 2,CmdL.FileList + CmdL.FileSize()) == 0) { delete [] List; - return _error->Error("No selections matched"); + return _error->Error(_("No selections matched")); } _error->DumpErrors(); @@ -747,15 +841,21 @@ bool Generate(CommandLine &CmdL) delete [] List; } - - if (_config->FindB("APT::FTPArchive::Contents",true) == false) - return true; - - c1out << "Done Packages, Starting contents." << endl; + return true; +} + + /*}}}*/ +// DoGenerateContents - Helper for Generate to generate the Contents /*{{{*/ +// --------------------------------------------------------------------- +static bool DoGenerateContents(Configuration &Setup, + vector &PkgList, + CommandLine &CmdL) +{ + c1out << "Packages done, Starting contents." << endl; // Sort the contents file list by date string ArchiveDir = Setup.FindDir("Dir::ArchiveDir"); - for (PackageMap *I = PkgList.begin(); I != PkgList.end(); I++) + for (vector::iterator I = PkgList.begin(); I != PkgList.end(); ++I) { struct stat A; if (MultiCompress::GetStat(flCombine(ArchiveDir,I->Contents), @@ -772,9 +872,9 @@ bool Generate(CommandLine &CmdL) hashes of the .debs this means they have not changed either so the contents must be up to date. */ unsigned long MaxContentsChange = Setup.FindI("Default::MaxContentsChange",UINT_MAX)*1024; - for (PackageMap *I = PkgList.begin(); I != PkgList.end(); I++) + for (vector::iterator I = PkgList.begin(); I != PkgList.end(); ++I) { - // This record is not relevent + // This record is not relevant if (I->ContentsDone == true || I->Contents.empty() == true) continue; @@ -788,7 +888,7 @@ bool Generate(CommandLine &CmdL) { if (MultiCompress::GetStat(flCombine(ArchiveDir,I->PkgFile),I->PkgCompress,B) == false) { - _error->Warning("Some files are missing in the package file group `%s'",I->PkgFile.c_str()); + _error->Warning(_("Some files are missing in the package file group `%s'"),I->PkgFile.c_str()); continue; } @@ -807,21 +907,74 @@ bool Generate(CommandLine &CmdL) break; } } + + return true; +} + + /*}}}*/ +// Generate - Full generate, using a config file /*{{{*/ +// --------------------------------------------------------------------- +/* */ +static bool Generate(CommandLine &CmdL) +{ + struct CacheDB::Stats SrcStats; + if (CmdL.FileSize() < 2) + return ShowHelp(CmdL); + + struct timeval StartTime; + gettimeofday(&StartTime,0); + struct CacheDB::Stats Stats; + // Read the configuration file. + Configuration Setup; + if (ReadConfigFile(Setup,CmdL.FileList[1],true) == false) + return false; + + vector PkgList; + std::vector TransList; + LoadTree(PkgList, TransList, Setup); + LoadBinDir(PkgList,Setup); + + // Sort by cache DB to improve IO locality. + stable_sort(PkgList.begin(),PkgList.end(),PackageMap::DBCompare()); + stable_sort(PkgList.begin(),PkgList.end(),PackageMap::SrcDBCompare()); + + // Generate packages + if (_config->FindB("APT::FTPArchive::ContentsOnly", false) == false) + { + if(DoGeneratePackagesAndSources(Setup, PkgList, SrcStats, Stats, CmdL) == false) + { + UnloadTree(TransList); + return false; + } + } else { + c1out << "Skipping Packages/Sources generation" << endl; + } + + // do Contents if needed + if (_config->FindB("APT::FTPArchive::Contents", true) == true) + if (DoGenerateContents(Setup, PkgList, CmdL) == false) + { + UnloadTree(TransList); + return false; + } + struct timeval NewTime; - gettimeofday(&NewTime,0); - double Delta = NewTime.tv_sec - StartTime.tv_sec + + gettimeofday(&NewTime,0); + double Delta = NewTime.tv_sec - StartTime.tv_sec + (NewTime.tv_usec - StartTime.tv_usec)/1000000.0; - c1out << "Done. " << SizeToStr(Stats.Bytes) << "B in " << Stats.Packages + c1out << "Done. " << SizeToStr(Stats.Bytes) << "B in " << Stats.Packages << " archives. Took " << TimeToStr((long)Delta) << endl; - + + UnloadTree(TransList); return true; } - /*}}}*/ + + /*}}}*/ // Clean - Clean out the databases /*{{{*/ // --------------------------------------------------------------------- /* */ -bool Clean(CommandLine &CmdL) +static bool Clean(CommandLine &CmdL) { if (CmdL.FileSize() != 2) return ShowHelp(CmdL); @@ -830,36 +983,54 @@ bool Clean(CommandLine &CmdL) Configuration Setup; if (ReadConfigFile(Setup,CmdL.FileList[1],true) == false) return false; + // we don't need translation creation here + Setup.Set("TreeDefault::Translation", "/dev/null"); vector PkgList; - LoadTree(PkgList,Setup); + std::vector TransList; + LoadTree(PkgList, TransList, Setup); LoadBinDir(PkgList,Setup); // Sort by cache DB to improve IO locality. stable_sort(PkgList.begin(),PkgList.end(),PackageMap::DBCompare()); + stable_sort(PkgList.begin(),PkgList.end(),PackageMap::SrcDBCompare()); string CacheDir = Setup.FindDir("Dir::CacheDir"); - for (PackageMap *I = PkgList.begin(); I != PkgList.end(); ) + for (vector::iterator I = PkgList.begin(); I != PkgList.end(); ) { - c0out << I->BinCacheDB << endl; + if(I->BinCacheDB != "") + c0out << I->BinCacheDB << endl; + if(I->SrcCacheDB != "") + c0out << I->SrcCacheDB << endl; CacheDB DB(flCombine(CacheDir,I->BinCacheDB)); + CacheDB DB_SRC(flCombine(CacheDir,I->SrcCacheDB)); if (DB.Clean() == false) _error->DumpErrors(); - + if (DB_SRC.Clean() == false) + _error->DumpErrors(); + string CacheDB = I->BinCacheDB; - for (; I != PkgList.end() && I->BinCacheDB == CacheDB; I++); + string SrcCacheDB = I->SrcCacheDB; + while(I != PkgList.end() && + I->BinCacheDB == CacheDB && + I->SrcCacheDB == SrcCacheDB) + ++I; } - + + return true; } /*}}}*/ int main(int argc, const char *argv[]) { + setlocale(LC_ALL, ""); CommandLine::Args Args[] = { {'h',"help","help",0}, {0,"md5","APT::FTPArchive::MD5",0}, + {0,"sha1","APT::FTPArchive::SHA1",0}, + {0,"sha256","APT::FTPArchive::SHA256",0}, {'v',"version","version",0}, {'d',"db","APT::FTPArchive::DB",CommandLine::HasArg}, {'s',"source-override","APT::FTPArchive::SourceOverride",CommandLine::HasArg}, @@ -868,12 +1039,14 @@ int main(int argc, const char *argv[]) {0,"delink","APT::FTPArchive::DeLinkAct",0}, {0,"readonly","APT::FTPArchive::ReadOnlyDB",0}, {0,"contents","APT::FTPArchive::Contents",0}, + {'a',"arch","APT::FTPArchive::Architecture",CommandLine::HasArg}, {'c',"config-file",0,CommandLine::ConfigFile}, {'o',"option",0,CommandLine::ArbItem}, {0,0,0,0}}; CommandLine::Dispatch Cmds[] = {{"packages",&SimpleGenPackages}, {"contents",&SimpleGenContents}, {"sources",&SimpleGenSources}, + {"release",&SimpleGenRelease}, {"generate",&Generate}, {"clean",&Clean}, {"help",&ShowHelp}, @@ -881,31 +1054,12 @@ int main(int argc, const char *argv[]) // Parse the command line and initialize the package library CommandLine CmdL(Args,_config); - if (CmdL.Parse(argc,argv) == false) - { - _error->DumpErrors(); - return 100; - } - - // See if the help should be shown - if (_config->FindB("help") == true || - _config->FindB("version") == true || - CmdL.FileSize() == 0) - { - ShowHelp(CmdL); - return 0; - } - - // Setup the output streams - c0out.rdbuf(cout.rdbuf()); - c1out.rdbuf(cout.rdbuf()); - c2out.rdbuf(cout.rdbuf()); + ParseCommandLine(CmdL, Cmds, Args, &_config, NULL, argc, argv, ShowHelp); + + _config->CndSet("quiet",0); Quiet = _config->FindI("quiet",0); - if (Quiet > 0) - c0out.rdbuf(devnull.rdbuf()); - if (Quiet > 1) - c1out.rdbuf(devnull.rdbuf()); - + InitOutput(clog.rdbuf()); + // Match the operation CmdL.DispatchArg(Cmds);