X-Git-Url: https://git.saurik.com/apt.git/blobdiff_plain/5aa5a030bb4f2d46a30526377c5309dc20dbe2af..98cc7fd2c1d397623960baf69ae3cec04a87a23e:/ftparchive/apt-ftparchive.cc diff --git a/ftparchive/apt-ftparchive.cc b/ftparchive/apt-ftparchive.cc index 3b1e80631..cf667483c 100644 --- a/ftparchive/apt-ftparchive.cc +++ b/ftparchive/apt-ftparchive.cc @@ -3,37 +3,47 @@ // $Id: apt-ftparchive.cc,v 1.8.2.3 2004/01/02 22:01:48 mdz Exp $ /* ###################################################################### - apt-scanpackages - Efficient work-alike for dpkg-scanpackages + apt-ftparchive - Efficient work-alike for dpkg-scanpackages Let contents be disabled from the conf ##################################################################### */ /*}}}*/ // Include Files /*{{{*/ -#include "apt-ftparchive.h" - +#include + #include #include #include #include -#include -#include -#include +#include +#include + +#include +#include +#include #include #include -#include +#include +#include +#include +#include +#include +#include +#include +#include -#include "contents.h" +#include "cachedb.h" +#include "override.h" +#include "apt-ftparchive.h" #include "multicompress.h" -#include "writer.h" +#include "writer.h" + +#include /*}}}*/ -using namespace std; -ostream c0out(0); -ostream c1out(0); -ostream c2out(0); -ofstream devnull("/dev/null"); +using namespace std; unsigned Quiet = 0; // struct PackageMap - List of all package files in the config file /*{{{*/ @@ -51,6 +61,7 @@ struct PackageMap // Stuff for the Package File string PkgFile; string BinCacheDB; + string SrcCacheDB; string BinOverride; string ExtraOverride; @@ -62,6 +73,10 @@ struct PackageMap string SrcOverride; string SrcExtraOverride; + // Translation master file + bool LongDesc; + TranslationWriter *TransWriter; + // Contents string Contents; string ContentsHead; @@ -91,6 +106,12 @@ struct PackageMap inline bool operator() (const PackageMap &x,const PackageMap &y) {return x.BinCacheDB < y.BinCacheDB;}; }; + + struct SrcDBCompare : public binary_function + { + inline bool operator() (const PackageMap &x,const PackageMap &y) + {return x.SrcCacheDB < y.SrcCacheDB;}; + }; void GetGeneral(Configuration &Setup,Configuration &Block); bool GenPackages(Configuration &Setup,struct CacheDB::Stats &Stats); @@ -100,8 +121,9 @@ struct PackageMap vector::iterator End, unsigned long &Left); - PackageMap() : DeLinkLimit(0), Permissions(1), ContentsDone(false), - PkgDone(false), SrcDone(false), ContentsMTime(0) {}; + PackageMap() : LongDesc(true), TransWriter(NULL), DeLinkLimit(0), Permissions(1), + ContentsDone(false), PkgDone(false), SrcDone(false), + ContentsMTime(0) {}; }; /*}}}*/ @@ -130,7 +152,7 @@ void PackageMap::GetGeneral(Configuration &Setup,Configuration &Block) Setup.Find("Default::Packages::Extensions",".deb").c_str()); Permissions = Setup.FindI("Default::FileMode",0644); - + if (FLFile.empty() == false) FLFile = flCombine(Setup.Find("Dir::FileListDir"),FLFile); @@ -156,7 +178,9 @@ bool PackageMap::GenPackages(Configuration &Setup,struct CacheDB::Stats &Stats) PkgDone = true; // Create a package writer object. - PackagesWriter Packages(flCombine(CacheDir,BinCacheDB), + MultiCompress Comp(flCombine(ArchiveDir,PkgFile), + PkgCompress,Permissions); + PackagesWriter Packages(&Comp.Input, TransWriter, flCombine(CacheDir,BinCacheDB), flCombine(OverrideDir,BinOverride), flCombine(OverrideDir,ExtraOverride), Arch); @@ -169,13 +193,11 @@ bool PackageMap::GenPackages(Configuration &Setup,struct CacheDB::Stats &Stats) Packages.DirStrip = ArchiveDir; Packages.InternalPrefix = flCombine(ArchiveDir,InternalPrefix); + Packages.LongDescription = LongDesc; + Packages.Stats.DeLinkBytes = Stats.DeLinkBytes; Packages.DeLinkLimit = DeLinkLimit; - // Create a compressor object - MultiCompress Comp(flCombine(ArchiveDir,PkgFile), - PkgCompress,Permissions); - Packages.Output = Comp.Input; if (_error->PendingError() == true) return _error->Error(_("Error processing directory %s"),BaseDir.c_str()); @@ -196,7 +218,7 @@ bool PackageMap::GenPackages(Configuration &Setup,struct CacheDB::Stats &Stats) Packages.Output = 0; // Just in case // Finish compressing - unsigned long Size; + unsigned long long Size; if (Comp.Finalize(Size) == false) { c0out << endl; @@ -213,11 +235,14 @@ bool PackageMap::GenPackages(Configuration &Setup,struct CacheDB::Stats &Stats) gettimeofday(&NewTime,0); double Delta = NewTime.tv_sec - StartTime.tv_sec + (NewTime.tv_usec - StartTime.tv_usec)/1000000.0; - + c0out << Packages.Stats.Packages << " files " << /* SizeToStr(Packages.Stats.MD5Bytes) << "B/" << */ SizeToStr(Packages.Stats.Bytes) << "B " << TimeToStr((long)Delta) << endl; + + if(_config->FindB("APT::FTPArchive::ShowCacheMisses", false) == true) + c0out << " Misses in Cache: " << Packages.Stats.Misses<< endl; Stats.Add(Packages.Stats); Stats.DeLinkBytes = Packages.Stats.DeLinkBytes; @@ -244,7 +269,10 @@ bool PackageMap::GenSources(Configuration &Setup,struct CacheDB::Stats &Stats) SrcDone = true; // Create a package writer object. - SourcesWriter Sources(flCombine(OverrideDir,BinOverride), + MultiCompress Comp(flCombine(ArchiveDir,SrcFile), + SrcCompress,Permissions); + SourcesWriter Sources(&Comp.Input, flCombine(CacheDir, SrcCacheDB), + flCombine(OverrideDir,BinOverride), flCombine(OverrideDir,SrcOverride), flCombine(OverrideDir,SrcExtraOverride)); if (SrcExt.empty() == false && Sources.SetExts(SrcExt) == false) @@ -258,11 +286,7 @@ bool PackageMap::GenSources(Configuration &Setup,struct CacheDB::Stats &Stats) Sources.DeLinkLimit = DeLinkLimit; Sources.Stats.DeLinkBytes = Stats.DeLinkBytes; - - // Create a compressor object - MultiCompress Comp(flCombine(ArchiveDir,SrcFile), - SrcCompress,Permissions); - Sources.Output = Comp.Input; + if (_error->PendingError() == true) return _error->Error(_("Error processing directory %s"),BaseDir.c_str()); @@ -282,7 +306,7 @@ bool PackageMap::GenSources(Configuration &Setup,struct CacheDB::Stats &Stats) Sources.Output = 0; // Just in case // Finish compressing - unsigned long Size; + unsigned long long Size; if (Comp.Finalize(Size) == false) { c0out << endl; @@ -303,6 +327,9 @@ bool PackageMap::GenSources(Configuration &Setup,struct CacheDB::Stats &Stats) c0out << Sources.Stats.Packages << " pkgs in " << TimeToStr((long)Delta) << endl; + if(_config->FindB("APT::FTPArchive::ShowCacheMisses", false) == true) + c0out << " Misses in Cache: " << Sources.Stats.Misses << endl; + Stats.Add(Sources.Stats); Stats.DeLinkBytes = Sources.Stats.DeLinkBytes; @@ -333,16 +360,15 @@ bool PackageMap::GenContents(Configuration &Setup, gettimeofday(&StartTime,0); // Create a package writer object. - ContentsWriter Contents(""); + MultiCompress Comp(flCombine(ArchiveDir,this->Contents), + CntCompress,Permissions); + Comp.UpdateMTime = Setup.FindI("Default::ContentsAge",10)*24*60*60; + ContentsWriter Contents(&Comp.Input, "", Arch); if (PkgExt.empty() == false && Contents.SetExts(PkgExt) == false) return _error->Error(_("Package extension list is too long")); if (_error->PendingError() == true) return false; - MultiCompress Comp(flCombine(ArchiveDir,this->Contents), - CntCompress,Permissions); - Comp.UpdateMTime = Setup.FindI("Default::ContentsAge",10)*24*60*60; - Contents.Output = Comp.Input; if (_error->PendingError() == true) return false; @@ -352,30 +378,30 @@ bool PackageMap::GenContents(Configuration &Setup, FileFd Head(flCombine(OverrideDir,ContentsHead),FileFd::ReadOnly); if (_error->PendingError() == true) return false; - - unsigned long Size = Head.Size(); + + unsigned long long Size = Head.Size(); unsigned char Buf[4096]; while (Size != 0) { - unsigned long ToRead = Size; + unsigned long long ToRead = Size; if (Size > sizeof(Buf)) ToRead = sizeof(Buf); - + if (Head.Read(Buf,ToRead) == false) return false; - - if (fwrite(Buf,1,ToRead,Comp.Input) != ToRead) + + if (Comp.Input.Write(Buf, ToRead) == false) return _error->Errno("fwrite",_("Error writing header to contents file")); - + Size -= ToRead; - } - } - + } + } + /* Go over all the package file records and parse all the package files associated with this contents file into one great big honking memory structure, then dump the sorted version */ c0out << ' ' << this->Contents << ":" << flush; - for (vector::iterator I = Begin; I != End; I++) + for (vector::iterator I = Begin; I != End; ++I) { if (I->Contents != this->Contents) continue; @@ -391,7 +417,7 @@ bool PackageMap::GenContents(Configuration &Setup, Contents.Finish(); // Finish compressing - unsigned long Size; + unsigned long long Size; if (Comp.Finalize(Size) == false || _error->PendingError() == true) { c0out << endl; @@ -415,6 +441,9 @@ bool PackageMap::GenContents(Configuration &Setup, double Delta = NewTime.tv_sec - StartTime.tv_sec + (NewTime.tv_usec - StartTime.tv_usec)/1000000.0; + if(_config->FindB("APT::FTPArchive::ShowCacheMisses", false) == true) + c0out << " Misses in Cache: " << Contents.Stats.Misses<< endl; + c0out << Contents.Stats.Packages << " files " << SizeToStr(Contents.Stats.Bytes) << "B " << TimeToStr((long)Delta) << endl; @@ -427,7 +456,7 @@ bool PackageMap::GenContents(Configuration &Setup, // --------------------------------------------------------------------- /* This populates the PkgList with all the possible permutations of the section/arch lists. */ -void LoadTree(vector &PkgList,Configuration &Setup) +static void LoadTree(vector &PkgList, std::vector &TransList, Configuration &Setup) { // Load the defaults string DDir = Setup.Find("TreeDefault::Directory", @@ -436,18 +465,28 @@ void LoadTree(vector &PkgList,Configuration &Setup) "$(DIST)/$(SECTION)/source/"); string DPkg = Setup.Find("TreeDefault::Packages", "$(DIST)/$(SECTION)/binary-$(ARCH)/Packages"); + string DTrans = Setup.Find("TreeDefault::Translation", + "$(DIST)/$(SECTION)/i18n/Translation-en"); string DIPrfx = Setup.Find("TreeDefault::InternalPrefix", "$(DIST)/$(SECTION)/"); string DContents = Setup.Find("TreeDefault::Contents", - "$(DIST)/Contents-$(ARCH)"); + "$(DIST)/$(SECTION)/Contents-$(ARCH)"); string DContentsH = Setup.Find("TreeDefault::Contents::Header",""); string DBCache = Setup.Find("TreeDefault::BinCacheDB", "packages-$(ARCH).db"); + string SrcDBCache = Setup.Find("TreeDefault::SrcCacheDB", + "sources-$(SECTION).db"); string DSources = Setup.Find("TreeDefault::Sources", "$(DIST)/$(SECTION)/source/Sources"); string DFLFile = Setup.Find("TreeDefault::FileList", ""); string DSFLFile = Setup.Find("TreeDefault::SourceFileList", ""); + mode_t const Permissions = Setup.FindI("Default::FileMode",0644); + + bool const LongDescription = Setup.FindB("Default::LongDescription", + _config->FindB("APT::FTPArchive::LongDescription", true)); + string const TranslationCompress = Setup.Find("Default::Translation::Compress",". gzip").c_str(); + // Process 'tree' type sections const Configuration::Item *Top = Setup.Tree("tree"); for (Top = (Top == 0?0:Top->Child); Top != 0;) @@ -461,17 +500,21 @@ void LoadTree(vector &PkgList,Configuration &Setup) string Section; while (ParseQuoteWord(Sections,Section) == true) { - string Tmp2 = Block.Find("Architectures"); string Arch; + struct SubstVar const Vars[] = {{"$(DIST)",&Dist}, + {"$(SECTION)",&Section}, + {"$(ARCH)",&Arch}, + {NULL, NULL}}; + mode_t const Perms = Block.FindI("FileMode", Permissions); + bool const LongDesc = Block.FindB("LongDescription", LongDescription); + TranslationWriter *TransWriter = NULL; + + string const Tmp2 = Block.Find("Architectures"); const char *Archs = Tmp2.c_str(); while (ParseQuoteWord(Archs,Arch) == true) { - struct SubstVar Vars[] = {{"$(DIST)",&Dist}, - {"$(SECTION)",&Section}, - {"$(ARCH)",&Arch}, - {}}; PackageMap Itm; - + Itm.Permissions = Perms; Itm.BinOverride = SubstVar(Block.Find("BinOverride"),Vars); Itm.InternalPrefix = SubstVar(Block.Find("InternalPrefix",DIPrfx.c_str()),Vars); @@ -483,6 +526,7 @@ void LoadTree(vector &PkgList,Configuration &Setup) Itm.Tag = SubstVar("$(DIST)/$(SECTION)/source",Vars); Itm.FLFile = SubstVar(Block.Find("SourceFileList",DSFLFile.c_str()),Vars); Itm.SrcExtraOverride = SubstVar(Block.Find("SrcExtraOverride"),Vars); + Itm.SrcCacheDB = SubstVar(Block.Find("SrcCacheDB",SrcDBCache.c_str()),Vars); } else { @@ -491,26 +535,44 @@ void LoadTree(vector &PkgList,Configuration &Setup) Itm.PkgFile = SubstVar(Block.Find("Packages",DPkg.c_str()),Vars); Itm.Tag = SubstVar("$(DIST)/$(SECTION)/$(ARCH)",Vars); Itm.Arch = Arch; + Itm.LongDesc = LongDesc; + if (TransWriter == NULL && DTrans.empty() == false && LongDesc == false && DTrans != "/dev/null") + { + string const TranslationFile = flCombine(Setup.FindDir("Dir::ArchiveDir"), + SubstVar(Block.Find("Translation", DTrans.c_str()), Vars)); + string const TransCompress = Block.Find("Translation::Compress", TranslationCompress); + TransWriter = new TranslationWriter(TranslationFile, TransCompress, Perms); + TransList.push_back(TransWriter); + } + Itm.TransWriter = TransWriter; Itm.Contents = SubstVar(Block.Find("Contents",DContents.c_str()),Vars); Itm.ContentsHead = SubstVar(Block.Find("Contents::Header",DContentsH.c_str()),Vars); Itm.FLFile = SubstVar(Block.Find("FileList",DFLFile.c_str()),Vars); Itm.ExtraOverride = SubstVar(Block.Find("ExtraOverride"),Vars); } - Itm.GetGeneral(Setup,Block); + Itm.GetGeneral(Setup,Block); PkgList.push_back(Itm); } } - + Top = Top->Next; - } + } +} + /*}}}*/ +static void UnloadTree(std::vector const &Trans) /*{{{*/ +{ + for (std::vector::const_reverse_iterator T = Trans.rbegin(); T != Trans.rend(); ++T) + delete *T; } /*}}}*/ // LoadBinDir - Load a 'bindirectory' section from the Generate Config /*{{{*/ // --------------------------------------------------------------------- /* */ -void LoadBinDir(vector &PkgList,Configuration &Setup) +static void LoadBinDir(vector &PkgList,Configuration &Setup) { + mode_t const Permissions = Setup.FindI("Default::FileMode",0644); + // Process 'bindirectory' type sections const Configuration::Item *Top = Setup.Tree("bindirectory"); for (Top = (Top == 0?0:Top->Child); Top != 0;) @@ -521,6 +583,7 @@ void LoadBinDir(vector &PkgList,Configuration &Setup) Itm.PkgFile = Block.Find("Packages"); Itm.SrcFile = Block.Find("Sources"); Itm.BinCacheDB = Block.Find("BinCacheDB"); + Itm.SrcCacheDB = Block.Find("SrcCacheDB"); Itm.BinOverride = Block.Find("BinOverride"); Itm.ExtraOverride = Block.Find("ExtraOverride"); Itm.SrcExtraOverride = Block.Find("SrcExtraOverride"); @@ -530,6 +593,7 @@ void LoadBinDir(vector &PkgList,Configuration &Setup) Itm.InternalPrefix = Block.Find("InternalPrefix",Top->Tag.c_str()); Itm.Contents = Block.Find("Contents"); Itm.ContentsHead = Block.Find("Contents::Header"); + Itm.Permissions = Block.FindI("FileMode", Permissions); Itm.GetGeneral(Setup,Block); PkgList.push_back(Itm); @@ -542,10 +606,9 @@ void LoadBinDir(vector &PkgList,Configuration &Setup) // ShowHelp - Show the help text /*{{{*/ // --------------------------------------------------------------------- /* */ -bool ShowHelp(CommandLine &CmdL) +static bool ShowHelp(CommandLine &) { - ioprintf(cout,_("%s %s for %s compiled on %s %s\n"),PACKAGE,VERSION, - COMMON_ARCH,__DATE__,__TIME__); + ioprintf(cout, "%s %s (%s)\n", PACKAGE, PACKAGE_VERSION, COMMON_ARCH); if (_config->FindB("version") == true) return true; @@ -595,7 +658,7 @@ bool ShowHelp(CommandLine &CmdL) // SimpleGenPackages - Generate a Packages file for a directory tree /*{{{*/ // --------------------------------------------------------------------- /* This emulates dpkg-scanpackages's command line interface. 'mostly' */ -bool SimpleGenPackages(CommandLine &CmdL) +static bool SimpleGenPackages(CommandLine &CmdL) { if (CmdL.FileSize() < 2) return ShowHelp(CmdL); @@ -605,8 +668,8 @@ bool SimpleGenPackages(CommandLine &CmdL) Override = CmdL.FileList[2]; // Create a package writer object. - PackagesWriter Packages(_config->Find("APT::FTPArchive::DB"), - Override, ""); + PackagesWriter Packages(NULL, NULL, _config->Find("APT::FTPArchive::DB"), + Override, "", _config->Find("APT::FTPArchive::Architecture")); if (_error->PendingError() == true) return false; @@ -617,19 +680,23 @@ bool SimpleGenPackages(CommandLine &CmdL) if (Packages.RecursiveScan(CmdL.FileList[1]) == false) return false; + // Give some stats if asked for + if(_config->FindB("APT::FTPArchive::ShowCacheMisses", false) == true) + c0out << " Misses in Cache: " << Packages.Stats.Misses<< endl; + return true; } /*}}}*/ // SimpleGenContents - Generate a Contents listing /*{{{*/ // --------------------------------------------------------------------- /* */ -bool SimpleGenContents(CommandLine &CmdL) +static bool SimpleGenContents(CommandLine &CmdL) { if (CmdL.FileSize() < 2) return ShowHelp(CmdL); // Create a package writer object. - ContentsWriter Contents(_config->Find("APT::FTPArchive::DB")); + ContentsWriter Contents(NULL, _config->Find("APT::FTPArchive::DB"), _config->Find("APT::FTPArchive::Architecture")); if (_error->PendingError() == true) return false; @@ -645,7 +712,7 @@ bool SimpleGenContents(CommandLine &CmdL) // SimpleGenSources - Generate a Sources file for a directory tree /*{{{*/ // --------------------------------------------------------------------- /* This emulates dpkg-scanpackages's command line interface. 'mostly' */ -bool SimpleGenSources(CommandLine &CmdL) +static bool SimpleGenSources(CommandLine &CmdL) { if (CmdL.FileSize() < 2) return ShowHelp(CmdL); @@ -662,7 +729,7 @@ bool SimpleGenSources(CommandLine &CmdL) SOverride.c_str()); // Create a package writer object. - SourcesWriter Sources(Override,SOverride); + SourcesWriter Sources(NULL, _config->Find("APT::FTPArchive::DB"),Override,SOverride); if (_error->PendingError() == true) return false; @@ -673,19 +740,23 @@ bool SimpleGenSources(CommandLine &CmdL) if (Sources.RecursiveScan(CmdL.FileList[1]) == false) return false; + // Give some stats if asked for + if(_config->FindB("APT::FTPArchive::ShowCacheMisses", false) == true) + c0out << " Misses in Cache: " << Sources.Stats.Misses<< endl; + return true; } /*}}}*/ // SimpleGenRelease - Generate a Release file for a directory tree /*{{{*/ // --------------------------------------------------------------------- -bool SimpleGenRelease(CommandLine &CmdL) +static bool SimpleGenRelease(CommandLine &CmdL) { if (CmdL.FileSize() < 2) return ShowHelp(CmdL); string Dir = CmdL.FileList[1]; - ReleaseWriter Release(""); + ReleaseWriter Release(NULL, ""); Release.DirStrip = Dir; if (_error->PendingError() == true) @@ -700,38 +771,20 @@ bool SimpleGenRelease(CommandLine &CmdL) } /*}}}*/ -// Generate - Full generate, using a config file /*{{{*/ +// DoGeneratePackagesAndSources - Helper for Generate /*{{{*/ // --------------------------------------------------------------------- -/* */ -bool Generate(CommandLine &CmdL) +static bool DoGeneratePackagesAndSources(Configuration &Setup, + vector &PkgList, + struct CacheDB::Stats &SrcStats, + struct CacheDB::Stats &Stats, + CommandLine &CmdL) { - struct CacheDB::Stats SrcStats; - if (CmdL.FileSize() < 2) - return ShowHelp(CmdL); - - struct timeval StartTime; - gettimeofday(&StartTime,0); - struct CacheDB::Stats Stats; - - // Read the configuration file. - Configuration Setup; - if (ReadConfigFile(Setup,CmdL.FileList[1],true) == false) - return false; - - vector PkgList; - LoadTree(PkgList,Setup); - LoadBinDir(PkgList,Setup); - - // Sort by cache DB to improve IO locality. - stable_sort(PkgList.begin(),PkgList.end(),PackageMap::DBCompare()); - - // Generate packages if (CmdL.FileSize() <= 2) { - for (vector::iterator I = PkgList.begin(); I != PkgList.end(); I++) + for (vector::iterator I = PkgList.begin(); I != PkgList.end(); ++I) if (I->GenPackages(Setup,Stats) == false) _error->DumpErrors(); - for (vector::iterator I = PkgList.begin(); I != PkgList.end(); I++) + for (vector::iterator I = PkgList.begin(); I != PkgList.end(); ++I) if (I->GenSources(Setup,SrcStats) == false) _error->DumpErrors(); } @@ -740,7 +793,7 @@ bool Generate(CommandLine &CmdL) // Make a choice list out of the package list.. RxChoiceList *List = new RxChoiceList[2*PkgList.size()+1]; RxChoiceList *End = List; - for (vector::iterator I = PkgList.begin(); I != PkgList.end(); I++) + for (vector::iterator I = PkgList.begin(); I != PkgList.end(); ++I) { End->UserData = &(*I); End->Str = I->BaseDir.c_str(); @@ -788,15 +841,21 @@ bool Generate(CommandLine &CmdL) delete [] List; } - - if (_config->FindB("APT::FTPArchive::Contents",true) == false) - return true; - - c1out << "Done Packages, Starting contents." << endl; + return true; +} + + /*}}}*/ +// DoGenerateContents - Helper for Generate to generate the Contents /*{{{*/ +// --------------------------------------------------------------------- +static bool DoGenerateContents(Configuration &Setup, + vector &PkgList, + CommandLine &CmdL) +{ + c1out << "Packages done, Starting contents." << endl; // Sort the contents file list by date string ArchiveDir = Setup.FindDir("Dir::ArchiveDir"); - for (vector::iterator I = PkgList.begin(); I != PkgList.end(); I++) + for (vector::iterator I = PkgList.begin(); I != PkgList.end(); ++I) { struct stat A; if (MultiCompress::GetStat(flCombine(ArchiveDir,I->Contents), @@ -813,9 +872,9 @@ bool Generate(CommandLine &CmdL) hashes of the .debs this means they have not changed either so the contents must be up to date. */ unsigned long MaxContentsChange = Setup.FindI("Default::MaxContentsChange",UINT_MAX)*1024; - for (vector::iterator I = PkgList.begin(); I != PkgList.end(); I++) + for (vector::iterator I = PkgList.begin(); I != PkgList.end(); ++I) { - // This record is not relevent + // This record is not relevant if (I->ContentsDone == true || I->Contents.empty() == true) continue; @@ -848,21 +907,74 @@ bool Generate(CommandLine &CmdL) break; } } + + return true; +} + + /*}}}*/ +// Generate - Full generate, using a config file /*{{{*/ +// --------------------------------------------------------------------- +/* */ +static bool Generate(CommandLine &CmdL) +{ + struct CacheDB::Stats SrcStats; + if (CmdL.FileSize() < 2) + return ShowHelp(CmdL); + + struct timeval StartTime; + gettimeofday(&StartTime,0); + struct CacheDB::Stats Stats; + // Read the configuration file. + Configuration Setup; + if (ReadConfigFile(Setup,CmdL.FileList[1],true) == false) + return false; + + vector PkgList; + std::vector TransList; + LoadTree(PkgList, TransList, Setup); + LoadBinDir(PkgList,Setup); + + // Sort by cache DB to improve IO locality. + stable_sort(PkgList.begin(),PkgList.end(),PackageMap::DBCompare()); + stable_sort(PkgList.begin(),PkgList.end(),PackageMap::SrcDBCompare()); + + // Generate packages + if (_config->FindB("APT::FTPArchive::ContentsOnly", false) == false) + { + if(DoGeneratePackagesAndSources(Setup, PkgList, SrcStats, Stats, CmdL) == false) + { + UnloadTree(TransList); + return false; + } + } else { + c1out << "Skipping Packages/Sources generation" << endl; + } + + // do Contents if needed + if (_config->FindB("APT::FTPArchive::Contents", true) == true) + if (DoGenerateContents(Setup, PkgList, CmdL) == false) + { + UnloadTree(TransList); + return false; + } + struct timeval NewTime; - gettimeofday(&NewTime,0); - double Delta = NewTime.tv_sec - StartTime.tv_sec + + gettimeofday(&NewTime,0); + double Delta = NewTime.tv_sec - StartTime.tv_sec + (NewTime.tv_usec - StartTime.tv_usec)/1000000.0; - c1out << "Done. " << SizeToStr(Stats.Bytes) << "B in " << Stats.Packages + c1out << "Done. " << SizeToStr(Stats.Bytes) << "B in " << Stats.Packages << " archives. Took " << TimeToStr((long)Delta) << endl; - + + UnloadTree(TransList); return true; } - /*}}}*/ + + /*}}}*/ // Clean - Clean out the databases /*{{{*/ // --------------------------------------------------------------------- /* */ -bool Clean(CommandLine &CmdL) +static bool Clean(CommandLine &CmdL) { if (CmdL.FileSize() != 2) return ShowHelp(CmdL); @@ -871,36 +983,54 @@ bool Clean(CommandLine &CmdL) Configuration Setup; if (ReadConfigFile(Setup,CmdL.FileList[1],true) == false) return false; + // we don't need translation creation here + Setup.Set("TreeDefault::Translation", "/dev/null"); vector PkgList; - LoadTree(PkgList,Setup); + std::vector TransList; + LoadTree(PkgList, TransList, Setup); LoadBinDir(PkgList,Setup); // Sort by cache DB to improve IO locality. stable_sort(PkgList.begin(),PkgList.end(),PackageMap::DBCompare()); + stable_sort(PkgList.begin(),PkgList.end(),PackageMap::SrcDBCompare()); string CacheDir = Setup.FindDir("Dir::CacheDir"); for (vector::iterator I = PkgList.begin(); I != PkgList.end(); ) { - c0out << I->BinCacheDB << endl; + if(I->BinCacheDB != "") + c0out << I->BinCacheDB << endl; + if(I->SrcCacheDB != "") + c0out << I->SrcCacheDB << endl; CacheDB DB(flCombine(CacheDir,I->BinCacheDB)); + CacheDB DB_SRC(flCombine(CacheDir,I->SrcCacheDB)); if (DB.Clean() == false) _error->DumpErrors(); - + if (DB_SRC.Clean() == false) + _error->DumpErrors(); + string CacheDB = I->BinCacheDB; - for (; I != PkgList.end() && I->BinCacheDB == CacheDB; I++); + string SrcCacheDB = I->SrcCacheDB; + while(I != PkgList.end() && + I->BinCacheDB == CacheDB && + I->SrcCacheDB == SrcCacheDB) + ++I; } - + + return true; } /*}}}*/ int main(int argc, const char *argv[]) { + setlocale(LC_ALL, ""); CommandLine::Args Args[] = { {'h',"help","help",0}, {0,"md5","APT::FTPArchive::MD5",0}, + {0,"sha1","APT::FTPArchive::SHA1",0}, + {0,"sha256","APT::FTPArchive::SHA256",0}, {'v',"version","version",0}, {'d',"db","APT::FTPArchive::DB",CommandLine::HasArg}, {'s',"source-override","APT::FTPArchive::SourceOverride",CommandLine::HasArg}, @@ -909,6 +1039,7 @@ int main(int argc, const char *argv[]) {0,"delink","APT::FTPArchive::DeLinkAct",0}, {0,"readonly","APT::FTPArchive::ReadOnlyDB",0}, {0,"contents","APT::FTPArchive::Contents",0}, + {'a',"arch","APT::FTPArchive::Architecture",CommandLine::HasArg}, {'c',"config-file",0,CommandLine::ConfigFile}, {'o',"option",0,CommandLine::ArbItem}, {0,0,0,0}}; @@ -923,31 +1054,12 @@ int main(int argc, const char *argv[]) // Parse the command line and initialize the package library CommandLine CmdL(Args,_config); - if (CmdL.Parse(argc,argv) == false) - { - _error->DumpErrors(); - return 100; - } - - // See if the help should be shown - if (_config->FindB("help") == true || - _config->FindB("version") == true || - CmdL.FileSize() == 0) - { - ShowHelp(CmdL); - return 0; - } - - // Setup the output streams - c0out.rdbuf(clog.rdbuf()); - c1out.rdbuf(clog.rdbuf()); - c2out.rdbuf(clog.rdbuf()); + ParseCommandLine(CmdL, Cmds, Args, &_config, NULL, argc, argv, ShowHelp); + + _config->CndSet("quiet",0); Quiet = _config->FindI("quiet",0); - if (Quiet > 0) - c0out.rdbuf(devnull.rdbuf()); - if (Quiet > 1) - c1out.rdbuf(devnull.rdbuf()); - + InitOutput(clog.rdbuf()); + // Match the operation CmdL.DispatchArg(Cmds);