]> git.saurik.com Git - apt.git/blobdiff - ftparchive/apt-ftparchive.cc
French manpages translation update
[apt.git] / ftparchive / apt-ftparchive.cc
index c811343b693685951b50589195f224e26ebe58bc..4b2c3ba12449b32707b02f982b30f18d9b7e61a8 100644 (file)
@@ -1,35 +1,34 @@
 // -*- mode: cpp; mode: fold -*-
 // Description                                                         /*{{{*/
-// $Id: apt-ftparchive.cc,v 1.9 2003/12/26 20:08:56 mdz Exp $
+// $Id: apt-ftparchive.cc,v 1.8.2.3 2004/01/02 22:01:48 mdz Exp $
 /* ######################################################################
 
-   apt-scanpackages - Efficient work-alike for dpkg-scanpackages
+   apt-ftparchive - Efficient work-alike for dpkg-scanpackages
 
    Let contents be disabled from the conf
    
    ##################################################################### */
                                                                        /*}}}*/
 // Include Files                                                       /*{{{*/
-#ifdef __GNUG__
-#pragma implementation "apt-ftparchive.h"
-#endif
+#include <config.h>
 
-#include "apt-ftparchive.h"
-    
 #include <apt-pkg/error.h>
 #include <apt-pkg/configuration.h>
 #include <apt-pkg/cmndline.h>
 #include <apt-pkg/strutl.h>
-#include <config.h>
-#include <apti18n.h>
+#include <apt-pkg/init.h>
 #include <algorithm>
 
+#include <climits>
 #include <sys/time.h>
 #include <regex.h>
 
+#include "apt-ftparchive.h"
 #include "contents.h"
 #include "multicompress.h"
-#include "writer.h"    
+#include "writer.h"
+
+#include <apti18n.h>
                                                                        /*}}}*/
 
 using namespace std;    
@@ -56,12 +55,19 @@ struct PackageMap
    string BinCacheDB;
    string BinOverride;
    string ExtraOverride;
+
+   // We generate for this given arch
+   string Arch;
    
    // Stuff for the Source File
    string SrcFile;
    string SrcOverride;
    string SrcExtraOverride;
 
+   // Translation master file
+   bool LongDesc;
+   TranslationWriter *TransWriter;
+
    // Contents 
    string Contents;
    string ContentsHead;
@@ -100,8 +106,9 @@ struct PackageMap
                    vector<PackageMap>::iterator End,
                    unsigned long &Left);
    
-   PackageMap() : DeLinkLimit(0), Permissions(1), ContentsDone(false), 
-        PkgDone(false), SrcDone(false), ContentsMTime(0) {};
+   PackageMap() : LongDesc(true), TransWriter(NULL), DeLinkLimit(0), Permissions(1),
+                 ContentsDone(false), PkgDone(false), SrcDone(false),
+                 ContentsMTime(0) {};
 };
                                                                        /*}}}*/
 
@@ -130,7 +137,7 @@ void PackageMap::GetGeneral(Configuration &Setup,Configuration &Block)
                       Setup.Find("Default::Packages::Extensions",".deb").c_str());
    
    Permissions = Setup.FindI("Default::FileMode",0644);
-   
+
    if (FLFile.empty() == false)
       FLFile = flCombine(Setup.Find("Dir::FileListDir"),FLFile);
    
@@ -158,16 +165,20 @@ bool PackageMap::GenPackages(Configuration &Setup,struct CacheDB::Stats &Stats)
    // Create a package writer object.
    PackagesWriter Packages(flCombine(CacheDir,BinCacheDB),
                           flCombine(OverrideDir,BinOverride),
-                          flCombine(OverrideDir,ExtraOverride));
+                          flCombine(OverrideDir,ExtraOverride),
+                          Arch);
    if (PkgExt.empty() == false && Packages.SetExts(PkgExt) == false)
       return _error->Error(_("Package extension list is too long"));
    if (_error->PendingError() == true)
-      return _error->Error(_("Error Processing directory %s"),BaseDir.c_str());
+      return _error->Error(_("Error processing directory %s"),BaseDir.c_str());
    
    Packages.PathPrefix = PathPrefix;
    Packages.DirStrip = ArchiveDir;
    Packages.InternalPrefix = flCombine(ArchiveDir,InternalPrefix);
 
+   Packages.TransWriter = TransWriter;
+   Packages.LongDescription = LongDesc;
+
    Packages.Stats.DeLinkBytes = Stats.DeLinkBytes;
    Packages.DeLinkLimit = DeLinkLimit;
 
@@ -176,7 +187,7 @@ bool PackageMap::GenPackages(Configuration &Setup,struct CacheDB::Stats &Stats)
                      PkgCompress,Permissions);
    Packages.Output = Comp.Input;
    if (_error->PendingError() == true)
-      return _error->Error(_("Error Processing directory %s"),BaseDir.c_str());
+      return _error->Error(_("Error processing directory %s"),BaseDir.c_str());
    
    c0out << ' ' << BaseDir << ":" << flush;
    
@@ -195,11 +206,11 @@ bool PackageMap::GenPackages(Configuration &Setup,struct CacheDB::Stats &Stats)
    Packages.Output = 0;      // Just in case
    
    // Finish compressing
-   unsigned long Size;
+   unsigned long long Size;
    if (Comp.Finalize(Size) == false)
    {
       c0out << endl;
-      return _error->Error(_("Error Processing directory %s"),BaseDir.c_str());
+      return _error->Error(_("Error processing directory %s"),BaseDir.c_str());
    }
    
    if (Size != 0)
@@ -249,7 +260,7 @@ bool PackageMap::GenSources(Configuration &Setup,struct CacheDB::Stats &Stats)
    if (SrcExt.empty() == false && Sources.SetExts(SrcExt) == false)
       return _error->Error(_("Source extension list is too long"));
    if (_error->PendingError() == true)
-      return _error->Error(_("Error Processing directory %s"),BaseDir.c_str());
+      return _error->Error(_("Error processing directory %s"),BaseDir.c_str());
    
    Sources.PathPrefix = PathPrefix;
    Sources.DirStrip = ArchiveDir;
@@ -263,7 +274,7 @@ bool PackageMap::GenSources(Configuration &Setup,struct CacheDB::Stats &Stats)
                      SrcCompress,Permissions);
    Sources.Output = Comp.Input;
    if (_error->PendingError() == true)
-      return _error->Error(_("Error Processing directory %s"),BaseDir.c_str());
+      return _error->Error(_("Error processing directory %s"),BaseDir.c_str());
 
    c0out << ' ' << BaseDir << ":" << flush;
    
@@ -281,11 +292,11 @@ bool PackageMap::GenSources(Configuration &Setup,struct CacheDB::Stats &Stats)
    Sources.Output = 0;      // Just in case
    
    // Finish compressing
-   unsigned long Size;
+   unsigned long long Size;
    if (Comp.Finalize(Size) == false)
    {
       c0out << endl;
-      return _error->Error(_("Error Processing directory %s"),BaseDir.c_str());
+      return _error->Error(_("Error processing directory %s"),BaseDir.c_str());
    }
       
    if (Size != 0)
@@ -332,7 +343,7 @@ bool PackageMap::GenContents(Configuration &Setup,
    gettimeofday(&StartTime,0);   
    
    // Create a package writer object.
-   ContentsWriter Contents("");
+   ContentsWriter Contents("", Arch);
    if (PkgExt.empty() == false && Contents.SetExts(PkgExt) == false)
       return _error->Error(_("Package extension list is too long"));
    if (_error->PendingError() == true)
@@ -352,11 +363,11 @@ bool PackageMap::GenContents(Configuration &Setup,
       if (_error->PendingError() == true)
         return false;
       
-      unsigned long Size = Head.Size();
+      unsigned long long Size = Head.Size();
       unsigned char Buf[4096];
       while (Size != 0)
       {
-        unsigned long ToRead = Size;
+        unsigned long long ToRead = Size;
         if (Size > sizeof(Buf))
            ToRead = sizeof(Buf);
         
@@ -374,7 +385,7 @@ bool PackageMap::GenContents(Configuration &Setup,
       files associated with this contents file into one great big honking
       memory structure, then dump the sorted version */
    c0out << ' ' << this->Contents << ":" << flush;
-   for (vector<PackageMap>::iterator I = Begin; I != End; I++)
+   for (vector<PackageMap>::iterator I = Begin; I != End; ++I)
    {
       if (I->Contents != this->Contents)
         continue;
@@ -390,11 +401,11 @@ bool PackageMap::GenContents(Configuration &Setup,
    Contents.Finish();
    
    // Finish compressing
-   unsigned long Size;
+   unsigned long long Size;
    if (Comp.Finalize(Size) == false || _error->PendingError() == true)
    {
       c0out << endl;
-      return _error->Error(_("Error Processing Contents %s"),
+      return _error->Error(_("Error processing contents %s"),
                           this->Contents.c_str());
    }
    
@@ -435,10 +446,12 @@ void LoadTree(vector<PackageMap> &PkgList,Configuration &Setup)
                            "$(DIST)/$(SECTION)/source/");
    string DPkg = Setup.Find("TreeDefault::Packages",
                            "$(DIST)/$(SECTION)/binary-$(ARCH)/Packages");
+   string DTrans = Setup.Find("TreeDefault::Translation",
+                           "$(DIST)/$(SECTION)/i18n/Translation-en");
    string DIPrfx = Setup.Find("TreeDefault::InternalPrefix",
                            "$(DIST)/$(SECTION)/");
    string DContents = Setup.Find("TreeDefault::Contents",
-                           "$(DIST)/Contents-$(ARCH)");
+                           "$(DIST)/$(SECTION)/Contents-$(ARCH)");
    string DContentsH = Setup.Find("TreeDefault::Contents::Header","");
    string DBCache = Setup.Find("TreeDefault::BinCacheDB",
                               "packages-$(ARCH).db");
@@ -447,6 +460,12 @@ void LoadTree(vector<PackageMap> &PkgList,Configuration &Setup)
    string DFLFile = Setup.Find("TreeDefault::FileList", "");
    string DSFLFile = Setup.Find("TreeDefault::SourceFileList", "");
 
+   mode_t const Permissions = Setup.FindI("Default::FileMode",0644);
+
+   bool const LongDescription = Setup.FindB("Default::LongDescription",
+                                       _config->FindB("APT::FTPArchive::LongDescription", true));
+   string const TranslationCompress = Setup.Find("Default::Translation::Compress",". gzip").c_str();
+
    // Process 'tree' type sections
    const Configuration::Item *Top = Setup.Tree("tree");
    for (Top = (Top == 0?0:Top->Child); Top != 0;)
@@ -460,17 +479,30 @@ void LoadTree(vector<PackageMap> &PkgList,Configuration &Setup)
       string Section;
       while (ParseQuoteWord(Sections,Section) == true)
       {
-        string Tmp2 = Block.Find("Architectures");
         string Arch;
+        struct SubstVar const Vars[] = {{"$(DIST)",&Dist},
+                                        {"$(SECTION)",&Section},
+                                        {"$(ARCH)",&Arch},
+                                        {}};
+        mode_t const Perms = Block.FindI("FileMode", Permissions);
+        bool const LongDesc = Block.FindB("LongDescription", LongDescription);
+        TranslationWriter *TransWriter;
+        if (DTrans.empty() == false && LongDesc == false)
+        {
+           string const TranslationFile = flCombine(Setup.FindDir("Dir::ArchiveDir"),
+                       SubstVar(Block.Find("Translation", DTrans.c_str()), Vars));
+           string const TransCompress = Block.Find("Translation::Compress", TranslationCompress);
+           TransWriter = new TranslationWriter(TranslationFile, TransCompress, Perms);
+        }
+        else
+           TransWriter = NULL;
+
+        string const Tmp2 = Block.Find("Architectures");
         const char *Archs = Tmp2.c_str();
         while (ParseQuoteWord(Archs,Arch) == true)
         {
-           struct SubstVar Vars[] = {{"$(DIST)",&Dist},
-                                     {"$(SECTION)",&Section},
-                                     {"$(ARCH)",&Arch},
-                                     {}};
            PackageMap Itm;
-           
+           Itm.Permissions = Perms;
            Itm.BinOverride = SubstVar(Block.Find("BinOverride"),Vars);
            Itm.InternalPrefix = SubstVar(Block.Find("InternalPrefix",DIPrfx.c_str()),Vars);
 
@@ -489,6 +521,13 @@ void LoadTree(vector<PackageMap> &PkgList,Configuration &Setup)
               Itm.BaseDir = SubstVar(Block.Find("Directory",DDir.c_str()),Vars);
               Itm.PkgFile = SubstVar(Block.Find("Packages",DPkg.c_str()),Vars);
               Itm.Tag = SubstVar("$(DIST)/$(SECTION)/$(ARCH)",Vars);
+              Itm.Arch = Arch;
+              Itm.LongDesc = LongDesc;
+              if (TransWriter != NULL)
+              {
+                 TransWriter->IncreaseRefCounter();
+                 Itm.TransWriter = TransWriter;
+              }
               Itm.Contents = SubstVar(Block.Find("Contents",DContents.c_str()),Vars);
               Itm.ContentsHead = SubstVar(Block.Find("Contents::Header",DContentsH.c_str()),Vars);
               Itm.FLFile = SubstVar(Block.Find("FileList",DFLFile.c_str()),Vars);
@@ -498,6 +537,9 @@ void LoadTree(vector<PackageMap> &PkgList,Configuration &Setup)
            Itm.GetGeneral(Setup,Block);
            PkgList.push_back(Itm);
         }
+        // we didn't use this TransWriter, so we can release it
+        if (TransWriter != NULL && TransWriter->GetRefCounter() == 0)
+           delete TransWriter;
       }
       
       Top = Top->Next;
@@ -509,6 +551,8 @@ void LoadTree(vector<PackageMap> &PkgList,Configuration &Setup)
 /* */
 void LoadBinDir(vector<PackageMap> &PkgList,Configuration &Setup)
 {
+   mode_t const Permissions = Setup.FindI("Default::FileMode",0644);
+
    // Process 'bindirectory' type sections
    const Configuration::Item *Top = Setup.Tree("bindirectory");
    for (Top = (Top == 0?0:Top->Child); Top != 0;)
@@ -528,6 +572,7 @@ void LoadBinDir(vector<PackageMap> &PkgList,Configuration &Setup)
       Itm.InternalPrefix = Block.Find("InternalPrefix",Top->Tag.c_str());
       Itm.Contents = Block.Find("Contents");
       Itm.ContentsHead = Block.Find("Contents::Header");
+      Itm.Permissions = Block.FindI("FileMode", Permissions);
       
       Itm.GetGeneral(Setup,Block);
       PkgList.push_back(Itm);
@@ -542,8 +587,8 @@ void LoadBinDir(vector<PackageMap> &PkgList,Configuration &Setup)
 /* */
 bool ShowHelp(CommandLine &CmdL)
 {
-   ioprintf(cout,_("%s %s for %s %s compiled on %s %s\n"),PACKAGE,VERSION,
-           COMMON_OS,COMMON_CPU,__DATE__,__TIME__);
+   ioprintf(cout,_("%s %s for %s compiled on %s %s\n"),PACKAGE,PACKAGE_VERSION,
+           COMMON_ARCH,__DATE__,__TIME__);
    if (_config->FindB("version") == true)
       return true;
 
@@ -552,6 +597,7 @@ bool ShowHelp(CommandLine &CmdL)
       "Commands: packages binarypath [overridefile [pathprefix]]\n"
       "          sources srcpath [overridefile [pathprefix]]\n"
       "          contents path\n"
+      "          release path\n"
       "          generate config [groups]\n"
       "          clean config\n"
       "\n"
@@ -584,7 +630,7 @@ bool ShowHelp(CommandLine &CmdL)
       "  --no-delink Enable delinking debug mode\n"
       "  --contents  Control contents file generation\n"
       "  -c=?  Read this configuration file\n"
-      "  -o=?  Set an arbitary configuration option") << endl;
+      "  -o=?  Set an arbitrary configuration option") << endl;
    
    return true;
 }
@@ -603,7 +649,7 @@ bool SimpleGenPackages(CommandLine &CmdL)
    
    // Create a package writer object.
    PackagesWriter Packages(_config->Find("APT::FTPArchive::DB"),
-                          Override, "");   
+                          Override, "", _config->Find("APT::FTPArchive::Architecture"));
    if (_error->PendingError() == true)
       return false;
    
@@ -626,7 +672,7 @@ bool SimpleGenContents(CommandLine &CmdL)
       return ShowHelp(CmdL);
    
    // Create a package writer object.
-   ContentsWriter Contents(_config->Find("APT::FTPArchive::DB"));
+   ContentsWriter Contents(_config->Find("APT::FTPArchive::DB"), _config->Find("APT::FTPArchive::Architecture"));
    if (_error->PendingError() == true)
       return false;
    
@@ -677,13 +723,22 @@ bool SimpleGenSources(CommandLine &CmdL)
 // ---------------------------------------------------------------------
 bool SimpleGenRelease(CommandLine &CmdL)
 {
+   if (CmdL.FileSize() < 2)
+      return ShowHelp(CmdL);
+
+   string Dir = CmdL.FileList[1];
+
    ReleaseWriter Release("");
+   Release.DirStrip = Dir;
+
    if (_error->PendingError() == true)
       return false;
 
-   if (Release.RecursiveScan(CmdL.FileList[1]) == false)
+   if (Release.RecursiveScan(Dir) == false)
       return false;
 
+   Release.Finish();
+
    return true;
 }
 
@@ -716,10 +771,10 @@ bool Generate(CommandLine &CmdL)
    // Generate packages
    if (CmdL.FileSize() <= 2)
    {
-      for (vector<PackageMap>::iterator I = PkgList.begin(); I != PkgList.end(); I++)
+      for (vector<PackageMap>::iterator I = PkgList.begin(); I != PkgList.end(); ++I)
         if (I->GenPackages(Setup,Stats) == false)
            _error->DumpErrors();
-      for (vector<PackageMap>::iterator I = PkgList.begin(); I != PkgList.end(); I++)
+      for (vector<PackageMap>::iterator I = PkgList.begin(); I != PkgList.end(); ++I)
         if (I->GenSources(Setup,SrcStats) == false)
            _error->DumpErrors();
    }
@@ -728,7 +783,7 @@ bool Generate(CommandLine &CmdL)
       // Make a choice list out of the package list..
       RxChoiceList *List = new RxChoiceList[2*PkgList.size()+1];
       RxChoiceList *End = List;
-      for (vector<PackageMap>::iterator I = PkgList.begin(); I != PkgList.end(); I++)
+      for (vector<PackageMap>::iterator I = PkgList.begin(); I != PkgList.end(); ++I)
       {
         End->UserData = &(*I);
         End->Str = I->BaseDir.c_str();
@@ -776,15 +831,20 @@ bool Generate(CommandLine &CmdL)
       
       delete [] List;
    }
-   
+
+   // close the Translation master files
+   for (vector<PackageMap>::reverse_iterator I = PkgList.rbegin(); I != PkgList.rend(); ++I)
+      if (I->TransWriter != NULL && I->TransWriter->DecreaseRefCounter() == 0)
+        delete I->TransWriter;
+
    if (_config->FindB("APT::FTPArchive::Contents",true) == false)
       return true;
    
-   c1out << "Done Packages, Starting contents." << endl;
+   c1out << "Packages done, Starting contents." << endl;
 
    // Sort the contents file list by date
    string ArchiveDir = Setup.FindDir("Dir::ArchiveDir");
-   for (vector<PackageMap>::iterator I = PkgList.begin(); I != PkgList.end(); I++)
+   for (vector<PackageMap>::iterator I = PkgList.begin(); I != PkgList.end(); ++I)
    {
       struct stat A;
       if (MultiCompress::GetStat(flCombine(ArchiveDir,I->Contents),
@@ -801,7 +861,7 @@ bool Generate(CommandLine &CmdL)
       hashes of the .debs this means they have not changed either so the 
       contents must be up to date. */
    unsigned long MaxContentsChange = Setup.FindI("Default::MaxContentsChange",UINT_MAX)*1024;
-   for (vector<PackageMap>::iterator I = PkgList.begin(); I != PkgList.end(); I++)
+   for (vector<PackageMap>::iterator I = PkgList.begin(); I != PkgList.end(); ++I)
    {
       // This record is not relevent
       if (I->ContentsDone == true ||
@@ -877,7 +937,7 @@ bool Clean(CommandLine &CmdL)
         _error->DumpErrors();
       
       string CacheDB = I->BinCacheDB;
-      for (; I != PkgList.end() && I->BinCacheDB == CacheDB; I++);
+      for (; I != PkgList.end() && I->BinCacheDB == CacheDB; ++I);
    }
    
    return true;
@@ -886,9 +946,12 @@ bool Clean(CommandLine &CmdL)
 
 int main(int argc, const char *argv[])
 {
+   setlocale(LC_ALL, "");
    CommandLine::Args Args[] = {
       {'h',"help","help",0},
       {0,"md5","APT::FTPArchive::MD5",0},
+      {0,"sha1","APT::FTPArchive::SHA1",0},
+      {0,"sha256","APT::FTPArchive::SHA256",0},
       {'v',"version","version",0},
       {'d',"db","APT::FTPArchive::DB",CommandLine::HasArg},
       {'s',"source-override","APT::FTPArchive::SourceOverride",CommandLine::HasArg},
@@ -897,6 +960,7 @@ int main(int argc, const char *argv[])
       {0,"delink","APT::FTPArchive::DeLinkAct",0},
       {0,"readonly","APT::FTPArchive::ReadOnlyDB",0},
       {0,"contents","APT::FTPArchive::Contents",0},
+      {'a',"arch","APT::FTPArchive::Architecture",CommandLine::HasArg},
       {'c',"config-file",0,CommandLine::ConfigFile},
       {'o',"option",0,CommandLine::ArbItem},
       {0,0,0,0}};
@@ -911,7 +975,7 @@ int main(int argc, const char *argv[])
 
    // Parse the command line and initialize the package library
    CommandLine CmdL(Args,_config);
-   if (CmdL.Parse(argc,argv) == false)
+   if (pkgInitConfig(*_config) == false || CmdL.Parse(argc,argv) == false)
    {
       _error->DumpErrors();
       return 100;