// -*- mode: cpp; mode: fold -*-
// Description /*{{{*/
-// $Id: apt-ftparchive.cc,v 1.9 2003/12/26 20:08:56 mdz Exp $
+// $Id: apt-ftparchive.cc,v 1.8.2.3 2004/01/02 22:01:48 mdz Exp $
/* ######################################################################
- apt-scanpackages - Efficient work-alike for dpkg-scanpackages
+ apt-ftparchive - Efficient work-alike for dpkg-scanpackages
Let contents be disabled from the conf
##################################################################### */
/*}}}*/
// Include Files /*{{{*/
-#ifdef __GNUG__
-#pragma implementation "apt-ftparchive.h"
-#endif
+#include <config.h>
-#include "apt-ftparchive.h"
-
#include <apt-pkg/error.h>
#include <apt-pkg/configuration.h>
#include <apt-pkg/cmndline.h>
#include <apt-pkg/strutl.h>
-#include <config.h>
-#include <apti18n.h>
+#include <apt-pkg/init.h>
#include <algorithm>
+#include <climits>
#include <sys/time.h>
#include <regex.h>
+#include "apt-ftparchive.h"
#include "contents.h"
#include "multicompress.h"
-#include "writer.h"
+#include "writer.h"
+
+#include <apti18n.h>
/*}}}*/
using namespace std;
string BinCacheDB;
string BinOverride;
string ExtraOverride;
+
+ // We generate for this given arch
+ string Arch;
// Stuff for the Source File
string SrcFile;
string SrcOverride;
string SrcExtraOverride;
+ // Translation master file
+ bool LongDesc;
+ TranslationWriter *TransWriter;
+
// Contents
string Contents;
string ContentsHead;
vector<PackageMap>::iterator End,
unsigned long &Left);
- PackageMap() : DeLinkLimit(0), Permissions(1), ContentsDone(false),
- PkgDone(false), SrcDone(false), ContentsMTime(0) {};
+ PackageMap() : LongDesc(true), TransWriter(NULL), DeLinkLimit(0), Permissions(1),
+ ContentsDone(false), PkgDone(false), SrcDone(false),
+ ContentsMTime(0) {};
};
/*}}}*/
Setup.Find("Default::Packages::Extensions",".deb").c_str());
Permissions = Setup.FindI("Default::FileMode",0644);
-
+
if (FLFile.empty() == false)
FLFile = flCombine(Setup.Find("Dir::FileListDir"),FLFile);
// Create a package writer object.
PackagesWriter Packages(flCombine(CacheDir,BinCacheDB),
flCombine(OverrideDir,BinOverride),
- flCombine(OverrideDir,ExtraOverride));
+ flCombine(OverrideDir,ExtraOverride),
+ Arch);
if (PkgExt.empty() == false && Packages.SetExts(PkgExt) == false)
return _error->Error(_("Package extension list is too long"));
if (_error->PendingError() == true)
- return _error->Error(_("Error Processing directory %s"),BaseDir.c_str());
+ return _error->Error(_("Error processing directory %s"),BaseDir.c_str());
Packages.PathPrefix = PathPrefix;
Packages.DirStrip = ArchiveDir;
Packages.InternalPrefix = flCombine(ArchiveDir,InternalPrefix);
+ Packages.TransWriter = TransWriter;
+ Packages.LongDescription = LongDesc;
+
Packages.Stats.DeLinkBytes = Stats.DeLinkBytes;
Packages.DeLinkLimit = DeLinkLimit;
PkgCompress,Permissions);
Packages.Output = Comp.Input;
if (_error->PendingError() == true)
- return _error->Error(_("Error Processing directory %s"),BaseDir.c_str());
+ return _error->Error(_("Error processing directory %s"),BaseDir.c_str());
c0out << ' ' << BaseDir << ":" << flush;
Packages.Output = 0; // Just in case
// Finish compressing
- unsigned long Size;
+ unsigned long long Size;
if (Comp.Finalize(Size) == false)
{
c0out << endl;
- return _error->Error(_("Error Processing directory %s"),BaseDir.c_str());
+ return _error->Error(_("Error processing directory %s"),BaseDir.c_str());
}
if (Size != 0)
if (SrcExt.empty() == false && Sources.SetExts(SrcExt) == false)
return _error->Error(_("Source extension list is too long"));
if (_error->PendingError() == true)
- return _error->Error(_("Error Processing directory %s"),BaseDir.c_str());
+ return _error->Error(_("Error processing directory %s"),BaseDir.c_str());
Sources.PathPrefix = PathPrefix;
Sources.DirStrip = ArchiveDir;
SrcCompress,Permissions);
Sources.Output = Comp.Input;
if (_error->PendingError() == true)
- return _error->Error(_("Error Processing directory %s"),BaseDir.c_str());
+ return _error->Error(_("Error processing directory %s"),BaseDir.c_str());
c0out << ' ' << BaseDir << ":" << flush;
Sources.Output = 0; // Just in case
// Finish compressing
- unsigned long Size;
+ unsigned long long Size;
if (Comp.Finalize(Size) == false)
{
c0out << endl;
- return _error->Error(_("Error Processing directory %s"),BaseDir.c_str());
+ return _error->Error(_("Error processing directory %s"),BaseDir.c_str());
}
if (Size != 0)
gettimeofday(&StartTime,0);
// Create a package writer object.
- ContentsWriter Contents("");
+ ContentsWriter Contents("", Arch);
if (PkgExt.empty() == false && Contents.SetExts(PkgExt) == false)
return _error->Error(_("Package extension list is too long"));
if (_error->PendingError() == true)
if (_error->PendingError() == true)
return false;
- unsigned long Size = Head.Size();
+ unsigned long long Size = Head.Size();
unsigned char Buf[4096];
while (Size != 0)
{
- unsigned long ToRead = Size;
+ unsigned long long ToRead = Size;
if (Size > sizeof(Buf))
ToRead = sizeof(Buf);
files associated with this contents file into one great big honking
memory structure, then dump the sorted version */
c0out << ' ' << this->Contents << ":" << flush;
- for (vector<PackageMap>::iterator I = Begin; I != End; I++)
+ for (vector<PackageMap>::iterator I = Begin; I != End; ++I)
{
if (I->Contents != this->Contents)
continue;
Contents.Finish();
// Finish compressing
- unsigned long Size;
+ unsigned long long Size;
if (Comp.Finalize(Size) == false || _error->PendingError() == true)
{
c0out << endl;
- return _error->Error(_("Error Processing Contents %s"),
+ return _error->Error(_("Error processing contents %s"),
this->Contents.c_str());
}
"$(DIST)/$(SECTION)/source/");
string DPkg = Setup.Find("TreeDefault::Packages",
"$(DIST)/$(SECTION)/binary-$(ARCH)/Packages");
+ string DTrans = Setup.Find("TreeDefault::Translation",
+ "$(DIST)/$(SECTION)/i18n/Translation-en");
string DIPrfx = Setup.Find("TreeDefault::InternalPrefix",
"$(DIST)/$(SECTION)/");
string DContents = Setup.Find("TreeDefault::Contents",
- "$(DIST)/Contents-$(ARCH)");
+ "$(DIST)/$(SECTION)/Contents-$(ARCH)");
string DContentsH = Setup.Find("TreeDefault::Contents::Header","");
string DBCache = Setup.Find("TreeDefault::BinCacheDB",
"packages-$(ARCH).db");
string DFLFile = Setup.Find("TreeDefault::FileList", "");
string DSFLFile = Setup.Find("TreeDefault::SourceFileList", "");
+ mode_t const Permissions = Setup.FindI("Default::FileMode",0644);
+
+ bool const LongDescription = Setup.FindB("Default::LongDescription",
+ _config->FindB("APT::FTPArchive::LongDescription", true));
+ string const TranslationCompress = Setup.Find("Default::Translation::Compress",". gzip").c_str();
+
// Process 'tree' type sections
const Configuration::Item *Top = Setup.Tree("tree");
for (Top = (Top == 0?0:Top->Child); Top != 0;)
string Section;
while (ParseQuoteWord(Sections,Section) == true)
{
- string Tmp2 = Block.Find("Architectures");
string Arch;
+ struct SubstVar const Vars[] = {{"$(DIST)",&Dist},
+ {"$(SECTION)",&Section},
+ {"$(ARCH)",&Arch},
+ {}};
+ mode_t const Perms = Block.FindI("FileMode", Permissions);
+ bool const LongDesc = Block.FindB("LongDescription", LongDescription);
+ TranslationWriter *TransWriter;
+ if (DTrans.empty() == false && LongDesc == false)
+ {
+ string const TranslationFile = flCombine(Setup.FindDir("Dir::ArchiveDir"),
+ SubstVar(Block.Find("Translation", DTrans.c_str()), Vars));
+ string const TransCompress = Block.Find("Translation::Compress", TranslationCompress);
+ TransWriter = new TranslationWriter(TranslationFile, TransCompress, Perms);
+ }
+ else
+ TransWriter = NULL;
+
+ string const Tmp2 = Block.Find("Architectures");
const char *Archs = Tmp2.c_str();
while (ParseQuoteWord(Archs,Arch) == true)
{
- struct SubstVar Vars[] = {{"$(DIST)",&Dist},
- {"$(SECTION)",&Section},
- {"$(ARCH)",&Arch},
- {}};
PackageMap Itm;
-
+ Itm.Permissions = Perms;
Itm.BinOverride = SubstVar(Block.Find("BinOverride"),Vars);
Itm.InternalPrefix = SubstVar(Block.Find("InternalPrefix",DIPrfx.c_str()),Vars);
Itm.BaseDir = SubstVar(Block.Find("Directory",DDir.c_str()),Vars);
Itm.PkgFile = SubstVar(Block.Find("Packages",DPkg.c_str()),Vars);
Itm.Tag = SubstVar("$(DIST)/$(SECTION)/$(ARCH)",Vars);
+ Itm.Arch = Arch;
+ Itm.LongDesc = LongDesc;
+ if (TransWriter != NULL)
+ {
+ TransWriter->IncreaseRefCounter();
+ Itm.TransWriter = TransWriter;
+ }
Itm.Contents = SubstVar(Block.Find("Contents",DContents.c_str()),Vars);
Itm.ContentsHead = SubstVar(Block.Find("Contents::Header",DContentsH.c_str()),Vars);
Itm.FLFile = SubstVar(Block.Find("FileList",DFLFile.c_str()),Vars);
Itm.GetGeneral(Setup,Block);
PkgList.push_back(Itm);
}
+ // we didn't use this TransWriter, so we can release it
+ if (TransWriter != NULL && TransWriter->GetRefCounter() == 0)
+ delete TransWriter;
}
Top = Top->Next;
/* */
void LoadBinDir(vector<PackageMap> &PkgList,Configuration &Setup)
{
+ mode_t const Permissions = Setup.FindI("Default::FileMode",0644);
+
// Process 'bindirectory' type sections
const Configuration::Item *Top = Setup.Tree("bindirectory");
for (Top = (Top == 0?0:Top->Child); Top != 0;)
Itm.InternalPrefix = Block.Find("InternalPrefix",Top->Tag.c_str());
Itm.Contents = Block.Find("Contents");
Itm.ContentsHead = Block.Find("Contents::Header");
+ Itm.Permissions = Block.FindI("FileMode", Permissions);
Itm.GetGeneral(Setup,Block);
PkgList.push_back(Itm);
/* */
bool ShowHelp(CommandLine &CmdL)
{
- ioprintf(cout,_("%s %s for %s %s compiled on %s %s\n"),PACKAGE,VERSION,
- COMMON_OS,COMMON_CPU,__DATE__,__TIME__);
+ ioprintf(cout,_("%s %s for %s compiled on %s %s\n"),PACKAGE,PACKAGE_VERSION,
+ COMMON_ARCH,__DATE__,__TIME__);
if (_config->FindB("version") == true)
return true;
"Commands: packages binarypath [overridefile [pathprefix]]\n"
" sources srcpath [overridefile [pathprefix]]\n"
" contents path\n"
+ " release path\n"
" generate config [groups]\n"
" clean config\n"
"\n"
" --no-delink Enable delinking debug mode\n"
" --contents Control contents file generation\n"
" -c=? Read this configuration file\n"
- " -o=? Set an arbitary configuration option") << endl;
+ " -o=? Set an arbitrary configuration option") << endl;
return true;
}
// Create a package writer object.
PackagesWriter Packages(_config->Find("APT::FTPArchive::DB"),
- Override, "");
+ Override, "", _config->Find("APT::FTPArchive::Architecture"));
if (_error->PendingError() == true)
return false;
return ShowHelp(CmdL);
// Create a package writer object.
- ContentsWriter Contents(_config->Find("APT::FTPArchive::DB"));
+ ContentsWriter Contents(_config->Find("APT::FTPArchive::DB"), _config->Find("APT::FTPArchive::Architecture"));
if (_error->PendingError() == true)
return false;
// ---------------------------------------------------------------------
bool SimpleGenRelease(CommandLine &CmdL)
{
+ if (CmdL.FileSize() < 2)
+ return ShowHelp(CmdL);
+
+ string Dir = CmdL.FileList[1];
+
ReleaseWriter Release("");
+ Release.DirStrip = Dir;
+
if (_error->PendingError() == true)
return false;
- if (Release.RecursiveScan(CmdL.FileList[1]) == false)
+ if (Release.RecursiveScan(Dir) == false)
return false;
+ Release.Finish();
+
return true;
}
// Generate packages
if (CmdL.FileSize() <= 2)
{
- for (vector<PackageMap>::iterator I = PkgList.begin(); I != PkgList.end(); I++)
+ for (vector<PackageMap>::iterator I = PkgList.begin(); I != PkgList.end(); ++I)
if (I->GenPackages(Setup,Stats) == false)
_error->DumpErrors();
- for (vector<PackageMap>::iterator I = PkgList.begin(); I != PkgList.end(); I++)
+ for (vector<PackageMap>::iterator I = PkgList.begin(); I != PkgList.end(); ++I)
if (I->GenSources(Setup,SrcStats) == false)
_error->DumpErrors();
}
// Make a choice list out of the package list..
RxChoiceList *List = new RxChoiceList[2*PkgList.size()+1];
RxChoiceList *End = List;
- for (vector<PackageMap>::iterator I = PkgList.begin(); I != PkgList.end(); I++)
+ for (vector<PackageMap>::iterator I = PkgList.begin(); I != PkgList.end(); ++I)
{
End->UserData = &(*I);
End->Str = I->BaseDir.c_str();
delete [] List;
}
-
+
+ // close the Translation master files
+ for (vector<PackageMap>::reverse_iterator I = PkgList.rbegin(); I != PkgList.rend(); ++I)
+ if (I->TransWriter != NULL && I->TransWriter->DecreaseRefCounter() == 0)
+ delete I->TransWriter;
+
if (_config->FindB("APT::FTPArchive::Contents",true) == false)
return true;
- c1out << "Done Packages, Starting contents." << endl;
+ c1out << "Packages done, Starting contents." << endl;
// Sort the contents file list by date
string ArchiveDir = Setup.FindDir("Dir::ArchiveDir");
- for (vector<PackageMap>::iterator I = PkgList.begin(); I != PkgList.end(); I++)
+ for (vector<PackageMap>::iterator I = PkgList.begin(); I != PkgList.end(); ++I)
{
struct stat A;
if (MultiCompress::GetStat(flCombine(ArchiveDir,I->Contents),
hashes of the .debs this means they have not changed either so the
contents must be up to date. */
unsigned long MaxContentsChange = Setup.FindI("Default::MaxContentsChange",UINT_MAX)*1024;
- for (vector<PackageMap>::iterator I = PkgList.begin(); I != PkgList.end(); I++)
+ for (vector<PackageMap>::iterator I = PkgList.begin(); I != PkgList.end(); ++I)
{
// This record is not relevent
if (I->ContentsDone == true ||
_error->DumpErrors();
string CacheDB = I->BinCacheDB;
- for (; I != PkgList.end() && I->BinCacheDB == CacheDB; I++);
+ for (; I != PkgList.end() && I->BinCacheDB == CacheDB; ++I);
}
return true;
int main(int argc, const char *argv[])
{
+ setlocale(LC_ALL, "");
CommandLine::Args Args[] = {
{'h',"help","help",0},
{0,"md5","APT::FTPArchive::MD5",0},
+ {0,"sha1","APT::FTPArchive::SHA1",0},
+ {0,"sha256","APT::FTPArchive::SHA256",0},
{'v',"version","version",0},
{'d',"db","APT::FTPArchive::DB",CommandLine::HasArg},
{'s',"source-override","APT::FTPArchive::SourceOverride",CommandLine::HasArg},
{0,"delink","APT::FTPArchive::DeLinkAct",0},
{0,"readonly","APT::FTPArchive::ReadOnlyDB",0},
{0,"contents","APT::FTPArchive::Contents",0},
+ {'a',"arch","APT::FTPArchive::Architecture",CommandLine::HasArg},
{'c',"config-file",0,CommandLine::ConfigFile},
{'o',"option",0,CommandLine::ArbItem},
{0,0,0,0}};
// Parse the command line and initialize the package library
CommandLine CmdL(Args,_config);
- if (CmdL.Parse(argc,argv) == false)
+ if (pkgInitConfig(*_config) == false || CmdL.Parse(argc,argv) == false)
{
_error->DumpErrors();
return 100;