// -*- mode: cpp; mode: fold -*-
// Description /*{{{*/
-// $Id: apt-ftparchive.cc,v 1.2 2001/02/20 07:03:18 jgg Exp $
+// $Id: apt-ftparchive.cc,v 1.8.2.3 2004/01/02 22:01:48 mdz Exp $
/* ######################################################################
apt-scanpackages - Efficient work-alike for dpkg-scanpackages
#include "writer.h"
/*}}}*/
-ostream c0out;
-ostream c1out;
-ostream c2out;
+using namespace std;
+ostream c0out(0);
+ostream c1out(0);
+ostream c2out(0);
ofstream devnull("/dev/null");
unsigned Quiet = 0;
string PkgFile;
string BinCacheDB;
string BinOverride;
+ string ExtraOverride;
// Stuff for the Source File
string SrcFile;
string SrcOverride;
+ string SrcExtraOverride;
// Contents
string Contents;
bool GenPackages(Configuration &Setup,struct CacheDB::Stats &Stats);
bool GenSources(Configuration &Setup,struct CacheDB::Stats &Stats);
bool GenContents(Configuration &Setup,
- PackageMap *Begin,PackageMap *End,
+ vector<PackageMap>::iterator Begin,
+ vector<PackageMap>::iterator End,
unsigned long &Left);
PackageMap() : DeLinkLimit(0), Permissions(1), ContentsDone(false),
// Create a package writer object.
PackagesWriter Packages(flCombine(CacheDir,BinCacheDB),
- flCombine(OverrideDir,BinOverride));
+ flCombine(OverrideDir,BinOverride),
+ flCombine(OverrideDir,ExtraOverride));
if (PkgExt.empty() == false && Packages.SetExts(PkgExt) == false)
- return _error->Error("Package extension list is too long");
+ return _error->Error(_("Package extension list is too long"));
if (_error->PendingError() == true)
- return _error->Error("Error Processing directory %s",BaseDir.c_str());
+ return _error->Error(_("Error processing directory %s"),BaseDir.c_str());
Packages.PathPrefix = PathPrefix;
Packages.DirStrip = ArchiveDir;
Packages.Stats.DeLinkBytes = Stats.DeLinkBytes;
Packages.DeLinkLimit = DeLinkLimit;
-
+
// Create a compressor object
MultiCompress Comp(flCombine(ArchiveDir,PkgFile),
PkgCompress,Permissions);
Packages.Output = Comp.Input;
if (_error->PendingError() == true)
- return _error->Error("Error Processing directory %s",BaseDir.c_str());
+ return _error->Error(_("Error processing directory %s"),BaseDir.c_str());
c0out << ' ' << BaseDir << ":" << flush;
if (Comp.Finalize(Size) == false)
{
c0out << endl;
- return _error->Error("Error Processing directory %s",BaseDir.c_str());
+ return _error->Error(_("Error processing directory %s"),BaseDir.c_str());
}
if (Size != 0)
return !_error->PendingError();
}
+
/*}}}*/
-// PackageMap::GenSources - Actually generate a Package file /*{{{*/
+// PackageMap::GenSources - Actually generate a Source file /*{{{*/
// ---------------------------------------------------------------------
/* This generates the Sources File described by this object. */
bool PackageMap::GenSources(Configuration &Setup,struct CacheDB::Stats &Stats)
// Create a package writer object.
SourcesWriter Sources(flCombine(OverrideDir,BinOverride),
- flCombine(OverrideDir,SrcOverride));
+ flCombine(OverrideDir,SrcOverride),
+ flCombine(OverrideDir,SrcExtraOverride));
if (SrcExt.empty() == false && Sources.SetExts(SrcExt) == false)
- return _error->Error("Source extension list is too long");
+ return _error->Error(_("Source extension list is too long"));
if (_error->PendingError() == true)
- return _error->Error("Error Processing directory %s",BaseDir.c_str());
+ return _error->Error(_("Error processing directory %s"),BaseDir.c_str());
Sources.PathPrefix = PathPrefix;
Sources.DirStrip = ArchiveDir;
SrcCompress,Permissions);
Sources.Output = Comp.Input;
if (_error->PendingError() == true)
- return _error->Error("Error Processing directory %s",BaseDir.c_str());
+ return _error->Error(_("Error processing directory %s"),BaseDir.c_str());
c0out << ' ' << BaseDir << ":" << flush;
if (Comp.Finalize(Size) == false)
{
c0out << endl;
- return _error->Error("Error Processing directory %s",BaseDir.c_str());
+ return _error->Error(_("Error processing directory %s"),BaseDir.c_str());
}
if (Size != 0)
It searches the given iterator range for other package files that map
into this contents file and includes their data as well when building. */
bool PackageMap::GenContents(Configuration &Setup,
- PackageMap *Begin,PackageMap *End,
- unsigned long &Left)
+ vector<PackageMap>::iterator Begin,
+ vector<PackageMap>::iterator End,
+ unsigned long &Left)
{
if (Contents.empty() == true)
return true;
// Create a package writer object.
ContentsWriter Contents("");
if (PkgExt.empty() == false && Contents.SetExts(PkgExt) == false)
- return _error->Error("Package extension list is too long");
+ return _error->Error(_("Package extension list is too long"));
if (_error->PendingError() == true)
return false;
return false;
if (fwrite(Buf,1,ToRead,Comp.Input) != ToRead)
- return _error->Errno("fwrite","Error writing header to contents file");
+ return _error->Errno("fwrite",_("Error writing header to contents file"));
Size -= ToRead;
}
files associated with this contents file into one great big honking
memory structure, then dump the sorted version */
c0out << ' ' << this->Contents << ":" << flush;
- for (PackageMap *I = Begin; I != End; I++)
+ for (vector<PackageMap>::iterator I = Begin; I != End; I++)
{
if (I->Contents != this->Contents)
continue;
if (Comp.Finalize(Size) == false || _error->PendingError() == true)
{
c0out << endl;
- return _error->Error("Error Processing Contents %s",
+ return _error->Error(_("Error processing contents %s"),
this->Contents.c_str());
}
string Dist = Top->Tag;
// Parse the sections
- const char *Sections = Block.Find("Sections").c_str();
+ string Tmp = Block.Find("Sections");
+ const char *Sections = Tmp.c_str();
string Section;
while (ParseQuoteWord(Sections,Section) == true)
{
- const char *Archs = Block.Find("Architectures").c_str();
+ string Tmp2 = Block.Find("Architectures");
string Arch;
+ const char *Archs = Tmp2.c_str();
while (ParseQuoteWord(Archs,Arch) == true)
{
struct SubstVar Vars[] = {{"$(DIST)",&Dist},
Itm.SrcFile = SubstVar(Block.Find("Sources",DSources.c_str()),Vars);
Itm.Tag = SubstVar("$(DIST)/$(SECTION)/source",Vars);
Itm.FLFile = SubstVar(Block.Find("SourceFileList",DSFLFile.c_str()),Vars);
+ Itm.SrcExtraOverride = SubstVar(Block.Find("SrcExtraOverride"),Vars);
}
else
{
Itm.Contents = SubstVar(Block.Find("Contents",DContents.c_str()),Vars);
Itm.ContentsHead = SubstVar(Block.Find("Contents::Header",DContentsH.c_str()),Vars);
Itm.FLFile = SubstVar(Block.Find("FileList",DFLFile.c_str()),Vars);
+ Itm.ExtraOverride = SubstVar(Block.Find("ExtraOverride"),Vars);
}
Itm.GetGeneral(Setup,Block);
Itm.SrcFile = Block.Find("Sources");
Itm.BinCacheDB = Block.Find("BinCacheDB");
Itm.BinOverride = Block.Find("BinOverride");
+ Itm.ExtraOverride = Block.Find("ExtraOverride");
+ Itm.SrcExtraOverride = Block.Find("SrcExtraOverride");
Itm.SrcOverride = Block.Find("SrcOverride");
Itm.BaseDir = Top->Tag;
Itm.FLFile = Block.Find("FileList");
return true;
cout <<
- "Usage: apt-ftparchive [options] command\n"
- "Commands: packges binarypath [overridefile [pathprefix]]\n"
+ _("Usage: apt-ftparchive [options] command\n"
+ "Commands: packages binarypath [overridefile [pathprefix]]\n"
" sources srcpath [overridefile [pathprefix]]\n"
" contents path\n"
+ " release path\n"
" generate config [groups]\n"
" clean config\n"
"\n"
"\n"
"The 'packages' and 'sources' command should be run in the root of the\n"
"tree. BinaryPath should point to the base of the recursive search and \n"
- "override file should contian the override flags. Pathprefix is\n"
+ "override file should contain the override flags. Pathprefix is\n"
"appended to the filename fields if present. Example usage from the \n"
- "debian archive:\n"
+ "Debian archive:\n"
" apt-ftparchive packages dists/potato/main/binary-i386/ > \\\n"
" dists/potato/main/binary-i386/Packages\n"
"\n"
" --no-delink Enable delinking debug mode\n"
" --contents Control contents file generation\n"
" -c=? Read this configuration file\n"
- " -o=? Set an arbitary configuration option" << endl;
+ " -o=? Set an arbitrary configuration option") << endl;
return true;
}
// Create a package writer object.
PackagesWriter Packages(_config->Find("APT::FTPArchive::DB"),
- Override);
+ Override, "");
if (_error->PendingError() == true)
return false;
return true;
}
+ /*}}}*/
+// SimpleGenRelease - Generate a Release file for a directory tree /*{{{*/
+// ---------------------------------------------------------------------
+bool SimpleGenRelease(CommandLine &CmdL)
+{
+ if (CmdL.FileSize() < 2)
+ return ShowHelp(CmdL);
+
+ string Dir = CmdL.FileList[1];
+
+ ReleaseWriter Release("");
+ Release.DirStrip = Dir;
+
+ if (_error->PendingError() == true)
+ return false;
+
+ if (Release.RecursiveScan(Dir) == false)
+ return false;
+
+ Release.Finish();
+
+ return true;
+}
+
/*}}}*/
// Generate - Full generate, using a config file /*{{{*/
// ---------------------------------------------------------------------
// Generate packages
if (CmdL.FileSize() <= 2)
{
- for (PackageMap *I = PkgList.begin(); I != PkgList.end(); I++)
+ for (vector<PackageMap>::iterator I = PkgList.begin(); I != PkgList.end(); I++)
if (I->GenPackages(Setup,Stats) == false)
_error->DumpErrors();
- for (PackageMap *I = PkgList.begin(); I != PkgList.end(); I++)
+ for (vector<PackageMap>::iterator I = PkgList.begin(); I != PkgList.end(); I++)
if (I->GenSources(Setup,SrcStats) == false)
_error->DumpErrors();
}
// Make a choice list out of the package list..
RxChoiceList *List = new RxChoiceList[2*PkgList.size()+1];
RxChoiceList *End = List;
- for (PackageMap *I = PkgList.begin(); I != PkgList.end(); I++)
+ for (vector<PackageMap>::iterator I = PkgList.begin(); I != PkgList.end(); I++)
{
- End->UserData = I;
+ End->UserData = &(*I);
End->Str = I->BaseDir.c_str();
End++;
- End->UserData = I;
+ End->UserData = &(*I);
End->Str = I->Tag.c_str();
End++;
}
if (RegexChoice(List,CmdL.FileList + 2,CmdL.FileList + CmdL.FileSize()) == 0)
{
delete [] List;
- return _error->Error("No selections matched");
+ return _error->Error(_("No selections matched"));
}
_error->DumpErrors();
// Sort the contents file list by date
string ArchiveDir = Setup.FindDir("Dir::ArchiveDir");
- for (PackageMap *I = PkgList.begin(); I != PkgList.end(); I++)
+ for (vector<PackageMap>::iterator I = PkgList.begin(); I != PkgList.end(); I++)
{
struct stat A;
if (MultiCompress::GetStat(flCombine(ArchiveDir,I->Contents),
hashes of the .debs this means they have not changed either so the
contents must be up to date. */
unsigned long MaxContentsChange = Setup.FindI("Default::MaxContentsChange",UINT_MAX)*1024;
- for (PackageMap *I = PkgList.begin(); I != PkgList.end(); I++)
+ for (vector<PackageMap>::iterator I = PkgList.begin(); I != PkgList.end(); I++)
{
// This record is not relevent
if (I->ContentsDone == true ||
{
if (MultiCompress::GetStat(flCombine(ArchiveDir,I->PkgFile),I->PkgCompress,B) == false)
{
- _error->Warning("Some files are missing in the package file group `%s'",I->PkgFile.c_str());
+ _error->Warning(_("Some files are missing in the package file group `%s'"),I->PkgFile.c_str());
continue;
}
string CacheDir = Setup.FindDir("Dir::CacheDir");
- for (PackageMap *I = PkgList.begin(); I != PkgList.end(); )
+ for (vector<PackageMap>::iterator I = PkgList.begin(); I != PkgList.end(); )
{
c0out << I->BinCacheDB << endl;
CacheDB DB(flCombine(CacheDir,I->BinCacheDB));
CommandLine::Dispatch Cmds[] = {{"packages",&SimpleGenPackages},
{"contents",&SimpleGenContents},
{"sources",&SimpleGenSources},
+ {"release",&SimpleGenRelease},
{"generate",&Generate},
{"clean",&Clean},
{"help",&ShowHelp},
}
// Setup the output streams
- c0out.rdbuf(cout.rdbuf());
- c1out.rdbuf(cout.rdbuf());
- c2out.rdbuf(cout.rdbuf());
+ c0out.rdbuf(clog.rdbuf());
+ c1out.rdbuf(clog.rdbuf());
+ c2out.rdbuf(clog.rdbuf());
Quiet = _config->FindI("quiet",0);
if (Quiet > 0)
c0out.rdbuf(devnull.rdbuf());