]> git.saurik.com Git - apt.git/blame_incremental - ftparchive/apt-ftparchive.cc
* apt--mvo merged
[apt.git] / ftparchive / apt-ftparchive.cc
... / ...
CommitLineData
1// -*- mode: cpp; mode: fold -*-
2// Description /*{{{*/
3// $Id: apt-ftparchive.cc,v 1.8.2.3 2004/01/02 22:01:48 mdz Exp $
4/* ######################################################################
5
6 apt-scanpackages - Efficient work-alike for dpkg-scanpackages
7
8 Let contents be disabled from the conf
9
10 ##################################################################### */
11 /*}}}*/
12// Include Files /*{{{*/
13#ifdef __GNUG__
14#pragma implementation "apt-ftparchive.h"
15#endif
16
17#include "apt-ftparchive.h"
18
19#include <apt-pkg/error.h>
20#include <apt-pkg/configuration.h>
21#include <apt-pkg/cmndline.h>
22#include <apt-pkg/strutl.h>
23#include <config.h>
24#include <apti18n.h>
25#include <algorithm>
26
27#include <sys/time.h>
28#include <regex.h>
29
30#include "contents.h"
31#include "multicompress.h"
32#include "writer.h"
33 /*}}}*/
34
35using namespace std;
36ostream c0out(0);
37ostream c1out(0);
38ostream c2out(0);
39ofstream devnull("/dev/null");
40unsigned Quiet = 0;
41
42// struct PackageMap - List of all package files in the config file /*{{{*/
43// ---------------------------------------------------------------------
44/* */
45struct PackageMap
46{
47 // General Stuff
48 string BaseDir;
49 string InternalPrefix;
50 string FLFile;
51 string PkgExt;
52 string SrcExt;
53
54 // Stuff for the Package File
55 string PkgFile;
56 string BinCacheDB;
57 string BinOverride;
58 string ExtraOverride;
59
60 // We generate for this given arch
61 string Arch;
62
63 // Stuff for the Source File
64 string SrcFile;
65 string SrcOverride;
66 string SrcExtraOverride;
67
68 // Contents
69 string Contents;
70 string ContentsHead;
71
72 // Random things
73 string Tag;
74 string PkgCompress;
75 string CntCompress;
76 string SrcCompress;
77 string PathPrefix;
78 unsigned int DeLinkLimit;
79 mode_t Permissions;
80
81 bool ContentsDone;
82 bool PkgDone;
83 bool SrcDone;
84 time_t ContentsMTime;
85
86 struct ContentsCompare : public binary_function<PackageMap,PackageMap,bool>
87 {
88 inline bool operator() (const PackageMap &x,const PackageMap &y)
89 {return x.ContentsMTime < y.ContentsMTime;};
90 };
91
92 struct DBCompare : public binary_function<PackageMap,PackageMap,bool>
93 {
94 inline bool operator() (const PackageMap &x,const PackageMap &y)
95 {return x.BinCacheDB < y.BinCacheDB;};
96 };
97
98 void GetGeneral(Configuration &Setup,Configuration &Block);
99 bool GenPackages(Configuration &Setup,struct CacheDB::Stats &Stats);
100 bool GenSources(Configuration &Setup,struct CacheDB::Stats &Stats);
101 bool GenContents(Configuration &Setup,
102 vector<PackageMap>::iterator Begin,
103 vector<PackageMap>::iterator End,
104 unsigned long &Left);
105
106 PackageMap() : DeLinkLimit(0), Permissions(1), ContentsDone(false),
107 PkgDone(false), SrcDone(false), ContentsMTime(0) {};
108};
109 /*}}}*/
110
111// PackageMap::GetGeneral - Common per-section definitions /*{{{*/
112// ---------------------------------------------------------------------
113/* */
114void PackageMap::GetGeneral(Configuration &Setup,Configuration &Block)
115{
116 PathPrefix = Block.Find("PathPrefix");
117
118 if (Block.FindB("External-Links",true) == false)
119 DeLinkLimit = Setup.FindI("Default::DeLinkLimit",UINT_MAX);
120 else
121 DeLinkLimit = 0;
122
123 PkgCompress = Block.Find("Packages::Compress",
124 Setup.Find("Default::Packages::Compress",". gzip").c_str());
125 CntCompress = Block.Find("Contents::Compress",
126 Setup.Find("Default::Contents::Compress",". gzip").c_str());
127 SrcCompress = Block.Find("Sources::Compress",
128 Setup.Find("Default::Sources::Compress",". gzip").c_str());
129
130 SrcExt = Block.Find("Sources::Extensions",
131 Setup.Find("Default::Sources::Extensions",".dsc").c_str());
132 PkgExt = Block.Find("Packages::Extensions",
133 Setup.Find("Default::Packages::Extensions",".deb").c_str());
134
135 Permissions = Setup.FindI("Default::FileMode",0644);
136
137 if (FLFile.empty() == false)
138 FLFile = flCombine(Setup.Find("Dir::FileListDir"),FLFile);
139
140 if (Contents == " ")
141 Contents= string();
142}
143 /*}}}*/
144// PackageMap::GenPackages - Actually generate a Package file /*{{{*/
145// ---------------------------------------------------------------------
146/* This generates the Package File described by this object. */
147bool PackageMap::GenPackages(Configuration &Setup,struct CacheDB::Stats &Stats)
148{
149 if (PkgFile.empty() == true)
150 return true;
151
152 string ArchiveDir = Setup.FindDir("Dir::ArchiveDir");
153 string OverrideDir = Setup.FindDir("Dir::OverrideDir");
154 string CacheDir = Setup.FindDir("Dir::CacheDir");
155
156 struct timeval StartTime;
157 gettimeofday(&StartTime,0);
158
159 PkgDone = true;
160
161 // Create a package writer object.
162 PackagesWriter Packages(flCombine(CacheDir,BinCacheDB),
163 flCombine(OverrideDir,BinOverride),
164 flCombine(OverrideDir,ExtraOverride),
165 Arch);
166 if (PkgExt.empty() == false && Packages.SetExts(PkgExt) == false)
167 return _error->Error(_("Package extension list is too long"));
168 if (_error->PendingError() == true)
169 return _error->Error(_("Error processing directory %s"),BaseDir.c_str());
170
171 Packages.PathPrefix = PathPrefix;
172 Packages.DirStrip = ArchiveDir;
173 Packages.InternalPrefix = flCombine(ArchiveDir,InternalPrefix);
174
175 Packages.Stats.DeLinkBytes = Stats.DeLinkBytes;
176 Packages.DeLinkLimit = DeLinkLimit;
177
178 // Create a compressor object
179 MultiCompress Comp(flCombine(ArchiveDir,PkgFile),
180 PkgCompress,Permissions);
181 Packages.Output = Comp.Input;
182 if (_error->PendingError() == true)
183 return _error->Error(_("Error processing directory %s"),BaseDir.c_str());
184
185 c0out << ' ' << BaseDir << ":" << flush;
186
187 // Do recursive directory searching
188 if (FLFile.empty() == true)
189 {
190 if (Packages.RecursiveScan(flCombine(ArchiveDir,BaseDir)) == false)
191 return false;
192 }
193 else
194 {
195 if (Packages.LoadFileList(ArchiveDir,FLFile) == false)
196 return false;
197 }
198
199 Packages.Output = 0; // Just in case
200
201 // Finish compressing
202 unsigned long Size;
203 if (Comp.Finalize(Size) == false)
204 {
205 c0out << endl;
206 return _error->Error(_("Error processing directory %s"),BaseDir.c_str());
207 }
208
209 if (Size != 0)
210 c0out << " New "
211 << SizeToStr(Size) << "B ";
212 else
213 c0out << ' ';
214
215 struct timeval NewTime;
216 gettimeofday(&NewTime,0);
217 double Delta = NewTime.tv_sec - StartTime.tv_sec +
218 (NewTime.tv_usec - StartTime.tv_usec)/1000000.0;
219
220 c0out << Packages.Stats.Packages << " files " <<
221/* SizeToStr(Packages.Stats.MD5Bytes) << "B/" << */
222 SizeToStr(Packages.Stats.Bytes) << "B " <<
223 TimeToStr((long)Delta) << endl;
224
225 Stats.Add(Packages.Stats);
226 Stats.DeLinkBytes = Packages.Stats.DeLinkBytes;
227
228 return !_error->PendingError();
229}
230
231 /*}}}*/
232// PackageMap::GenSources - Actually generate a Source file /*{{{*/
233// ---------------------------------------------------------------------
234/* This generates the Sources File described by this object. */
235bool PackageMap::GenSources(Configuration &Setup,struct CacheDB::Stats &Stats)
236{
237 if (SrcFile.empty() == true)
238 return true;
239
240 string ArchiveDir = Setup.FindDir("Dir::ArchiveDir");
241 string OverrideDir = Setup.FindDir("Dir::OverrideDir");
242 string CacheDir = Setup.FindDir("Dir::CacheDir");
243
244 struct timeval StartTime;
245 gettimeofday(&StartTime,0);
246
247 SrcDone = true;
248
249 // Create a package writer object.
250 SourcesWriter Sources(flCombine(OverrideDir,BinOverride),
251 flCombine(OverrideDir,SrcOverride),
252 flCombine(OverrideDir,SrcExtraOverride));
253 if (SrcExt.empty() == false && Sources.SetExts(SrcExt) == false)
254 return _error->Error(_("Source extension list is too long"));
255 if (_error->PendingError() == true)
256 return _error->Error(_("Error processing directory %s"),BaseDir.c_str());
257
258 Sources.PathPrefix = PathPrefix;
259 Sources.DirStrip = ArchiveDir;
260 Sources.InternalPrefix = flCombine(ArchiveDir,InternalPrefix);
261
262 Sources.DeLinkLimit = DeLinkLimit;
263 Sources.Stats.DeLinkBytes = Stats.DeLinkBytes;
264
265 // Create a compressor object
266 MultiCompress Comp(flCombine(ArchiveDir,SrcFile),
267 SrcCompress,Permissions);
268 Sources.Output = Comp.Input;
269 if (_error->PendingError() == true)
270 return _error->Error(_("Error processing directory %s"),BaseDir.c_str());
271
272 c0out << ' ' << BaseDir << ":" << flush;
273
274 // Do recursive directory searching
275 if (FLFile.empty() == true)
276 {
277 if (Sources.RecursiveScan(flCombine(ArchiveDir,BaseDir))== false)
278 return false;
279 }
280 else
281 {
282 if (Sources.LoadFileList(ArchiveDir,FLFile) == false)
283 return false;
284 }
285 Sources.Output = 0; // Just in case
286
287 // Finish compressing
288 unsigned long Size;
289 if (Comp.Finalize(Size) == false)
290 {
291 c0out << endl;
292 return _error->Error(_("Error processing directory %s"),BaseDir.c_str());
293 }
294
295 if (Size != 0)
296 c0out << " New "
297 << SizeToStr(Size) << "B ";
298 else
299 c0out << ' ';
300
301 struct timeval NewTime;
302 gettimeofday(&NewTime,0);
303 double Delta = NewTime.tv_sec - StartTime.tv_sec +
304 (NewTime.tv_usec - StartTime.tv_usec)/1000000.0;
305
306 c0out << Sources.Stats.Packages << " pkgs in " <<
307 TimeToStr((long)Delta) << endl;
308
309 Stats.Add(Sources.Stats);
310 Stats.DeLinkBytes = Sources.Stats.DeLinkBytes;
311
312 return !_error->PendingError();
313}
314 /*}}}*/
315// PackageMap::GenContents - Actually generate a Contents file /*{{{*/
316// ---------------------------------------------------------------------
317/* This generates the contents file partially described by this object.
318 It searches the given iterator range for other package files that map
319 into this contents file and includes their data as well when building. */
320bool PackageMap::GenContents(Configuration &Setup,
321 vector<PackageMap>::iterator Begin,
322 vector<PackageMap>::iterator End,
323 unsigned long &Left)
324{
325 if (Contents.empty() == true)
326 return true;
327
328 if (Left == 0)
329 return true;
330
331 string ArchiveDir = Setup.FindDir("Dir::ArchiveDir");
332 string CacheDir = Setup.FindDir("Dir::CacheDir");
333 string OverrideDir = Setup.FindDir("Dir::OverrideDir");
334
335 struct timeval StartTime;
336 gettimeofday(&StartTime,0);
337
338 // Create a package writer object.
339 ContentsWriter Contents("");
340 if (PkgExt.empty() == false && Contents.SetExts(PkgExt) == false)
341 return _error->Error(_("Package extension list is too long"));
342 if (_error->PendingError() == true)
343 return false;
344
345 MultiCompress Comp(flCombine(ArchiveDir,this->Contents),
346 CntCompress,Permissions);
347 Comp.UpdateMTime = Setup.FindI("Default::ContentsAge",10)*24*60*60;
348 Contents.Output = Comp.Input;
349 if (_error->PendingError() == true)
350 return false;
351
352 // Write the header out.
353 if (ContentsHead.empty() == false)
354 {
355 FileFd Head(flCombine(OverrideDir,ContentsHead),FileFd::ReadOnly);
356 if (_error->PendingError() == true)
357 return false;
358
359 unsigned long Size = Head.Size();
360 unsigned char Buf[4096];
361 while (Size != 0)
362 {
363 unsigned long ToRead = Size;
364 if (Size > sizeof(Buf))
365 ToRead = sizeof(Buf);
366
367 if (Head.Read(Buf,ToRead) == false)
368 return false;
369
370 if (fwrite(Buf,1,ToRead,Comp.Input) != ToRead)
371 return _error->Errno("fwrite",_("Error writing header to contents file"));
372
373 Size -= ToRead;
374 }
375 }
376
377 /* Go over all the package file records and parse all the package
378 files associated with this contents file into one great big honking
379 memory structure, then dump the sorted version */
380 c0out << ' ' << this->Contents << ":" << flush;
381 for (vector<PackageMap>::iterator I = Begin; I != End; I++)
382 {
383 if (I->Contents != this->Contents)
384 continue;
385
386 Contents.Prefix = ArchiveDir;
387 Contents.ReadyDB(flCombine(CacheDir,I->BinCacheDB));
388 Contents.ReadFromPkgs(flCombine(ArchiveDir,I->PkgFile),
389 I->PkgCompress);
390
391 I->ContentsDone = true;
392 }
393
394 Contents.Finish();
395
396 // Finish compressing
397 unsigned long Size;
398 if (Comp.Finalize(Size) == false || _error->PendingError() == true)
399 {
400 c0out << endl;
401 return _error->Error(_("Error processing contents %s"),
402 this->Contents.c_str());
403 }
404
405 if (Size != 0)
406 {
407 c0out << " New " << SizeToStr(Size) << "B ";
408 if (Left > Size)
409 Left -= Size;
410 else
411 Left = 0;
412 }
413 else
414 c0out << ' ';
415
416 struct timeval NewTime;
417 gettimeofday(&NewTime,0);
418 double Delta = NewTime.tv_sec - StartTime.tv_sec +
419 (NewTime.tv_usec - StartTime.tv_usec)/1000000.0;
420
421 c0out << Contents.Stats.Packages << " files " <<
422 SizeToStr(Contents.Stats.Bytes) << "B " <<
423 TimeToStr((long)Delta) << endl;
424
425 return true;
426}
427 /*}}}*/
428
429// LoadTree - Load a 'tree' section from the Generate Config /*{{{*/
430// ---------------------------------------------------------------------
431/* This populates the PkgList with all the possible permutations of the
432 section/arch lists. */
433void LoadTree(vector<PackageMap> &PkgList,Configuration &Setup)
434{
435 // Load the defaults
436 string DDir = Setup.Find("TreeDefault::Directory",
437 "$(DIST)/$(SECTION)/binary-$(ARCH)/");
438 string DSDir = Setup.Find("TreeDefault::SrcDirectory",
439 "$(DIST)/$(SECTION)/source/");
440 string DPkg = Setup.Find("TreeDefault::Packages",
441 "$(DIST)/$(SECTION)/binary-$(ARCH)/Packages");
442 string DIPrfx = Setup.Find("TreeDefault::InternalPrefix",
443 "$(DIST)/$(SECTION)/");
444 string DContents = Setup.Find("TreeDefault::Contents",
445 "$(DIST)/Contents-$(ARCH)");
446 string DContentsH = Setup.Find("TreeDefault::Contents::Header","");
447 string DBCache = Setup.Find("TreeDefault::BinCacheDB",
448 "packages-$(ARCH).db");
449 string DSources = Setup.Find("TreeDefault::Sources",
450 "$(DIST)/$(SECTION)/source/Sources");
451 string DFLFile = Setup.Find("TreeDefault::FileList", "");
452 string DSFLFile = Setup.Find("TreeDefault::SourceFileList", "");
453
454 // Process 'tree' type sections
455 const Configuration::Item *Top = Setup.Tree("tree");
456 for (Top = (Top == 0?0:Top->Child); Top != 0;)
457 {
458 Configuration Block(Top);
459 string Dist = Top->Tag;
460
461 // Parse the sections
462 string Tmp = Block.Find("Sections");
463 const char *Sections = Tmp.c_str();
464 string Section;
465 while (ParseQuoteWord(Sections,Section) == true)
466 {
467 string Tmp2 = Block.Find("Architectures");
468 string Arch;
469 const char *Archs = Tmp2.c_str();
470 while (ParseQuoteWord(Archs,Arch) == true)
471 {
472 struct SubstVar Vars[] = {{"$(DIST)",&Dist},
473 {"$(SECTION)",&Section},
474 {"$(ARCH)",&Arch},
475 {}};
476 PackageMap Itm;
477
478 Itm.BinOverride = SubstVar(Block.Find("BinOverride"),Vars);
479 Itm.InternalPrefix = SubstVar(Block.Find("InternalPrefix",DIPrfx.c_str()),Vars);
480
481 if (stringcasecmp(Arch,"source") == 0)
482 {
483 Itm.SrcOverride = SubstVar(Block.Find("SrcOverride"),Vars);
484 Itm.BaseDir = SubstVar(Block.Find("SrcDirectory",DSDir.c_str()),Vars);
485 Itm.SrcFile = SubstVar(Block.Find("Sources",DSources.c_str()),Vars);
486 Itm.Tag = SubstVar("$(DIST)/$(SECTION)/source",Vars);
487 Itm.FLFile = SubstVar(Block.Find("SourceFileList",DSFLFile.c_str()),Vars);
488 Itm.SrcExtraOverride = SubstVar(Block.Find("SrcExtraOverride"),Vars);
489 }
490 else
491 {
492 Itm.BinCacheDB = SubstVar(Block.Find("BinCacheDB",DBCache.c_str()),Vars);
493 Itm.BaseDir = SubstVar(Block.Find("Directory",DDir.c_str()),Vars);
494 Itm.PkgFile = SubstVar(Block.Find("Packages",DPkg.c_str()),Vars);
495 Itm.Tag = SubstVar("$(DIST)/$(SECTION)/$(ARCH)",Vars);
496 Itm.Arch = Arch;
497 Itm.Contents = SubstVar(Block.Find("Contents",DContents.c_str()),Vars);
498 Itm.ContentsHead = SubstVar(Block.Find("Contents::Header",DContentsH.c_str()),Vars);
499 Itm.FLFile = SubstVar(Block.Find("FileList",DFLFile.c_str()),Vars);
500 Itm.ExtraOverride = SubstVar(Block.Find("ExtraOverride"),Vars);
501 }
502
503 Itm.GetGeneral(Setup,Block);
504 PkgList.push_back(Itm);
505 }
506 }
507
508 Top = Top->Next;
509 }
510}
511 /*}}}*/
512// LoadBinDir - Load a 'bindirectory' section from the Generate Config /*{{{*/
513// ---------------------------------------------------------------------
514/* */
515void LoadBinDir(vector<PackageMap> &PkgList,Configuration &Setup)
516{
517 // Process 'bindirectory' type sections
518 const Configuration::Item *Top = Setup.Tree("bindirectory");
519 for (Top = (Top == 0?0:Top->Child); Top != 0;)
520 {
521 Configuration Block(Top);
522
523 PackageMap Itm;
524 Itm.PkgFile = Block.Find("Packages");
525 Itm.SrcFile = Block.Find("Sources");
526 Itm.BinCacheDB = Block.Find("BinCacheDB");
527 Itm.BinOverride = Block.Find("BinOverride");
528 Itm.ExtraOverride = Block.Find("ExtraOverride");
529 Itm.SrcExtraOverride = Block.Find("SrcExtraOverride");
530 Itm.SrcOverride = Block.Find("SrcOverride");
531 Itm.BaseDir = Top->Tag;
532 Itm.FLFile = Block.Find("FileList");
533 Itm.InternalPrefix = Block.Find("InternalPrefix",Top->Tag.c_str());
534 Itm.Contents = Block.Find("Contents");
535 Itm.ContentsHead = Block.Find("Contents::Header");
536
537 Itm.GetGeneral(Setup,Block);
538 PkgList.push_back(Itm);
539
540 Top = Top->Next;
541 }
542}
543 /*}}}*/
544
545// ShowHelp - Show the help text /*{{{*/
546// ---------------------------------------------------------------------
547/* */
548bool ShowHelp(CommandLine &CmdL)
549{
550 ioprintf(cout,_("%s %s for %s %s compiled on %s %s\n"),PACKAGE,VERSION,
551 COMMON_OS,COMMON_CPU,__DATE__,__TIME__);
552 if (_config->FindB("version") == true)
553 return true;
554
555 cout <<
556 _("Usage: apt-ftparchive [options] command\n"
557 "Commands: packages binarypath [overridefile [pathprefix]]\n"
558 " sources srcpath [overridefile [pathprefix]]\n"
559 " contents path\n"
560 " release path\n"
561 " generate config [groups]\n"
562 " clean config\n"
563 "\n"
564 "apt-ftparchive generates index files for Debian archives. It supports\n"
565 "many styles of generation from fully automated to functional replacements\n"
566 "for dpkg-scanpackages and dpkg-scansources\n"
567 "\n"
568 "apt-ftparchive generates Package files from a tree of .debs. The\n"
569 "Package file contains the contents of all the control fields from\n"
570 "each package as well as the MD5 hash and filesize. An override file\n"
571 "is supported to force the value of Priority and Section.\n"
572 "\n"
573 "Similarly apt-ftparchive generates Sources files from a tree of .dscs.\n"
574 "The --source-override option can be used to specify a src override file\n"
575 "\n"
576 "The 'packages' and 'sources' command should be run in the root of the\n"
577 "tree. BinaryPath should point to the base of the recursive search and \n"
578 "override file should contain the override flags. Pathprefix is\n"
579 "appended to the filename fields if present. Example usage from the \n"
580 "Debian archive:\n"
581 " apt-ftparchive packages dists/potato/main/binary-i386/ > \\\n"
582 " dists/potato/main/binary-i386/Packages\n"
583 "\n"
584 "Options:\n"
585 " -h This help text\n"
586 " --md5 Control MD5 generation\n"
587 " -s=? Source override file\n"
588 " -q Quiet\n"
589 " -d=? Select the optional caching database\n"
590 " --no-delink Enable delinking debug mode\n"
591 " --contents Control contents file generation\n"
592 " -c=? Read this configuration file\n"
593 " -o=? Set an arbitrary configuration option") << endl;
594
595 return true;
596}
597 /*}}}*/
598// SimpleGenPackages - Generate a Packages file for a directory tree /*{{{*/
599// ---------------------------------------------------------------------
600/* This emulates dpkg-scanpackages's command line interface. 'mostly' */
601bool SimpleGenPackages(CommandLine &CmdL)
602{
603 if (CmdL.FileSize() < 2)
604 return ShowHelp(CmdL);
605
606 string Override;
607 if (CmdL.FileSize() >= 3)
608 Override = CmdL.FileList[2];
609
610 // Create a package writer object.
611 PackagesWriter Packages(_config->Find("APT::FTPArchive::DB"),
612 Override, "");
613 if (_error->PendingError() == true)
614 return false;
615
616 if (CmdL.FileSize() >= 4)
617 Packages.PathPrefix = CmdL.FileList[3];
618
619 // Do recursive directory searching
620 if (Packages.RecursiveScan(CmdL.FileList[1]) == false)
621 return false;
622
623 return true;
624}
625 /*}}}*/
626// SimpleGenContents - Generate a Contents listing /*{{{*/
627// ---------------------------------------------------------------------
628/* */
629bool SimpleGenContents(CommandLine &CmdL)
630{
631 if (CmdL.FileSize() < 2)
632 return ShowHelp(CmdL);
633
634 // Create a package writer object.
635 ContentsWriter Contents(_config->Find("APT::FTPArchive::DB"));
636 if (_error->PendingError() == true)
637 return false;
638
639 // Do recursive directory searching
640 if (Contents.RecursiveScan(CmdL.FileList[1]) == false)
641 return false;
642
643 Contents.Finish();
644
645 return true;
646}
647 /*}}}*/
648// SimpleGenSources - Generate a Sources file for a directory tree /*{{{*/
649// ---------------------------------------------------------------------
650/* This emulates dpkg-scanpackages's command line interface. 'mostly' */
651bool SimpleGenSources(CommandLine &CmdL)
652{
653 if (CmdL.FileSize() < 2)
654 return ShowHelp(CmdL);
655
656 string Override;
657 if (CmdL.FileSize() >= 3)
658 Override = CmdL.FileList[2];
659
660 string SOverride;
661 if (Override.empty() == false)
662 SOverride = Override + ".src";
663
664 SOverride = _config->Find("APT::FTPArchive::SourceOverride",
665 SOverride.c_str());
666
667 // Create a package writer object.
668 SourcesWriter Sources(Override,SOverride);
669 if (_error->PendingError() == true)
670 return false;
671
672 if (CmdL.FileSize() >= 4)
673 Sources.PathPrefix = CmdL.FileList[3];
674
675 // Do recursive directory searching
676 if (Sources.RecursiveScan(CmdL.FileList[1]) == false)
677 return false;
678
679 return true;
680}
681 /*}}}*/
682// SimpleGenRelease - Generate a Release file for a directory tree /*{{{*/
683// ---------------------------------------------------------------------
684bool SimpleGenRelease(CommandLine &CmdL)
685{
686 if (CmdL.FileSize() < 2)
687 return ShowHelp(CmdL);
688
689 string Dir = CmdL.FileList[1];
690
691 ReleaseWriter Release("");
692 Release.DirStrip = Dir;
693
694 if (_error->PendingError() == true)
695 return false;
696
697 if (Release.RecursiveScan(Dir) == false)
698 return false;
699
700 Release.Finish();
701
702 return true;
703}
704
705 /*}}}*/
706// Generate - Full generate, using a config file /*{{{*/
707// ---------------------------------------------------------------------
708/* */
709bool Generate(CommandLine &CmdL)
710{
711 struct CacheDB::Stats SrcStats;
712 if (CmdL.FileSize() < 2)
713 return ShowHelp(CmdL);
714
715 struct timeval StartTime;
716 gettimeofday(&StartTime,0);
717 struct CacheDB::Stats Stats;
718
719 // Read the configuration file.
720 Configuration Setup;
721 if (ReadConfigFile(Setup,CmdL.FileList[1],true) == false)
722 return false;
723
724 vector<PackageMap> PkgList;
725 LoadTree(PkgList,Setup);
726 LoadBinDir(PkgList,Setup);
727
728 // Sort by cache DB to improve IO locality.
729 stable_sort(PkgList.begin(),PkgList.end(),PackageMap::DBCompare());
730
731 // Generate packages
732 if (CmdL.FileSize() <= 2)
733 {
734 for (vector<PackageMap>::iterator I = PkgList.begin(); I != PkgList.end(); I++)
735 if (I->GenPackages(Setup,Stats) == false)
736 _error->DumpErrors();
737 for (vector<PackageMap>::iterator I = PkgList.begin(); I != PkgList.end(); I++)
738 if (I->GenSources(Setup,SrcStats) == false)
739 _error->DumpErrors();
740 }
741 else
742 {
743 // Make a choice list out of the package list..
744 RxChoiceList *List = new RxChoiceList[2*PkgList.size()+1];
745 RxChoiceList *End = List;
746 for (vector<PackageMap>::iterator I = PkgList.begin(); I != PkgList.end(); I++)
747 {
748 End->UserData = &(*I);
749 End->Str = I->BaseDir.c_str();
750 End++;
751
752 End->UserData = &(*I);
753 End->Str = I->Tag.c_str();
754 End++;
755 }
756 End->Str = 0;
757
758 // Regex it
759 if (RegexChoice(List,CmdL.FileList + 2,CmdL.FileList + CmdL.FileSize()) == 0)
760 {
761 delete [] List;
762 return _error->Error(_("No selections matched"));
763 }
764 _error->DumpErrors();
765
766 // Do the generation for Packages
767 for (End = List; End->Str != 0; End++)
768 {
769 if (End->Hit == false)
770 continue;
771
772 PackageMap *I = (PackageMap *)End->UserData;
773 if (I->PkgDone == true)
774 continue;
775 if (I->GenPackages(Setup,Stats) == false)
776 _error->DumpErrors();
777 }
778
779 // Do the generation for Sources
780 for (End = List; End->Str != 0; End++)
781 {
782 if (End->Hit == false)
783 continue;
784
785 PackageMap *I = (PackageMap *)End->UserData;
786 if (I->SrcDone == true)
787 continue;
788 if (I->GenSources(Setup,SrcStats) == false)
789 _error->DumpErrors();
790 }
791
792 delete [] List;
793 }
794
795 if (_config->FindB("APT::FTPArchive::Contents",true) == false)
796 return true;
797
798 c1out << "Done Packages, Starting contents." << endl;
799
800 // Sort the contents file list by date
801 string ArchiveDir = Setup.FindDir("Dir::ArchiveDir");
802 for (vector<PackageMap>::iterator I = PkgList.begin(); I != PkgList.end(); I++)
803 {
804 struct stat A;
805 if (MultiCompress::GetStat(flCombine(ArchiveDir,I->Contents),
806 I->CntCompress,A) == false)
807 time(&I->ContentsMTime);
808 else
809 I->ContentsMTime = A.st_mtime;
810 }
811 stable_sort(PkgList.begin(),PkgList.end(),PackageMap::ContentsCompare());
812
813 /* Now for Contents.. The process here is to do a make-like dependency
814 check. Each contents file is verified to be newer than the package files
815 that describe the debs it indexes. Since the package files contain
816 hashes of the .debs this means they have not changed either so the
817 contents must be up to date. */
818 unsigned long MaxContentsChange = Setup.FindI("Default::MaxContentsChange",UINT_MAX)*1024;
819 for (vector<PackageMap>::iterator I = PkgList.begin(); I != PkgList.end(); I++)
820 {
821 // This record is not relevent
822 if (I->ContentsDone == true ||
823 I->Contents.empty() == true)
824 continue;
825
826 // Do not do everything if the user specified sections.
827 if (CmdL.FileSize() > 2 && I->PkgDone == false)
828 continue;
829
830 struct stat A,B;
831 if (MultiCompress::GetStat(flCombine(ArchiveDir,I->Contents),I->CntCompress,A) == true)
832 {
833 if (MultiCompress::GetStat(flCombine(ArchiveDir,I->PkgFile),I->PkgCompress,B) == false)
834 {
835 _error->Warning(_("Some files are missing in the package file group `%s'"),I->PkgFile.c_str());
836 continue;
837 }
838
839 if (A.st_mtime > B.st_mtime)
840 continue;
841 }
842
843 if (I->GenContents(Setup,PkgList.begin(),PkgList.end(),
844 MaxContentsChange) == false)
845 _error->DumpErrors();
846
847 // Hit the limit?
848 if (MaxContentsChange == 0)
849 {
850 c1out << "Hit contents update byte limit" << endl;
851 break;
852 }
853 }
854
855 struct timeval NewTime;
856 gettimeofday(&NewTime,0);
857 double Delta = NewTime.tv_sec - StartTime.tv_sec +
858 (NewTime.tv_usec - StartTime.tv_usec)/1000000.0;
859 c1out << "Done. " << SizeToStr(Stats.Bytes) << "B in " << Stats.Packages
860 << " archives. Took " << TimeToStr((long)Delta) << endl;
861
862 return true;
863}
864 /*}}}*/
865// Clean - Clean out the databases /*{{{*/
866// ---------------------------------------------------------------------
867/* */
868bool Clean(CommandLine &CmdL)
869{
870 if (CmdL.FileSize() != 2)
871 return ShowHelp(CmdL);
872
873 // Read the configuration file.
874 Configuration Setup;
875 if (ReadConfigFile(Setup,CmdL.FileList[1],true) == false)
876 return false;
877
878 vector<PackageMap> PkgList;
879 LoadTree(PkgList,Setup);
880 LoadBinDir(PkgList,Setup);
881
882 // Sort by cache DB to improve IO locality.
883 stable_sort(PkgList.begin(),PkgList.end(),PackageMap::DBCompare());
884
885 string CacheDir = Setup.FindDir("Dir::CacheDir");
886
887 for (vector<PackageMap>::iterator I = PkgList.begin(); I != PkgList.end(); )
888 {
889 c0out << I->BinCacheDB << endl;
890 CacheDB DB(flCombine(CacheDir,I->BinCacheDB));
891 if (DB.Clean() == false)
892 _error->DumpErrors();
893
894 string CacheDB = I->BinCacheDB;
895 for (; I != PkgList.end() && I->BinCacheDB == CacheDB; I++);
896 }
897
898 return true;
899}
900 /*}}}*/
901
902int main(int argc, const char *argv[])
903{
904 CommandLine::Args Args[] = {
905 {'h',"help","help",0},
906 {0,"md5","APT::FTPArchive::MD5",0},
907 {'v',"version","version",0},
908 {'d',"db","APT::FTPArchive::DB",CommandLine::HasArg},
909 {'s',"source-override","APT::FTPArchive::SourceOverride",CommandLine::HasArg},
910 {'q',"quiet","quiet",CommandLine::IntLevel},
911 {'q',"silent","quiet",CommandLine::IntLevel},
912 {0,"delink","APT::FTPArchive::DeLinkAct",0},
913 {0,"readonly","APT::FTPArchive::ReadOnlyDB",0},
914 {0,"contents","APT::FTPArchive::Contents",0},
915 {'c',"config-file",0,CommandLine::ConfigFile},
916 {'o',"option",0,CommandLine::ArbItem},
917 {0,0,0,0}};
918 CommandLine::Dispatch Cmds[] = {{"packages",&SimpleGenPackages},
919 {"contents",&SimpleGenContents},
920 {"sources",&SimpleGenSources},
921 {"release",&SimpleGenRelease},
922 {"generate",&Generate},
923 {"clean",&Clean},
924 {"help",&ShowHelp},
925 {0,0}};
926
927 // Parse the command line and initialize the package library
928 CommandLine CmdL(Args,_config);
929 if (CmdL.Parse(argc,argv) == false)
930 {
931 _error->DumpErrors();
932 return 100;
933 }
934
935 // See if the help should be shown
936 if (_config->FindB("help") == true ||
937 _config->FindB("version") == true ||
938 CmdL.FileSize() == 0)
939 {
940 ShowHelp(CmdL);
941 return 0;
942 }
943
944 // Setup the output streams
945 c0out.rdbuf(clog.rdbuf());
946 c1out.rdbuf(clog.rdbuf());
947 c2out.rdbuf(clog.rdbuf());
948 Quiet = _config->FindI("quiet",0);
949 if (Quiet > 0)
950 c0out.rdbuf(devnull.rdbuf());
951 if (Quiet > 1)
952 c1out.rdbuf(devnull.rdbuf());
953
954 // Match the operation
955 CmdL.DispatchArg(Cmds);
956
957 if (_error->empty() == false)
958 {
959 bool Errors = _error->PendingError();
960 _error->DumpErrors();
961 return Errors == true?100:0;
962 }
963 return 0;
964}