]> git.saurik.com Git - apt.git/blame_incremental - ftparchive/writer.cc
ensure FileFd doesn't try to open /dev/null as atomic and co
[apt.git] / ftparchive / writer.cc
... / ...
CommitLineData
1// -*- mode: cpp; mode: fold -*-
2// Description /*{{{*/
3// $Id: writer.cc,v 1.14 2004/03/24 01:40:43 mdz Exp $
4/* ######################################################################
5
6 Writer
7
8 The file writer classes. These write various types of output, sources,
9 packages and contents.
10
11 ##################################################################### */
12 /*}}}*/
13// Include Files /*{{{*/
14#include <config.h>
15
16#include <apt-pkg/configuration.h>
17#include <apt-pkg/deblistparser.h>
18#include <apt-pkg/error.h>
19#include <apt-pkg/fileutl.h>
20#include <apt-pkg/gpgv.h>
21#include <apt-pkg/hashes.h>
22#include <apt-pkg/md5.h>
23#include <apt-pkg/strutl.h>
24#include <apt-pkg/debfile.h>
25#include <apt-pkg/pkgcache.h>
26#include <apt-pkg/sha1.h>
27#include <apt-pkg/sha2.h>
28#include <apt-pkg/tagfile.h>
29
30#include <ctype.h>
31#include <fnmatch.h>
32#include <ftw.h>
33#include <locale.h>
34#include <string.h>
35#include <sys/stat.h>
36#include <sys/types.h>
37#include <unistd.h>
38#include <ctime>
39#include <iostream>
40#include <sstream>
41#include <memory>
42#include <utility>
43#include <algorithm>
44
45#include "apt-ftparchive.h"
46#include "writer.h"
47#include "cachedb.h"
48#include "multicompress.h"
49#include "byhash.h"
50
51#include <apti18n.h>
52 /*}}}*/
53using namespace std;
54FTWScanner *FTWScanner::Owner;
55
56// ConfigToDoHashes - which hashes to generate /*{{{*/
57static void SingleConfigToDoHashes(unsigned int &DoHashes, std::string const &Conf, unsigned int const Flag)
58{
59 if (_config->FindB(Conf, (DoHashes & Flag) == Flag) == true)
60 DoHashes |= Flag;
61 else
62 DoHashes &= ~Flag;
63}
64static void ConfigToDoHashes(unsigned int &DoHashes, std::string const &Conf)
65{
66 SingleConfigToDoHashes(DoHashes, Conf + "::MD5", Hashes::MD5SUM);
67 SingleConfigToDoHashes(DoHashes, Conf + "::SHA1", Hashes::SHA1SUM);
68 SingleConfigToDoHashes(DoHashes, Conf + "::SHA256", Hashes::SHA256SUM);
69 SingleConfigToDoHashes(DoHashes, Conf + "::SHA512", Hashes::SHA512SUM);
70}
71 /*}}}*/
72
73// FTWScanner::FTWScanner - Constructor /*{{{*/
74FTWScanner::FTWScanner(FileFd * const GivenOutput, string const &Arch, bool const IncludeArchAll)
75 : Arch(Arch), IncludeArchAll(IncludeArchAll), DoHashes(~0)
76{
77 if (GivenOutput == NULL)
78 {
79 Output = new FileFd;
80 OwnsOutput = true;
81 Output->OpenDescriptor(STDOUT_FILENO, FileFd::WriteOnly, false);
82 }
83 else
84 {
85 Output = GivenOutput;
86 OwnsOutput = false;
87 }
88 ErrorPrinted = false;
89 NoLinkAct = !_config->FindB("APT::FTPArchive::DeLinkAct",true);
90 ConfigToDoHashes(DoHashes, "APT::FTPArchive");
91}
92 /*}}}*/
93FTWScanner::~FTWScanner()
94{
95 if (Output != NULL && OwnsOutput)
96 delete Output;
97}
98// FTWScanner::Scanner - FTW Scanner /*{{{*/
99// ---------------------------------------------------------------------
100/* This is the FTW scanner, it processes each directory element in the
101 directory tree. */
102int FTWScanner::ScannerFTW(const char *File,const struct stat * /*sb*/,int Flag)
103{
104 if (Flag == FTW_DNR)
105 {
106 Owner->NewLine(1);
107 ioprintf(c1out, _("W: Unable to read directory %s\n"), File);
108 }
109 if (Flag == FTW_NS)
110 {
111 Owner->NewLine(1);
112 ioprintf(c1out, _("W: Unable to stat %s\n"), File);
113 }
114 if (Flag != FTW_F)
115 return 0;
116
117 return ScannerFile(File, true);
118}
119 /*}}}*/
120// FTWScanner::ScannerFile - File Scanner /*{{{*/
121// ---------------------------------------------------------------------
122/* */
123int FTWScanner::ScannerFile(const char *File, bool const &ReadLink)
124{
125 const char *LastComponent = strrchr(File, '/');
126 char *RealPath = NULL;
127
128 if (LastComponent == NULL)
129 LastComponent = File;
130 else
131 LastComponent++;
132
133 vector<string>::const_iterator I;
134 for(I = Owner->Patterns.begin(); I != Owner->Patterns.end(); ++I)
135 {
136 if (fnmatch((*I).c_str(), LastComponent, 0) == 0)
137 break;
138 }
139 if (I == Owner->Patterns.end())
140 return 0;
141
142 /* Process it. If the file is a link then resolve it into an absolute
143 name.. This works best if the directory components the scanner are
144 given are not links themselves. */
145 char Jnk[2];
146 Owner->OriginalPath = File;
147 if (ReadLink &&
148 readlink(File,Jnk,sizeof(Jnk)) != -1 &&
149 (RealPath = realpath(File,NULL)) != 0)
150 {
151 Owner->DoPackage(RealPath);
152 free(RealPath);
153 }
154 else
155 Owner->DoPackage(File);
156
157 if (_error->empty() == false)
158 {
159 // Print any errors or warnings found
160 string Err;
161 bool SeenPath = false;
162 while (_error->empty() == false)
163 {
164 Owner->NewLine(1);
165
166 bool const Type = _error->PopMessage(Err);
167 if (Type == true)
168 cerr << _("E: ") << Err << endl;
169 else
170 cerr << _("W: ") << Err << endl;
171
172 if (Err.find(File) != string::npos)
173 SeenPath = true;
174 }
175
176 if (SeenPath == false)
177 cerr << _("E: Errors apply to file ") << "'" << File << "'" << endl;
178 return 0;
179 }
180
181 return 0;
182}
183 /*}}}*/
184// FTWScanner::RecursiveScan - Just scan a directory tree /*{{{*/
185// ---------------------------------------------------------------------
186/* */
187bool FTWScanner::RecursiveScan(string const &Dir)
188{
189 char *RealPath = NULL;
190 /* If noprefix is set then jam the scan root in, so we don't generate
191 link followed paths out of control */
192 if (InternalPrefix.empty() == true)
193 {
194 if ((RealPath = realpath(Dir.c_str(),NULL)) == 0)
195 return _error->Errno("realpath",_("Failed to resolve %s"),Dir.c_str());
196 InternalPrefix = RealPath;
197 free(RealPath);
198 }
199
200 // Do recursive directory searching
201 Owner = this;
202 int const Res = ftw(Dir.c_str(),ScannerFTW,30);
203
204 // Error treewalking?
205 if (Res != 0)
206 {
207 if (_error->PendingError() == false)
208 _error->Errno("ftw",_("Tree walking failed"));
209 return false;
210 }
211
212 return true;
213}
214 /*}}}*/
215// FTWScanner::LoadFileList - Load the file list from a file /*{{{*/
216// ---------------------------------------------------------------------
217/* This is an alternative to using FTW to locate files, it reads the list
218 of files from another file. */
219bool FTWScanner::LoadFileList(string const &Dir, string const &File)
220{
221 char *RealPath = NULL;
222 /* If noprefix is set then jam the scan root in, so we don't generate
223 link followed paths out of control */
224 if (InternalPrefix.empty() == true)
225 {
226 if ((RealPath = realpath(Dir.c_str(),NULL)) == 0)
227 return _error->Errno("realpath",_("Failed to resolve %s"),Dir.c_str());
228 InternalPrefix = RealPath;
229 free(RealPath);
230 }
231
232 Owner = this;
233 FILE *List = fopen(File.c_str(),"r");
234 if (List == 0)
235 return _error->Errno("fopen",_("Failed to open %s"),File.c_str());
236
237 /* We are a tad tricky here.. We prefix the buffer with the directory
238 name, that way if we need a full path with just use line.. Sneaky and
239 fully evil. */
240 char Line[1000];
241 char *FileStart;
242 if (Dir.empty() == true || Dir.end()[-1] != '/')
243 FileStart = Line + snprintf(Line,sizeof(Line),"%s/",Dir.c_str());
244 else
245 FileStart = Line + snprintf(Line,sizeof(Line),"%s",Dir.c_str());
246 while (fgets(FileStart,sizeof(Line) - (FileStart - Line),List) != 0)
247 {
248 char *FileName = _strstrip(FileStart);
249 if (FileName[0] == 0)
250 continue;
251
252 if (FileName[0] != '/')
253 {
254 if (FileName != FileStart)
255 memmove(FileStart,FileName,strlen(FileStart));
256 FileName = Line;
257 }
258
259#if 0
260 struct stat St;
261 int Flag = FTW_F;
262 if (stat(FileName,&St) != 0)
263 Flag = FTW_NS;
264#endif
265
266 if (ScannerFile(FileName, false) != 0)
267 break;
268 }
269
270 fclose(List);
271 return true;
272}
273 /*}}}*/
274// FTWScanner::Delink - Delink symlinks /*{{{*/
275// ---------------------------------------------------------------------
276/* */
277bool FTWScanner::Delink(string &FileName,const char *OriginalPath,
278 unsigned long long &DeLinkBytes,
279 unsigned long long const &FileSize)
280{
281 // See if this isn't an internaly prefix'd file name.
282 if (InternalPrefix.empty() == false &&
283 InternalPrefix.length() < FileName.length() &&
284 stringcmp(FileName.begin(),FileName.begin() + InternalPrefix.length(),
285 InternalPrefix.begin(),InternalPrefix.end()) != 0)
286 {
287 if (DeLinkLimit != 0 && DeLinkBytes/1024 < DeLinkLimit)
288 {
289 // Tidy up the display
290 if (DeLinkBytes == 0)
291 cout << endl;
292
293 NewLine(1);
294 ioprintf(c1out, _(" DeLink %s [%s]\n"), (OriginalPath + InternalPrefix.length()),
295 SizeToStr(FileSize).c_str());
296 c1out << flush;
297
298 if (NoLinkAct == false)
299 {
300 char OldLink[400];
301 if (readlink(OriginalPath,OldLink,sizeof(OldLink)) == -1)
302 _error->Errno("readlink",_("Failed to readlink %s"),OriginalPath);
303 else
304 {
305 if (unlink(OriginalPath) != 0)
306 _error->Errno("unlink",_("Failed to unlink %s"),OriginalPath);
307 else
308 {
309 if (link(FileName.c_str(),OriginalPath) != 0)
310 {
311 // Panic! Restore the symlink
312 if (symlink(OldLink,OriginalPath) != 0)
313 _error->Errno("symlink", "failed to restore symlink");
314 return _error->Errno("link",_("*** Failed to link %s to %s"),
315 FileName.c_str(),
316 OriginalPath);
317 }
318 }
319 }
320 }
321
322 DeLinkBytes += FileSize;
323 if (DeLinkBytes/1024 >= DeLinkLimit)
324 ioprintf(c1out, _(" DeLink limit of %sB hit.\n"), SizeToStr(DeLinkBytes).c_str());
325 }
326
327 FileName = OriginalPath;
328 }
329
330 return true;
331}
332 /*}}}*/
333// FTWScanner::SetExts - Set extensions to support /*{{{*/
334// ---------------------------------------------------------------------
335/* */
336bool FTWScanner::SetExts(string const &Vals)
337{
338 ClearPatterns();
339 string::size_type Start = 0;
340 while (Start <= Vals.length()-1)
341 {
342 string::size_type const Space = Vals.find(' ',Start);
343 string::size_type const Length = ((Space == string::npos) ? Vals.length() : Space) - Start;
344 if ( Arch.empty() == false )
345 {
346 AddPattern(string("*_") + Arch + Vals.substr(Start, Length));
347 if (IncludeArchAll == true && Arch != "all")
348 AddPattern(string("*_all") + Vals.substr(Start, Length));
349 }
350 else
351 AddPattern(string("*") + Vals.substr(Start, Length));
352
353 Start += Length + 1;
354 }
355
356 return true;
357}
358 /*}}}*/
359
360// PackagesWriter::PackagesWriter - Constructor /*{{{*/
361// ---------------------------------------------------------------------
362/* */
363PackagesWriter::PackagesWriter(FileFd * const GivenOutput, TranslationWriter * const transWriter,
364 string const &DB,string const &Overrides,string const &ExtOverrides,
365 string const &Arch, bool const IncludeArchAll) :
366 FTWScanner(GivenOutput, Arch, IncludeArchAll), Db(DB), Stats(Db.Stats), TransWriter(transWriter)
367{
368 SetExts(".deb .udeb");
369 DeLinkLimit = 0;
370
371 // Process the command line options
372 ConfigToDoHashes(DoHashes, "APT::FTPArchive::Packages");
373 DoAlwaysStat = _config->FindB("APT::FTPArchive::AlwaysStat", false);
374 DoContents = _config->FindB("APT::FTPArchive::Contents",true);
375 NoOverride = _config->FindB("APT::FTPArchive::NoOverrideMsg",false);
376 LongDescription = _config->FindB("APT::FTPArchive::LongDescription",true);
377
378 if (Db.Loaded() == false)
379 DoContents = false;
380
381 // Read the override file
382 if (Overrides.empty() == false && Over.ReadOverride(Overrides) == false)
383 return;
384 else
385 NoOverride = true;
386
387 if (ExtOverrides.empty() == false)
388 Over.ReadExtraOverride(ExtOverrides);
389
390 _error->DumpErrors();
391}
392 /*}}}*/
393// PackagesWriter::DoPackage - Process a single package /*{{{*/
394// ---------------------------------------------------------------------
395/* This method takes a package and gets its control information and
396 MD5, SHA1 and SHA256 then writes out a control record with the proper fields
397 rewritten and the path/size/hash appended. */
398bool PackagesWriter::DoPackage(string FileName)
399{
400 // Pull all the data we need form the DB
401 if (Db.GetFileInfo(FileName,
402 true, /* DoControl */
403 DoContents,
404 true, /* GenContentsOnly */
405 false, /* DoSource */
406 DoHashes, DoAlwaysStat) == false)
407 {
408 return false;
409 }
410
411 unsigned long long FileSize = Db.GetFileSize();
412 if (Delink(FileName,OriginalPath,Stats.DeLinkBytes,FileSize) == false)
413 return false;
414
415 // Lookup the overide information
416 pkgTagSection &Tags = Db.Control.Section;
417 string Package = Tags.FindS("Package");
418 string Architecture;
419 // if we generate a Packages file for a given arch, we use it to
420 // look for overrides. if we run in "simple" mode without the
421 // "Architecures" variable in the config we use the architecure value
422 // from the deb file
423 if(Arch != "")
424 Architecture = Arch;
425 else
426 Architecture = Tags.FindS("Architecture");
427 unique_ptr<Override::Item> OverItem(Over.GetItem(Package,Architecture));
428
429 if (Package.empty() == true)
430 return _error->Error(_("Archive had no package field"));
431
432 // If we need to do any rewriting of the header do it now..
433 if (OverItem.get() == 0)
434 {
435 if (NoOverride == false)
436 {
437 NewLine(1);
438 ioprintf(c1out, _(" %s has no override entry\n"), Package.c_str());
439 }
440
441 OverItem = unique_ptr<Override::Item>(new Override::Item);
442 OverItem->FieldOverride["Section"] = Tags.FindS("Section");
443 OverItem->Priority = Tags.FindS("Priority");
444 }
445
446 // Strip the DirStrip prefix from the FileName and add the PathPrefix
447 string NewFileName;
448 if (DirStrip.empty() == false &&
449 FileName.length() > DirStrip.length() &&
450 stringcmp(FileName.begin(),FileName.begin() + DirStrip.length(),
451 DirStrip.begin(),DirStrip.end()) == 0)
452 NewFileName = string(FileName.begin() + DirStrip.length(),FileName.end());
453 else
454 NewFileName = FileName;
455 if (PathPrefix.empty() == false)
456 NewFileName = flCombine(PathPrefix,NewFileName);
457
458 /* Configuration says we don't want to include the long Description
459 in the package file - instead we want to ship a separated file */
460 string desc;
461 if (LongDescription == false) {
462 desc = Tags.FindS("Description").append("\n");
463 OverItem->FieldOverride["Description"] = desc.substr(0, desc.find('\n')).c_str();
464 }
465
466 // This lists all the changes to the fields we are going to make.
467 std::vector<pkgTagSection::Tag> Changes;
468
469 std::string Size;
470 strprintf(Size, "%llu", (unsigned long long) FileSize);
471 Changes.push_back(pkgTagSection::Tag::Rewrite("Size", Size));
472
473 for (HashStringList::const_iterator hs = Db.HashesList.begin(); hs != Db.HashesList.end(); ++hs)
474 {
475 if (hs->HashType() == "MD5Sum")
476 Changes.push_back(pkgTagSection::Tag::Rewrite("MD5sum", hs->HashValue()));
477 else if (hs->HashType() == "Checksum-FileSize")
478 continue;
479 else
480 Changes.push_back(pkgTagSection::Tag::Rewrite(hs->HashType(), hs->HashValue()));
481 }
482 Changes.push_back(pkgTagSection::Tag::Rewrite("Filename", NewFileName));
483 Changes.push_back(pkgTagSection::Tag::Rewrite("Priority", OverItem->Priority));
484 Changes.push_back(pkgTagSection::Tag::Remove("Status"));
485 Changes.push_back(pkgTagSection::Tag::Remove("Optional"));
486
487 string DescriptionMd5;
488 if (LongDescription == false) {
489 MD5Summation descmd5;
490 descmd5.Add(desc.c_str());
491 DescriptionMd5 = descmd5.Result().Value();
492 Changes.push_back(pkgTagSection::Tag::Rewrite("Description-md5", DescriptionMd5));
493 if (TransWriter != NULL)
494 TransWriter->DoPackage(Package, desc, DescriptionMd5);
495 }
496
497 // Rewrite the maintainer field if necessary
498 bool MaintFailed;
499 string NewMaint = OverItem->SwapMaint(Tags.FindS("Maintainer"),MaintFailed);
500 if (MaintFailed == true)
501 {
502 if (NoOverride == false)
503 {
504 NewLine(1);
505 ioprintf(c1out, _(" %s maintainer is %s not %s\n"),
506 Package.c_str(), Tags.FindS("Maintainer").c_str(), OverItem->OldMaint.c_str());
507 }
508 }
509
510 if (NewMaint.empty() == false)
511 Changes.push_back(pkgTagSection::Tag::Rewrite("Maintainer", NewMaint));
512
513 /* Get rid of the Optional tag. This is an ugly, ugly, ugly hack that
514 dpkg-scanpackages does. Well sort of. dpkg-scanpackages just does renaming
515 but dpkg does this append bit. So we do the append bit, at least that way the
516 status file and package file will remain similar. There are other transforms
517 but optional is the only legacy one still in use for some lazy reason. */
518 string OptionalStr = Tags.FindS("Optional");
519 if (OptionalStr.empty() == false)
520 {
521 if (Tags.FindS("Suggests").empty() == false)
522 OptionalStr = Tags.FindS("Suggests") + ", " + OptionalStr;
523 Changes.push_back(pkgTagSection::Tag::Rewrite("Suggests", OptionalStr));
524 }
525
526 for (map<string,string>::const_iterator I = OverItem->FieldOverride.begin();
527 I != OverItem->FieldOverride.end(); ++I)
528 Changes.push_back(pkgTagSection::Tag::Rewrite(I->first, I->second));
529
530 // Rewrite and store the fields.
531 if (Tags.Write(*Output, TFRewritePackageOrder, Changes) == false ||
532 Output->Write("\n", 1) == false)
533 return false;
534
535 return Db.Finish();
536}
537 /*}}}*/
538PackagesWriter::~PackagesWriter() /*{{{*/
539{
540}
541 /*}}}*/
542
543// TranslationWriter::TranslationWriter - Constructor /*{{{*/
544// ---------------------------------------------------------------------
545/* Create a Translation-Master file for this Packages file */
546TranslationWriter::TranslationWriter(string const &File, string const &TransCompress,
547 mode_t const &Permissions) : Comp(NULL), Output(NULL)
548{
549 if (File.empty() == true)
550 return;
551
552 Comp = new MultiCompress(File, TransCompress, Permissions);
553 Output = &Comp->Input;
554}
555 /*}}}*/
556// TranslationWriter::DoPackage - Process a single package /*{{{*/
557// ---------------------------------------------------------------------
558/* Create a Translation-Master file for this Packages file */
559bool TranslationWriter::DoPackage(string const &Pkg, string const &Desc,
560 string const &MD5)
561{
562 if (Output == NULL)
563 return true;
564
565 // Different archs can include different versions and therefore
566 // different descriptions - so we need to check for both name and md5.
567 string const Record = Pkg + ":" + MD5;
568
569 if (Included.find(Record) != Included.end())
570 return true;
571
572 std::string out;
573 strprintf(out, "Package: %s\nDescription-md5: %s\nDescription-en: %s\n",
574 Pkg.c_str(), MD5.c_str(), Desc.c_str());
575 Output->Write(out.c_str(), out.length());
576
577 Included.insert(Record);
578 return true;
579}
580 /*}}}*/
581// TranslationWriter::~TranslationWriter - Destructor /*{{{*/
582// ---------------------------------------------------------------------
583/* */
584TranslationWriter::~TranslationWriter()
585{
586 if (Comp != NULL)
587 delete Comp;
588}
589 /*}}}*/
590
591// SourcesWriter::SourcesWriter - Constructor /*{{{*/
592// ---------------------------------------------------------------------
593/* */
594SourcesWriter::SourcesWriter(FileFd * const GivenOutput, string const &DB, string const &BOverrides,string const &SOverrides,
595 string const &ExtOverrides) :
596 FTWScanner(GivenOutput), Db(DB), Stats(Db.Stats)
597{
598 AddPattern("*.dsc");
599 DeLinkLimit = 0;
600 Buffer = 0;
601 BufSize = 0;
602
603 // Process the command line options
604 ConfigToDoHashes(DoHashes, "APT::FTPArchive::Sources");
605 NoOverride = _config->FindB("APT::FTPArchive::NoOverrideMsg",false);
606 DoAlwaysStat = _config->FindB("APT::FTPArchive::AlwaysStat", false);
607
608 // Read the override file
609 if (BOverrides.empty() == false && BOver.ReadOverride(BOverrides) == false)
610 return;
611 else
612 NoOverride = true;
613
614 // WTF?? The logic above: if we can't read binary overrides, don't even try
615 // reading source overrides. if we can read binary overrides, then say there
616 // are no overrides. THIS MAKES NO SENSE! -- ajt@d.o, 2006/02/28
617
618 if (ExtOverrides.empty() == false)
619 SOver.ReadExtraOverride(ExtOverrides);
620
621 if (SOverrides.empty() == false && FileExists(SOverrides) == true)
622 SOver.ReadOverride(SOverrides,true);
623}
624 /*}}}*/
625// SourcesWriter::DoPackage - Process a single package /*{{{*/
626static std::string getDscHash(unsigned int const DoHashes,
627 Hashes::SupportedHashes const DoIt, pkgTagSection &Tags, char const * const FieldName,
628 HashString const * const Hash, unsigned long long Size, std::string FileName)
629{
630 if ((DoHashes & DoIt) != DoIt || Tags.Exists(FieldName) == false || Hash == NULL)
631 return "";
632 std::ostringstream out;
633 out << "\n " << Hash->HashValue() << " " << Size << " " << FileName
634 << "\n " << Tags.FindS(FieldName);
635 return out.str();
636}
637bool SourcesWriter::DoPackage(string FileName)
638{
639 // Pull all the data we need form the DB
640 if (Db.GetFileInfo(FileName,
641 false, /* DoControl */
642 false, /* DoContents */
643 false, /* GenContentsOnly */
644 true, /* DoSource */
645 DoHashes, DoAlwaysStat) == false)
646 {
647 return false;
648 }
649
650 // we need to perform a "write" here (this is what finish is doing)
651 // because the call to Db.GetFileInfo() in the loop will change
652 // the "db cursor"
653 Db.Finish();
654
655 pkgTagSection Tags;
656 if (Tags.Scan(Db.Dsc.Data.c_str(), Db.Dsc.Data.length()) == false)
657 return _error->Error("Could not find a record in the DSC '%s'",FileName.c_str());
658
659 if (Tags.Exists("Source") == false)
660 return _error->Error("Could not find a Source entry in the DSC '%s'",FileName.c_str());
661 Tags.Trim();
662
663 // Lookup the overide information, finding first the best priority.
664 string BestPrio;
665 string Bins = Tags.FindS("Binary");
666 char Buffer[Bins.length() + 1];
667 unique_ptr<Override::Item> OverItem(nullptr);
668 if (Bins.empty() == false)
669 {
670 strcpy(Buffer,Bins.c_str());
671
672 // Ignore too-long errors.
673 char *BinList[400];
674 TokSplitString(',',Buffer,BinList,sizeof(BinList)/sizeof(BinList[0]));
675
676 // Look at all the binaries
677 unsigned char BestPrioV = pkgCache::State::Extra;
678 for (unsigned I = 0; BinList[I] != 0; I++)
679 {
680 unique_ptr<Override::Item> Itm(BOver.GetItem(BinList[I]));
681 if (Itm.get() == 0)
682 continue;
683
684 unsigned char NewPrioV = debListParser::GetPrio(Itm->Priority);
685 if (NewPrioV < BestPrioV || BestPrio.empty() == true)
686 {
687 BestPrioV = NewPrioV;
688 BestPrio = Itm->Priority;
689 }
690
691 if (OverItem.get() == 0)
692 OverItem = std::move(Itm);
693 }
694 }
695
696 // If we need to do any rewriting of the header do it now..
697 if (OverItem.get() == 0)
698 {
699 if (NoOverride == false)
700 {
701 NewLine(1);
702 ioprintf(c1out, _(" %s has no override entry\n"), Tags.FindS("Source").c_str());
703 }
704
705 OverItem.reset(new Override::Item);
706 }
707
708 struct stat St;
709 if (stat(FileName.c_str(), &St) != 0)
710 return _error->Errno("fstat","Failed to stat %s",FileName.c_str());
711
712 unique_ptr<Override::Item> SOverItem(SOver.GetItem(Tags.FindS("Source")));
713 // const unique_ptr<Override::Item> autoSOverItem(SOverItem);
714 if (SOverItem.get() == 0)
715 {
716 ioprintf(c1out, _(" %s has no source override entry\n"), Tags.FindS("Source").c_str());
717 SOverItem = unique_ptr<Override::Item>(BOver.GetItem(Tags.FindS("Source")));
718 if (SOverItem.get() == 0)
719 {
720 ioprintf(c1out, _(" %s has no binary override entry either\n"), Tags.FindS("Source").c_str());
721 SOverItem = unique_ptr<Override::Item>(new Override::Item);
722 *SOverItem = *OverItem;
723 }
724 }
725
726 // Add the dsc to the files hash list
727 string const strippedName = flNotDir(FileName);
728 std::string const Files = getDscHash(DoHashes, Hashes::MD5SUM, Tags, "Files", Db.HashesList.find("MD5Sum"), St.st_size, strippedName);
729 std::string ChecksumsSha1 = getDscHash(DoHashes, Hashes::SHA1SUM, Tags, "Checksums-Sha1", Db.HashesList.find("SHA1"), St.st_size, strippedName);
730 std::string ChecksumsSha256 = getDscHash(DoHashes, Hashes::SHA256SUM, Tags, "Checksums-Sha256", Db.HashesList.find("SHA256"), St.st_size, strippedName);
731 std::string ChecksumsSha512 = getDscHash(DoHashes, Hashes::SHA512SUM, Tags, "Checksums-Sha512", Db.HashesList.find("SHA512"), St.st_size, strippedName);
732
733 // Strip the DirStrip prefix from the FileName and add the PathPrefix
734 string NewFileName;
735 if (DirStrip.empty() == false &&
736 FileName.length() > DirStrip.length() &&
737 stringcmp(DirStrip,OriginalPath,OriginalPath + DirStrip.length()) == 0)
738 NewFileName = string(OriginalPath + DirStrip.length());
739 else
740 NewFileName = OriginalPath;
741 if (PathPrefix.empty() == false)
742 NewFileName = flCombine(PathPrefix,NewFileName);
743
744 string Directory = flNotFile(OriginalPath);
745 string Package = Tags.FindS("Source");
746
747 // Perform operation over all of the files
748 string ParseJnk;
749 const char *C = Files.c_str();
750 char *RealPath = NULL;
751 for (;isspace(*C); C++);
752 while (*C != 0)
753 {
754 // Parse each of the elements
755 if (ParseQuoteWord(C,ParseJnk) == false ||
756 ParseQuoteWord(C,ParseJnk) == false ||
757 ParseQuoteWord(C,ParseJnk) == false)
758 return _error->Error("Error parsing file record");
759
760 string OriginalPath = Directory + ParseJnk;
761
762 // Add missing hashes to source files
763 if (((DoHashes & Hashes::SHA1SUM) == Hashes::SHA1SUM && !Tags.Exists("Checksums-Sha1")) ||
764 ((DoHashes & Hashes::SHA256SUM) == Hashes::SHA256SUM && !Tags.Exists("Checksums-Sha256")) ||
765 ((DoHashes & Hashes::SHA512SUM) == Hashes::SHA512SUM && !Tags.Exists("Checksums-Sha512")))
766 {
767 if (Db.GetFileInfo(OriginalPath,
768 false, /* DoControl */
769 false, /* DoContents */
770 false, /* GenContentsOnly */
771 false, /* DoSource */
772 DoHashes,
773 DoAlwaysStat) == false)
774 {
775 return _error->Error("Error getting file info");
776 }
777
778 for (HashStringList::const_iterator hs = Db.HashesList.begin(); hs != Db.HashesList.end(); ++hs)
779 {
780 if (hs->HashType() == "MD5Sum" || hs->HashType() == "Checksum-FileSize")
781 continue;
782 char const * fieldname;
783 std::string * out;
784 if (hs->HashType() == "SHA1")
785 {
786 fieldname = "Checksums-Sha1";
787 out = &ChecksumsSha1;
788 }
789 else if (hs->HashType() == "SHA256")
790 {
791 fieldname = "Checksums-Sha256";
792 out = &ChecksumsSha256;
793 }
794 else if (hs->HashType() == "SHA512")
795 {
796 fieldname = "Checksums-Sha512";
797 out = &ChecksumsSha512;
798 }
799 else
800 {
801 _error->Warning("Ignoring unknown Checksumtype %s in SourcesWriter::DoPackages", hs->HashType().c_str());
802 continue;
803 }
804 if (Tags.Exists(fieldname) == true)
805 continue;
806 std::ostringstream streamout;
807 streamout << "\n " << hs->HashValue() << " " << Db.GetFileSize() << " " << ParseJnk;
808 out->append(streamout.str());
809 }
810
811 // write back the GetFileInfo() stats data
812 Db.Finish();
813 }
814
815 // Perform the delinking operation
816 char Jnk[2];
817
818 if (readlink(OriginalPath.c_str(),Jnk,sizeof(Jnk)) != -1 &&
819 (RealPath = realpath(OriginalPath.c_str(),NULL)) != 0)
820 {
821 string RP = RealPath;
822 free(RealPath);
823 if (Delink(RP,OriginalPath.c_str(),Stats.DeLinkBytes,St.st_size) == false)
824 return false;
825 }
826 }
827
828 Directory = flNotFile(NewFileName);
829 if (Directory.length() > 2)
830 Directory.erase(Directory.end()-1);
831
832 // This lists all the changes to the fields we are going to make.
833 // (5 hardcoded + checksums + maintainer + end marker)
834 std::vector<pkgTagSection::Tag> Changes;
835
836 Changes.push_back(pkgTagSection::Tag::Remove("Source"));
837 Changes.push_back(pkgTagSection::Tag::Rewrite("Package", Package));
838 if (Files.empty() == false)
839 Changes.push_back(pkgTagSection::Tag::Rewrite("Files", Files));
840 if (ChecksumsSha1.empty() == false)
841 Changes.push_back(pkgTagSection::Tag::Rewrite("Checksums-Sha1", ChecksumsSha1));
842 if (ChecksumsSha256.empty() == false)
843 Changes.push_back(pkgTagSection::Tag::Rewrite("Checksums-Sha256", ChecksumsSha256));
844 if (ChecksumsSha512.empty() == false)
845 Changes.push_back(pkgTagSection::Tag::Rewrite("Checksums-Sha512", ChecksumsSha512));
846 if (Directory != "./")
847 Changes.push_back(pkgTagSection::Tag::Rewrite("Directory", Directory));
848 Changes.push_back(pkgTagSection::Tag::Rewrite("Priority", BestPrio));
849 Changes.push_back(pkgTagSection::Tag::Remove("Status"));
850
851 // Rewrite the maintainer field if necessary
852 bool MaintFailed;
853 string NewMaint = OverItem->SwapMaint(Tags.FindS("Maintainer"), MaintFailed);
854 if (MaintFailed == true)
855 {
856 if (NoOverride == false)
857 {
858 NewLine(1);
859 ioprintf(c1out, _(" %s maintainer is %s not %s\n"), Package.c_str(),
860 Tags.FindS("Maintainer").c_str(), OverItem->OldMaint.c_str());
861 }
862 }
863 if (NewMaint.empty() == false)
864 Changes.push_back(pkgTagSection::Tag::Rewrite("Maintainer", NewMaint.c_str()));
865
866 for (map<string,string>::const_iterator I = SOverItem->FieldOverride.begin();
867 I != SOverItem->FieldOverride.end(); ++I)
868 Changes.push_back(pkgTagSection::Tag::Rewrite(I->first, I->second));
869
870 // Rewrite and store the fields.
871 if (Tags.Write(*Output, TFRewriteSourceOrder, Changes) == false ||
872 Output->Write("\n", 1) == false)
873 return false;
874
875 Stats.Packages++;
876
877 return true;
878}
879 /*}}}*/
880
881// ContentsWriter::ContentsWriter - Constructor /*{{{*/
882// ---------------------------------------------------------------------
883/* */
884ContentsWriter::ContentsWriter(FileFd * const GivenOutput, string const &DB,
885 string const &Arch, bool const IncludeArchAll) :
886 FTWScanner(GivenOutput, Arch, IncludeArchAll), Db(DB), Stats(Db.Stats)
887
888{
889 SetExts(".deb");
890}
891 /*}}}*/
892// ContentsWriter::DoPackage - Process a single package /*{{{*/
893// ---------------------------------------------------------------------
894/* If Package is the empty string the control record will be parsed to
895 determine what the package name is. */
896bool ContentsWriter::DoPackage(string FileName, string Package)
897{
898 if (!Db.GetFileInfo(FileName,
899 Package.empty(), /* DoControl */
900 true, /* DoContents */
901 false, /* GenContentsOnly */
902 false, /* DoSource */
903 0, /* DoHashes */
904 false /* checkMtime */))
905 {
906 return false;
907 }
908
909 // Parse the package name
910 if (Package.empty() == true)
911 {
912 Package = Db.Control.Section.FindS("Package");
913 }
914
915 Db.Contents.Add(Gen,Package);
916
917 return Db.Finish();
918}
919 /*}}}*/
920// ContentsWriter::ReadFromPkgs - Read from a packages file /*{{{*/
921// ---------------------------------------------------------------------
922/* */
923bool ContentsWriter::ReadFromPkgs(string const &PkgFile,string const &PkgCompress)
924{
925 MultiCompress Pkgs(PkgFile,PkgCompress,0,false);
926 if (_error->PendingError() == true)
927 return false;
928
929 // Open the package file
930 FileFd Fd;
931 if (Pkgs.OpenOld(Fd) == false)
932 return false;
933
934 pkgTagFile Tags(&Fd);
935 if (_error->PendingError() == true)
936 return false;
937
938 // Parse.
939 pkgTagSection Section;
940 while (Tags.Step(Section) == true)
941 {
942 string File = flCombine(Prefix,Section.FindS("FileName"));
943 string Package = Section.FindS("Section");
944 if (Package.empty() == false && Package.end()[-1] != '/')
945 {
946 Package += '/';
947 Package += Section.FindS("Package");
948 }
949 else
950 Package += Section.FindS("Package");
951
952 DoPackage(File,Package);
953 if (_error->empty() == false)
954 {
955 _error->Error("Errors apply to file '%s'",File.c_str());
956 _error->DumpErrors();
957 }
958 }
959
960 // Tidy the compressor
961 Fd.Close();
962
963 return true;
964}
965
966 /*}}}*/
967
968// ReleaseWriter::ReleaseWriter - Constructor /*{{{*/
969// ---------------------------------------------------------------------
970/* */
971ReleaseWriter::ReleaseWriter(FileFd * const GivenOutput, string const &/*DB*/) : FTWScanner(GivenOutput)
972{
973 if (_config->FindB("APT::FTPArchive::Release::Default-Patterns", true) == true)
974 {
975 AddPattern("Packages");
976 AddPattern("Packages.gz");
977 AddPattern("Packages.bz2");
978 AddPattern("Packages.lzma");
979 AddPattern("Packages.xz");
980 AddPattern("Translation-*");
981 AddPattern("Sources");
982 AddPattern("Sources.gz");
983 AddPattern("Sources.bz2");
984 AddPattern("Sources.lzma");
985 AddPattern("Sources.xz");
986 AddPattern("Release");
987 AddPattern("Contents-*");
988 AddPattern("Index");
989 AddPattern("md5sum.txt");
990 }
991 AddPatterns(_config->FindVector("APT::FTPArchive::Release::Patterns"));
992
993 time_t const now = time(NULL);
994
995 setlocale(LC_TIME, "C");
996
997 char datestr[128];
998 if (strftime(datestr, sizeof(datestr), "%a, %d %b %Y %H:%M:%S UTC",
999 gmtime(&now)) == 0)
1000 {
1001 datestr[0] = '\0';
1002 }
1003
1004 time_t const validuntil = now + _config->FindI("APT::FTPArchive::Release::ValidTime", 0);
1005 char validstr[128];
1006 if (now == validuntil ||
1007 strftime(validstr, sizeof(validstr), "%a, %d %b %Y %H:%M:%S UTC",
1008 gmtime(&validuntil)) == 0)
1009 {
1010 validstr[0] = '\0';
1011 }
1012
1013 setlocale(LC_TIME, "");
1014
1015 map<string,string> Fields;
1016 Fields["Origin"] = "";
1017 Fields["Label"] = "";
1018 Fields["Suite"] = "";
1019 Fields["Version"] = "";
1020 Fields["Codename"] = "";
1021 Fields["Date"] = datestr;
1022 Fields["Valid-Until"] = validstr;
1023 Fields["Architectures"] = "";
1024 Fields["Components"] = "";
1025 Fields["Description"] = "";
1026 if (_config->FindB("APT::FTPArchive::DoByHash", false) == true)
1027 Fields["Acquire-By-Hash"] = "true";
1028
1029 for(map<string,string>::const_iterator I = Fields.begin();
1030 I != Fields.end();
1031 ++I)
1032 {
1033 string Config = string("APT::FTPArchive::Release::") + (*I).first;
1034 string Value = _config->Find(Config, (*I).second.c_str());
1035 if (Value == "")
1036 continue;
1037
1038 std::string const out = I->first + ": " + Value + "\n";
1039 Output->Write(out.c_str(), out.length());
1040 }
1041
1042 ConfigToDoHashes(DoHashes, "APT::FTPArchive::Release");
1043}
1044 /*}}}*/
1045// ReleaseWriter::DoPackage - Process a single package /*{{{*/
1046// ---------------------------------------------------------------------
1047bool ReleaseWriter::DoPackage(string FileName)
1048{
1049 // Strip the DirStrip prefix from the FileName and add the PathPrefix
1050 string NewFileName;
1051 if (DirStrip.empty() == false &&
1052 FileName.length() > DirStrip.length() &&
1053 stringcmp(FileName.begin(),FileName.begin() + DirStrip.length(),
1054 DirStrip.begin(),DirStrip.end()) == 0)
1055 {
1056 NewFileName = string(FileName.begin() + DirStrip.length(),FileName.end());
1057 while (NewFileName[0] == '/')
1058 NewFileName = string(NewFileName.begin() + 1,NewFileName.end());
1059 }
1060 else
1061 NewFileName = FileName;
1062
1063 if (PathPrefix.empty() == false)
1064 NewFileName = flCombine(PathPrefix,NewFileName);
1065
1066 FileFd fd(FileName, FileFd::ReadOnly);
1067
1068 if (!fd.IsOpen())
1069 {
1070 return false;
1071 }
1072
1073 CheckSums[NewFileName].size = fd.Size();
1074
1075 Hashes hs(DoHashes);
1076 hs.AddFD(fd);
1077 CheckSums[NewFileName].Hashes = hs.GetHashStringList();
1078 fd.Close();
1079
1080 // FIXME: wrong layer in the code(?)
1081 // FIXME2: symlink instead of create a copy
1082 if (_config->FindB("APT::FTPArchive::DoByHash", false) == true)
1083 {
1084 std::string Input = FileName;
1085 HashStringList hsl = hs.GetHashStringList();
1086 for(HashStringList::const_iterator h = hsl.begin();
1087 h != hsl.end(); ++h)
1088 {
1089 if (!h->usable())
1090 continue;
1091 if (flNotDir(FileName) == "Release" || flNotDir(FileName) == "InRelease")
1092 continue;
1093
1094 std::string ByHashOutputFile = GenByHashFilename(Input, *h);
1095 std::string ByHashOutputDir = flNotFile(ByHashOutputFile);
1096 if(!CreateDirectory(flNotFile(Input), ByHashOutputDir))
1097 return _error->Warning("can not create dir %s", flNotFile(ByHashOutputFile).c_str());
1098
1099 // write new hashes
1100 FileFd In(Input, FileFd::ReadOnly);
1101 FileFd Out(ByHashOutputFile, FileFd::WriteEmpty);
1102 if(!CopyFile(In, Out))
1103 return _error->Warning("failed to copy %s %s", Input.c_str(), ByHashOutputFile.c_str());
1104 }
1105 }
1106
1107 return true;
1108}
1109
1110 /*}}}*/
1111// ReleaseWriter::Finish - Output the checksums /*{{{*/
1112// ---------------------------------------------------------------------
1113static void printChecksumTypeRecord(FileFd &Output, char const * const Type, map<string, ReleaseWriter::CheckSum> const &CheckSums)
1114{
1115 {
1116 std::string out;
1117 strprintf(out, "%s:\n", Type);
1118 Output.Write(out.c_str(), out.length());
1119 }
1120 for(map<string,ReleaseWriter::CheckSum>::const_iterator I = CheckSums.begin();
1121 I != CheckSums.end(); ++I)
1122 {
1123 HashString const * const hs = I->second.Hashes.find(Type);
1124 if (hs == NULL)
1125 continue;
1126 std::string out;
1127 strprintf(out, " %s %16llu %s\n",
1128 hs->HashValue().c_str(),
1129 (*I).second.size,
1130 (*I).first.c_str());
1131 Output.Write(out.c_str(), out.length());
1132 }
1133}
1134void ReleaseWriter::Finish()
1135{
1136 if ((DoHashes & Hashes::MD5SUM) == Hashes::MD5SUM)
1137 printChecksumTypeRecord(*Output, "MD5Sum", CheckSums);
1138 if ((DoHashes & Hashes::SHA1SUM) == Hashes::SHA1SUM)
1139 printChecksumTypeRecord(*Output, "SHA1", CheckSums);
1140 if ((DoHashes & Hashes::SHA256SUM) == Hashes::SHA256SUM)
1141 printChecksumTypeRecord(*Output, "SHA256", CheckSums);
1142 if ((DoHashes & Hashes::SHA512SUM) == Hashes::SHA512SUM)
1143 printChecksumTypeRecord(*Output, "SHA512", CheckSums);
1144
1145 // go by-hash cleanup
1146 map<string,ReleaseWriter::CheckSum>::const_iterator prev = CheckSums.begin();
1147 if (_config->FindB("APT::FTPArchive::DoByHash", false) == true)
1148 {
1149 for(map<string,ReleaseWriter::CheckSum>::const_iterator I = CheckSums.begin();
1150 I != CheckSums.end(); ++I)
1151 {
1152 if (I->first == "Release" || I->first == "InRelease")
1153 continue;
1154
1155 // keep iterating until we find a new subdir
1156 if(flNotFile(I->first) == flNotFile(prev->first))
1157 continue;
1158
1159 // clean that subdir up
1160 int keepFiles = _config->FindI("APT::FTPArchive::By-Hash-Keep", 3);
1161 // calculate how many compressors are used (the amount of files
1162 // in that subdir generated for this run)
1163 keepFiles *= std::distance(prev, I);
1164 prev = I;
1165
1166 HashStringList hsl = prev->second.Hashes;
1167 for(HashStringList::const_iterator h = hsl.begin();
1168 h != hsl.end(); ++h)
1169 {
1170
1171 if (!h->usable())
1172 continue;
1173
1174 std::string RealFilename = DirStrip+"/"+prev->first;
1175 std::string ByHashOutputFile = GenByHashFilename(RealFilename, *h);
1176 DeleteAllButMostRecent(flNotFile(ByHashOutputFile), keepFiles);
1177 }
1178 }
1179 }
1180}