]> git.saurik.com Git - apt.git/blob - ftparchive/writer.cc
894a1173b7e6d5bc8a09de538be0d3bf4045c873
[apt.git] / ftparchive / writer.cc
1 // -*- mode: cpp; mode: fold -*-
2 // Description /*{{{*/
3 // $Id: writer.cc,v 1.14 2004/03/24 01:40:43 mdz Exp $
4 /* ######################################################################
5
6 Writer
7
8 The file writer classes. These write various types of output, sources,
9 packages and contents.
10
11 ##################################################################### */
12 /*}}}*/
13 // Include Files /*{{{*/
14 #include <config.h>
15
16 #include <apt-pkg/configuration.h>
17 #include <apt-pkg/deblistparser.h>
18 #include <apt-pkg/error.h>
19 #include <apt-pkg/fileutl.h>
20 #include <apt-pkg/gpgv.h>
21 #include <apt-pkg/hashes.h>
22 #include <apt-pkg/md5.h>
23 #include <apt-pkg/strutl.h>
24 #include <apt-pkg/debfile.h>
25 #include <apt-pkg/pkgcache.h>
26 #include <apt-pkg/sha1.h>
27 #include <apt-pkg/sha2.h>
28 #include <apt-pkg/tagfile.h>
29
30 #include <ctype.h>
31 #include <fnmatch.h>
32 #include <ftw.h>
33 #include <locale.h>
34 #include <string.h>
35 #include <sys/stat.h>
36 #include <sys/types.h>
37 #include <unistd.h>
38 #include <ctime>
39 #include <iostream>
40 #include <sstream>
41 #include <memory>
42 #include <utility>
43 #include <algorithm>
44
45 #include "apt-ftparchive.h"
46 #include "writer.h"
47 #include "cachedb.h"
48 #include "multicompress.h"
49 #include "byhash.h"
50
51 #include <apti18n.h>
52 /*}}}*/
53 using namespace std;
54 FTWScanner *FTWScanner::Owner;
55
56 // ConfigToDoHashes - which hashes to generate /*{{{*/
57 static void SingleConfigToDoHashes(unsigned int &DoHashes, std::string const &Conf, unsigned int const Flag)
58 {
59 if (_config->FindB(Conf, (DoHashes & Flag) == Flag) == true)
60 DoHashes |= Flag;
61 else
62 DoHashes &= ~Flag;
63 }
64 static void ConfigToDoHashes(unsigned int &DoHashes, std::string const &Conf)
65 {
66 SingleConfigToDoHashes(DoHashes, Conf + "::MD5", Hashes::MD5SUM);
67 SingleConfigToDoHashes(DoHashes, Conf + "::SHA1", Hashes::SHA1SUM);
68 SingleConfigToDoHashes(DoHashes, Conf + "::SHA256", Hashes::SHA256SUM);
69 SingleConfigToDoHashes(DoHashes, Conf + "::SHA512", Hashes::SHA512SUM);
70 }
71 /*}}}*/
72
73 // FTWScanner::FTWScanner - Constructor /*{{{*/
74 FTWScanner::FTWScanner(FileFd * const GivenOutput, string const &Arch, bool const IncludeArchAll)
75 : Arch(Arch), IncludeArchAll(IncludeArchAll), DoHashes(~0)
76 {
77 if (GivenOutput == NULL)
78 {
79 Output = new FileFd;
80 OwnsOutput = true;
81 Output->OpenDescriptor(STDOUT_FILENO, FileFd::WriteOnly, false);
82 }
83 else
84 {
85 Output = GivenOutput;
86 OwnsOutput = false;
87 }
88 ErrorPrinted = false;
89 NoLinkAct = !_config->FindB("APT::FTPArchive::DeLinkAct",true);
90 ConfigToDoHashes(DoHashes, "APT::FTPArchive");
91 }
92 /*}}}*/
93 FTWScanner::~FTWScanner()
94 {
95 if (Output != NULL && OwnsOutput)
96 delete Output;
97 }
98 // FTWScanner::Scanner - FTW Scanner /*{{{*/
99 // ---------------------------------------------------------------------
100 /* This is the FTW scanner, it processes each directory element in the
101 directory tree. */
102 int FTWScanner::ScannerFTW(const char *File,const struct stat * /*sb*/,int Flag)
103 {
104 if (Flag == FTW_DNR)
105 {
106 Owner->NewLine(1);
107 ioprintf(c1out, _("W: Unable to read directory %s\n"), File);
108 }
109 if (Flag == FTW_NS)
110 {
111 Owner->NewLine(1);
112 ioprintf(c1out, _("W: Unable to stat %s\n"), File);
113 }
114 if (Flag != FTW_F)
115 return 0;
116
117 return ScannerFile(File, true);
118 }
119 /*}}}*/
120 // FTWScanner::ScannerFile - File Scanner /*{{{*/
121 // ---------------------------------------------------------------------
122 /* */
123 int FTWScanner::ScannerFile(const char *File, bool const &ReadLink)
124 {
125 const char *LastComponent = strrchr(File, '/');
126 char *RealPath = NULL;
127
128 if (LastComponent == NULL)
129 LastComponent = File;
130 else
131 LastComponent++;
132
133 vector<string>::const_iterator I;
134 for(I = Owner->Patterns.begin(); I != Owner->Patterns.end(); ++I)
135 {
136 if (fnmatch((*I).c_str(), LastComponent, 0) == 0)
137 break;
138 }
139 if (I == Owner->Patterns.end())
140 return 0;
141
142 /* Process it. If the file is a link then resolve it into an absolute
143 name.. This works best if the directory components the scanner are
144 given are not links themselves. */
145 char Jnk[2];
146 Owner->OriginalPath = File;
147 if (ReadLink &&
148 readlink(File,Jnk,sizeof(Jnk)) != -1 &&
149 (RealPath = realpath(File,NULL)) != 0)
150 {
151 Owner->DoPackage(RealPath);
152 free(RealPath);
153 }
154 else
155 Owner->DoPackage(File);
156
157 if (_error->empty() == false)
158 {
159 // Print any errors or warnings found
160 string Err;
161 bool SeenPath = false;
162 while (_error->empty() == false)
163 {
164 Owner->NewLine(1);
165
166 bool const Type = _error->PopMessage(Err);
167 if (Type == true)
168 cerr << _("E: ") << Err << endl;
169 else
170 cerr << _("W: ") << Err << endl;
171
172 if (Err.find(File) != string::npos)
173 SeenPath = true;
174 }
175
176 if (SeenPath == false)
177 cerr << _("E: Errors apply to file ") << "'" << File << "'" << endl;
178 return 0;
179 }
180
181 return 0;
182 }
183 /*}}}*/
184 // FTWScanner::RecursiveScan - Just scan a directory tree /*{{{*/
185 // ---------------------------------------------------------------------
186 /* */
187 bool FTWScanner::RecursiveScan(string const &Dir)
188 {
189 char *RealPath = NULL;
190 /* If noprefix is set then jam the scan root in, so we don't generate
191 link followed paths out of control */
192 if (InternalPrefix.empty() == true)
193 {
194 if ((RealPath = realpath(Dir.c_str(),NULL)) == 0)
195 return _error->Errno("realpath",_("Failed to resolve %s"),Dir.c_str());
196 InternalPrefix = RealPath;
197 free(RealPath);
198 }
199
200 // Do recursive directory searching
201 Owner = this;
202 int const Res = ftw(Dir.c_str(),ScannerFTW,30);
203
204 // Error treewalking?
205 if (Res != 0)
206 {
207 if (_error->PendingError() == false)
208 _error->Errno("ftw",_("Tree walking failed"));
209 return false;
210 }
211
212 return true;
213 }
214 /*}}}*/
215 // FTWScanner::LoadFileList - Load the file list from a file /*{{{*/
216 // ---------------------------------------------------------------------
217 /* This is an alternative to using FTW to locate files, it reads the list
218 of files from another file. */
219 bool FTWScanner::LoadFileList(string const &Dir, string const &File)
220 {
221 char *RealPath = NULL;
222 /* If noprefix is set then jam the scan root in, so we don't generate
223 link followed paths out of control */
224 if (InternalPrefix.empty() == true)
225 {
226 if ((RealPath = realpath(Dir.c_str(),NULL)) == 0)
227 return _error->Errno("realpath",_("Failed to resolve %s"),Dir.c_str());
228 InternalPrefix = RealPath;
229 free(RealPath);
230 }
231
232 Owner = this;
233 FILE *List = fopen(File.c_str(),"r");
234 if (List == 0)
235 return _error->Errno("fopen",_("Failed to open %s"),File.c_str());
236
237 /* We are a tad tricky here.. We prefix the buffer with the directory
238 name, that way if we need a full path with just use line.. Sneaky and
239 fully evil. */
240 char Line[1000];
241 char *FileStart;
242 if (Dir.empty() == true || Dir.end()[-1] != '/')
243 FileStart = Line + snprintf(Line,sizeof(Line),"%s/",Dir.c_str());
244 else
245 FileStart = Line + snprintf(Line,sizeof(Line),"%s",Dir.c_str());
246 while (fgets(FileStart,sizeof(Line) - (FileStart - Line),List) != 0)
247 {
248 char *FileName = _strstrip(FileStart);
249 if (FileName[0] == 0)
250 continue;
251
252 if (FileName[0] != '/')
253 {
254 if (FileName != FileStart)
255 memmove(FileStart,FileName,strlen(FileStart));
256 FileName = Line;
257 }
258
259 #if 0
260 struct stat St;
261 int Flag = FTW_F;
262 if (stat(FileName,&St) != 0)
263 Flag = FTW_NS;
264 #endif
265
266 if (ScannerFile(FileName, false) != 0)
267 break;
268 }
269
270 fclose(List);
271 return true;
272 }
273 /*}}}*/
274 // FTWScanner::Delink - Delink symlinks /*{{{*/
275 // ---------------------------------------------------------------------
276 /* */
277 bool FTWScanner::Delink(string &FileName,const char *OriginalPath,
278 unsigned long long &DeLinkBytes,
279 unsigned long long const &FileSize)
280 {
281 // See if this isn't an internaly prefix'd file name.
282 if (InternalPrefix.empty() == false &&
283 InternalPrefix.length() < FileName.length() &&
284 stringcmp(FileName.begin(),FileName.begin() + InternalPrefix.length(),
285 InternalPrefix.begin(),InternalPrefix.end()) != 0)
286 {
287 if (DeLinkLimit != 0 && DeLinkBytes/1024 < DeLinkLimit)
288 {
289 // Tidy up the display
290 if (DeLinkBytes == 0)
291 cout << endl;
292
293 NewLine(1);
294 ioprintf(c1out, _(" DeLink %s [%s]\n"), (OriginalPath + InternalPrefix.length()),
295 SizeToStr(FileSize).c_str());
296 c1out << flush;
297
298 if (NoLinkAct == false)
299 {
300 char OldLink[400];
301 if (readlink(OriginalPath,OldLink,sizeof(OldLink)) == -1)
302 _error->Errno("readlink",_("Failed to readlink %s"),OriginalPath);
303 else
304 {
305 if (RemoveFile("FTWScanner::Delink", OriginalPath))
306 {
307 if (link(FileName.c_str(),OriginalPath) != 0)
308 {
309 // Panic! Restore the symlink
310 if (symlink(OldLink,OriginalPath) != 0)
311 _error->Errno("symlink", "failed to restore symlink");
312 return _error->Errno("link",_("*** Failed to link %s to %s"),
313 FileName.c_str(),
314 OriginalPath);
315 }
316 }
317 }
318 }
319
320 DeLinkBytes += FileSize;
321 if (DeLinkBytes/1024 >= DeLinkLimit)
322 ioprintf(c1out, _(" DeLink limit of %sB hit.\n"), SizeToStr(DeLinkBytes).c_str());
323 }
324
325 FileName = OriginalPath;
326 }
327
328 return true;
329 }
330 /*}}}*/
331 // FTWScanner::SetExts - Set extensions to support /*{{{*/
332 // ---------------------------------------------------------------------
333 /* */
334 bool FTWScanner::SetExts(string const &Vals)
335 {
336 ClearPatterns();
337 string::size_type Start = 0;
338 while (Start <= Vals.length()-1)
339 {
340 string::size_type const Space = Vals.find(' ',Start);
341 string::size_type const Length = ((Space == string::npos) ? Vals.length() : Space) - Start;
342 if ( Arch.empty() == false )
343 {
344 AddPattern(string("*_") + Arch + Vals.substr(Start, Length));
345 if (IncludeArchAll == true && Arch != "all")
346 AddPattern(string("*_all") + Vals.substr(Start, Length));
347 }
348 else
349 AddPattern(string("*") + Vals.substr(Start, Length));
350
351 Start += Length + 1;
352 }
353
354 return true;
355 }
356 /*}}}*/
357
358 // PackagesWriter::PackagesWriter - Constructor /*{{{*/
359 // ---------------------------------------------------------------------
360 /* */
361 PackagesWriter::PackagesWriter(FileFd * const GivenOutput, TranslationWriter * const transWriter,
362 string const &DB,string const &Overrides,string const &ExtOverrides,
363 string const &Arch, bool const IncludeArchAll) :
364 FTWScanner(GivenOutput, Arch, IncludeArchAll), Db(DB), Stats(Db.Stats), TransWriter(transWriter)
365 {
366 SetExts(".deb .udeb");
367 DeLinkLimit = 0;
368
369 // Process the command line options
370 ConfigToDoHashes(DoHashes, "APT::FTPArchive::Packages");
371 DoAlwaysStat = _config->FindB("APT::FTPArchive::AlwaysStat", false);
372 DoContents = _config->FindB("APT::FTPArchive::Contents",true);
373 NoOverride = _config->FindB("APT::FTPArchive::NoOverrideMsg",false);
374 LongDescription = _config->FindB("APT::FTPArchive::LongDescription",true);
375
376 if (Db.Loaded() == false)
377 DoContents = false;
378
379 // Read the override file
380 if (Overrides.empty() == false && Over.ReadOverride(Overrides) == false)
381 return;
382 else
383 NoOverride = true;
384
385 if (ExtOverrides.empty() == false)
386 Over.ReadExtraOverride(ExtOverrides);
387
388 _error->DumpErrors();
389 }
390 /*}}}*/
391 // PackagesWriter::DoPackage - Process a single package /*{{{*/
392 // ---------------------------------------------------------------------
393 /* This method takes a package and gets its control information and
394 MD5, SHA1 and SHA256 then writes out a control record with the proper fields
395 rewritten and the path/size/hash appended. */
396 bool PackagesWriter::DoPackage(string FileName)
397 {
398 // Pull all the data we need form the DB
399 if (Db.GetFileInfo(FileName,
400 true, /* DoControl */
401 DoContents,
402 true, /* GenContentsOnly */
403 false, /* DoSource */
404 DoHashes, DoAlwaysStat) == false)
405 {
406 return false;
407 }
408
409 unsigned long long FileSize = Db.GetFileSize();
410 if (Delink(FileName,OriginalPath,Stats.DeLinkBytes,FileSize) == false)
411 return false;
412
413 // Lookup the overide information
414 pkgTagSection &Tags = Db.Control.Section;
415 string Package = Tags.FindS("Package");
416 string Architecture;
417 // if we generate a Packages file for a given arch, we use it to
418 // look for overrides. if we run in "simple" mode without the
419 // "Architecures" variable in the config we use the architecure value
420 // from the deb file
421 if(Arch != "")
422 Architecture = Arch;
423 else
424 Architecture = Tags.FindS("Architecture");
425 unique_ptr<Override::Item> OverItem(Over.GetItem(Package,Architecture));
426
427 if (Package.empty() == true)
428 return _error->Error(_("Archive had no package field"));
429
430 // If we need to do any rewriting of the header do it now..
431 if (OverItem.get() == 0)
432 {
433 if (NoOverride == false)
434 {
435 NewLine(1);
436 ioprintf(c1out, _(" %s has no override entry\n"), Package.c_str());
437 }
438
439 OverItem = unique_ptr<Override::Item>(new Override::Item);
440 OverItem->FieldOverride["Section"] = Tags.FindS("Section");
441 OverItem->Priority = Tags.FindS("Priority");
442 }
443
444 // Strip the DirStrip prefix from the FileName and add the PathPrefix
445 string NewFileName;
446 if (DirStrip.empty() == false &&
447 FileName.length() > DirStrip.length() &&
448 stringcmp(FileName.begin(),FileName.begin() + DirStrip.length(),
449 DirStrip.begin(),DirStrip.end()) == 0)
450 NewFileName = string(FileName.begin() + DirStrip.length(),FileName.end());
451 else
452 NewFileName = FileName;
453 if (PathPrefix.empty() == false)
454 NewFileName = flCombine(PathPrefix,NewFileName);
455
456 /* Configuration says we don't want to include the long Description
457 in the package file - instead we want to ship a separated file */
458 string desc;
459 if (LongDescription == false) {
460 desc = Tags.FindS("Description").append("\n");
461 OverItem->FieldOverride["Description"] = desc.substr(0, desc.find('\n')).c_str();
462 }
463
464 // This lists all the changes to the fields we are going to make.
465 std::vector<pkgTagSection::Tag> Changes;
466
467 std::string Size;
468 strprintf(Size, "%llu", (unsigned long long) FileSize);
469 Changes.push_back(pkgTagSection::Tag::Rewrite("Size", Size));
470
471 for (HashStringList::const_iterator hs = Db.HashesList.begin(); hs != Db.HashesList.end(); ++hs)
472 {
473 if (hs->HashType() == "MD5Sum")
474 Changes.push_back(pkgTagSection::Tag::Rewrite("MD5sum", hs->HashValue()));
475 else if (hs->HashType() == "Checksum-FileSize")
476 continue;
477 else
478 Changes.push_back(pkgTagSection::Tag::Rewrite(hs->HashType(), hs->HashValue()));
479 }
480 Changes.push_back(pkgTagSection::Tag::Rewrite("Filename", NewFileName));
481 Changes.push_back(pkgTagSection::Tag::Rewrite("Priority", OverItem->Priority));
482 Changes.push_back(pkgTagSection::Tag::Remove("Status"));
483 Changes.push_back(pkgTagSection::Tag::Remove("Optional"));
484
485 string DescriptionMd5;
486 if (LongDescription == false) {
487 MD5Summation descmd5;
488 descmd5.Add(desc.c_str());
489 DescriptionMd5 = descmd5.Result().Value();
490 Changes.push_back(pkgTagSection::Tag::Rewrite("Description-md5", DescriptionMd5));
491 if (TransWriter != NULL)
492 TransWriter->DoPackage(Package, desc, DescriptionMd5);
493 }
494
495 // Rewrite the maintainer field if necessary
496 bool MaintFailed;
497 string NewMaint = OverItem->SwapMaint(Tags.FindS("Maintainer"),MaintFailed);
498 if (MaintFailed == true)
499 {
500 if (NoOverride == false)
501 {
502 NewLine(1);
503 ioprintf(c1out, _(" %s maintainer is %s not %s\n"),
504 Package.c_str(), Tags.FindS("Maintainer").c_str(), OverItem->OldMaint.c_str());
505 }
506 }
507
508 if (NewMaint.empty() == false)
509 Changes.push_back(pkgTagSection::Tag::Rewrite("Maintainer", NewMaint));
510
511 /* Get rid of the Optional tag. This is an ugly, ugly, ugly hack that
512 dpkg-scanpackages does. Well sort of. dpkg-scanpackages just does renaming
513 but dpkg does this append bit. So we do the append bit, at least that way the
514 status file and package file will remain similar. There are other transforms
515 but optional is the only legacy one still in use for some lazy reason. */
516 string OptionalStr = Tags.FindS("Optional");
517 if (OptionalStr.empty() == false)
518 {
519 if (Tags.FindS("Suggests").empty() == false)
520 OptionalStr = Tags.FindS("Suggests") + ", " + OptionalStr;
521 Changes.push_back(pkgTagSection::Tag::Rewrite("Suggests", OptionalStr));
522 }
523
524 for (map<string,string>::const_iterator I = OverItem->FieldOverride.begin();
525 I != OverItem->FieldOverride.end(); ++I)
526 Changes.push_back(pkgTagSection::Tag::Rewrite(I->first, I->second));
527
528 // Rewrite and store the fields.
529 if (Tags.Write(*Output, TFRewritePackageOrder, Changes) == false ||
530 Output->Write("\n", 1) == false)
531 return false;
532
533 return Db.Finish();
534 }
535 /*}}}*/
536 PackagesWriter::~PackagesWriter() /*{{{*/
537 {
538 }
539 /*}}}*/
540
541 // TranslationWriter::TranslationWriter - Constructor /*{{{*/
542 // ---------------------------------------------------------------------
543 /* Create a Translation-Master file for this Packages file */
544 TranslationWriter::TranslationWriter(string const &File, string const &TransCompress,
545 mode_t const &Permissions) : Comp(NULL), Output(NULL)
546 {
547 if (File.empty() == true)
548 return;
549
550 Comp = new MultiCompress(File, TransCompress, Permissions);
551 Output = &Comp->Input;
552 }
553 /*}}}*/
554 // TranslationWriter::DoPackage - Process a single package /*{{{*/
555 // ---------------------------------------------------------------------
556 /* Create a Translation-Master file for this Packages file */
557 bool TranslationWriter::DoPackage(string const &Pkg, string const &Desc,
558 string const &MD5)
559 {
560 if (Output == NULL)
561 return true;
562
563 // Different archs can include different versions and therefore
564 // different descriptions - so we need to check for both name and md5.
565 string const Record = Pkg + ":" + MD5;
566
567 if (Included.find(Record) != Included.end())
568 return true;
569
570 std::string out;
571 strprintf(out, "Package: %s\nDescription-md5: %s\nDescription-en: %s\n",
572 Pkg.c_str(), MD5.c_str(), Desc.c_str());
573 Output->Write(out.c_str(), out.length());
574
575 Included.insert(Record);
576 return true;
577 }
578 /*}}}*/
579 // TranslationWriter::~TranslationWriter - Destructor /*{{{*/
580 // ---------------------------------------------------------------------
581 /* */
582 TranslationWriter::~TranslationWriter()
583 {
584 if (Comp != NULL)
585 delete Comp;
586 }
587 /*}}}*/
588
589 // SourcesWriter::SourcesWriter - Constructor /*{{{*/
590 // ---------------------------------------------------------------------
591 /* */
592 SourcesWriter::SourcesWriter(FileFd * const GivenOutput, string const &DB, string const &BOverrides,string const &SOverrides,
593 string const &ExtOverrides) :
594 FTWScanner(GivenOutput), Db(DB), Stats(Db.Stats)
595 {
596 AddPattern("*.dsc");
597 DeLinkLimit = 0;
598 Buffer = 0;
599 BufSize = 0;
600
601 // Process the command line options
602 ConfigToDoHashes(DoHashes, "APT::FTPArchive::Sources");
603 NoOverride = _config->FindB("APT::FTPArchive::NoOverrideMsg",false);
604 DoAlwaysStat = _config->FindB("APT::FTPArchive::AlwaysStat", false);
605
606 // Read the override file
607 if (BOverrides.empty() == false && BOver.ReadOverride(BOverrides) == false)
608 return;
609 else
610 NoOverride = true;
611
612 // WTF?? The logic above: if we can't read binary overrides, don't even try
613 // reading source overrides. if we can read binary overrides, then say there
614 // are no overrides. THIS MAKES NO SENSE! -- ajt@d.o, 2006/02/28
615
616 if (ExtOverrides.empty() == false)
617 SOver.ReadExtraOverride(ExtOverrides);
618
619 if (SOverrides.empty() == false && FileExists(SOverrides) == true)
620 SOver.ReadOverride(SOverrides,true);
621 }
622 /*}}}*/
623 // SourcesWriter::DoPackage - Process a single package /*{{{*/
624 static std::string getDscHash(unsigned int const DoHashes,
625 Hashes::SupportedHashes const DoIt, pkgTagSection &Tags, char const * const FieldName,
626 HashString const * const Hash, unsigned long long Size, std::string FileName)
627 {
628 if ((DoHashes & DoIt) != DoIt || Tags.Exists(FieldName) == false || Hash == NULL)
629 return "";
630 std::ostringstream out;
631 out << "\n " << Hash->HashValue() << " " << Size << " " << FileName
632 << "\n " << Tags.FindS(FieldName);
633 return out.str();
634 }
635 bool SourcesWriter::DoPackage(string FileName)
636 {
637 // Pull all the data we need form the DB
638 if (Db.GetFileInfo(FileName,
639 false, /* DoControl */
640 false, /* DoContents */
641 false, /* GenContentsOnly */
642 true, /* DoSource */
643 DoHashes, DoAlwaysStat) == false)
644 {
645 return false;
646 }
647
648 // we need to perform a "write" here (this is what finish is doing)
649 // because the call to Db.GetFileInfo() in the loop will change
650 // the "db cursor"
651 Db.Finish();
652
653 pkgTagSection Tags;
654 if (Tags.Scan(Db.Dsc.Data.c_str(), Db.Dsc.Data.length()) == false)
655 return _error->Error("Could not find a record in the DSC '%s'",FileName.c_str());
656
657 if (Tags.Exists("Source") == false)
658 return _error->Error("Could not find a Source entry in the DSC '%s'",FileName.c_str());
659 Tags.Trim();
660
661 // Lookup the overide information, finding first the best priority.
662 string BestPrio;
663 string Bins = Tags.FindS("Binary");
664 char Buffer[Bins.length() + 1];
665 unique_ptr<Override::Item> OverItem(nullptr);
666 if (Bins.empty() == false)
667 {
668 strcpy(Buffer,Bins.c_str());
669
670 // Ignore too-long errors.
671 char *BinList[400];
672 TokSplitString(',',Buffer,BinList,sizeof(BinList)/sizeof(BinList[0]));
673
674 // Look at all the binaries
675 unsigned char BestPrioV = pkgCache::State::Extra;
676 for (unsigned I = 0; BinList[I] != 0; I++)
677 {
678 unique_ptr<Override::Item> Itm(BOver.GetItem(BinList[I]));
679 if (Itm.get() == 0)
680 continue;
681
682 unsigned char NewPrioV = debListParser::GetPrio(Itm->Priority);
683 if (NewPrioV < BestPrioV || BestPrio.empty() == true)
684 {
685 BestPrioV = NewPrioV;
686 BestPrio = Itm->Priority;
687 }
688
689 if (OverItem.get() == 0)
690 OverItem = std::move(Itm);
691 }
692 }
693
694 // If we need to do any rewriting of the header do it now..
695 if (OverItem.get() == 0)
696 {
697 if (NoOverride == false)
698 {
699 NewLine(1);
700 ioprintf(c1out, _(" %s has no override entry\n"), Tags.FindS("Source").c_str());
701 }
702
703 OverItem.reset(new Override::Item);
704 }
705
706 struct stat St;
707 if (stat(FileName.c_str(), &St) != 0)
708 return _error->Errno("fstat","Failed to stat %s",FileName.c_str());
709
710 unique_ptr<Override::Item> SOverItem(SOver.GetItem(Tags.FindS("Source")));
711 // const unique_ptr<Override::Item> autoSOverItem(SOverItem);
712 if (SOverItem.get() == 0)
713 {
714 ioprintf(c1out, _(" %s has no source override entry\n"), Tags.FindS("Source").c_str());
715 SOverItem = unique_ptr<Override::Item>(BOver.GetItem(Tags.FindS("Source")));
716 if (SOverItem.get() == 0)
717 {
718 ioprintf(c1out, _(" %s has no binary override entry either\n"), Tags.FindS("Source").c_str());
719 SOverItem = unique_ptr<Override::Item>(new Override::Item);
720 *SOverItem = *OverItem;
721 }
722 }
723
724 // Add the dsc to the files hash list
725 string const strippedName = flNotDir(FileName);
726 std::string const Files = getDscHash(DoHashes, Hashes::MD5SUM, Tags, "Files", Db.HashesList.find("MD5Sum"), St.st_size, strippedName);
727 std::string ChecksumsSha1 = getDscHash(DoHashes, Hashes::SHA1SUM, Tags, "Checksums-Sha1", Db.HashesList.find("SHA1"), St.st_size, strippedName);
728 std::string ChecksumsSha256 = getDscHash(DoHashes, Hashes::SHA256SUM, Tags, "Checksums-Sha256", Db.HashesList.find("SHA256"), St.st_size, strippedName);
729 std::string ChecksumsSha512 = getDscHash(DoHashes, Hashes::SHA512SUM, Tags, "Checksums-Sha512", Db.HashesList.find("SHA512"), St.st_size, strippedName);
730
731 // Strip the DirStrip prefix from the FileName and add the PathPrefix
732 string NewFileName;
733 if (DirStrip.empty() == false &&
734 FileName.length() > DirStrip.length() &&
735 stringcmp(DirStrip,OriginalPath,OriginalPath + DirStrip.length()) == 0)
736 NewFileName = string(OriginalPath + DirStrip.length());
737 else
738 NewFileName = OriginalPath;
739 if (PathPrefix.empty() == false)
740 NewFileName = flCombine(PathPrefix,NewFileName);
741
742 string Directory = flNotFile(OriginalPath);
743 string Package = Tags.FindS("Source");
744
745 // Perform operation over all of the files
746 string ParseJnk;
747 const char *C = Files.c_str();
748 char *RealPath = NULL;
749 for (;isspace(*C); C++);
750 while (*C != 0)
751 {
752 // Parse each of the elements
753 if (ParseQuoteWord(C,ParseJnk) == false ||
754 ParseQuoteWord(C,ParseJnk) == false ||
755 ParseQuoteWord(C,ParseJnk) == false)
756 return _error->Error("Error parsing file record");
757
758 string OriginalPath = Directory + ParseJnk;
759
760 // Add missing hashes to source files
761 if (((DoHashes & Hashes::SHA1SUM) == Hashes::SHA1SUM && !Tags.Exists("Checksums-Sha1")) ||
762 ((DoHashes & Hashes::SHA256SUM) == Hashes::SHA256SUM && !Tags.Exists("Checksums-Sha256")) ||
763 ((DoHashes & Hashes::SHA512SUM) == Hashes::SHA512SUM && !Tags.Exists("Checksums-Sha512")))
764 {
765 if (Db.GetFileInfo(OriginalPath,
766 false, /* DoControl */
767 false, /* DoContents */
768 false, /* GenContentsOnly */
769 false, /* DoSource */
770 DoHashes,
771 DoAlwaysStat) == false)
772 {
773 return _error->Error("Error getting file info");
774 }
775
776 for (HashStringList::const_iterator hs = Db.HashesList.begin(); hs != Db.HashesList.end(); ++hs)
777 {
778 if (hs->HashType() == "MD5Sum" || hs->HashType() == "Checksum-FileSize")
779 continue;
780 char const * fieldname;
781 std::string * out;
782 if (hs->HashType() == "SHA1")
783 {
784 fieldname = "Checksums-Sha1";
785 out = &ChecksumsSha1;
786 }
787 else if (hs->HashType() == "SHA256")
788 {
789 fieldname = "Checksums-Sha256";
790 out = &ChecksumsSha256;
791 }
792 else if (hs->HashType() == "SHA512")
793 {
794 fieldname = "Checksums-Sha512";
795 out = &ChecksumsSha512;
796 }
797 else
798 {
799 _error->Warning("Ignoring unknown Checksumtype %s in SourcesWriter::DoPackages", hs->HashType().c_str());
800 continue;
801 }
802 if (Tags.Exists(fieldname) == true)
803 continue;
804 std::ostringstream streamout;
805 streamout << "\n " << hs->HashValue() << " " << Db.GetFileSize() << " " << ParseJnk;
806 out->append(streamout.str());
807 }
808
809 // write back the GetFileInfo() stats data
810 Db.Finish();
811 }
812
813 // Perform the delinking operation
814 char Jnk[2];
815
816 if (readlink(OriginalPath.c_str(),Jnk,sizeof(Jnk)) != -1 &&
817 (RealPath = realpath(OriginalPath.c_str(),NULL)) != 0)
818 {
819 string RP = RealPath;
820 free(RealPath);
821 if (Delink(RP,OriginalPath.c_str(),Stats.DeLinkBytes,St.st_size) == false)
822 return false;
823 }
824 }
825
826 Directory = flNotFile(NewFileName);
827 if (Directory.length() > 2)
828 Directory.erase(Directory.end()-1);
829
830 // This lists all the changes to the fields we are going to make.
831 // (5 hardcoded + checksums + maintainer + end marker)
832 std::vector<pkgTagSection::Tag> Changes;
833
834 Changes.push_back(pkgTagSection::Tag::Remove("Source"));
835 Changes.push_back(pkgTagSection::Tag::Rewrite("Package", Package));
836 if (Files.empty() == false)
837 Changes.push_back(pkgTagSection::Tag::Rewrite("Files", Files));
838 if (ChecksumsSha1.empty() == false)
839 Changes.push_back(pkgTagSection::Tag::Rewrite("Checksums-Sha1", ChecksumsSha1));
840 if (ChecksumsSha256.empty() == false)
841 Changes.push_back(pkgTagSection::Tag::Rewrite("Checksums-Sha256", ChecksumsSha256));
842 if (ChecksumsSha512.empty() == false)
843 Changes.push_back(pkgTagSection::Tag::Rewrite("Checksums-Sha512", ChecksumsSha512));
844 if (Directory != "./")
845 Changes.push_back(pkgTagSection::Tag::Rewrite("Directory", Directory));
846 Changes.push_back(pkgTagSection::Tag::Rewrite("Priority", BestPrio));
847 Changes.push_back(pkgTagSection::Tag::Remove("Status"));
848
849 // Rewrite the maintainer field if necessary
850 bool MaintFailed;
851 string NewMaint = OverItem->SwapMaint(Tags.FindS("Maintainer"), MaintFailed);
852 if (MaintFailed == true)
853 {
854 if (NoOverride == false)
855 {
856 NewLine(1);
857 ioprintf(c1out, _(" %s maintainer is %s not %s\n"), Package.c_str(),
858 Tags.FindS("Maintainer").c_str(), OverItem->OldMaint.c_str());
859 }
860 }
861 if (NewMaint.empty() == false)
862 Changes.push_back(pkgTagSection::Tag::Rewrite("Maintainer", NewMaint.c_str()));
863
864 for (map<string,string>::const_iterator I = SOverItem->FieldOverride.begin();
865 I != SOverItem->FieldOverride.end(); ++I)
866 Changes.push_back(pkgTagSection::Tag::Rewrite(I->first, I->second));
867
868 // Rewrite and store the fields.
869 if (Tags.Write(*Output, TFRewriteSourceOrder, Changes) == false ||
870 Output->Write("\n", 1) == false)
871 return false;
872
873 Stats.Packages++;
874
875 return true;
876 }
877 /*}}}*/
878
879 // ContentsWriter::ContentsWriter - Constructor /*{{{*/
880 // ---------------------------------------------------------------------
881 /* */
882 ContentsWriter::ContentsWriter(FileFd * const GivenOutput, string const &DB,
883 string const &Arch, bool const IncludeArchAll) :
884 FTWScanner(GivenOutput, Arch, IncludeArchAll), Db(DB), Stats(Db.Stats)
885
886 {
887 SetExts(".deb");
888 }
889 /*}}}*/
890 // ContentsWriter::DoPackage - Process a single package /*{{{*/
891 // ---------------------------------------------------------------------
892 /* If Package is the empty string the control record will be parsed to
893 determine what the package name is. */
894 bool ContentsWriter::DoPackage(string FileName, string Package)
895 {
896 if (!Db.GetFileInfo(FileName,
897 Package.empty(), /* DoControl */
898 true, /* DoContents */
899 false, /* GenContentsOnly */
900 false, /* DoSource */
901 0, /* DoHashes */
902 false /* checkMtime */))
903 {
904 return false;
905 }
906
907 // Parse the package name
908 if (Package.empty() == true)
909 {
910 Package = Db.Control.Section.FindS("Package");
911 }
912
913 Db.Contents.Add(Gen,Package);
914
915 return Db.Finish();
916 }
917 /*}}}*/
918 // ContentsWriter::ReadFromPkgs - Read from a packages file /*{{{*/
919 // ---------------------------------------------------------------------
920 /* */
921 bool ContentsWriter::ReadFromPkgs(string const &PkgFile,string const &PkgCompress)
922 {
923 MultiCompress Pkgs(PkgFile,PkgCompress,0,false);
924 if (_error->PendingError() == true)
925 return false;
926
927 // Open the package file
928 FileFd Fd;
929 if (Pkgs.OpenOld(Fd) == false)
930 return false;
931
932 pkgTagFile Tags(&Fd);
933 if (_error->PendingError() == true)
934 return false;
935
936 // Parse.
937 pkgTagSection Section;
938 while (Tags.Step(Section) == true)
939 {
940 string File = flCombine(Prefix,Section.FindS("FileName"));
941 string Package = Section.FindS("Section");
942 if (Package.empty() == false && Package.end()[-1] != '/')
943 {
944 Package += '/';
945 Package += Section.FindS("Package");
946 }
947 else
948 Package += Section.FindS("Package");
949
950 DoPackage(File,Package);
951 if (_error->empty() == false)
952 {
953 _error->Error("Errors apply to file '%s'",File.c_str());
954 _error->DumpErrors();
955 }
956 }
957
958 // Tidy the compressor
959 Fd.Close();
960
961 return true;
962 }
963
964 /*}}}*/
965
966 // ReleaseWriter::ReleaseWriter - Constructor /*{{{*/
967 // ---------------------------------------------------------------------
968 /* */
969 ReleaseWriter::ReleaseWriter(FileFd * const GivenOutput, string const &/*DB*/) : FTWScanner(GivenOutput)
970 {
971 if (_config->FindB("APT::FTPArchive::Release::Default-Patterns", true) == true)
972 {
973 AddPattern("Packages");
974 AddPattern("Packages.*");
975 AddPattern("Translation-*");
976 AddPattern("Sources");
977 AddPattern("Sources.*");
978 AddPattern("Release");
979 AddPattern("Contents-*");
980 AddPattern("Index");
981 AddPattern("icons-*.tar");
982 AddPattern("icons-*.tar.*");
983 AddPattern("Components-*.yml");
984 AddPattern("Components-*.yml.*");
985 AddPattern("md5sum.txt");
986 }
987 AddPatterns(_config->FindVector("APT::FTPArchive::Release::Patterns"));
988
989 time_t const now = time(NULL);
990
991 setlocale(LC_TIME, "C");
992
993 char datestr[128];
994 if (strftime(datestr, sizeof(datestr), "%a, %d %b %Y %H:%M:%S UTC",
995 gmtime(&now)) == 0)
996 {
997 datestr[0] = '\0';
998 }
999
1000 time_t const validuntil = now + _config->FindI("APT::FTPArchive::Release::ValidTime", 0);
1001 char validstr[128];
1002 if (now == validuntil ||
1003 strftime(validstr, sizeof(validstr), "%a, %d %b %Y %H:%M:%S UTC",
1004 gmtime(&validuntil)) == 0)
1005 {
1006 validstr[0] = '\0';
1007 }
1008
1009 setlocale(LC_TIME, "");
1010
1011 map<string,string> Fields;
1012 Fields["Origin"] = "";
1013 Fields["Label"] = "";
1014 Fields["Suite"] = "";
1015 Fields["Version"] = "";
1016 Fields["Codename"] = "";
1017 Fields["Date"] = datestr;
1018 Fields["Valid-Until"] = validstr;
1019 Fields["Architectures"] = "";
1020 Fields["Components"] = "";
1021 Fields["Description"] = "";
1022 if (_config->FindB("APT::FTPArchive::DoByHash", false) == true)
1023 Fields["Acquire-By-Hash"] = "true";
1024
1025 for(map<string,string>::const_iterator I = Fields.begin();
1026 I != Fields.end();
1027 ++I)
1028 {
1029 string Config = string("APT::FTPArchive::Release::") + (*I).first;
1030 string Value = _config->Find(Config, (*I).second.c_str());
1031 if (Value == "")
1032 continue;
1033
1034 std::string const out = I->first + ": " + Value + "\n";
1035 Output->Write(out.c_str(), out.length());
1036 }
1037
1038 ConfigToDoHashes(DoHashes, "APT::FTPArchive::Release");
1039 }
1040 /*}}}*/
1041 // ReleaseWriter::DoPackage - Process a single package /*{{{*/
1042 // ---------------------------------------------------------------------
1043 bool ReleaseWriter::DoPackage(string FileName)
1044 {
1045 // Strip the DirStrip prefix from the FileName and add the PathPrefix
1046 string NewFileName;
1047 if (DirStrip.empty() == false &&
1048 FileName.length() > DirStrip.length() &&
1049 stringcmp(FileName.begin(),FileName.begin() + DirStrip.length(),
1050 DirStrip.begin(),DirStrip.end()) == 0)
1051 {
1052 NewFileName = string(FileName.begin() + DirStrip.length(),FileName.end());
1053 while (NewFileName[0] == '/')
1054 NewFileName = string(NewFileName.begin() + 1,NewFileName.end());
1055 }
1056 else
1057 NewFileName = FileName;
1058
1059 if (PathPrefix.empty() == false)
1060 NewFileName = flCombine(PathPrefix,NewFileName);
1061
1062 FileFd fd(FileName, FileFd::ReadOnly);
1063
1064 if (!fd.IsOpen())
1065 {
1066 return false;
1067 }
1068
1069 CheckSums[NewFileName].size = fd.Size();
1070
1071 Hashes hs(DoHashes);
1072 hs.AddFD(fd);
1073 CheckSums[NewFileName].Hashes = hs.GetHashStringList();
1074 fd.Close();
1075
1076 // FIXME: wrong layer in the code(?)
1077 // FIXME2: symlink instead of create a copy
1078 if (_config->FindB("APT::FTPArchive::DoByHash", false) == true)
1079 {
1080 std::string Input = FileName;
1081 HashStringList hsl = hs.GetHashStringList();
1082 for(HashStringList::const_iterator h = hsl.begin();
1083 h != hsl.end(); ++h)
1084 {
1085 if (!h->usable())
1086 continue;
1087 if (flNotDir(FileName) == "Release" || flNotDir(FileName) == "InRelease")
1088 continue;
1089
1090 std::string ByHashOutputFile = GenByHashFilename(Input, *h);
1091 std::string ByHashOutputDir = flNotFile(ByHashOutputFile);
1092 if(!CreateDirectory(flNotFile(Input), ByHashOutputDir))
1093 return _error->Warning("can not create dir %s", flNotFile(ByHashOutputFile).c_str());
1094
1095 // write new hashes
1096 FileFd In(Input, FileFd::ReadOnly);
1097 FileFd Out(ByHashOutputFile, FileFd::WriteEmpty);
1098 if(!CopyFile(In, Out))
1099 return _error->Warning("failed to copy %s %s", Input.c_str(), ByHashOutputFile.c_str());
1100 }
1101 }
1102
1103 return true;
1104 }
1105
1106 /*}}}*/
1107 // ReleaseWriter::Finish - Output the checksums /*{{{*/
1108 // ---------------------------------------------------------------------
1109 static void printChecksumTypeRecord(FileFd &Output, char const * const Type, map<string, ReleaseWriter::CheckSum> const &CheckSums)
1110 {
1111 {
1112 std::string out;
1113 strprintf(out, "%s:\n", Type);
1114 Output.Write(out.c_str(), out.length());
1115 }
1116 for(map<string,ReleaseWriter::CheckSum>::const_iterator I = CheckSums.begin();
1117 I != CheckSums.end(); ++I)
1118 {
1119 HashString const * const hs = I->second.Hashes.find(Type);
1120 if (hs == NULL)
1121 continue;
1122 std::string out;
1123 strprintf(out, " %s %16llu %s\n",
1124 hs->HashValue().c_str(),
1125 (*I).second.size,
1126 (*I).first.c_str());
1127 Output.Write(out.c_str(), out.length());
1128 }
1129 }
1130 void ReleaseWriter::Finish()
1131 {
1132 if ((DoHashes & Hashes::MD5SUM) == Hashes::MD5SUM)
1133 printChecksumTypeRecord(*Output, "MD5Sum", CheckSums);
1134 if ((DoHashes & Hashes::SHA1SUM) == Hashes::SHA1SUM)
1135 printChecksumTypeRecord(*Output, "SHA1", CheckSums);
1136 if ((DoHashes & Hashes::SHA256SUM) == Hashes::SHA256SUM)
1137 printChecksumTypeRecord(*Output, "SHA256", CheckSums);
1138 if ((DoHashes & Hashes::SHA512SUM) == Hashes::SHA512SUM)
1139 printChecksumTypeRecord(*Output, "SHA512", CheckSums);
1140
1141 // go by-hash cleanup
1142 map<string,ReleaseWriter::CheckSum>::const_iterator prev = CheckSums.begin();
1143 if (_config->FindB("APT::FTPArchive::DoByHash", false) == true)
1144 {
1145 for(map<string,ReleaseWriter::CheckSum>::const_iterator I = CheckSums.begin();
1146 I != CheckSums.end(); ++I)
1147 {
1148 if (I->first == "Release" || I->first == "InRelease")
1149 continue;
1150
1151 // keep iterating until we find a new subdir
1152 if(flNotFile(I->first) == flNotFile(prev->first))
1153 continue;
1154
1155 // clean that subdir up
1156 int keepFiles = _config->FindI("APT::FTPArchive::By-Hash-Keep", 3);
1157 // calculate how many compressors are used (the amount of files
1158 // in that subdir generated for this run)
1159 keepFiles *= std::distance(prev, I);
1160 prev = I;
1161
1162 HashStringList hsl = prev->second.Hashes;
1163 for(HashStringList::const_iterator h = hsl.begin();
1164 h != hsl.end(); ++h)
1165 {
1166
1167 if (!h->usable())
1168 continue;
1169
1170 std::string RealFilename = DirStrip+"/"+prev->first;
1171 std::string ByHashOutputFile = GenByHashFilename(RealFilename, *h);
1172 DeleteAllButMostRecent(flNotFile(ByHashOutputFile), keepFiles);
1173 }
1174 }
1175 }
1176 }