]> git.saurik.com Git - apt.git/blob - ftparchive/writer.cc
rename Checksum-FileSize to Filesize in hashsum mismatch
[apt.git] / ftparchive / writer.cc
1 // -*- mode: cpp; mode: fold -*-
2 // Description /*{{{*/
3 // $Id: writer.cc,v 1.14 2004/03/24 01:40:43 mdz Exp $
4 /* ######################################################################
5
6 Writer
7
8 The file writer classes. These write various types of output, sources,
9 packages and contents.
10
11 ##################################################################### */
12 /*}}}*/
13 // Include Files /*{{{*/
14 #include <config.h>
15
16 #include <apt-pkg/configuration.h>
17 #include <apt-pkg/deblistparser.h>
18 #include <apt-pkg/error.h>
19 #include <apt-pkg/fileutl.h>
20 #include <apt-pkg/gpgv.h>
21 #include <apt-pkg/hashes.h>
22 #include <apt-pkg/md5.h>
23 #include <apt-pkg/strutl.h>
24 #include <apt-pkg/debfile.h>
25 #include <apt-pkg/pkgcache.h>
26 #include <apt-pkg/sha1.h>
27 #include <apt-pkg/sha2.h>
28 #include <apt-pkg/tagfile.h>
29
30 #include <ctype.h>
31 #include <fnmatch.h>
32 #include <ftw.h>
33 #include <locale.h>
34 #include <string.h>
35 #include <sys/stat.h>
36 #include <sys/types.h>
37 #include <unistd.h>
38 #include <ctime>
39 #include <iostream>
40 #include <iomanip>
41 #include <sstream>
42 #include <memory>
43 #include <utility>
44 #include <algorithm>
45
46 #include "apt-ftparchive.h"
47 #include "writer.h"
48 #include "cachedb.h"
49 #include "multicompress.h"
50 #include "byhash.h"
51
52 #include <apti18n.h>
53 /*}}}*/
54 using namespace std;
55 FTWScanner *FTWScanner::Owner;
56
57 // ConfigToDoHashes - which hashes to generate /*{{{*/
58 static void SingleConfigToDoHashes(unsigned int &DoHashes, std::string const &Conf, unsigned int const Flag)
59 {
60 if (_config->FindB(Conf, (DoHashes & Flag) == Flag) == true)
61 DoHashes |= Flag;
62 else
63 DoHashes &= ~Flag;
64 }
65 static void ConfigToDoHashes(unsigned int &DoHashes, std::string const &Conf)
66 {
67 SingleConfigToDoHashes(DoHashes, Conf + "::MD5", Hashes::MD5SUM);
68 SingleConfigToDoHashes(DoHashes, Conf + "::SHA1", Hashes::SHA1SUM);
69 SingleConfigToDoHashes(DoHashes, Conf + "::SHA256", Hashes::SHA256SUM);
70 SingleConfigToDoHashes(DoHashes, Conf + "::SHA512", Hashes::SHA512SUM);
71 }
72 /*}}}*/
73
74 // FTWScanner::FTWScanner - Constructor /*{{{*/
75 FTWScanner::FTWScanner(FileFd * const GivenOutput, string const &Arch, bool const IncludeArchAll)
76 : Arch(Arch), IncludeArchAll(IncludeArchAll), DoHashes(~0)
77 {
78 if (GivenOutput == NULL)
79 {
80 Output = new FileFd;
81 OwnsOutput = true;
82 Output->OpenDescriptor(STDOUT_FILENO, FileFd::WriteOnly, false);
83 }
84 else
85 {
86 Output = GivenOutput;
87 OwnsOutput = false;
88 }
89 ErrorPrinted = false;
90 NoLinkAct = !_config->FindB("APT::FTPArchive::DeLinkAct",true);
91 ConfigToDoHashes(DoHashes, "APT::FTPArchive");
92 }
93 /*}}}*/
94 FTWScanner::~FTWScanner()
95 {
96 if (Output != NULL && OwnsOutput)
97 delete Output;
98 }
99 // FTWScanner::Scanner - FTW Scanner /*{{{*/
100 // ---------------------------------------------------------------------
101 /* This is the FTW scanner, it processes each directory element in the
102 directory tree. */
103 int FTWScanner::ScannerFTW(const char *File,const struct stat * /*sb*/,int Flag)
104 {
105 if (Flag == FTW_DNR)
106 {
107 Owner->NewLine(1);
108 ioprintf(c1out, _("W: Unable to read directory %s\n"), File);
109 }
110 if (Flag == FTW_NS)
111 {
112 Owner->NewLine(1);
113 ioprintf(c1out, _("W: Unable to stat %s\n"), File);
114 }
115 if (Flag != FTW_F)
116 return 0;
117
118 return ScannerFile(File, true);
119 }
120 /*}}}*/
121 // FTWScanner::ScannerFile - File Scanner /*{{{*/
122 // ---------------------------------------------------------------------
123 /* */
124 int FTWScanner::ScannerFile(const char *File, bool const &ReadLink)
125 {
126 const char *LastComponent = strrchr(File, '/');
127 char *RealPath = NULL;
128
129 if (LastComponent == NULL)
130 LastComponent = File;
131 else
132 LastComponent++;
133
134 vector<string>::const_iterator I;
135 for(I = Owner->Patterns.begin(); I != Owner->Patterns.end(); ++I)
136 {
137 if (fnmatch((*I).c_str(), LastComponent, 0) == 0)
138 break;
139 }
140 if (I == Owner->Patterns.end())
141 return 0;
142
143 /* Process it. If the file is a link then resolve it into an absolute
144 name.. This works best if the directory components the scanner are
145 given are not links themselves. */
146 char Jnk[2];
147 Owner->OriginalPath = File;
148 if (ReadLink &&
149 readlink(File,Jnk,sizeof(Jnk)) != -1 &&
150 (RealPath = realpath(File,NULL)) != 0)
151 {
152 Owner->DoPackage(RealPath);
153 free(RealPath);
154 }
155 else
156 Owner->DoPackage(File);
157
158 if (_error->empty() == false)
159 {
160 // Print any errors or warnings found
161 string Err;
162 bool SeenPath = false;
163 while (_error->empty() == false)
164 {
165 Owner->NewLine(1);
166
167 bool const Type = _error->PopMessage(Err);
168 if (Type == true)
169 cerr << _("E: ") << Err << endl;
170 else
171 cerr << _("W: ") << Err << endl;
172
173 if (Err.find(File) != string::npos)
174 SeenPath = true;
175 }
176
177 if (SeenPath == false)
178 cerr << _("E: Errors apply to file ") << "'" << File << "'" << endl;
179 return 0;
180 }
181
182 return 0;
183 }
184 /*}}}*/
185 // FTWScanner::RecursiveScan - Just scan a directory tree /*{{{*/
186 // ---------------------------------------------------------------------
187 /* */
188 bool FTWScanner::RecursiveScan(string const &Dir)
189 {
190 char *RealPath = NULL;
191 /* If noprefix is set then jam the scan root in, so we don't generate
192 link followed paths out of control */
193 if (InternalPrefix.empty() == true)
194 {
195 if ((RealPath = realpath(Dir.c_str(),NULL)) == 0)
196 return _error->Errno("realpath",_("Failed to resolve %s"),Dir.c_str());
197 InternalPrefix = RealPath;
198 free(RealPath);
199 }
200
201 // Do recursive directory searching
202 Owner = this;
203 int const Res = ftw(Dir.c_str(),ScannerFTW,30);
204
205 // Error treewalking?
206 if (Res != 0)
207 {
208 if (_error->PendingError() == false)
209 _error->Errno("ftw",_("Tree walking failed"));
210 return false;
211 }
212
213 return true;
214 }
215 /*}}}*/
216 // FTWScanner::LoadFileList - Load the file list from a file /*{{{*/
217 // ---------------------------------------------------------------------
218 /* This is an alternative to using FTW to locate files, it reads the list
219 of files from another file. */
220 bool FTWScanner::LoadFileList(string const &Dir, string const &File)
221 {
222 char *RealPath = NULL;
223 /* If noprefix is set then jam the scan root in, so we don't generate
224 link followed paths out of control */
225 if (InternalPrefix.empty() == true)
226 {
227 if ((RealPath = realpath(Dir.c_str(),NULL)) == 0)
228 return _error->Errno("realpath",_("Failed to resolve %s"),Dir.c_str());
229 InternalPrefix = RealPath;
230 free(RealPath);
231 }
232
233 Owner = this;
234 FILE *List = fopen(File.c_str(),"r");
235 if (List == 0)
236 return _error->Errno("fopen",_("Failed to open %s"),File.c_str());
237
238 /* We are a tad tricky here.. We prefix the buffer with the directory
239 name, that way if we need a full path with just use line.. Sneaky and
240 fully evil. */
241 char Line[1000];
242 char *FileStart;
243 if (Dir.empty() == true || Dir.end()[-1] != '/')
244 FileStart = Line + snprintf(Line,sizeof(Line),"%s/",Dir.c_str());
245 else
246 FileStart = Line + snprintf(Line,sizeof(Line),"%s",Dir.c_str());
247 while (fgets(FileStart,sizeof(Line) - (FileStart - Line),List) != 0)
248 {
249 char *FileName = _strstrip(FileStart);
250 if (FileName[0] == 0)
251 continue;
252
253 if (FileName[0] != '/')
254 {
255 if (FileName != FileStart)
256 memmove(FileStart,FileName,strlen(FileStart));
257 FileName = Line;
258 }
259
260 #if 0
261 struct stat St;
262 int Flag = FTW_F;
263 if (stat(FileName,&St) != 0)
264 Flag = FTW_NS;
265 #endif
266
267 if (ScannerFile(FileName, false) != 0)
268 break;
269 }
270
271 fclose(List);
272 return true;
273 }
274 /*}}}*/
275 // FTWScanner::Delink - Delink symlinks /*{{{*/
276 // ---------------------------------------------------------------------
277 /* */
278 bool FTWScanner::Delink(string &FileName,const char *OriginalPath,
279 unsigned long long &DeLinkBytes,
280 unsigned long long const &FileSize)
281 {
282 // See if this isn't an internaly prefix'd file name.
283 if (InternalPrefix.empty() == false &&
284 InternalPrefix.length() < FileName.length() &&
285 stringcmp(FileName.begin(),FileName.begin() + InternalPrefix.length(),
286 InternalPrefix.begin(),InternalPrefix.end()) != 0)
287 {
288 if (DeLinkLimit != 0 && DeLinkBytes/1024 < DeLinkLimit)
289 {
290 // Tidy up the display
291 if (DeLinkBytes == 0)
292 cout << endl;
293
294 NewLine(1);
295 ioprintf(c1out, _(" DeLink %s [%s]\n"), (OriginalPath + InternalPrefix.length()),
296 SizeToStr(FileSize).c_str());
297 c1out << flush;
298
299 if (NoLinkAct == false)
300 {
301 char OldLink[400];
302 if (readlink(OriginalPath,OldLink,sizeof(OldLink)) == -1)
303 _error->Errno("readlink",_("Failed to readlink %s"),OriginalPath);
304 else
305 {
306 if (RemoveFile("FTWScanner::Delink", OriginalPath))
307 {
308 if (link(FileName.c_str(),OriginalPath) != 0)
309 {
310 // Panic! Restore the symlink
311 if (symlink(OldLink,OriginalPath) != 0)
312 _error->Errno("symlink", "failed to restore symlink");
313 return _error->Errno("link",_("*** Failed to link %s to %s"),
314 FileName.c_str(),
315 OriginalPath);
316 }
317 }
318 }
319 }
320
321 DeLinkBytes += FileSize;
322 if (DeLinkBytes/1024 >= DeLinkLimit)
323 ioprintf(c1out, _(" DeLink limit of %sB hit.\n"), SizeToStr(DeLinkBytes).c_str());
324 }
325
326 FileName = OriginalPath;
327 }
328
329 return true;
330 }
331 /*}}}*/
332 // FTWScanner::SetExts - Set extensions to support /*{{{*/
333 // ---------------------------------------------------------------------
334 /* */
335 bool FTWScanner::SetExts(string const &Vals)
336 {
337 ClearPatterns();
338 string::size_type Start = 0;
339 while (Start <= Vals.length()-1)
340 {
341 string::size_type const Space = Vals.find(' ',Start);
342 string::size_type const Length = ((Space == string::npos) ? Vals.length() : Space) - Start;
343 if ( Arch.empty() == false )
344 {
345 AddPattern(string("*_") + Arch + Vals.substr(Start, Length));
346 if (IncludeArchAll == true && Arch != "all")
347 AddPattern(string("*_all") + Vals.substr(Start, Length));
348 }
349 else
350 AddPattern(string("*") + Vals.substr(Start, Length));
351
352 Start += Length + 1;
353 }
354
355 return true;
356 }
357 /*}}}*/
358
359 // PackagesWriter::PackagesWriter - Constructor /*{{{*/
360 // ---------------------------------------------------------------------
361 /* */
362 PackagesWriter::PackagesWriter(FileFd * const GivenOutput, TranslationWriter * const transWriter,
363 string const &DB,string const &Overrides,string const &ExtOverrides,
364 string const &Arch, bool const IncludeArchAll) :
365 FTWScanner(GivenOutput, Arch, IncludeArchAll), Db(DB), Stats(Db.Stats), TransWriter(transWriter)
366 {
367 SetExts(".deb .udeb");
368 DeLinkLimit = 0;
369
370 // Process the command line options
371 ConfigToDoHashes(DoHashes, "APT::FTPArchive::Packages");
372 DoAlwaysStat = _config->FindB("APT::FTPArchive::AlwaysStat", false);
373 DoContents = _config->FindB("APT::FTPArchive::Contents",true);
374 NoOverride = _config->FindB("APT::FTPArchive::NoOverrideMsg",false);
375 LongDescription = _config->FindB("APT::FTPArchive::LongDescription",true);
376
377 if (Db.Loaded() == false)
378 DoContents = false;
379
380 // Read the override file
381 if (Overrides.empty() == false && Over.ReadOverride(Overrides) == false)
382 return;
383 else
384 NoOverride = true;
385
386 if (ExtOverrides.empty() == false)
387 Over.ReadExtraOverride(ExtOverrides);
388
389 _error->DumpErrors();
390 }
391 /*}}}*/
392 // PackagesWriter::DoPackage - Process a single package /*{{{*/
393 // ---------------------------------------------------------------------
394 /* This method takes a package and gets its control information and
395 MD5, SHA1 and SHA256 then writes out a control record with the proper fields
396 rewritten and the path/size/hash appended. */
397 bool PackagesWriter::DoPackage(string FileName)
398 {
399 // Pull all the data we need form the DB
400 if (Db.GetFileInfo(FileName,
401 true, /* DoControl */
402 DoContents,
403 true, /* GenContentsOnly */
404 false, /* DoSource */
405 DoHashes, DoAlwaysStat) == false)
406 {
407 return false;
408 }
409
410 unsigned long long FileSize = Db.GetFileSize();
411 if (Delink(FileName,OriginalPath,Stats.DeLinkBytes,FileSize) == false)
412 return false;
413
414 // Lookup the overide information
415 pkgTagSection &Tags = Db.Control.Section;
416 string Package = Tags.FindS("Package");
417 string Architecture;
418 // if we generate a Packages file for a given arch, we use it to
419 // look for overrides. if we run in "simple" mode without the
420 // "Architecures" variable in the config we use the architecure value
421 // from the deb file
422 if(Arch != "")
423 Architecture = Arch;
424 else
425 Architecture = Tags.FindS("Architecture");
426 unique_ptr<Override::Item> OverItem(Over.GetItem(Package,Architecture));
427
428 if (Package.empty() == true)
429 return _error->Error(_("Archive had no package field"));
430
431 // If we need to do any rewriting of the header do it now..
432 if (OverItem.get() == 0)
433 {
434 if (NoOverride == false)
435 {
436 NewLine(1);
437 ioprintf(c1out, _(" %s has no override entry\n"), Package.c_str());
438 }
439
440 OverItem = unique_ptr<Override::Item>(new Override::Item);
441 OverItem->FieldOverride["Section"] = Tags.FindS("Section");
442 OverItem->Priority = Tags.FindS("Priority");
443 }
444
445 // Strip the DirStrip prefix from the FileName and add the PathPrefix
446 string NewFileName;
447 if (DirStrip.empty() == false &&
448 FileName.length() > DirStrip.length() &&
449 stringcmp(FileName.begin(),FileName.begin() + DirStrip.length(),
450 DirStrip.begin(),DirStrip.end()) == 0)
451 NewFileName = string(FileName.begin() + DirStrip.length(),FileName.end());
452 else
453 NewFileName = FileName;
454 if (PathPrefix.empty() == false)
455 NewFileName = flCombine(PathPrefix,NewFileName);
456
457 /* Configuration says we don't want to include the long Description
458 in the package file - instead we want to ship a separated file */
459 string desc;
460 if (LongDescription == false) {
461 desc = Tags.FindS("Description").append("\n");
462 OverItem->FieldOverride["Description"] = desc.substr(0, desc.find('\n')).c_str();
463 }
464
465 // This lists all the changes to the fields we are going to make.
466 std::vector<pkgTagSection::Tag> Changes;
467
468 std::string Size;
469 strprintf(Size, "%llu", (unsigned long long) FileSize);
470 Changes.push_back(pkgTagSection::Tag::Rewrite("Size", Size));
471
472 for (HashStringList::const_iterator hs = Db.HashesList.begin(); hs != Db.HashesList.end(); ++hs)
473 {
474 if (hs->HashType() == "MD5Sum")
475 Changes.push_back(pkgTagSection::Tag::Rewrite("MD5sum", hs->HashValue()));
476 else if (hs->HashType() == "Checksum-FileSize")
477 continue;
478 else
479 Changes.push_back(pkgTagSection::Tag::Rewrite(hs->HashType(), hs->HashValue()));
480 }
481 Changes.push_back(pkgTagSection::Tag::Rewrite("Filename", NewFileName));
482 Changes.push_back(pkgTagSection::Tag::Rewrite("Priority", OverItem->Priority));
483 Changes.push_back(pkgTagSection::Tag::Remove("Status"));
484 Changes.push_back(pkgTagSection::Tag::Remove("Optional"));
485
486 string DescriptionMd5;
487 if (LongDescription == false) {
488 MD5Summation descmd5;
489 descmd5.Add(desc.c_str());
490 DescriptionMd5 = descmd5.Result().Value();
491 Changes.push_back(pkgTagSection::Tag::Rewrite("Description-md5", DescriptionMd5));
492 if (TransWriter != NULL)
493 TransWriter->DoPackage(Package, desc, DescriptionMd5);
494 }
495
496 // Rewrite the maintainer field if necessary
497 bool MaintFailed;
498 string NewMaint = OverItem->SwapMaint(Tags.FindS("Maintainer"),MaintFailed);
499 if (MaintFailed == true)
500 {
501 if (NoOverride == false)
502 {
503 NewLine(1);
504 ioprintf(c1out, _(" %s maintainer is %s not %s\n"),
505 Package.c_str(), Tags.FindS("Maintainer").c_str(), OverItem->OldMaint.c_str());
506 }
507 }
508
509 if (NewMaint.empty() == false)
510 Changes.push_back(pkgTagSection::Tag::Rewrite("Maintainer", NewMaint));
511
512 /* Get rid of the Optional tag. This is an ugly, ugly, ugly hack that
513 dpkg-scanpackages does. Well sort of. dpkg-scanpackages just does renaming
514 but dpkg does this append bit. So we do the append bit, at least that way the
515 status file and package file will remain similar. There are other transforms
516 but optional is the only legacy one still in use for some lazy reason. */
517 string OptionalStr = Tags.FindS("Optional");
518 if (OptionalStr.empty() == false)
519 {
520 if (Tags.FindS("Suggests").empty() == false)
521 OptionalStr = Tags.FindS("Suggests") + ", " + OptionalStr;
522 Changes.push_back(pkgTagSection::Tag::Rewrite("Suggests", OptionalStr));
523 }
524
525 for (map<string,string>::const_iterator I = OverItem->FieldOverride.begin();
526 I != OverItem->FieldOverride.end(); ++I)
527 Changes.push_back(pkgTagSection::Tag::Rewrite(I->first, I->second));
528
529 // Rewrite and store the fields.
530 if (Tags.Write(*Output, TFRewritePackageOrder, Changes) == false ||
531 Output->Write("\n", 1) == false)
532 return false;
533
534 return Db.Finish();
535 }
536 /*}}}*/
537 PackagesWriter::~PackagesWriter() /*{{{*/
538 {
539 }
540 /*}}}*/
541
542 // TranslationWriter::TranslationWriter - Constructor /*{{{*/
543 // ---------------------------------------------------------------------
544 /* Create a Translation-Master file for this Packages file */
545 TranslationWriter::TranslationWriter(string const &File, string const &TransCompress,
546 mode_t const &Permissions) : Comp(NULL), Output(NULL)
547 {
548 if (File.empty() == true)
549 return;
550
551 Comp = new MultiCompress(File, TransCompress, Permissions);
552 Output = &Comp->Input;
553 }
554 /*}}}*/
555 // TranslationWriter::DoPackage - Process a single package /*{{{*/
556 // ---------------------------------------------------------------------
557 /* Create a Translation-Master file for this Packages file */
558 bool TranslationWriter::DoPackage(string const &Pkg, string const &Desc,
559 string const &MD5)
560 {
561 if (Output == NULL)
562 return true;
563
564 // Different archs can include different versions and therefore
565 // different descriptions - so we need to check for both name and md5.
566 string const Record = Pkg + ":" + MD5;
567
568 if (Included.find(Record) != Included.end())
569 return true;
570
571 std::string out;
572 strprintf(out, "Package: %s\nDescription-md5: %s\nDescription-en: %s\n",
573 Pkg.c_str(), MD5.c_str(), Desc.c_str());
574 Output->Write(out.c_str(), out.length());
575
576 Included.insert(Record);
577 return true;
578 }
579 /*}}}*/
580 // TranslationWriter::~TranslationWriter - Destructor /*{{{*/
581 // ---------------------------------------------------------------------
582 /* */
583 TranslationWriter::~TranslationWriter()
584 {
585 if (Comp != NULL)
586 delete Comp;
587 }
588 /*}}}*/
589
590 // SourcesWriter::SourcesWriter - Constructor /*{{{*/
591 // ---------------------------------------------------------------------
592 /* */
593 SourcesWriter::SourcesWriter(FileFd * const GivenOutput, string const &DB, string const &BOverrides,string const &SOverrides,
594 string const &ExtOverrides) :
595 FTWScanner(GivenOutput), Db(DB), Stats(Db.Stats)
596 {
597 AddPattern("*.dsc");
598 DeLinkLimit = 0;
599 Buffer = 0;
600 BufSize = 0;
601
602 // Process the command line options
603 ConfigToDoHashes(DoHashes, "APT::FTPArchive::Sources");
604 NoOverride = _config->FindB("APT::FTPArchive::NoOverrideMsg",false);
605 DoAlwaysStat = _config->FindB("APT::FTPArchive::AlwaysStat", false);
606
607 // Read the override file
608 if (BOverrides.empty() == false && BOver.ReadOverride(BOverrides) == false)
609 return;
610 else
611 NoOverride = true;
612
613 // WTF?? The logic above: if we can't read binary overrides, don't even try
614 // reading source overrides. if we can read binary overrides, then say there
615 // are no overrides. THIS MAKES NO SENSE! -- ajt@d.o, 2006/02/28
616
617 if (ExtOverrides.empty() == false)
618 SOver.ReadExtraOverride(ExtOverrides);
619
620 if (SOverrides.empty() == false && FileExists(SOverrides) == true)
621 SOver.ReadOverride(SOverrides,true);
622 }
623 /*}}}*/
624 // SourcesWriter::DoPackage - Process a single package /*{{{*/
625 static std::string getDscHash(unsigned int const DoHashes,
626 Hashes::SupportedHashes const DoIt, pkgTagSection &Tags, char const * const FieldName,
627 HashString const * const Hash, unsigned long long Size, std::string const &FileName)
628 {
629 if ((DoHashes & DoIt) != DoIt || Tags.Exists(FieldName) == false || Hash == NULL)
630 return "";
631 std::ostringstream out;
632 out << "\n " << Hash->HashValue() << " " << std::to_string(Size) << " " << FileName
633 << "\n " << Tags.FindS(FieldName);
634 return out.str();
635 }
636 bool SourcesWriter::DoPackage(string FileName)
637 {
638 // Pull all the data we need form the DB
639 if (Db.GetFileInfo(FileName,
640 false, /* DoControl */
641 false, /* DoContents */
642 false, /* GenContentsOnly */
643 true, /* DoSource */
644 DoHashes, DoAlwaysStat) == false)
645 {
646 return false;
647 }
648
649 // we need to perform a "write" here (this is what finish is doing)
650 // because the call to Db.GetFileInfo() in the loop will change
651 // the "db cursor"
652 Db.Finish();
653
654 pkgTagSection Tags;
655 if (Tags.Scan(Db.Dsc.Data.c_str(), Db.Dsc.Data.length()) == false)
656 return _error->Error("Could not find a record in the DSC '%s'",FileName.c_str());
657
658 if (Tags.Exists("Source") == false)
659 return _error->Error("Could not find a Source entry in the DSC '%s'",FileName.c_str());
660 Tags.Trim();
661
662 // Lookup the overide information, finding first the best priority.
663 string BestPrio;
664 string Bins = Tags.FindS("Binary");
665 char Buffer[Bins.length() + 1];
666 unique_ptr<Override::Item> OverItem(nullptr);
667 if (Bins.empty() == false)
668 {
669 strcpy(Buffer,Bins.c_str());
670
671 // Ignore too-long errors.
672 char *BinList[400];
673 TokSplitString(',',Buffer,BinList,sizeof(BinList)/sizeof(BinList[0]));
674
675 // Look at all the binaries
676 unsigned char BestPrioV = pkgCache::State::Extra;
677 for (unsigned I = 0; BinList[I] != 0; I++)
678 {
679 unique_ptr<Override::Item> Itm(BOver.GetItem(BinList[I]));
680 if (Itm.get() == 0)
681 continue;
682
683 unsigned char NewPrioV = debListParser::GetPrio(Itm->Priority);
684 if (NewPrioV < BestPrioV || BestPrio.empty() == true)
685 {
686 BestPrioV = NewPrioV;
687 BestPrio = Itm->Priority;
688 }
689
690 if (OverItem.get() == 0)
691 OverItem = std::move(Itm);
692 }
693 }
694
695 // If we need to do any rewriting of the header do it now..
696 if (OverItem.get() == 0)
697 {
698 if (NoOverride == false)
699 {
700 NewLine(1);
701 ioprintf(c1out, _(" %s has no override entry\n"), Tags.FindS("Source").c_str());
702 }
703
704 OverItem.reset(new Override::Item);
705 }
706
707 struct stat St;
708 if (stat(FileName.c_str(), &St) != 0)
709 return _error->Errno("fstat","Failed to stat %s",FileName.c_str());
710
711 unique_ptr<Override::Item> SOverItem(SOver.GetItem(Tags.FindS("Source")));
712 // const unique_ptr<Override::Item> autoSOverItem(SOverItem);
713 if (SOverItem.get() == 0)
714 {
715 ioprintf(c1out, _(" %s has no source override entry\n"), Tags.FindS("Source").c_str());
716 SOverItem = unique_ptr<Override::Item>(BOver.GetItem(Tags.FindS("Source")));
717 if (SOverItem.get() == 0)
718 {
719 ioprintf(c1out, _(" %s has no binary override entry either\n"), Tags.FindS("Source").c_str());
720 SOverItem = unique_ptr<Override::Item>(new Override::Item);
721 *SOverItem = *OverItem;
722 }
723 }
724
725 // Add the dsc to the files hash list
726 string const strippedName = flNotDir(FileName);
727 std::string const Files = getDscHash(DoHashes, Hashes::MD5SUM, Tags, "Files", Db.HashesList.find("MD5Sum"), St.st_size, strippedName);
728 std::string ChecksumsSha1 = getDscHash(DoHashes, Hashes::SHA1SUM, Tags, "Checksums-Sha1", Db.HashesList.find("SHA1"), St.st_size, strippedName);
729 std::string ChecksumsSha256 = getDscHash(DoHashes, Hashes::SHA256SUM, Tags, "Checksums-Sha256", Db.HashesList.find("SHA256"), St.st_size, strippedName);
730 std::string ChecksumsSha512 = getDscHash(DoHashes, Hashes::SHA512SUM, Tags, "Checksums-Sha512", Db.HashesList.find("SHA512"), St.st_size, strippedName);
731
732 // Strip the DirStrip prefix from the FileName and add the PathPrefix
733 string NewFileName;
734 if (DirStrip.empty() == false &&
735 FileName.length() > DirStrip.length() &&
736 stringcmp(DirStrip,OriginalPath,OriginalPath + DirStrip.length()) == 0)
737 NewFileName = string(OriginalPath + DirStrip.length());
738 else
739 NewFileName = OriginalPath;
740 if (PathPrefix.empty() == false)
741 NewFileName = flCombine(PathPrefix,NewFileName);
742
743 string Directory = flNotFile(OriginalPath);
744 string Package = Tags.FindS("Source");
745
746 // Perform operation over all of the files
747 string ParseJnk;
748 const char *C = Files.c_str();
749 char *RealPath = NULL;
750 for (;isspace(*C); C++);
751 while (*C != 0)
752 {
753 // Parse each of the elements
754 if (ParseQuoteWord(C,ParseJnk) == false ||
755 ParseQuoteWord(C,ParseJnk) == false ||
756 ParseQuoteWord(C,ParseJnk) == false)
757 return _error->Error("Error parsing file record");
758
759 string OriginalPath = Directory + ParseJnk;
760
761 // Add missing hashes to source files
762 if (((DoHashes & Hashes::SHA1SUM) == Hashes::SHA1SUM && !Tags.Exists("Checksums-Sha1")) ||
763 ((DoHashes & Hashes::SHA256SUM) == Hashes::SHA256SUM && !Tags.Exists("Checksums-Sha256")) ||
764 ((DoHashes & Hashes::SHA512SUM) == Hashes::SHA512SUM && !Tags.Exists("Checksums-Sha512")))
765 {
766 if (Db.GetFileInfo(OriginalPath,
767 false, /* DoControl */
768 false, /* DoContents */
769 false, /* GenContentsOnly */
770 false, /* DoSource */
771 DoHashes,
772 DoAlwaysStat) == false)
773 {
774 return _error->Error("Error getting file info");
775 }
776
777 for (HashStringList::const_iterator hs = Db.HashesList.begin(); hs != Db.HashesList.end(); ++hs)
778 {
779 if (hs->HashType() == "MD5Sum" || hs->HashType() == "Checksum-FileSize")
780 continue;
781 char const * fieldname;
782 std::string * out;
783 if (hs->HashType() == "SHA1")
784 {
785 fieldname = "Checksums-Sha1";
786 out = &ChecksumsSha1;
787 }
788 else if (hs->HashType() == "SHA256")
789 {
790 fieldname = "Checksums-Sha256";
791 out = &ChecksumsSha256;
792 }
793 else if (hs->HashType() == "SHA512")
794 {
795 fieldname = "Checksums-Sha512";
796 out = &ChecksumsSha512;
797 }
798 else
799 {
800 _error->Warning("Ignoring unknown Checksumtype %s in SourcesWriter::DoPackages", hs->HashType().c_str());
801 continue;
802 }
803 if (Tags.Exists(fieldname) == true)
804 continue;
805 std::ostringstream streamout;
806 streamout << "\n " << hs->HashValue() << " " << std::to_string(Db.GetFileSize()) << " " << ParseJnk;
807 out->append(streamout.str());
808 }
809
810 // write back the GetFileInfo() stats data
811 Db.Finish();
812 }
813
814 // Perform the delinking operation
815 char Jnk[2];
816
817 if (readlink(OriginalPath.c_str(),Jnk,sizeof(Jnk)) != -1 &&
818 (RealPath = realpath(OriginalPath.c_str(),NULL)) != 0)
819 {
820 string RP = RealPath;
821 free(RealPath);
822 if (Delink(RP,OriginalPath.c_str(),Stats.DeLinkBytes,St.st_size) == false)
823 return false;
824 }
825 }
826
827 Directory = flNotFile(NewFileName);
828 if (Directory.length() > 2)
829 Directory.erase(Directory.end()-1);
830
831 // This lists all the changes to the fields we are going to make.
832 // (5 hardcoded + checksums + maintainer + end marker)
833 std::vector<pkgTagSection::Tag> Changes;
834
835 Changes.push_back(pkgTagSection::Tag::Remove("Source"));
836 Changes.push_back(pkgTagSection::Tag::Rewrite("Package", Package));
837 if (Files.empty() == false)
838 Changes.push_back(pkgTagSection::Tag::Rewrite("Files", Files));
839 if (ChecksumsSha1.empty() == false)
840 Changes.push_back(pkgTagSection::Tag::Rewrite("Checksums-Sha1", ChecksumsSha1));
841 if (ChecksumsSha256.empty() == false)
842 Changes.push_back(pkgTagSection::Tag::Rewrite("Checksums-Sha256", ChecksumsSha256));
843 if (ChecksumsSha512.empty() == false)
844 Changes.push_back(pkgTagSection::Tag::Rewrite("Checksums-Sha512", ChecksumsSha512));
845 if (Directory != "./")
846 Changes.push_back(pkgTagSection::Tag::Rewrite("Directory", Directory));
847 Changes.push_back(pkgTagSection::Tag::Rewrite("Priority", BestPrio));
848 Changes.push_back(pkgTagSection::Tag::Remove("Status"));
849
850 // Rewrite the maintainer field if necessary
851 bool MaintFailed;
852 string NewMaint = OverItem->SwapMaint(Tags.FindS("Maintainer"), MaintFailed);
853 if (MaintFailed == true)
854 {
855 if (NoOverride == false)
856 {
857 NewLine(1);
858 ioprintf(c1out, _(" %s maintainer is %s not %s\n"), Package.c_str(),
859 Tags.FindS("Maintainer").c_str(), OverItem->OldMaint.c_str());
860 }
861 }
862 if (NewMaint.empty() == false)
863 Changes.push_back(pkgTagSection::Tag::Rewrite("Maintainer", NewMaint.c_str()));
864
865 for (map<string,string>::const_iterator I = SOverItem->FieldOverride.begin();
866 I != SOverItem->FieldOverride.end(); ++I)
867 Changes.push_back(pkgTagSection::Tag::Rewrite(I->first, I->second));
868
869 // Rewrite and store the fields.
870 if (Tags.Write(*Output, TFRewriteSourceOrder, Changes) == false ||
871 Output->Write("\n", 1) == false)
872 return false;
873
874 Stats.Packages++;
875
876 return true;
877 }
878 /*}}}*/
879
880 // ContentsWriter::ContentsWriter - Constructor /*{{{*/
881 // ---------------------------------------------------------------------
882 /* */
883 ContentsWriter::ContentsWriter(FileFd * const GivenOutput, string const &DB,
884 string const &Arch, bool const IncludeArchAll) :
885 FTWScanner(GivenOutput, Arch, IncludeArchAll), Db(DB), Stats(Db.Stats)
886
887 {
888 SetExts(".deb");
889 }
890 /*}}}*/
891 // ContentsWriter::DoPackage - Process a single package /*{{{*/
892 // ---------------------------------------------------------------------
893 /* If Package is the empty string the control record will be parsed to
894 determine what the package name is. */
895 bool ContentsWriter::DoPackage(string FileName, string Package)
896 {
897 if (!Db.GetFileInfo(FileName,
898 Package.empty(), /* DoControl */
899 true, /* DoContents */
900 false, /* GenContentsOnly */
901 false, /* DoSource */
902 0, /* DoHashes */
903 false /* checkMtime */))
904 {
905 return false;
906 }
907
908 // Parse the package name
909 if (Package.empty() == true)
910 {
911 Package = Db.Control.Section.FindS("Package");
912 }
913
914 Db.Contents.Add(Gen,Package);
915
916 return Db.Finish();
917 }
918 /*}}}*/
919 // ContentsWriter::ReadFromPkgs - Read from a packages file /*{{{*/
920 // ---------------------------------------------------------------------
921 /* */
922 bool ContentsWriter::ReadFromPkgs(string const &PkgFile,string const &PkgCompress)
923 {
924 MultiCompress Pkgs(PkgFile,PkgCompress,0,false);
925 if (_error->PendingError() == true)
926 return false;
927
928 // Open the package file
929 FileFd Fd;
930 if (Pkgs.OpenOld(Fd) == false)
931 return false;
932
933 pkgTagFile Tags(&Fd);
934 if (_error->PendingError() == true)
935 return false;
936
937 // Parse.
938 pkgTagSection Section;
939 while (Tags.Step(Section) == true)
940 {
941 string File = flCombine(Prefix,Section.FindS("FileName"));
942 string Package = Section.FindS("Section");
943 if (Package.empty() == false && Package.end()[-1] != '/')
944 {
945 Package += '/';
946 Package += Section.FindS("Package");
947 }
948 else
949 Package += Section.FindS("Package");
950
951 DoPackage(File,Package);
952 if (_error->empty() == false)
953 {
954 _error->Error("Errors apply to file '%s'",File.c_str());
955 _error->DumpErrors();
956 }
957 }
958
959 // Tidy the compressor
960 Fd.Close();
961
962 return true;
963 }
964
965 /*}}}*/
966
967 // ReleaseWriter::ReleaseWriter - Constructor /*{{{*/
968 // ---------------------------------------------------------------------
969 /* */
970 static std::string formatUTCDateTime(time_t const now)
971 {
972 bool const NumericTimezone = _config->FindB("APT::FTPArchive::Release::NumericTimezone", true);
973 // TimeRFC1123 uses GMT to satisfy HTTP/1.1
974 std::string datetime = TimeRFC1123(now, NumericTimezone);
975 if (NumericTimezone == false)
976 {
977 auto const lastspace = datetime.rfind(' ');
978 if (likely(lastspace != std::string::npos))
979 datetime.replace(lastspace + 1, 3, "UTC");
980 }
981 return datetime;
982 }
983 ReleaseWriter::ReleaseWriter(FileFd * const GivenOutput, string const &/*DB*/) : FTWScanner(GivenOutput)
984 {
985 if (_config->FindB("APT::FTPArchive::Release::Default-Patterns", true) == true)
986 {
987 AddPattern("Packages");
988 AddPattern("Packages.*");
989 AddPattern("Translation-*");
990 AddPattern("Sources");
991 AddPattern("Sources.*");
992 AddPattern("Release");
993 AddPattern("Contents-*");
994 AddPattern("Index");
995 AddPattern("Index.*");
996 AddPattern("icons-*.tar");
997 AddPattern("icons-*.tar.*");
998 AddPattern("Components-*.yml");
999 AddPattern("Components-*.yml.*");
1000 AddPattern("md5sum.txt");
1001 }
1002 AddPatterns(_config->FindVector("APT::FTPArchive::Release::Patterns"));
1003
1004 time_t const now = time(NULL);
1005 time_t const validuntil = now + _config->FindI("APT::FTPArchive::Release::ValidTime", 0);
1006
1007 map<string,string> Fields;
1008 Fields["Origin"] = "";
1009 Fields["Label"] = "";
1010 Fields["Suite"] = "";
1011 Fields["Version"] = "";
1012 Fields["Codename"] = "";
1013 Fields["Date"] = formatUTCDateTime(now);
1014 if (validuntil != now)
1015 Fields["Valid-Until"] = formatUTCDateTime(validuntil);
1016 Fields["Architectures"] = "";
1017 Fields["Components"] = "";
1018 Fields["Description"] = "";
1019 Fields["Signed-By"] = "";
1020 if (_config->FindB("APT::FTPArchive::DoByHash", false) == true)
1021 Fields["Acquire-By-Hash"] = "true";
1022
1023 for(map<string,string>::const_iterator I = Fields.begin();
1024 I != Fields.end();
1025 ++I)
1026 {
1027 string Config = string("APT::FTPArchive::Release::") + (*I).first;
1028 string Value = _config->Find(Config, (*I).second.c_str());
1029 if (Value == "")
1030 continue;
1031
1032 std::string const out = I->first + ": " + Value + "\n";
1033 Output->Write(out.c_str(), out.length());
1034 }
1035
1036 ConfigToDoHashes(DoHashes, "APT::FTPArchive::Release");
1037 }
1038 /*}}}*/
1039 // ReleaseWriter::DoPackage - Process a single package /*{{{*/
1040 // ---------------------------------------------------------------------
1041 bool ReleaseWriter::DoPackage(string FileName)
1042 {
1043 // Strip the DirStrip prefix from the FileName and add the PathPrefix
1044 string NewFileName;
1045 if (DirStrip.empty() == false &&
1046 FileName.length() > DirStrip.length() &&
1047 stringcmp(FileName.begin(),FileName.begin() + DirStrip.length(),
1048 DirStrip.begin(),DirStrip.end()) == 0)
1049 {
1050 NewFileName = string(FileName.begin() + DirStrip.length(),FileName.end());
1051 while (NewFileName[0] == '/')
1052 NewFileName = string(NewFileName.begin() + 1,NewFileName.end());
1053 }
1054 else
1055 NewFileName = FileName;
1056
1057 if (PathPrefix.empty() == false)
1058 NewFileName = flCombine(PathPrefix,NewFileName);
1059
1060 FileFd fd(FileName, FileFd::ReadOnly);
1061
1062 if (!fd.IsOpen())
1063 {
1064 return false;
1065 }
1066
1067 CheckSums[NewFileName].size = fd.Size();
1068
1069 Hashes hs(DoHashes);
1070 hs.AddFD(fd);
1071 CheckSums[NewFileName].Hashes = hs.GetHashStringList();
1072 fd.Close();
1073
1074 // FIXME: wrong layer in the code(?)
1075 // FIXME2: symlink instead of create a copy
1076 if (_config->FindB("APT::FTPArchive::DoByHash", false) == true)
1077 {
1078 std::string Input = FileName;
1079 HashStringList hsl = hs.GetHashStringList();
1080 for(HashStringList::const_iterator h = hsl.begin();
1081 h != hsl.end(); ++h)
1082 {
1083 if (!h->usable())
1084 continue;
1085 if (flNotDir(FileName) == "Release" || flNotDir(FileName) == "InRelease")
1086 continue;
1087
1088 std::string ByHashOutputFile = GenByHashFilename(Input, *h);
1089 std::string ByHashOutputDir = flNotFile(ByHashOutputFile);
1090 if(!CreateDirectory(flNotFile(Input), ByHashOutputDir))
1091 return _error->Warning("can not create dir %s", flNotFile(ByHashOutputFile).c_str());
1092
1093 // write new hashes
1094 FileFd In(Input, FileFd::ReadOnly);
1095 FileFd Out(ByHashOutputFile, FileFd::WriteEmpty);
1096 if(!CopyFile(In, Out))
1097 return _error->Warning("failed to copy %s %s", Input.c_str(), ByHashOutputFile.c_str());
1098 }
1099 }
1100
1101 return true;
1102 }
1103
1104 /*}}}*/
1105 // ReleaseWriter::Finish - Output the checksums /*{{{*/
1106 // ---------------------------------------------------------------------
1107 static void printChecksumTypeRecord(FileFd &Output, char const * const Type, map<string, ReleaseWriter::CheckSum> const &CheckSums)
1108 {
1109 {
1110 std::string out;
1111 strprintf(out, "%s:\n", Type);
1112 Output.Write(out.c_str(), out.length());
1113 }
1114 for(map<string,ReleaseWriter::CheckSum>::const_iterator I = CheckSums.begin();
1115 I != CheckSums.end(); ++I)
1116 {
1117 HashString const * const hs = I->second.Hashes.find(Type);
1118 if (hs == NULL)
1119 continue;
1120 std::string out;
1121 strprintf(out, " %s %16llu %s\n",
1122 hs->HashValue().c_str(),
1123 (*I).second.size,
1124 (*I).first.c_str());
1125 Output.Write(out.c_str(), out.length());
1126 }
1127 }
1128 void ReleaseWriter::Finish()
1129 {
1130 if ((DoHashes & Hashes::MD5SUM) == Hashes::MD5SUM)
1131 printChecksumTypeRecord(*Output, "MD5Sum", CheckSums);
1132 if ((DoHashes & Hashes::SHA1SUM) == Hashes::SHA1SUM)
1133 printChecksumTypeRecord(*Output, "SHA1", CheckSums);
1134 if ((DoHashes & Hashes::SHA256SUM) == Hashes::SHA256SUM)
1135 printChecksumTypeRecord(*Output, "SHA256", CheckSums);
1136 if ((DoHashes & Hashes::SHA512SUM) == Hashes::SHA512SUM)
1137 printChecksumTypeRecord(*Output, "SHA512", CheckSums);
1138
1139 // go by-hash cleanup
1140 map<string,ReleaseWriter::CheckSum>::const_iterator prev = CheckSums.begin();
1141 if (_config->FindB("APT::FTPArchive::DoByHash", false) == true)
1142 {
1143 for(map<string,ReleaseWriter::CheckSum>::const_iterator I = CheckSums.begin();
1144 I != CheckSums.end(); ++I)
1145 {
1146 if (I->first == "Release" || I->first == "InRelease")
1147 continue;
1148
1149 // keep iterating until we find a new subdir
1150 if(flNotFile(I->first) == flNotFile(prev->first))
1151 continue;
1152
1153 // clean that subdir up
1154 int keepFiles = _config->FindI("APT::FTPArchive::By-Hash-Keep", 3);
1155 // calculate how many compressors are used (the amount of files
1156 // in that subdir generated for this run)
1157 keepFiles *= std::distance(prev, I);
1158 prev = I;
1159
1160 HashStringList hsl = prev->second.Hashes;
1161 for(HashStringList::const_iterator h = hsl.begin();
1162 h != hsl.end(); ++h)
1163 {
1164
1165 if (!h->usable())
1166 continue;
1167
1168 std::string RealFilename = DirStrip+"/"+prev->first;
1169 std::string ByHashOutputFile = GenByHashFilename(RealFilename, *h);
1170 DeleteAllButMostRecent(flNotFile(ByHashOutputFile), keepFiles);
1171 }
1172 }
1173 }
1174 }