]> git.saurik.com Git - apt.git/blob - ftparchive/writer.cc
implement CopyFile without using FileFd::Size()
[apt.git] / ftparchive / writer.cc
1 // -*- mode: cpp; mode: fold -*-
2 // Description /*{{{*/
3 // $Id: writer.cc,v 1.14 2004/03/24 01:40:43 mdz Exp $
4 /* ######################################################################
5
6 Writer
7
8 The file writer classes. These write various types of output, sources,
9 packages and contents.
10
11 ##################################################################### */
12 /*}}}*/
13 // Include Files /*{{{*/
14 #include <config.h>
15
16 #include <apt-pkg/configuration.h>
17 #include <apt-pkg/deblistparser.h>
18 #include <apt-pkg/error.h>
19 #include <apt-pkg/fileutl.h>
20 #include <apt-pkg/gpgv.h>
21 #include <apt-pkg/hashes.h>
22 #include <apt-pkg/md5.h>
23 #include <apt-pkg/strutl.h>
24 #include <apt-pkg/debfile.h>
25 #include <apt-pkg/pkgcache.h>
26 #include <apt-pkg/sha1.h>
27 #include <apt-pkg/sha2.h>
28 #include <apt-pkg/tagfile.h>
29
30 #include <ctype.h>
31 #include <fnmatch.h>
32 #include <ftw.h>
33 #include <locale.h>
34 #include <string.h>
35 #include <sys/stat.h>
36 #include <sys/types.h>
37 #include <unistd.h>
38 #include <ctime>
39 #include <iostream>
40 #include <sstream>
41 #include <memory>
42 #include <utility>
43 #include <algorithm>
44
45 #include "apt-ftparchive.h"
46 #include "writer.h"
47 #include "cachedb.h"
48 #include "multicompress.h"
49 #include "byhash.h"
50
51 #include <apti18n.h>
52 /*}}}*/
53 using namespace std;
54 FTWScanner *FTWScanner::Owner;
55
56 // ConfigToDoHashes - which hashes to generate /*{{{*/
57 static void SingleConfigToDoHashes(unsigned int &DoHashes, std::string const &Conf, unsigned int const Flag)
58 {
59 if (_config->FindB(Conf, true) == true)
60 DoHashes |= Flag;
61 else
62 DoHashes &= ~Flag;
63 }
64 static void ConfigToDoHashes(unsigned int &DoHashes, std::string const &Conf)
65 {
66 SingleConfigToDoHashes(DoHashes, Conf + "::MD5", Hashes::MD5SUM);
67 SingleConfigToDoHashes(DoHashes, Conf + "::SHA1", Hashes::SHA1SUM);
68 SingleConfigToDoHashes(DoHashes, Conf + "::SHA256", Hashes::SHA256SUM);
69 SingleConfigToDoHashes(DoHashes, Conf + "::SHA512", Hashes::SHA512SUM);
70 }
71 /*}}}*/
72
73 // FTWScanner::FTWScanner - Constructor /*{{{*/
74 FTWScanner::FTWScanner(FileFd * const GivenOutput, string const &Arch): Arch(Arch), DoHashes(~0)
75 {
76 if (GivenOutput == NULL)
77 {
78 Output = new FileFd;
79 OwnsOutput = true;
80 Output->OpenDescriptor(STDOUT_FILENO, FileFd::WriteOnly, false);
81 }
82 else
83 {
84 Output = GivenOutput;
85 OwnsOutput = false;
86 }
87 ErrorPrinted = false;
88 NoLinkAct = !_config->FindB("APT::FTPArchive::DeLinkAct",true);
89 ConfigToDoHashes(DoHashes, "APT::FTPArchive");
90 }
91 /*}}}*/
92 FTWScanner::~FTWScanner()
93 {
94 if (Output != NULL && OwnsOutput)
95 delete Output;
96 }
97 // FTWScanner::Scanner - FTW Scanner /*{{{*/
98 // ---------------------------------------------------------------------
99 /* This is the FTW scanner, it processes each directory element in the
100 directory tree. */
101 int FTWScanner::ScannerFTW(const char *File,const struct stat * /*sb*/,int Flag)
102 {
103 if (Flag == FTW_DNR)
104 {
105 Owner->NewLine(1);
106 ioprintf(c1out, _("W: Unable to read directory %s\n"), File);
107 }
108 if (Flag == FTW_NS)
109 {
110 Owner->NewLine(1);
111 ioprintf(c1out, _("W: Unable to stat %s\n"), File);
112 }
113 if (Flag != FTW_F)
114 return 0;
115
116 return ScannerFile(File, true);
117 }
118 /*}}}*/
119 // FTWScanner::ScannerFile - File Scanner /*{{{*/
120 // ---------------------------------------------------------------------
121 /* */
122 int FTWScanner::ScannerFile(const char *File, bool const &ReadLink)
123 {
124 const char *LastComponent = strrchr(File, '/');
125 char *RealPath = NULL;
126
127 if (LastComponent == NULL)
128 LastComponent = File;
129 else
130 LastComponent++;
131
132 vector<string>::const_iterator I;
133 for(I = Owner->Patterns.begin(); I != Owner->Patterns.end(); ++I)
134 {
135 if (fnmatch((*I).c_str(), LastComponent, 0) == 0)
136 break;
137 }
138 if (I == Owner->Patterns.end())
139 return 0;
140
141 /* Process it. If the file is a link then resolve it into an absolute
142 name.. This works best if the directory components the scanner are
143 given are not links themselves. */
144 char Jnk[2];
145 Owner->OriginalPath = File;
146 if (ReadLink &&
147 readlink(File,Jnk,sizeof(Jnk)) != -1 &&
148 (RealPath = realpath(File,NULL)) != 0)
149 {
150 Owner->DoPackage(RealPath);
151 free(RealPath);
152 }
153 else
154 Owner->DoPackage(File);
155
156 if (_error->empty() == false)
157 {
158 // Print any errors or warnings found
159 string Err;
160 bool SeenPath = false;
161 while (_error->empty() == false)
162 {
163 Owner->NewLine(1);
164
165 bool const Type = _error->PopMessage(Err);
166 if (Type == true)
167 cerr << _("E: ") << Err << endl;
168 else
169 cerr << _("W: ") << Err << endl;
170
171 if (Err.find(File) != string::npos)
172 SeenPath = true;
173 }
174
175 if (SeenPath == false)
176 cerr << _("E: Errors apply to file ") << "'" << File << "'" << endl;
177 return 0;
178 }
179
180 return 0;
181 }
182 /*}}}*/
183 // FTWScanner::RecursiveScan - Just scan a directory tree /*{{{*/
184 // ---------------------------------------------------------------------
185 /* */
186 bool FTWScanner::RecursiveScan(string const &Dir)
187 {
188 char *RealPath = NULL;
189 /* If noprefix is set then jam the scan root in, so we don't generate
190 link followed paths out of control */
191 if (InternalPrefix.empty() == true)
192 {
193 if ((RealPath = realpath(Dir.c_str(),NULL)) == 0)
194 return _error->Errno("realpath",_("Failed to resolve %s"),Dir.c_str());
195 InternalPrefix = RealPath;
196 free(RealPath);
197 }
198
199 // Do recursive directory searching
200 Owner = this;
201 int const Res = ftw(Dir.c_str(),ScannerFTW,30);
202
203 // Error treewalking?
204 if (Res != 0)
205 {
206 if (_error->PendingError() == false)
207 _error->Errno("ftw",_("Tree walking failed"));
208 return false;
209 }
210
211 return true;
212 }
213 /*}}}*/
214 // FTWScanner::LoadFileList - Load the file list from a file /*{{{*/
215 // ---------------------------------------------------------------------
216 /* This is an alternative to using FTW to locate files, it reads the list
217 of files from another file. */
218 bool FTWScanner::LoadFileList(string const &Dir, string const &File)
219 {
220 char *RealPath = NULL;
221 /* If noprefix is set then jam the scan root in, so we don't generate
222 link followed paths out of control */
223 if (InternalPrefix.empty() == true)
224 {
225 if ((RealPath = realpath(Dir.c_str(),NULL)) == 0)
226 return _error->Errno("realpath",_("Failed to resolve %s"),Dir.c_str());
227 InternalPrefix = RealPath;
228 free(RealPath);
229 }
230
231 Owner = this;
232 FILE *List = fopen(File.c_str(),"r");
233 if (List == 0)
234 return _error->Errno("fopen",_("Failed to open %s"),File.c_str());
235
236 /* We are a tad tricky here.. We prefix the buffer with the directory
237 name, that way if we need a full path with just use line.. Sneaky and
238 fully evil. */
239 char Line[1000];
240 char *FileStart;
241 if (Dir.empty() == true || Dir.end()[-1] != '/')
242 FileStart = Line + snprintf(Line,sizeof(Line),"%s/",Dir.c_str());
243 else
244 FileStart = Line + snprintf(Line,sizeof(Line),"%s",Dir.c_str());
245 while (fgets(FileStart,sizeof(Line) - (FileStart - Line),List) != 0)
246 {
247 char *FileName = _strstrip(FileStart);
248 if (FileName[0] == 0)
249 continue;
250
251 if (FileName[0] != '/')
252 {
253 if (FileName != FileStart)
254 memmove(FileStart,FileName,strlen(FileStart));
255 FileName = Line;
256 }
257
258 #if 0
259 struct stat St;
260 int Flag = FTW_F;
261 if (stat(FileName,&St) != 0)
262 Flag = FTW_NS;
263 #endif
264
265 if (ScannerFile(FileName, false) != 0)
266 break;
267 }
268
269 fclose(List);
270 return true;
271 }
272 /*}}}*/
273 // FTWScanner::Delink - Delink symlinks /*{{{*/
274 // ---------------------------------------------------------------------
275 /* */
276 bool FTWScanner::Delink(string &FileName,const char *OriginalPath,
277 unsigned long long &DeLinkBytes,
278 unsigned long long const &FileSize)
279 {
280 // See if this isn't an internaly prefix'd file name.
281 if (InternalPrefix.empty() == false &&
282 InternalPrefix.length() < FileName.length() &&
283 stringcmp(FileName.begin(),FileName.begin() + InternalPrefix.length(),
284 InternalPrefix.begin(),InternalPrefix.end()) != 0)
285 {
286 if (DeLinkLimit != 0 && DeLinkBytes/1024 < DeLinkLimit)
287 {
288 // Tidy up the display
289 if (DeLinkBytes == 0)
290 cout << endl;
291
292 NewLine(1);
293 ioprintf(c1out, _(" DeLink %s [%s]\n"), (OriginalPath + InternalPrefix.length()),
294 SizeToStr(FileSize).c_str());
295 c1out << flush;
296
297 if (NoLinkAct == false)
298 {
299 char OldLink[400];
300 if (readlink(OriginalPath,OldLink,sizeof(OldLink)) == -1)
301 _error->Errno("readlink",_("Failed to readlink %s"),OriginalPath);
302 else
303 {
304 if (unlink(OriginalPath) != 0)
305 _error->Errno("unlink",_("Failed to unlink %s"),OriginalPath);
306 else
307 {
308 if (link(FileName.c_str(),OriginalPath) != 0)
309 {
310 // Panic! Restore the symlink
311 if (symlink(OldLink,OriginalPath) != 0)
312 _error->Errno("symlink", "failed to restore symlink");
313 return _error->Errno("link",_("*** Failed to link %s to %s"),
314 FileName.c_str(),
315 OriginalPath);
316 }
317 }
318 }
319 }
320
321 DeLinkBytes += FileSize;
322 if (DeLinkBytes/1024 >= DeLinkLimit)
323 ioprintf(c1out, _(" DeLink limit of %sB hit.\n"), SizeToStr(DeLinkBytes).c_str());
324 }
325
326 FileName = OriginalPath;
327 }
328
329 return true;
330 }
331 /*}}}*/
332
333 // PackagesWriter::PackagesWriter - Constructor /*{{{*/
334 // ---------------------------------------------------------------------
335 /* */
336 PackagesWriter::PackagesWriter(FileFd * const GivenOutput, TranslationWriter * const transWriter,
337 string const &DB,string const &Overrides,string const &ExtOverrides,
338 string const &Arch) :
339 FTWScanner(GivenOutput, Arch), Db(DB), Stats(Db.Stats), TransWriter(transWriter)
340 {
341 SetExts(".deb .udeb");
342 DeLinkLimit = 0;
343
344 // Process the command line options
345 ConfigToDoHashes(DoHashes, "APT::FTPArchive::Packages");
346 DoAlwaysStat = _config->FindB("APT::FTPArchive::AlwaysStat", false);
347 DoContents = _config->FindB("APT::FTPArchive::Contents",true);
348 NoOverride = _config->FindB("APT::FTPArchive::NoOverrideMsg",false);
349 LongDescription = _config->FindB("APT::FTPArchive::LongDescription",true);
350
351 if (Db.Loaded() == false)
352 DoContents = false;
353
354 // Read the override file
355 if (Overrides.empty() == false && Over.ReadOverride(Overrides) == false)
356 return;
357 else
358 NoOverride = true;
359
360 if (ExtOverrides.empty() == false)
361 Over.ReadExtraOverride(ExtOverrides);
362
363 _error->DumpErrors();
364 }
365 /*}}}*/
366 // FTWScanner::SetExts - Set extensions to support /*{{{*/
367 // ---------------------------------------------------------------------
368 /* */
369 bool FTWScanner::SetExts(string const &Vals)
370 {
371 ClearPatterns();
372 string::size_type Start = 0;
373 while (Start <= Vals.length()-1)
374 {
375 string::size_type const Space = Vals.find(' ',Start);
376 string::size_type const Length = ((Space == string::npos) ? Vals.length() : Space) - Start;
377 if ( Arch.empty() == false )
378 {
379 AddPattern(string("*_") + Arch + Vals.substr(Start, Length));
380 AddPattern(string("*_all") + Vals.substr(Start, Length));
381 }
382 else
383 AddPattern(string("*") + Vals.substr(Start, Length));
384
385 Start += Length + 1;
386 }
387
388 return true;
389 }
390 /*}}}*/
391 // PackagesWriter::DoPackage - Process a single package /*{{{*/
392 // ---------------------------------------------------------------------
393 /* This method takes a package and gets its control information and
394 MD5, SHA1 and SHA256 then writes out a control record with the proper fields
395 rewritten and the path/size/hash appended. */
396 bool PackagesWriter::DoPackage(string FileName)
397 {
398 // Pull all the data we need form the DB
399 if (Db.GetFileInfo(FileName,
400 true, /* DoControl */
401 DoContents,
402 true, /* GenContentsOnly */
403 false, /* DoSource */
404 DoHashes, DoAlwaysStat) == false)
405 {
406 return false;
407 }
408
409 unsigned long long FileSize = Db.GetFileSize();
410 if (Delink(FileName,OriginalPath,Stats.DeLinkBytes,FileSize) == false)
411 return false;
412
413 // Lookup the overide information
414 pkgTagSection &Tags = Db.Control.Section;
415 string Package = Tags.FindS("Package");
416 string Architecture;
417 // if we generate a Packages file for a given arch, we use it to
418 // look for overrides. if we run in "simple" mode without the
419 // "Architecures" variable in the config we use the architecure value
420 // from the deb file
421 if(Arch != "")
422 Architecture = Arch;
423 else
424 Architecture = Tags.FindS("Architecture");
425 unique_ptr<Override::Item> OverItem(Over.GetItem(Package,Architecture));
426
427 if (Package.empty() == true)
428 return _error->Error(_("Archive had no package field"));
429
430 // If we need to do any rewriting of the header do it now..
431 if (OverItem.get() == 0)
432 {
433 if (NoOverride == false)
434 {
435 NewLine(1);
436 ioprintf(c1out, _(" %s has no override entry\n"), Package.c_str());
437 }
438
439 OverItem = unique_ptr<Override::Item>(new Override::Item);
440 OverItem->FieldOverride["Section"] = Tags.FindS("Section");
441 OverItem->Priority = Tags.FindS("Priority");
442 }
443
444 // Strip the DirStrip prefix from the FileName and add the PathPrefix
445 string NewFileName;
446 if (DirStrip.empty() == false &&
447 FileName.length() > DirStrip.length() &&
448 stringcmp(FileName.begin(),FileName.begin() + DirStrip.length(),
449 DirStrip.begin(),DirStrip.end()) == 0)
450 NewFileName = string(FileName.begin() + DirStrip.length(),FileName.end());
451 else
452 NewFileName = FileName;
453 if (PathPrefix.empty() == false)
454 NewFileName = flCombine(PathPrefix,NewFileName);
455
456 /* Configuration says we don't want to include the long Description
457 in the package file - instead we want to ship a separated file */
458 string desc;
459 if (LongDescription == false) {
460 desc = Tags.FindS("Description").append("\n");
461 OverItem->FieldOverride["Description"] = desc.substr(0, desc.find('\n')).c_str();
462 }
463
464 // This lists all the changes to the fields we are going to make.
465 std::vector<pkgTagSection::Tag> Changes;
466
467 std::string Size;
468 strprintf(Size, "%llu", (unsigned long long) FileSize);
469 Changes.push_back(pkgTagSection::Tag::Rewrite("Size", Size));
470
471 for (HashStringList::const_iterator hs = Db.HashesList.begin(); hs != Db.HashesList.end(); ++hs)
472 {
473 if (hs->HashType() == "MD5Sum")
474 Changes.push_back(pkgTagSection::Tag::Rewrite("MD5sum", hs->HashValue()));
475 else if (hs->HashType() == "Checksum-FileSize")
476 continue;
477 else
478 Changes.push_back(pkgTagSection::Tag::Rewrite(hs->HashType(), hs->HashValue()));
479 }
480 Changes.push_back(pkgTagSection::Tag::Rewrite("Filename", NewFileName));
481 Changes.push_back(pkgTagSection::Tag::Rewrite("Priority", OverItem->Priority));
482 Changes.push_back(pkgTagSection::Tag::Remove("Status"));
483 Changes.push_back(pkgTagSection::Tag::Remove("Optional"));
484
485 string DescriptionMd5;
486 if (LongDescription == false) {
487 MD5Summation descmd5;
488 descmd5.Add(desc.c_str());
489 DescriptionMd5 = descmd5.Result().Value();
490 Changes.push_back(pkgTagSection::Tag::Rewrite("Description-md5", DescriptionMd5));
491 if (TransWriter != NULL)
492 TransWriter->DoPackage(Package, desc, DescriptionMd5);
493 }
494
495 // Rewrite the maintainer field if necessary
496 bool MaintFailed;
497 string NewMaint = OverItem->SwapMaint(Tags.FindS("Maintainer"),MaintFailed);
498 if (MaintFailed == true)
499 {
500 if (NoOverride == false)
501 {
502 NewLine(1);
503 ioprintf(c1out, _(" %s maintainer is %s not %s\n"),
504 Package.c_str(), Tags.FindS("Maintainer").c_str(), OverItem->OldMaint.c_str());
505 }
506 }
507
508 if (NewMaint.empty() == false)
509 Changes.push_back(pkgTagSection::Tag::Rewrite("Maintainer", NewMaint));
510
511 /* Get rid of the Optional tag. This is an ugly, ugly, ugly hack that
512 dpkg-scanpackages does. Well sort of. dpkg-scanpackages just does renaming
513 but dpkg does this append bit. So we do the append bit, at least that way the
514 status file and package file will remain similar. There are other transforms
515 but optional is the only legacy one still in use for some lazy reason. */
516 string OptionalStr = Tags.FindS("Optional");
517 if (OptionalStr.empty() == false)
518 {
519 if (Tags.FindS("Suggests").empty() == false)
520 OptionalStr = Tags.FindS("Suggests") + ", " + OptionalStr;
521 Changes.push_back(pkgTagSection::Tag::Rewrite("Suggests", OptionalStr));
522 }
523
524 for (map<string,string>::const_iterator I = OverItem->FieldOverride.begin();
525 I != OverItem->FieldOverride.end(); ++I)
526 Changes.push_back(pkgTagSection::Tag::Rewrite(I->first, I->second));
527
528 // Rewrite and store the fields.
529 if (Tags.Write(*Output, TFRewritePackageOrder, Changes) == false ||
530 Output->Write("\n", 1) == false)
531 return false;
532
533 return Db.Finish();
534 }
535 /*}}}*/
536 PackagesWriter::~PackagesWriter() /*{{{*/
537 {
538 }
539 /*}}}*/
540
541 // TranslationWriter::TranslationWriter - Constructor /*{{{*/
542 // ---------------------------------------------------------------------
543 /* Create a Translation-Master file for this Packages file */
544 TranslationWriter::TranslationWriter(string const &File, string const &TransCompress,
545 mode_t const &Permissions) : Comp(NULL), Output(NULL)
546 {
547 if (File.empty() == true)
548 return;
549
550 Comp = new MultiCompress(File, TransCompress, Permissions);
551 Output = &Comp->Input;
552 }
553 /*}}}*/
554 // TranslationWriter::DoPackage - Process a single package /*{{{*/
555 // ---------------------------------------------------------------------
556 /* Create a Translation-Master file for this Packages file */
557 bool TranslationWriter::DoPackage(string const &Pkg, string const &Desc,
558 string const &MD5)
559 {
560 if (Output == NULL)
561 return true;
562
563 // Different archs can include different versions and therefore
564 // different descriptions - so we need to check for both name and md5.
565 string const Record = Pkg + ":" + MD5;
566
567 if (Included.find(Record) != Included.end())
568 return true;
569
570 std::string out;
571 strprintf(out, "Package: %s\nDescription-md5: %s\nDescription-en: %s\n",
572 Pkg.c_str(), MD5.c_str(), Desc.c_str());
573 Output->Write(out.c_str(), out.length());
574
575 Included.insert(Record);
576 return true;
577 }
578 /*}}}*/
579 // TranslationWriter::~TranslationWriter - Destructor /*{{{*/
580 // ---------------------------------------------------------------------
581 /* */
582 TranslationWriter::~TranslationWriter()
583 {
584 if (Comp != NULL)
585 delete Comp;
586 }
587 /*}}}*/
588
589 // SourcesWriter::SourcesWriter - Constructor /*{{{*/
590 // ---------------------------------------------------------------------
591 /* */
592 SourcesWriter::SourcesWriter(FileFd * const GivenOutput, string const &DB, string const &BOverrides,string const &SOverrides,
593 string const &ExtOverrides) :
594 FTWScanner(GivenOutput), Db(DB), Stats(Db.Stats)
595 {
596 AddPattern("*.dsc");
597 DeLinkLimit = 0;
598 Buffer = 0;
599 BufSize = 0;
600
601 // Process the command line options
602 ConfigToDoHashes(DoHashes, "APT::FTPArchive::Sources");
603 NoOverride = _config->FindB("APT::FTPArchive::NoOverrideMsg",false);
604 DoAlwaysStat = _config->FindB("APT::FTPArchive::AlwaysStat", false);
605
606 // Read the override file
607 if (BOverrides.empty() == false && BOver.ReadOverride(BOverrides) == false)
608 return;
609 else
610 NoOverride = true;
611
612 // WTF?? The logic above: if we can't read binary overrides, don't even try
613 // reading source overrides. if we can read binary overrides, then say there
614 // are no overrides. THIS MAKES NO SENSE! -- ajt@d.o, 2006/02/28
615
616 if (ExtOverrides.empty() == false)
617 SOver.ReadExtraOverride(ExtOverrides);
618
619 if (SOverrides.empty() == false && FileExists(SOverrides) == true)
620 SOver.ReadOverride(SOverrides,true);
621 }
622 /*}}}*/
623 // SourcesWriter::DoPackage - Process a single package /*{{{*/
624 static std::string getDscHash(unsigned int const DoHashes,
625 Hashes::SupportedHashes const DoIt, pkgTagSection &Tags, char const * const FieldName,
626 HashString const * const Hash, unsigned long long Size, std::string FileName)
627 {
628 if ((DoHashes & DoIt) != DoIt || Tags.Exists(FieldName) == false || Hash == NULL)
629 return "";
630 std::ostringstream out;
631 out << "\n " << Hash->HashValue() << " " << Size << " " << FileName
632 << "\n " << Tags.FindS(FieldName);
633 return out.str();
634 }
635 bool SourcesWriter::DoPackage(string FileName)
636 {
637 // Pull all the data we need form the DB
638 if (Db.GetFileInfo(FileName,
639 false, /* DoControl */
640 false, /* DoContents */
641 false, /* GenContentsOnly */
642 true, /* DoSource */
643 DoHashes, DoAlwaysStat) == false)
644 {
645 return false;
646 }
647
648 // we need to perform a "write" here (this is what finish is doing)
649 // because the call to Db.GetFileInfo() in the loop will change
650 // the "db cursor"
651 Db.Finish();
652
653 pkgTagSection Tags;
654 if (Tags.Scan(Db.Dsc.Data.c_str(), Db.Dsc.Data.length()) == false)
655 return _error->Error("Could not find a record in the DSC '%s'",FileName.c_str());
656
657 if (Tags.Exists("Source") == false)
658 return _error->Error("Could not find a Source entry in the DSC '%s'",FileName.c_str());
659 Tags.Trim();
660
661 // Lookup the overide information, finding first the best priority.
662 string BestPrio;
663 string Bins = Tags.FindS("Binary");
664 char Buffer[Bins.length() + 1];
665 unique_ptr<Override::Item> OverItem(nullptr);
666 if (Bins.empty() == false)
667 {
668 strcpy(Buffer,Bins.c_str());
669
670 // Ignore too-long errors.
671 char *BinList[400];
672 TokSplitString(',',Buffer,BinList,sizeof(BinList)/sizeof(BinList[0]));
673
674 // Look at all the binaries
675 unsigned char BestPrioV = pkgCache::State::Extra;
676 for (unsigned I = 0; BinList[I] != 0; I++)
677 {
678 unique_ptr<Override::Item> Itm(BOver.GetItem(BinList[I]));
679 if (Itm.get() == 0)
680 continue;
681
682 unsigned char NewPrioV = debListParser::GetPrio(Itm->Priority);
683 if (NewPrioV < BestPrioV || BestPrio.empty() == true)
684 {
685 BestPrioV = NewPrioV;
686 BestPrio = Itm->Priority;
687 }
688
689 if (OverItem.get() == 0)
690 OverItem = std::move(Itm);
691 }
692 }
693
694 // If we need to do any rewriting of the header do it now..
695 if (OverItem.get() == 0)
696 {
697 if (NoOverride == false)
698 {
699 NewLine(1);
700 ioprintf(c1out, _(" %s has no override entry\n"), Tags.FindS("Source").c_str());
701 }
702
703 OverItem.reset(new Override::Item);
704 }
705
706 struct stat St;
707 if (stat(FileName.c_str(), &St) != 0)
708 return _error->Errno("fstat","Failed to stat %s",FileName.c_str());
709
710 unique_ptr<Override::Item> SOverItem(SOver.GetItem(Tags.FindS("Source")));
711 // const unique_ptr<Override::Item> autoSOverItem(SOverItem);
712 if (SOverItem.get() == 0)
713 {
714 ioprintf(c1out, _(" %s has no source override entry\n"), Tags.FindS("Source").c_str());
715 SOverItem = unique_ptr<Override::Item>(BOver.GetItem(Tags.FindS("Source")));
716 if (SOverItem.get() == 0)
717 {
718 ioprintf(c1out, _(" %s has no binary override entry either\n"), Tags.FindS("Source").c_str());
719 SOverItem = unique_ptr<Override::Item>(new Override::Item);
720 *SOverItem = *OverItem;
721 }
722 }
723
724 // Add the dsc to the files hash list
725 string const strippedName = flNotDir(FileName);
726 std::string const Files = getDscHash(DoHashes, Hashes::MD5SUM, Tags, "Files", Db.HashesList.find("MD5Sum"), St.st_size, strippedName);
727 std::string ChecksumsSha1 = getDscHash(DoHashes, Hashes::SHA1SUM, Tags, "Checksums-Sha1", Db.HashesList.find("SHA1"), St.st_size, strippedName);
728 std::string ChecksumsSha256 = getDscHash(DoHashes, Hashes::SHA256SUM, Tags, "Checksums-Sha256", Db.HashesList.find("SHA256"), St.st_size, strippedName);
729 std::string ChecksumsSha512 = getDscHash(DoHashes, Hashes::SHA512SUM, Tags, "Checksums-Sha512", Db.HashesList.find("SHA512"), St.st_size, strippedName);
730
731 // Strip the DirStrip prefix from the FileName and add the PathPrefix
732 string NewFileName;
733 if (DirStrip.empty() == false &&
734 FileName.length() > DirStrip.length() &&
735 stringcmp(DirStrip,OriginalPath,OriginalPath + DirStrip.length()) == 0)
736 NewFileName = string(OriginalPath + DirStrip.length());
737 else
738 NewFileName = OriginalPath;
739 if (PathPrefix.empty() == false)
740 NewFileName = flCombine(PathPrefix,NewFileName);
741
742 string Directory = flNotFile(OriginalPath);
743 string Package = Tags.FindS("Source");
744
745 // Perform operation over all of the files
746 string ParseJnk;
747 const char *C = Files.c_str();
748 char *RealPath = NULL;
749 for (;isspace(*C); C++);
750 while (*C != 0)
751 {
752 // Parse each of the elements
753 if (ParseQuoteWord(C,ParseJnk) == false ||
754 ParseQuoteWord(C,ParseJnk) == false ||
755 ParseQuoteWord(C,ParseJnk) == false)
756 return _error->Error("Error parsing file record");
757
758 string OriginalPath = Directory + ParseJnk;
759
760 // Add missing hashes to source files
761 if (((DoHashes & Hashes::SHA1SUM) == Hashes::SHA1SUM && !Tags.Exists("Checksums-Sha1")) ||
762 ((DoHashes & Hashes::SHA256SUM) == Hashes::SHA256SUM && !Tags.Exists("Checksums-Sha256")) ||
763 ((DoHashes & Hashes::SHA512SUM) == Hashes::SHA512SUM && !Tags.Exists("Checksums-Sha512")))
764 {
765 if (Db.GetFileInfo(OriginalPath,
766 false, /* DoControl */
767 false, /* DoContents */
768 false, /* GenContentsOnly */
769 false, /* DoSource */
770 DoHashes,
771 DoAlwaysStat) == false)
772 {
773 return _error->Error("Error getting file info");
774 }
775
776 for (HashStringList::const_iterator hs = Db.HashesList.begin(); hs != Db.HashesList.end(); ++hs)
777 {
778 if (hs->HashType() == "MD5Sum" || hs->HashType() == "Checksum-FileSize")
779 continue;
780 char const * fieldname;
781 std::string * out;
782 if (hs->HashType() == "SHA1")
783 {
784 fieldname = "Checksums-Sha1";
785 out = &ChecksumsSha1;
786 }
787 else if (hs->HashType() == "SHA256")
788 {
789 fieldname = "Checksums-Sha256";
790 out = &ChecksumsSha256;
791 }
792 else if (hs->HashType() == "SHA512")
793 {
794 fieldname = "Checksums-Sha512";
795 out = &ChecksumsSha512;
796 }
797 else
798 {
799 _error->Warning("Ignoring unknown Checksumtype %s in SourcesWriter::DoPackages", hs->HashType().c_str());
800 continue;
801 }
802 if (Tags.Exists(fieldname) == true)
803 continue;
804 std::ostringstream streamout;
805 streamout << "\n " << hs->HashValue() << " " << Db.GetFileSize() << " " << ParseJnk;
806 out->append(streamout.str());
807 }
808
809 // write back the GetFileInfo() stats data
810 Db.Finish();
811 }
812
813 // Perform the delinking operation
814 char Jnk[2];
815
816 if (readlink(OriginalPath.c_str(),Jnk,sizeof(Jnk)) != -1 &&
817 (RealPath = realpath(OriginalPath.c_str(),NULL)) != 0)
818 {
819 string RP = RealPath;
820 free(RealPath);
821 if (Delink(RP,OriginalPath.c_str(),Stats.DeLinkBytes,St.st_size) == false)
822 return false;
823 }
824 }
825
826 Directory = flNotFile(NewFileName);
827 if (Directory.length() > 2)
828 Directory.erase(Directory.end()-1);
829
830 // This lists all the changes to the fields we are going to make.
831 // (5 hardcoded + checksums + maintainer + end marker)
832 std::vector<pkgTagSection::Tag> Changes;
833
834 Changes.push_back(pkgTagSection::Tag::Remove("Source"));
835 Changes.push_back(pkgTagSection::Tag::Rewrite("Package", Package));
836 if (Files.empty() == false)
837 Changes.push_back(pkgTagSection::Tag::Rewrite("Files", Files));
838 if (ChecksumsSha1.empty() == false)
839 Changes.push_back(pkgTagSection::Tag::Rewrite("Checksums-Sha1", ChecksumsSha1));
840 if (ChecksumsSha256.empty() == false)
841 Changes.push_back(pkgTagSection::Tag::Rewrite("Checksums-Sha256", ChecksumsSha256));
842 if (ChecksumsSha512.empty() == false)
843 Changes.push_back(pkgTagSection::Tag::Rewrite("Checksums-Sha512", ChecksumsSha512));
844 if (Directory != "./")
845 Changes.push_back(pkgTagSection::Tag::Rewrite("Directory", Directory));
846 Changes.push_back(pkgTagSection::Tag::Rewrite("Priority", BestPrio));
847 Changes.push_back(pkgTagSection::Tag::Remove("Status"));
848
849 // Rewrite the maintainer field if necessary
850 bool MaintFailed;
851 string NewMaint = OverItem->SwapMaint(Tags.FindS("Maintainer"), MaintFailed);
852 if (MaintFailed == true)
853 {
854 if (NoOverride == false)
855 {
856 NewLine(1);
857 ioprintf(c1out, _(" %s maintainer is %s not %s\n"), Package.c_str(),
858 Tags.FindS("Maintainer").c_str(), OverItem->OldMaint.c_str());
859 }
860 }
861 if (NewMaint.empty() == false)
862 Changes.push_back(pkgTagSection::Tag::Rewrite("Maintainer", NewMaint.c_str()));
863
864 for (map<string,string>::const_iterator I = SOverItem->FieldOverride.begin();
865 I != SOverItem->FieldOverride.end(); ++I)
866 Changes.push_back(pkgTagSection::Tag::Rewrite(I->first, I->second));
867
868 // Rewrite and store the fields.
869 if (Tags.Write(*Output, TFRewriteSourceOrder, Changes) == false ||
870 Output->Write("\n", 1) == false)
871 return false;
872
873 Stats.Packages++;
874
875 return true;
876 }
877 /*}}}*/
878
879 // ContentsWriter::ContentsWriter - Constructor /*{{{*/
880 // ---------------------------------------------------------------------
881 /* */
882 ContentsWriter::ContentsWriter(FileFd * const GivenOutput, string const &DB, string const &Arch) :
883 FTWScanner(GivenOutput, Arch), Db(DB), Stats(Db.Stats)
884
885 {
886 SetExts(".deb");
887 }
888 /*}}}*/
889 // ContentsWriter::DoPackage - Process a single package /*{{{*/
890 // ---------------------------------------------------------------------
891 /* If Package is the empty string the control record will be parsed to
892 determine what the package name is. */
893 bool ContentsWriter::DoPackage(string FileName, string Package)
894 {
895 if (!Db.GetFileInfo(FileName,
896 Package.empty(), /* DoControl */
897 true, /* DoContents */
898 false, /* GenContentsOnly */
899 false, /* DoSource */
900 0, /* DoHashes */
901 false /* checkMtime */))
902 {
903 return false;
904 }
905
906 // Parse the package name
907 if (Package.empty() == true)
908 {
909 Package = Db.Control.Section.FindS("Package");
910 }
911
912 Db.Contents.Add(Gen,Package);
913
914 return Db.Finish();
915 }
916 /*}}}*/
917 // ContentsWriter::ReadFromPkgs - Read from a packages file /*{{{*/
918 // ---------------------------------------------------------------------
919 /* */
920 bool ContentsWriter::ReadFromPkgs(string const &PkgFile,string const &PkgCompress)
921 {
922 MultiCompress Pkgs(PkgFile,PkgCompress,0,false);
923 if (_error->PendingError() == true)
924 return false;
925
926 // Open the package file
927 FileFd Fd;
928 if (Pkgs.OpenOld(Fd) == false)
929 return false;
930
931 pkgTagFile Tags(&Fd);
932 if (_error->PendingError() == true)
933 return false;
934
935 // Parse.
936 pkgTagSection Section;
937 while (Tags.Step(Section) == true)
938 {
939 string File = flCombine(Prefix,Section.FindS("FileName"));
940 string Package = Section.FindS("Section");
941 if (Package.empty() == false && Package.end()[-1] != '/')
942 {
943 Package += '/';
944 Package += Section.FindS("Package");
945 }
946 else
947 Package += Section.FindS("Package");
948
949 DoPackage(File,Package);
950 if (_error->empty() == false)
951 {
952 _error->Error("Errors apply to file '%s'",File.c_str());
953 _error->DumpErrors();
954 }
955 }
956
957 // Tidy the compressor
958 Fd.Close();
959
960 return true;
961 }
962
963 /*}}}*/
964
965 // ReleaseWriter::ReleaseWriter - Constructor /*{{{*/
966 // ---------------------------------------------------------------------
967 /* */
968 ReleaseWriter::ReleaseWriter(FileFd * const GivenOutput, string const &/*DB*/) : FTWScanner(GivenOutput)
969 {
970 if (_config->FindB("APT::FTPArchive::Release::Default-Patterns", true) == true)
971 {
972 AddPattern("Packages");
973 AddPattern("Packages.gz");
974 AddPattern("Packages.bz2");
975 AddPattern("Packages.lzma");
976 AddPattern("Packages.xz");
977 AddPattern("Translation-*");
978 AddPattern("Sources");
979 AddPattern("Sources.gz");
980 AddPattern("Sources.bz2");
981 AddPattern("Sources.lzma");
982 AddPattern("Sources.xz");
983 AddPattern("Release");
984 AddPattern("Contents-*");
985 AddPattern("Index");
986 AddPattern("md5sum.txt");
987 }
988 AddPatterns(_config->FindVector("APT::FTPArchive::Release::Patterns"));
989
990 time_t const now = time(NULL);
991
992 setlocale(LC_TIME, "C");
993
994 char datestr[128];
995 if (strftime(datestr, sizeof(datestr), "%a, %d %b %Y %H:%M:%S UTC",
996 gmtime(&now)) == 0)
997 {
998 datestr[0] = '\0';
999 }
1000
1001 time_t const validuntil = now + _config->FindI("APT::FTPArchive::Release::ValidTime", 0);
1002 char validstr[128];
1003 if (now == validuntil ||
1004 strftime(validstr, sizeof(validstr), "%a, %d %b %Y %H:%M:%S UTC",
1005 gmtime(&validuntil)) == 0)
1006 {
1007 validstr[0] = '\0';
1008 }
1009
1010 setlocale(LC_TIME, "");
1011
1012 map<string,string> Fields;
1013 Fields["Origin"] = "";
1014 Fields["Label"] = "";
1015 Fields["Suite"] = "";
1016 Fields["Version"] = "";
1017 Fields["Codename"] = "";
1018 Fields["Date"] = datestr;
1019 Fields["Valid-Until"] = validstr;
1020 Fields["Architectures"] = "";
1021 Fields["Components"] = "";
1022 Fields["Description"] = "";
1023 if (_config->FindB("APT::FTPArchive::DoByHash", false) == true)
1024 Fields["Acquire-By-Hash"] = "true";
1025
1026 for(map<string,string>::const_iterator I = Fields.begin();
1027 I != Fields.end();
1028 ++I)
1029 {
1030 string Config = string("APT::FTPArchive::Release::") + (*I).first;
1031 string Value = _config->Find(Config, (*I).second.c_str());
1032 if (Value == "")
1033 continue;
1034
1035 std::string const out = I->first + ": " + Value + "\n";
1036 Output->Write(out.c_str(), out.length());
1037 }
1038
1039 ConfigToDoHashes(DoHashes, "APT::FTPArchive::Release");
1040 }
1041 /*}}}*/
1042 // ReleaseWriter::DoPackage - Process a single package /*{{{*/
1043 // ---------------------------------------------------------------------
1044 bool ReleaseWriter::DoPackage(string FileName)
1045 {
1046 // Strip the DirStrip prefix from the FileName and add the PathPrefix
1047 string NewFileName;
1048 if (DirStrip.empty() == false &&
1049 FileName.length() > DirStrip.length() &&
1050 stringcmp(FileName.begin(),FileName.begin() + DirStrip.length(),
1051 DirStrip.begin(),DirStrip.end()) == 0)
1052 {
1053 NewFileName = string(FileName.begin() + DirStrip.length(),FileName.end());
1054 while (NewFileName[0] == '/')
1055 NewFileName = string(NewFileName.begin() + 1,NewFileName.end());
1056 }
1057 else
1058 NewFileName = FileName;
1059
1060 if (PathPrefix.empty() == false)
1061 NewFileName = flCombine(PathPrefix,NewFileName);
1062
1063 FileFd fd(FileName, FileFd::ReadOnly);
1064
1065 if (!fd.IsOpen())
1066 {
1067 return false;
1068 }
1069
1070 CheckSums[NewFileName].size = fd.Size();
1071
1072 Hashes hs(DoHashes);
1073 hs.AddFD(fd);
1074 CheckSums[NewFileName].Hashes = hs.GetHashStringList();
1075 fd.Close();
1076
1077 // FIXME: wrong layer in the code(?)
1078 // FIXME2: symlink instead of create a copy
1079 if (_config->FindB("APT::FTPArchive::DoByHash", false) == true)
1080 {
1081 std::string Input = FileName;
1082 HashStringList hsl = hs.GetHashStringList();
1083 for(HashStringList::const_iterator h = hsl.begin();
1084 h != hsl.end(); ++h)
1085 {
1086 if (!h->usable())
1087 continue;
1088 if (flNotDir(FileName) == "Release" || flNotDir(FileName) == "InRelease")
1089 continue;
1090
1091 std::string ByHashOutputFile = GenByHashFilename(Input, *h);
1092 std::string ByHashOutputDir = flNotFile(ByHashOutputFile);
1093 if(!CreateDirectory(flNotFile(Input), ByHashOutputDir))
1094 return _error->Warning("can not create dir %s", flNotFile(ByHashOutputFile).c_str());
1095
1096 // write new hashes
1097 FileFd In(Input, FileFd::ReadOnly);
1098 FileFd Out(ByHashOutputFile, FileFd::WriteEmpty);
1099 if(!CopyFile(In, Out))
1100 return _error->Warning("failed to copy %s %s", Input.c_str(), ByHashOutputFile.c_str());
1101 }
1102 }
1103
1104 return true;
1105 }
1106
1107 /*}}}*/
1108 // ReleaseWriter::Finish - Output the checksums /*{{{*/
1109 // ---------------------------------------------------------------------
1110 static void printChecksumTypeRecord(FileFd &Output, char const * const Type, map<string, ReleaseWriter::CheckSum> const &CheckSums)
1111 {
1112 {
1113 std::string out;
1114 strprintf(out, "%s:\n", Type);
1115 Output.Write(out.c_str(), out.length());
1116 }
1117 for(map<string,ReleaseWriter::CheckSum>::const_iterator I = CheckSums.begin();
1118 I != CheckSums.end(); ++I)
1119 {
1120 HashString const * const hs = I->second.Hashes.find(Type);
1121 if (hs == NULL)
1122 continue;
1123 std::string out;
1124 strprintf(out, " %s %16llu %s\n",
1125 hs->HashValue().c_str(),
1126 (*I).second.size,
1127 (*I).first.c_str());
1128 Output.Write(out.c_str(), out.length());
1129 }
1130 }
1131 void ReleaseWriter::Finish()
1132 {
1133 if ((DoHashes & Hashes::MD5SUM) == Hashes::MD5SUM)
1134 printChecksumTypeRecord(*Output, "MD5Sum", CheckSums);
1135 if ((DoHashes & Hashes::SHA1SUM) == Hashes::SHA1SUM)
1136 printChecksumTypeRecord(*Output, "SHA1", CheckSums);
1137 if ((DoHashes & Hashes::SHA256SUM) == Hashes::SHA256SUM)
1138 printChecksumTypeRecord(*Output, "SHA256", CheckSums);
1139 if ((DoHashes & Hashes::SHA512SUM) == Hashes::SHA512SUM)
1140 printChecksumTypeRecord(*Output, "SHA512", CheckSums);
1141
1142 // go by-hash cleanup
1143 map<string,ReleaseWriter::CheckSum>::const_iterator prev = CheckSums.begin();
1144 if (_config->FindB("APT::FTPArchive::DoByHash", false) == true)
1145 {
1146 for(map<string,ReleaseWriter::CheckSum>::const_iterator I = CheckSums.begin();
1147 I != CheckSums.end(); ++I)
1148 {
1149 if (I->first == "Release" || I->first == "InRelease")
1150 continue;
1151
1152 // keep iterating until we find a new subdir
1153 if(flNotFile(I->first) == flNotFile(prev->first))
1154 continue;
1155
1156 // clean that subdir up
1157 int keepFiles = _config->FindI("APT::FTPArchive::By-Hash-Keep", 3);
1158 // calculate how many compressors are used (the amount of files
1159 // in that subdir generated for this run)
1160 keepFiles *= std::distance(prev, I);
1161 prev = I;
1162
1163 HashStringList hsl = prev->second.Hashes;
1164 for(HashStringList::const_iterator h = hsl.begin();
1165 h != hsl.end(); ++h)
1166 {
1167
1168 if (!h->usable())
1169 continue;
1170
1171 std::string RealFilename = DirStrip+"/"+prev->first;
1172 std::string ByHashOutputFile = GenByHashFilename(RealFilename, *h);
1173 DeleteAllButMostRecent(flNotFile(ByHashOutputFile), keepFiles);
1174 }
1175 }
1176 }
1177 }