]> git.saurik.com Git - apt.git/blob - ftparchive/writer.cc
Merge branch 'debian/sid' into debian/experimental
[apt.git] / ftparchive / writer.cc
1 // -*- mode: cpp; mode: fold -*-
2 // Description /*{{{*/
3 // $Id: writer.cc,v 1.14 2004/03/24 01:40:43 mdz Exp $
4 /* ######################################################################
5
6 Writer
7
8 The file writer classes. These write various types of output, sources,
9 packages and contents.
10
11 ##################################################################### */
12 /*}}}*/
13 // Include Files /*{{{*/
14 #include <config.h>
15
16 #include <apt-pkg/configuration.h>
17 #include <apt-pkg/deblistparser.h>
18 #include <apt-pkg/error.h>
19 #include <apt-pkg/fileutl.h>
20 #include <apt-pkg/gpgv.h>
21 #include <apt-pkg/hashes.h>
22 #include <apt-pkg/md5.h>
23 #include <apt-pkg/strutl.h>
24 #include <apt-pkg/debfile.h>
25 #include <apt-pkg/pkgcache.h>
26 #include <apt-pkg/sha1.h>
27 #include <apt-pkg/sha2.h>
28 #include <apt-pkg/tagfile.h>
29
30 #include <ctype.h>
31 #include <fnmatch.h>
32 #include <ftw.h>
33 #include <locale.h>
34 #include <string.h>
35 #include <sys/stat.h>
36 #include <sys/types.h>
37 #include <unistd.h>
38 #include <ctime>
39 #include <iostream>
40 #include <sstream>
41 #include <memory>
42 #include <utility>
43
44 #include "apt-ftparchive.h"
45 #include "writer.h"
46 #include "cachedb.h"
47 #include "multicompress.h"
48
49 #include <apti18n.h>
50 /*}}}*/
51 using namespace std;
52 FTWScanner *FTWScanner::Owner;
53
54 // SetTFRewriteData - Helper for setting rewrite lists /*{{{*/
55 // ---------------------------------------------------------------------
56 /* */
57 static inline TFRewriteData SetTFRewriteData(const char *tag,
58 const char *rewrite,
59 const char *newtag = 0)
60 {
61 TFRewriteData tfrd;
62 tfrd.Tag = tag;
63 tfrd.Rewrite = rewrite;
64 tfrd.NewTag = newtag;
65 return tfrd;
66 }
67 /*}}}*/
68 // ConfigToDoHashes - which hashes to generate /*{{{*/
69 static void SingleConfigToDoHashes(unsigned int &DoHashes, std::string const &Conf, unsigned int const Flag)
70 {
71 if (_config->FindB(Conf, true) == true)
72 DoHashes |= Flag;
73 else
74 DoHashes &= ~Flag;
75 }
76 static void ConfigToDoHashes(unsigned int &DoHashes, std::string const &Conf)
77 {
78 SingleConfigToDoHashes(DoHashes, Conf + "::MD5", Hashes::MD5SUM);
79 SingleConfigToDoHashes(DoHashes, Conf + "::SHA1", Hashes::SHA1SUM);
80 SingleConfigToDoHashes(DoHashes, Conf + "::SHA256", Hashes::SHA256SUM);
81 SingleConfigToDoHashes(DoHashes, Conf + "::SHA512", Hashes::SHA512SUM);
82 }
83 /*}}}*/
84
85 // FTWScanner::FTWScanner - Constructor /*{{{*/
86 // ---------------------------------------------------------------------
87 /* */
88 FTWScanner::FTWScanner(string const &Arch): Arch(Arch), DoHashes(~0)
89 {
90 ErrorPrinted = false;
91 NoLinkAct = !_config->FindB("APT::FTPArchive::DeLinkAct",true);
92 ConfigToDoHashes(DoHashes, "APT::FTPArchive");
93 }
94 /*}}}*/
95 // FTWScanner::Scanner - FTW Scanner /*{{{*/
96 // ---------------------------------------------------------------------
97 /* This is the FTW scanner, it processes each directory element in the
98 directory tree. */
99 int FTWScanner::ScannerFTW(const char *File,const struct stat * /*sb*/,int Flag)
100 {
101 if (Flag == FTW_DNR)
102 {
103 Owner->NewLine(1);
104 ioprintf(c1out, _("W: Unable to read directory %s\n"), File);
105 }
106 if (Flag == FTW_NS)
107 {
108 Owner->NewLine(1);
109 ioprintf(c1out, _("W: Unable to stat %s\n"), File);
110 }
111 if (Flag != FTW_F)
112 return 0;
113
114 return ScannerFile(File, true);
115 }
116 /*}}}*/
117 // FTWScanner::ScannerFile - File Scanner /*{{{*/
118 // ---------------------------------------------------------------------
119 /* */
120 int FTWScanner::ScannerFile(const char *File, bool const &ReadLink)
121 {
122 const char *LastComponent = strrchr(File, '/');
123 char *RealPath = NULL;
124
125 if (LastComponent == NULL)
126 LastComponent = File;
127 else
128 LastComponent++;
129
130 vector<string>::const_iterator I;
131 for(I = Owner->Patterns.begin(); I != Owner->Patterns.end(); ++I)
132 {
133 if (fnmatch((*I).c_str(), LastComponent, 0) == 0)
134 break;
135 }
136 if (I == Owner->Patterns.end())
137 return 0;
138
139 /* Process it. If the file is a link then resolve it into an absolute
140 name.. This works best if the directory components the scanner are
141 given are not links themselves. */
142 char Jnk[2];
143 Owner->OriginalPath = File;
144 if (ReadLink &&
145 readlink(File,Jnk,sizeof(Jnk)) != -1 &&
146 (RealPath = realpath(File,NULL)) != 0)
147 {
148 Owner->DoPackage(RealPath);
149 free(RealPath);
150 }
151 else
152 Owner->DoPackage(File);
153
154 if (_error->empty() == false)
155 {
156 // Print any errors or warnings found
157 string Err;
158 bool SeenPath = false;
159 while (_error->empty() == false)
160 {
161 Owner->NewLine(1);
162
163 bool const Type = _error->PopMessage(Err);
164 if (Type == true)
165 cerr << _("E: ") << Err << endl;
166 else
167 cerr << _("W: ") << Err << endl;
168
169 if (Err.find(File) != string::npos)
170 SeenPath = true;
171 }
172
173 if (SeenPath == false)
174 cerr << _("E: Errors apply to file ") << "'" << File << "'" << endl;
175 return 0;
176 }
177
178 return 0;
179 }
180 /*}}}*/
181 // FTWScanner::RecursiveScan - Just scan a directory tree /*{{{*/
182 // ---------------------------------------------------------------------
183 /* */
184 bool FTWScanner::RecursiveScan(string const &Dir)
185 {
186 char *RealPath = NULL;
187 /* If noprefix is set then jam the scan root in, so we don't generate
188 link followed paths out of control */
189 if (InternalPrefix.empty() == true)
190 {
191 if ((RealPath = realpath(Dir.c_str(),NULL)) == 0)
192 return _error->Errno("realpath",_("Failed to resolve %s"),Dir.c_str());
193 InternalPrefix = RealPath;
194 free(RealPath);
195 }
196
197 // Do recursive directory searching
198 Owner = this;
199 int const Res = ftw(Dir.c_str(),ScannerFTW,30);
200
201 // Error treewalking?
202 if (Res != 0)
203 {
204 if (_error->PendingError() == false)
205 _error->Errno("ftw",_("Tree walking failed"));
206 return false;
207 }
208
209 return true;
210 }
211 /*}}}*/
212 // FTWScanner::LoadFileList - Load the file list from a file /*{{{*/
213 // ---------------------------------------------------------------------
214 /* This is an alternative to using FTW to locate files, it reads the list
215 of files from another file. */
216 bool FTWScanner::LoadFileList(string const &Dir, string const &File)
217 {
218 char *RealPath = NULL;
219 /* If noprefix is set then jam the scan root in, so we don't generate
220 link followed paths out of control */
221 if (InternalPrefix.empty() == true)
222 {
223 if ((RealPath = realpath(Dir.c_str(),NULL)) == 0)
224 return _error->Errno("realpath",_("Failed to resolve %s"),Dir.c_str());
225 InternalPrefix = RealPath;
226 free(RealPath);
227 }
228
229 Owner = this;
230 FILE *List = fopen(File.c_str(),"r");
231 if (List == 0)
232 return _error->Errno("fopen",_("Failed to open %s"),File.c_str());
233
234 /* We are a tad tricky here.. We prefix the buffer with the directory
235 name, that way if we need a full path with just use line.. Sneaky and
236 fully evil. */
237 char Line[1000];
238 char *FileStart;
239 if (Dir.empty() == true || Dir.end()[-1] != '/')
240 FileStart = Line + snprintf(Line,sizeof(Line),"%s/",Dir.c_str());
241 else
242 FileStart = Line + snprintf(Line,sizeof(Line),"%s",Dir.c_str());
243 while (fgets(FileStart,sizeof(Line) - (FileStart - Line),List) != 0)
244 {
245 char *FileName = _strstrip(FileStart);
246 if (FileName[0] == 0)
247 continue;
248
249 if (FileName[0] != '/')
250 {
251 if (FileName != FileStart)
252 memmove(FileStart,FileName,strlen(FileStart));
253 FileName = Line;
254 }
255
256 #if 0
257 struct stat St;
258 int Flag = FTW_F;
259 if (stat(FileName,&St) != 0)
260 Flag = FTW_NS;
261 #endif
262
263 if (ScannerFile(FileName, false) != 0)
264 break;
265 }
266
267 fclose(List);
268 return true;
269 }
270 /*}}}*/
271 // FTWScanner::Delink - Delink symlinks /*{{{*/
272 // ---------------------------------------------------------------------
273 /* */
274 bool FTWScanner::Delink(string &FileName,const char *OriginalPath,
275 unsigned long long &DeLinkBytes,
276 unsigned long long const &FileSize)
277 {
278 // See if this isn't an internaly prefix'd file name.
279 if (InternalPrefix.empty() == false &&
280 InternalPrefix.length() < FileName.length() &&
281 stringcmp(FileName.begin(),FileName.begin() + InternalPrefix.length(),
282 InternalPrefix.begin(),InternalPrefix.end()) != 0)
283 {
284 if (DeLinkLimit != 0 && DeLinkBytes/1024 < DeLinkLimit)
285 {
286 // Tidy up the display
287 if (DeLinkBytes == 0)
288 cout << endl;
289
290 NewLine(1);
291 ioprintf(c1out, _(" DeLink %s [%s]\n"), (OriginalPath + InternalPrefix.length()),
292 SizeToStr(FileSize).c_str());
293 c1out << flush;
294
295 if (NoLinkAct == false)
296 {
297 char OldLink[400];
298 if (readlink(OriginalPath,OldLink,sizeof(OldLink)) == -1)
299 _error->Errno("readlink",_("Failed to readlink %s"),OriginalPath);
300 else
301 {
302 if (unlink(OriginalPath) != 0)
303 _error->Errno("unlink",_("Failed to unlink %s"),OriginalPath);
304 else
305 {
306 if (link(FileName.c_str(),OriginalPath) != 0)
307 {
308 // Panic! Restore the symlink
309 if (symlink(OldLink,OriginalPath) != 0)
310 _error->Errno("symlink", "failed to restore symlink");
311 return _error->Errno("link",_("*** Failed to link %s to %s"),
312 FileName.c_str(),
313 OriginalPath);
314 }
315 }
316 }
317 }
318
319 DeLinkBytes += FileSize;
320 if (DeLinkBytes/1024 >= DeLinkLimit)
321 ioprintf(c1out, _(" DeLink limit of %sB hit.\n"), SizeToStr(DeLinkBytes).c_str());
322 }
323
324 FileName = OriginalPath;
325 }
326
327 return true;
328 }
329 /*}}}*/
330
331 // PackagesWriter::PackagesWriter - Constructor /*{{{*/
332 // ---------------------------------------------------------------------
333 /* */
334 PackagesWriter::PackagesWriter(string const &DB,string const &Overrides,string const &ExtOverrides,
335 string const &Arch) :
336 FTWScanner(Arch), Db(DB), Stats(Db.Stats), TransWriter(NULL)
337 {
338 Output = stdout;
339 SetExts(".deb .udeb");
340 DeLinkLimit = 0;
341
342 // Process the command line options
343 ConfigToDoHashes(DoHashes, "APT::FTPArchive::Packages");
344 DoAlwaysStat = _config->FindB("APT::FTPArchive::AlwaysStat", false);
345 DoContents = _config->FindB("APT::FTPArchive::Contents",true);
346 NoOverride = _config->FindB("APT::FTPArchive::NoOverrideMsg",false);
347 LongDescription = _config->FindB("APT::FTPArchive::LongDescription",true);
348
349 if (Db.Loaded() == false)
350 DoContents = false;
351
352 // Read the override file
353 if (Overrides.empty() == false && Over.ReadOverride(Overrides) == false)
354 return;
355 else
356 NoOverride = true;
357
358 if (ExtOverrides.empty() == false)
359 Over.ReadExtraOverride(ExtOverrides);
360
361 _error->DumpErrors();
362 }
363 /*}}}*/
364 // FTWScanner::SetExts - Set extensions to support /*{{{*/
365 // ---------------------------------------------------------------------
366 /* */
367 bool FTWScanner::SetExts(string const &Vals)
368 {
369 ClearPatterns();
370 string::size_type Start = 0;
371 while (Start <= Vals.length()-1)
372 {
373 string::size_type const Space = Vals.find(' ',Start);
374 string::size_type const Length = ((Space == string::npos) ? Vals.length() : Space) - Start;
375 if ( Arch.empty() == false )
376 {
377 AddPattern(string("*_") + Arch + Vals.substr(Start, Length));
378 AddPattern(string("*_all") + Vals.substr(Start, Length));
379 }
380 else
381 AddPattern(string("*") + Vals.substr(Start, Length));
382
383 Start += Length + 1;
384 }
385
386 return true;
387 }
388
389 /*}}}*/
390 // PackagesWriter::DoPackage - Process a single package /*{{{*/
391 // ---------------------------------------------------------------------
392 /* This method takes a package and gets its control information and
393 MD5, SHA1 and SHA256 then writes out a control record with the proper fields
394 rewritten and the path/size/hash appended. */
395 bool PackagesWriter::DoPackage(string FileName)
396 {
397 // Pull all the data we need form the DB
398 if (Db.GetFileInfo(FileName,
399 true, /* DoControl */
400 DoContents,
401 true, /* GenContentsOnly */
402 false, /* DoSource */
403 DoHashes, DoAlwaysStat) == false)
404 {
405 return false;
406 }
407
408 unsigned long long FileSize = Db.GetFileSize();
409 if (Delink(FileName,OriginalPath,Stats.DeLinkBytes,FileSize) == false)
410 return false;
411
412 // Lookup the overide information
413 pkgTagSection &Tags = Db.Control.Section;
414 string Package = Tags.FindS("Package");
415 string Architecture;
416 // if we generate a Packages file for a given arch, we use it to
417 // look for overrides. if we run in "simple" mode without the
418 // "Architecures" variable in the config we use the architecure value
419 // from the deb file
420 if(Arch != "")
421 Architecture = Arch;
422 else
423 Architecture = Tags.FindS("Architecture");
424 auto_ptr<Override::Item> OverItem(Over.GetItem(Package,Architecture));
425
426 if (Package.empty() == true)
427 return _error->Error(_("Archive had no package field"));
428
429 // If we need to do any rewriting of the header do it now..
430 if (OverItem.get() == 0)
431 {
432 if (NoOverride == false)
433 {
434 NewLine(1);
435 ioprintf(c1out, _(" %s has no override entry\n"), Package.c_str());
436 }
437
438 OverItem = auto_ptr<Override::Item>(new Override::Item);
439 OverItem->FieldOverride["Section"] = Tags.FindS("Section");
440 OverItem->Priority = Tags.FindS("Priority");
441 }
442
443 char Size[40];
444 sprintf(Size,"%llu", (unsigned long long) FileSize);
445
446 // Strip the DirStrip prefix from the FileName and add the PathPrefix
447 string NewFileName;
448 if (DirStrip.empty() == false &&
449 FileName.length() > DirStrip.length() &&
450 stringcmp(FileName.begin(),FileName.begin() + DirStrip.length(),
451 DirStrip.begin(),DirStrip.end()) == 0)
452 NewFileName = string(FileName.begin() + DirStrip.length(),FileName.end());
453 else
454 NewFileName = FileName;
455 if (PathPrefix.empty() == false)
456 NewFileName = flCombine(PathPrefix,NewFileName);
457
458 /* Configuration says we don't want to include the long Description
459 in the package file - instead we want to ship a separated file */
460 string desc;
461 if (LongDescription == false) {
462 desc = Tags.FindS("Description").append("\n");
463 OverItem->FieldOverride["Description"] = desc.substr(0, desc.find('\n')).c_str();
464 }
465
466 // This lists all the changes to the fields we are going to make.
467 std::vector<TFRewriteData> Changes;
468
469 Changes.push_back(SetTFRewriteData("Size", Size));
470 for (HashStringList::const_iterator hs = Db.HashesList.begin(); hs != Db.HashesList.end(); ++hs)
471 {
472 if (hs->HashType() == "MD5Sum")
473 Changes.push_back(SetTFRewriteData("MD5sum", hs->HashValue().c_str()));
474 else if (hs->HashType() == "Checksum-FileSize")
475 continue;
476 else
477 Changes.push_back(SetTFRewriteData(hs->HashType().c_str(), hs->HashValue().c_str()));
478 }
479 Changes.push_back(SetTFRewriteData("Filename", NewFileName.c_str()));
480 Changes.push_back(SetTFRewriteData("Priority", OverItem->Priority.c_str()));
481 Changes.push_back(SetTFRewriteData("Status", 0));
482 Changes.push_back(SetTFRewriteData("Optional", 0));
483
484 string DescriptionMd5;
485 if (LongDescription == false) {
486 MD5Summation descmd5;
487 descmd5.Add(desc.c_str());
488 DescriptionMd5 = descmd5.Result().Value();
489 Changes.push_back(SetTFRewriteData("Description-md5", DescriptionMd5.c_str()));
490 if (TransWriter != NULL)
491 TransWriter->DoPackage(Package, desc, DescriptionMd5);
492 }
493
494 // Rewrite the maintainer field if necessary
495 bool MaintFailed;
496 string NewMaint = OverItem->SwapMaint(Tags.FindS("Maintainer"),MaintFailed);
497 if (MaintFailed == true)
498 {
499 if (NoOverride == false)
500 {
501 NewLine(1);
502 ioprintf(c1out, _(" %s maintainer is %s not %s\n"),
503 Package.c_str(), Tags.FindS("Maintainer").c_str(), OverItem->OldMaint.c_str());
504 }
505 }
506
507 if (NewMaint.empty() == false)
508 Changes.push_back(SetTFRewriteData("Maintainer", NewMaint.c_str()));
509
510 /* Get rid of the Optional tag. This is an ugly, ugly, ugly hack that
511 dpkg-scanpackages does. Well sort of. dpkg-scanpackages just does renaming
512 but dpkg does this append bit. So we do the append bit, at least that way the
513 status file and package file will remain similar. There are other transforms
514 but optional is the only legacy one still in use for some lazy reason. */
515 string OptionalStr = Tags.FindS("Optional");
516 if (OptionalStr.empty() == false)
517 {
518 if (Tags.FindS("Suggests").empty() == false)
519 OptionalStr = Tags.FindS("Suggests") + ", " + OptionalStr;
520 Changes.push_back(SetTFRewriteData("Suggests", OptionalStr.c_str()));
521 }
522
523 for (map<string,string>::const_iterator I = OverItem->FieldOverride.begin();
524 I != OverItem->FieldOverride.end(); ++I)
525 Changes.push_back(SetTFRewriteData(I->first.c_str(),I->second.c_str()));
526
527 Changes.push_back(SetTFRewriteData( 0, 0));
528
529 // Rewrite and store the fields.
530 if (TFRewrite(Output,Tags,TFRewritePackageOrder,Changes.data()) == false)
531 return false;
532 fprintf(Output,"\n");
533
534 return Db.Finish();
535 }
536 /*}}}*/
537
538 // TranslationWriter::TranslationWriter - Constructor /*{{{*/
539 // ---------------------------------------------------------------------
540 /* Create a Translation-Master file for this Packages file */
541 TranslationWriter::TranslationWriter(string const &File, string const &TransCompress,
542 mode_t const &Permissions) : Output(NULL),
543 RefCounter(0)
544 {
545 if (File.empty() == true)
546 return;
547
548 Comp = new MultiCompress(File, TransCompress, Permissions);
549 Output = Comp->Input;
550 }
551 /*}}}*/
552 // TranslationWriter::DoPackage - Process a single package /*{{{*/
553 // ---------------------------------------------------------------------
554 /* Create a Translation-Master file for this Packages file */
555 bool TranslationWriter::DoPackage(string const &Pkg, string const &Desc,
556 string const &MD5)
557 {
558 if (Output == NULL)
559 return true;
560
561 // Different archs can include different versions and therefore
562 // different descriptions - so we need to check for both name and md5.
563 string const Record = Pkg + ":" + MD5;
564
565 if (Included.find(Record) != Included.end())
566 return true;
567
568 fprintf(Output, "Package: %s\nDescription-md5: %s\nDescription-en: %s\n",
569 Pkg.c_str(), MD5.c_str(), Desc.c_str());
570
571 Included.insert(Record);
572 return true;
573 }
574 /*}}}*/
575 // TranslationWriter::~TranslationWriter - Destructor /*{{{*/
576 // ---------------------------------------------------------------------
577 /* */
578 TranslationWriter::~TranslationWriter()
579 {
580 if (Comp == NULL)
581 return;
582
583 delete Comp;
584 }
585 /*}}}*/
586
587 // SourcesWriter::SourcesWriter - Constructor /*{{{*/
588 // ---------------------------------------------------------------------
589 /* */
590 SourcesWriter::SourcesWriter(string const &DB, string const &BOverrides,string const &SOverrides,
591 string const &ExtOverrides) :
592 Db(DB), Stats(Db.Stats)
593 {
594 Output = stdout;
595 AddPattern("*.dsc");
596 DeLinkLimit = 0;
597 Buffer = 0;
598 BufSize = 0;
599
600 // Process the command line options
601 ConfigToDoHashes(DoHashes, "APT::FTPArchive::Sources");
602 NoOverride = _config->FindB("APT::FTPArchive::NoOverrideMsg",false);
603 DoAlwaysStat = _config->FindB("APT::FTPArchive::AlwaysStat", false);
604
605 // Read the override file
606 if (BOverrides.empty() == false && BOver.ReadOverride(BOverrides) == false)
607 return;
608 else
609 NoOverride = true;
610
611 // WTF?? The logic above: if we can't read binary overrides, don't even try
612 // reading source overrides. if we can read binary overrides, then say there
613 // are no overrides. THIS MAKES NO SENSE! -- ajt@d.o, 2006/02/28
614
615 if (ExtOverrides.empty() == false)
616 SOver.ReadExtraOverride(ExtOverrides);
617
618 if (SOverrides.empty() == false && FileExists(SOverrides) == true)
619 SOver.ReadOverride(SOverrides,true);
620 }
621 /*}}}*/
622 // SourcesWriter::DoPackage - Process a single package /*{{{*/
623 static std::ostream& addDscHash(std::ostream &out, unsigned int const DoHashes,
624 Hashes::SupportedHashes const DoIt, pkgTagSection &Tags, char const * const FieldName,
625 HashString const * const Hash, unsigned long long Size, std::string FileName)
626 {
627 if ((DoHashes & DoIt) != DoIt || Tags.Exists(FieldName) == false || Hash == NULL)
628 return out;
629 out << "\n " << Hash->HashValue() << " " << Size << " " << FileName
630 << "\n " << Tags.FindS(FieldName);
631 return out;
632 }
633 bool SourcesWriter::DoPackage(string FileName)
634 {
635 // Pull all the data we need form the DB
636 if (Db.GetFileInfo(FileName,
637 false, /* DoControl */
638 false, /* DoContents */
639 false, /* GenContentsOnly */
640 true, /* DoSource */
641 DoHashes, DoAlwaysStat) == false)
642 {
643 return false;
644 }
645
646 // we need to perform a "write" here (this is what finish is doing)
647 // because the call to Db.GetFileInfo() in the loop will change
648 // the "db cursor"
649 Db.Finish();
650
651 // read stuff
652 char *Start = Db.Dsc.Data;
653 char *BlkEnd = Db.Dsc.Data + Db.Dsc.Length;
654
655 // Add extra \n to the end, just in case (as in clearsigned they are missing)
656 *BlkEnd++ = '\n';
657 *BlkEnd++ = '\n';
658
659 pkgTagSection Tags;
660 if (Tags.Scan(Start,BlkEnd - Start) == false)
661 return _error->Error("Could not find a record in the DSC '%s'",FileName.c_str());
662
663 if (Tags.Exists("Source") == false)
664 return _error->Error("Could not find a Source entry in the DSC '%s'",FileName.c_str());
665 Tags.Trim();
666
667 // Lookup the overide information, finding first the best priority.
668 string BestPrio;
669 string Bins = Tags.FindS("Binary");
670 char Buffer[Bins.length() + 1];
671 auto_ptr<Override::Item> OverItem(0);
672 if (Bins.empty() == false)
673 {
674 strcpy(Buffer,Bins.c_str());
675
676 // Ignore too-long errors.
677 char *BinList[400];
678 TokSplitString(',',Buffer,BinList,sizeof(BinList)/sizeof(BinList[0]));
679
680 // Look at all the binaries
681 unsigned char BestPrioV = pkgCache::State::Extra;
682 for (unsigned I = 0; BinList[I] != 0; I++)
683 {
684 auto_ptr<Override::Item> Itm(BOver.GetItem(BinList[I]));
685 if (Itm.get() == 0)
686 continue;
687
688 unsigned char NewPrioV = debListParser::GetPrio(Itm->Priority);
689 if (NewPrioV < BestPrioV || BestPrio.empty() == true)
690 {
691 BestPrioV = NewPrioV;
692 BestPrio = Itm->Priority;
693 }
694
695 if (OverItem.get() == 0)
696 OverItem = Itm;
697 }
698 }
699
700 // If we need to do any rewriting of the header do it now..
701 if (OverItem.get() == 0)
702 {
703 if (NoOverride == false)
704 {
705 NewLine(1);
706 ioprintf(c1out, _(" %s has no override entry\n"), Tags.FindS("Source").c_str());
707 }
708
709 OverItem = auto_ptr<Override::Item>(new Override::Item);
710 }
711
712 struct stat St;
713 if (stat(FileName.c_str(), &St) != 0)
714 return _error->Errno("fstat","Failed to stat %s",FileName.c_str());
715
716 auto_ptr<Override::Item> SOverItem(SOver.GetItem(Tags.FindS("Source")));
717 // const auto_ptr<Override::Item> autoSOverItem(SOverItem);
718 if (SOverItem.get() == 0)
719 {
720 ioprintf(c1out, _(" %s has no source override entry\n"), Tags.FindS("Source").c_str());
721 SOverItem = auto_ptr<Override::Item>(BOver.GetItem(Tags.FindS("Source")));
722 if (SOverItem.get() == 0)
723 {
724 ioprintf(c1out, _(" %s has no binary override entry either\n"), Tags.FindS("Source").c_str());
725 SOverItem = auto_ptr<Override::Item>(new Override::Item);
726 *SOverItem = *OverItem;
727 }
728 }
729
730 // Add the dsc to the files hash list
731 string const strippedName = flNotDir(FileName);
732 std::ostringstream ostreamFiles;
733 addDscHash(ostreamFiles, DoHashes, Hashes::MD5SUM, Tags, "Files", Db.HashesList.find("MD5Sum"), St.st_size, strippedName);
734 string const Files = ostreamFiles.str();
735
736 std::ostringstream ostreamSha1;
737 addDscHash(ostreamSha1, DoHashes, Hashes::SHA1SUM, Tags, "Checksums-Sha1", Db.HashesList.find("SHA1"), St.st_size, strippedName);
738 std::ostringstream ostreamSha256;
739 addDscHash(ostreamSha256, DoHashes, Hashes::SHA256SUM, Tags, "Checksums-Sha256", Db.HashesList.find("SHA256"), St.st_size, strippedName);
740 std::ostringstream ostreamSha512;
741 addDscHash(ostreamSha512, DoHashes, Hashes::SHA512SUM, Tags, "Checksums-Sha512", Db.HashesList.find("SHA512"), St.st_size, strippedName);
742
743 // Strip the DirStrip prefix from the FileName and add the PathPrefix
744 string NewFileName;
745 if (DirStrip.empty() == false &&
746 FileName.length() > DirStrip.length() &&
747 stringcmp(DirStrip,OriginalPath,OriginalPath + DirStrip.length()) == 0)
748 NewFileName = string(OriginalPath + DirStrip.length());
749 else
750 NewFileName = OriginalPath;
751 if (PathPrefix.empty() == false)
752 NewFileName = flCombine(PathPrefix,NewFileName);
753
754 string Directory = flNotFile(OriginalPath);
755 string Package = Tags.FindS("Source");
756
757 // Perform operation over all of the files
758 string ParseJnk;
759 const char *C = Files.c_str();
760 char *RealPath = NULL;
761 for (;isspace(*C); C++);
762 while (*C != 0)
763 {
764 // Parse each of the elements
765 if (ParseQuoteWord(C,ParseJnk) == false ||
766 ParseQuoteWord(C,ParseJnk) == false ||
767 ParseQuoteWord(C,ParseJnk) == false)
768 return _error->Error("Error parsing file record");
769
770 string OriginalPath = Directory + ParseJnk;
771
772 // Add missing hashes to source files
773 if (((DoHashes & Hashes::SHA1SUM) == Hashes::SHA1SUM && !Tags.Exists("Checksums-Sha1")) ||
774 ((DoHashes & Hashes::SHA256SUM) == Hashes::SHA256SUM && !Tags.Exists("Checksums-Sha256")) ||
775 ((DoHashes & Hashes::SHA512SUM) == Hashes::SHA512SUM && !Tags.Exists("Checksums-Sha512")))
776 {
777 if (Db.GetFileInfo(OriginalPath,
778 false, /* DoControl */
779 false, /* DoContents */
780 false, /* GenContentsOnly */
781 false, /* DoSource */
782 DoHashes,
783 DoAlwaysStat) == false)
784 {
785 return _error->Error("Error getting file info");
786 }
787
788 for (HashStringList::const_iterator hs = Db.HashesList.begin(); hs != Db.HashesList.end(); ++hs)
789 {
790 if (hs->HashType() == "MD5Sum" || hs->HashType() == "Checksum-FileSize")
791 continue;
792 char const * fieldname;
793 std::ostream * out;
794 if (hs->HashType() == "SHA1")
795 {
796 fieldname = "Checksums-Sha1";
797 out = &ostreamSha1;
798 }
799 else if (hs->HashType() == "SHA256")
800 {
801 fieldname = "Checksums-Sha256";
802 out = &ostreamSha256;
803 }
804 else if (hs->HashType() == "SHA512")
805 {
806 fieldname = "Checksums-Sha512";
807 out = &ostreamSha512;
808 }
809 else
810 {
811 _error->Warning("Ignoring unknown Checksumtype %s in SourcesWriter::DoPackages", hs->HashType().c_str());
812 continue;
813 }
814 if (Tags.Exists(fieldname) == true)
815 continue;
816 (*out) << "\n " << hs->HashValue() << " " << Db.GetFileSize() << " " << ParseJnk;
817 }
818
819 // write back the GetFileInfo() stats data
820 Db.Finish();
821 }
822
823 // Perform the delinking operation
824 char Jnk[2];
825
826 if (readlink(OriginalPath.c_str(),Jnk,sizeof(Jnk)) != -1 &&
827 (RealPath = realpath(OriginalPath.c_str(),NULL)) != 0)
828 {
829 string RP = RealPath;
830 free(RealPath);
831 if (Delink(RP,OriginalPath.c_str(),Stats.DeLinkBytes,St.st_size) == false)
832 return false;
833 }
834 }
835
836 Directory = flNotFile(NewFileName);
837 if (Directory.length() > 2)
838 Directory.erase(Directory.end()-1);
839
840 string const ChecksumsSha1 = ostreamSha1.str();
841 string const ChecksumsSha256 = ostreamSha256.str();
842 string const ChecksumsSha512 = ostreamSha512.str();
843
844 // This lists all the changes to the fields we are going to make.
845 // (5 hardcoded + checksums + maintainer + end marker)
846 std::vector<TFRewriteData> Changes;
847
848 Changes.push_back(SetTFRewriteData("Source",Package.c_str(),"Package"));
849 if (Files.empty() == false)
850 Changes.push_back(SetTFRewriteData("Files",Files.c_str()));
851 if (ChecksumsSha1.empty() == false)
852 Changes.push_back(SetTFRewriteData("Checksums-Sha1",ChecksumsSha1.c_str()));
853 if (ChecksumsSha256.empty() == false)
854 Changes.push_back(SetTFRewriteData("Checksums-Sha256",ChecksumsSha256.c_str()));
855 if (ChecksumsSha512.empty() == false)
856 Changes.push_back(SetTFRewriteData("Checksums-Sha512",ChecksumsSha512.c_str()));
857 if (Directory != "./")
858 Changes.push_back(SetTFRewriteData("Directory",Directory.c_str()));
859 Changes.push_back(SetTFRewriteData("Priority",BestPrio.c_str()));
860 Changes.push_back(SetTFRewriteData("Status",0));
861
862 // Rewrite the maintainer field if necessary
863 bool MaintFailed;
864 string NewMaint = OverItem->SwapMaint(Tags.FindS("Maintainer"),MaintFailed);
865 if (MaintFailed == true)
866 {
867 if (NoOverride == false)
868 {
869 NewLine(1);
870 ioprintf(c1out, _(" %s maintainer is %s not %s\n"), Package.c_str(),
871 Tags.FindS("Maintainer").c_str(), OverItem->OldMaint.c_str());
872 }
873 }
874 if (NewMaint.empty() == false)
875 Changes.push_back(SetTFRewriteData("Maintainer", NewMaint.c_str()));
876
877 for (map<string,string>::const_iterator I = SOverItem->FieldOverride.begin();
878 I != SOverItem->FieldOverride.end(); ++I)
879 Changes.push_back(SetTFRewriteData(I->first.c_str(),I->second.c_str()));
880
881 Changes.push_back(SetTFRewriteData(0, 0));
882
883 // Rewrite and store the fields.
884 if (TFRewrite(Output,Tags,TFRewriteSourceOrder,Changes.data()) == false)
885 return false;
886 fprintf(Output,"\n");
887
888 Stats.Packages++;
889
890 return true;
891 }
892 /*}}}*/
893
894 // ContentsWriter::ContentsWriter - Constructor /*{{{*/
895 // ---------------------------------------------------------------------
896 /* */
897 ContentsWriter::ContentsWriter(string const &DB, string const &Arch) :
898 FTWScanner(Arch), Db(DB), Stats(Db.Stats)
899
900 {
901 SetExts(".deb");
902 Output = stdout;
903 }
904 /*}}}*/
905 // ContentsWriter::DoPackage - Process a single package /*{{{*/
906 // ---------------------------------------------------------------------
907 /* If Package is the empty string the control record will be parsed to
908 determine what the package name is. */
909 bool ContentsWriter::DoPackage(string FileName, string Package)
910 {
911 if (!Db.GetFileInfo(FileName,
912 Package.empty(), /* DoControl */
913 true, /* DoContents */
914 false, /* GenContentsOnly */
915 false, /* DoSource */
916 0, /* DoHashes */
917 false /* checkMtime */))
918 {
919 return false;
920 }
921
922 // Parse the package name
923 if (Package.empty() == true)
924 {
925 Package = Db.Control.Section.FindS("Package");
926 }
927
928 Db.Contents.Add(Gen,Package);
929
930 return Db.Finish();
931 }
932 /*}}}*/
933 // ContentsWriter::ReadFromPkgs - Read from a packages file /*{{{*/
934 // ---------------------------------------------------------------------
935 /* */
936 bool ContentsWriter::ReadFromPkgs(string const &PkgFile,string const &PkgCompress)
937 {
938 MultiCompress Pkgs(PkgFile,PkgCompress,0,false);
939 if (_error->PendingError() == true)
940 return false;
941
942 // Open the package file
943 FileFd Fd;
944 if (Pkgs.OpenOld(Fd) == false)
945 return false;
946
947 pkgTagFile Tags(&Fd);
948 if (_error->PendingError() == true)
949 return false;
950
951 // Parse.
952 pkgTagSection Section;
953 while (Tags.Step(Section) == true)
954 {
955 string File = flCombine(Prefix,Section.FindS("FileName"));
956 string Package = Section.FindS("Section");
957 if (Package.empty() == false && Package.end()[-1] != '/')
958 {
959 Package += '/';
960 Package += Section.FindS("Package");
961 }
962 else
963 Package += Section.FindS("Package");
964
965 DoPackage(File,Package);
966 if (_error->empty() == false)
967 {
968 _error->Error("Errors apply to file '%s'",File.c_str());
969 _error->DumpErrors();
970 }
971 }
972
973 // Tidy the compressor
974 Fd.Close();
975
976 return true;
977 }
978
979 /*}}}*/
980
981 // ReleaseWriter::ReleaseWriter - Constructor /*{{{*/
982 // ---------------------------------------------------------------------
983 /* */
984 ReleaseWriter::ReleaseWriter(string const &/*DB*/)
985 {
986 if (_config->FindB("APT::FTPArchive::Release::Default-Patterns", true) == true)
987 {
988 AddPattern("Packages");
989 AddPattern("Packages.gz");
990 AddPattern("Packages.bz2");
991 AddPattern("Packages.lzma");
992 AddPattern("Packages.xz");
993 AddPattern("Translation-*");
994 AddPattern("Sources");
995 AddPattern("Sources.gz");
996 AddPattern("Sources.bz2");
997 AddPattern("Sources.lzma");
998 AddPattern("Sources.xz");
999 AddPattern("Release");
1000 AddPattern("Contents-*");
1001 AddPattern("Index");
1002 AddPattern("md5sum.txt");
1003 }
1004 AddPatterns(_config->FindVector("APT::FTPArchive::Release::Patterns"));
1005
1006 Output = stdout;
1007 time_t const now = time(NULL);
1008
1009 setlocale(LC_TIME, "C");
1010
1011 char datestr[128];
1012 if (strftime(datestr, sizeof(datestr), "%a, %d %b %Y %H:%M:%S UTC",
1013 gmtime(&now)) == 0)
1014 {
1015 datestr[0] = '\0';
1016 }
1017
1018 time_t const validuntil = now + _config->FindI("APT::FTPArchive::Release::ValidTime", 0);
1019 char validstr[128];
1020 if (now == validuntil ||
1021 strftime(validstr, sizeof(validstr), "%a, %d %b %Y %H:%M:%S UTC",
1022 gmtime(&validuntil)) == 0)
1023 {
1024 validstr[0] = '\0';
1025 }
1026
1027 setlocale(LC_TIME, "");
1028
1029 map<string,string> Fields;
1030 Fields["Origin"] = "";
1031 Fields["Label"] = "";
1032 Fields["Suite"] = "";
1033 Fields["Version"] = "";
1034 Fields["Codename"] = "";
1035 Fields["Date"] = datestr;
1036 Fields["Valid-Until"] = validstr;
1037 Fields["Architectures"] = "";
1038 Fields["Components"] = "";
1039 Fields["Description"] = "";
1040
1041 for(map<string,string>::const_iterator I = Fields.begin();
1042 I != Fields.end();
1043 ++I)
1044 {
1045 string Config = string("APT::FTPArchive::Release::") + (*I).first;
1046 string Value = _config->Find(Config, (*I).second.c_str());
1047 if (Value == "")
1048 continue;
1049
1050 fprintf(Output, "%s: %s\n", (*I).first.c_str(), Value.c_str());
1051 }
1052
1053 ConfigToDoHashes(DoHashes, "APT::FTPArchive::Release");
1054 }
1055 /*}}}*/
1056 // ReleaseWriter::DoPackage - Process a single package /*{{{*/
1057 // ---------------------------------------------------------------------
1058 bool ReleaseWriter::DoPackage(string FileName)
1059 {
1060 // Strip the DirStrip prefix from the FileName and add the PathPrefix
1061 string NewFileName;
1062 if (DirStrip.empty() == false &&
1063 FileName.length() > DirStrip.length() &&
1064 stringcmp(FileName.begin(),FileName.begin() + DirStrip.length(),
1065 DirStrip.begin(),DirStrip.end()) == 0)
1066 {
1067 NewFileName = string(FileName.begin() + DirStrip.length(),FileName.end());
1068 while (NewFileName[0] == '/')
1069 NewFileName = string(NewFileName.begin() + 1,NewFileName.end());
1070 }
1071 else
1072 NewFileName = FileName;
1073
1074 if (PathPrefix.empty() == false)
1075 NewFileName = flCombine(PathPrefix,NewFileName);
1076
1077 FileFd fd(FileName, FileFd::ReadOnly);
1078
1079 if (!fd.IsOpen())
1080 {
1081 return false;
1082 }
1083
1084 CheckSums[NewFileName].size = fd.Size();
1085
1086 Hashes hs;
1087 hs.AddFD(fd, 0, DoHashes);
1088 CheckSums[NewFileName].Hashes = hs.GetHashStringList();
1089 fd.Close();
1090
1091 return true;
1092 }
1093
1094 /*}}}*/
1095 // ReleaseWriter::Finish - Output the checksums /*{{{*/
1096 // ---------------------------------------------------------------------
1097 static void printChecksumTypeRecord(FILE * const Output, char const * const Type, map<string, ReleaseWriter::CheckSum> const &CheckSums)
1098 {
1099 fprintf(Output, "%s:\n", Type);
1100 for(map<string,ReleaseWriter::CheckSum>::const_iterator I = CheckSums.begin();
1101 I != CheckSums.end(); ++I)
1102 {
1103 HashString const * const hs = I->second.Hashes.find(Type);
1104 if (hs == NULL)
1105 continue;
1106 fprintf(Output, " %s %16llu %s\n",
1107 hs->HashValue().c_str(),
1108 (*I).second.size,
1109 (*I).first.c_str());
1110 }
1111 }
1112 void ReleaseWriter::Finish()
1113 {
1114 if ((DoHashes & Hashes::MD5SUM) == Hashes::MD5SUM)
1115 printChecksumTypeRecord(Output, "MD5Sum", CheckSums);
1116 if ((DoHashes & Hashes::SHA1SUM) == Hashes::SHA1SUM)
1117 printChecksumTypeRecord(Output, "SHA1", CheckSums);
1118 if ((DoHashes & Hashes::SHA256SUM) == Hashes::SHA256SUM)
1119 printChecksumTypeRecord(Output, "SHA256", CheckSums);
1120 if ((DoHashes & Hashes::SHA512SUM) == Hashes::SHA512SUM)
1121 printChecksumTypeRecord(Output, "SHA512", CheckSums);
1122 }