]> git.saurik.com Git - apt.git/blob - ftparchive/writer.cc
debian/apt.postinst: chown _apt:root /etc/apt/auth.conf
[apt.git] / ftparchive / writer.cc
1 // -*- mode: cpp; mode: fold -*-
2 // Description /*{{{*/
3 // $Id: writer.cc,v 1.14 2004/03/24 01:40:43 mdz Exp $
4 /* ######################################################################
5
6 Writer
7
8 The file writer classes. These write various types of output, sources,
9 packages and contents.
10
11 ##################################################################### */
12 /*}}}*/
13 // Include Files /*{{{*/
14 #include <config.h>
15
16 #include <apt-pkg/configuration.h>
17 #include <apt-pkg/deblistparser.h>
18 #include <apt-pkg/error.h>
19 #include <apt-pkg/fileutl.h>
20 #include <apt-pkg/gpgv.h>
21 #include <apt-pkg/hashes.h>
22 #include <apt-pkg/md5.h>
23 #include <apt-pkg/strutl.h>
24 #include <apt-pkg/debfile.h>
25 #include <apt-pkg/pkgcache.h>
26 #include <apt-pkg/sha1.h>
27 #include <apt-pkg/sha2.h>
28 #include <apt-pkg/tagfile.h>
29
30 #include <ctype.h>
31 #include <fnmatch.h>
32 #include <ftw.h>
33 #include <locale.h>
34 #include <string.h>
35 #include <sys/stat.h>
36 #include <sys/types.h>
37 #include <unistd.h>
38 #include <ctime>
39 #include <iostream>
40 #include <sstream>
41 #include <memory>
42 #include <utility>
43
44 #include "apt-ftparchive.h"
45 #include "writer.h"
46 #include "cachedb.h"
47 #include "multicompress.h"
48
49 #include <apti18n.h>
50 /*}}}*/
51 using namespace std;
52 FTWScanner *FTWScanner::Owner;
53
54 // SetTFRewriteData - Helper for setting rewrite lists /*{{{*/
55 // ---------------------------------------------------------------------
56 /* */
57 static inline TFRewriteData SetTFRewriteData(const char *tag,
58 const char *rewrite,
59 const char *newtag = 0)
60 {
61 TFRewriteData tfrd;
62 tfrd.Tag = tag;
63 tfrd.Rewrite = rewrite;
64 tfrd.NewTag = newtag;
65 return tfrd;
66 }
67 /*}}}*/
68 // ConfigToDoHashes - which hashes to generate /*{{{*/
69 static void SingleConfigToDoHashes(unsigned int &DoHashes, std::string const &Conf, unsigned int const Flag)
70 {
71 if (_config->FindB(Conf, true) == true)
72 DoHashes |= Flag;
73 else
74 DoHashes &= ~Flag;
75 }
76 static void ConfigToDoHashes(unsigned int &DoHashes, std::string const &Conf)
77 {
78 SingleConfigToDoHashes(DoHashes, Conf + "::MD5", Hashes::MD5SUM);
79 SingleConfigToDoHashes(DoHashes, Conf + "::SHA1", Hashes::SHA1SUM);
80 SingleConfigToDoHashes(DoHashes, Conf + "::SHA256", Hashes::SHA256SUM);
81 SingleConfigToDoHashes(DoHashes, Conf + "::SHA512", Hashes::SHA512SUM);
82 }
83 /*}}}*/
84
85 // FTWScanner::FTWScanner - Constructor /*{{{*/
86 // ---------------------------------------------------------------------
87 /* */
88 FTWScanner::FTWScanner(string const &Arch): Arch(Arch), DoHashes(~0)
89 {
90 ErrorPrinted = false;
91 NoLinkAct = !_config->FindB("APT::FTPArchive::DeLinkAct",true);
92 ConfigToDoHashes(DoHashes, "APT::FTPArchive");
93 }
94 /*}}}*/
95 // FTWScanner::Scanner - FTW Scanner /*{{{*/
96 // ---------------------------------------------------------------------
97 /* This is the FTW scanner, it processes each directory element in the
98 directory tree. */
99 int FTWScanner::ScannerFTW(const char *File,const struct stat * /*sb*/,int Flag)
100 {
101 if (Flag == FTW_DNR)
102 {
103 Owner->NewLine(1);
104 ioprintf(c1out, _("W: Unable to read directory %s\n"), File);
105 }
106 if (Flag == FTW_NS)
107 {
108 Owner->NewLine(1);
109 ioprintf(c1out, _("W: Unable to stat %s\n"), File);
110 }
111 if (Flag != FTW_F)
112 return 0;
113
114 return ScannerFile(File, true);
115 }
116 /*}}}*/
117 // FTWScanner::ScannerFile - File Scanner /*{{{*/
118 // ---------------------------------------------------------------------
119 /* */
120 int FTWScanner::ScannerFile(const char *File, bool const &ReadLink)
121 {
122 const char *LastComponent = strrchr(File, '/');
123 char *RealPath = NULL;
124
125 if (LastComponent == NULL)
126 LastComponent = File;
127 else
128 LastComponent++;
129
130 vector<string>::const_iterator I;
131 for(I = Owner->Patterns.begin(); I != Owner->Patterns.end(); ++I)
132 {
133 if (fnmatch((*I).c_str(), LastComponent, 0) == 0)
134 break;
135 }
136 if (I == Owner->Patterns.end())
137 return 0;
138
139 /* Process it. If the file is a link then resolve it into an absolute
140 name.. This works best if the directory components the scanner are
141 given are not links themselves. */
142 char Jnk[2];
143 Owner->OriginalPath = File;
144 if (ReadLink &&
145 readlink(File,Jnk,sizeof(Jnk)) != -1 &&
146 (RealPath = realpath(File,NULL)) != 0)
147 {
148 Owner->DoPackage(RealPath);
149 free(RealPath);
150 }
151 else
152 Owner->DoPackage(File);
153
154 if (_error->empty() == false)
155 {
156 // Print any errors or warnings found
157 string Err;
158 bool SeenPath = false;
159 while (_error->empty() == false)
160 {
161 Owner->NewLine(1);
162
163 bool const Type = _error->PopMessage(Err);
164 if (Type == true)
165 cerr << _("E: ") << Err << endl;
166 else
167 cerr << _("W: ") << Err << endl;
168
169 if (Err.find(File) != string::npos)
170 SeenPath = true;
171 }
172
173 if (SeenPath == false)
174 cerr << _("E: Errors apply to file ") << "'" << File << "'" << endl;
175 return 0;
176 }
177
178 return 0;
179 }
180 /*}}}*/
181 // FTWScanner::RecursiveScan - Just scan a directory tree /*{{{*/
182 // ---------------------------------------------------------------------
183 /* */
184 bool FTWScanner::RecursiveScan(string const &Dir)
185 {
186 char *RealPath = NULL;
187 /* If noprefix is set then jam the scan root in, so we don't generate
188 link followed paths out of control */
189 if (InternalPrefix.empty() == true)
190 {
191 if ((RealPath = realpath(Dir.c_str(),NULL)) == 0)
192 return _error->Errno("realpath",_("Failed to resolve %s"),Dir.c_str());
193 InternalPrefix = RealPath;
194 free(RealPath);
195 }
196
197 // Do recursive directory searching
198 Owner = this;
199 int const Res = ftw(Dir.c_str(),ScannerFTW,30);
200
201 // Error treewalking?
202 if (Res != 0)
203 {
204 if (_error->PendingError() == false)
205 _error->Errno("ftw",_("Tree walking failed"));
206 return false;
207 }
208
209 return true;
210 }
211 /*}}}*/
212 // FTWScanner::LoadFileList - Load the file list from a file /*{{{*/
213 // ---------------------------------------------------------------------
214 /* This is an alternative to using FTW to locate files, it reads the list
215 of files from another file. */
216 bool FTWScanner::LoadFileList(string const &Dir, string const &File)
217 {
218 char *RealPath = NULL;
219 /* If noprefix is set then jam the scan root in, so we don't generate
220 link followed paths out of control */
221 if (InternalPrefix.empty() == true)
222 {
223 if ((RealPath = realpath(Dir.c_str(),NULL)) == 0)
224 return _error->Errno("realpath",_("Failed to resolve %s"),Dir.c_str());
225 InternalPrefix = RealPath;
226 free(RealPath);
227 }
228
229 Owner = this;
230 FILE *List = fopen(File.c_str(),"r");
231 if (List == 0)
232 return _error->Errno("fopen",_("Failed to open %s"),File.c_str());
233
234 /* We are a tad tricky here.. We prefix the buffer with the directory
235 name, that way if we need a full path with just use line.. Sneaky and
236 fully evil. */
237 char Line[1000];
238 char *FileStart;
239 if (Dir.empty() == true || Dir.end()[-1] != '/')
240 FileStart = Line + snprintf(Line,sizeof(Line),"%s/",Dir.c_str());
241 else
242 FileStart = Line + snprintf(Line,sizeof(Line),"%s",Dir.c_str());
243 while (fgets(FileStart,sizeof(Line) - (FileStart - Line),List) != 0)
244 {
245 char *FileName = _strstrip(FileStart);
246 if (FileName[0] == 0)
247 continue;
248
249 if (FileName[0] != '/')
250 {
251 if (FileName != FileStart)
252 memmove(FileStart,FileName,strlen(FileStart));
253 FileName = Line;
254 }
255
256 #if 0
257 struct stat St;
258 int Flag = FTW_F;
259 if (stat(FileName,&St) != 0)
260 Flag = FTW_NS;
261 #endif
262
263 if (ScannerFile(FileName, false) != 0)
264 break;
265 }
266
267 fclose(List);
268 return true;
269 }
270 /*}}}*/
271 // FTWScanner::Delink - Delink symlinks /*{{{*/
272 // ---------------------------------------------------------------------
273 /* */
274 bool FTWScanner::Delink(string &FileName,const char *OriginalPath,
275 unsigned long long &DeLinkBytes,
276 unsigned long long const &FileSize)
277 {
278 // See if this isn't an internaly prefix'd file name.
279 if (InternalPrefix.empty() == false &&
280 InternalPrefix.length() < FileName.length() &&
281 stringcmp(FileName.begin(),FileName.begin() + InternalPrefix.length(),
282 InternalPrefix.begin(),InternalPrefix.end()) != 0)
283 {
284 if (DeLinkLimit != 0 && DeLinkBytes/1024 < DeLinkLimit)
285 {
286 // Tidy up the display
287 if (DeLinkBytes == 0)
288 cout << endl;
289
290 NewLine(1);
291 ioprintf(c1out, _(" DeLink %s [%s]\n"), (OriginalPath + InternalPrefix.length()),
292 SizeToStr(FileSize).c_str());
293 c1out << flush;
294
295 if (NoLinkAct == false)
296 {
297 char OldLink[400];
298 if (readlink(OriginalPath,OldLink,sizeof(OldLink)) == -1)
299 _error->Errno("readlink",_("Failed to readlink %s"),OriginalPath);
300 else
301 {
302 if (unlink(OriginalPath) != 0)
303 _error->Errno("unlink",_("Failed to unlink %s"),OriginalPath);
304 else
305 {
306 if (link(FileName.c_str(),OriginalPath) != 0)
307 {
308 // Panic! Restore the symlink
309 if (symlink(OldLink,OriginalPath) != 0)
310 _error->Errno("symlink", "failed to restore symlink");
311 return _error->Errno("link",_("*** Failed to link %s to %s"),
312 FileName.c_str(),
313 OriginalPath);
314 }
315 }
316 }
317 }
318
319 DeLinkBytes += FileSize;
320 if (DeLinkBytes/1024 >= DeLinkLimit)
321 ioprintf(c1out, _(" DeLink limit of %sB hit.\n"), SizeToStr(DeLinkBytes).c_str());
322 }
323
324 FileName = OriginalPath;
325 }
326
327 return true;
328 }
329 /*}}}*/
330
331 // PackagesWriter::PackagesWriter - Constructor /*{{{*/
332 // ---------------------------------------------------------------------
333 /* */
334 PackagesWriter::PackagesWriter(string const &DB,string const &Overrides,string const &ExtOverrides,
335 string const &Arch) :
336 FTWScanner(Arch), Db(DB), Stats(Db.Stats), TransWriter(NULL)
337 {
338 Output = stdout;
339 SetExts(".deb .udeb");
340 DeLinkLimit = 0;
341
342 // Process the command line options
343 ConfigToDoHashes(DoHashes, "APT::FTPArchive::Packages");
344 DoAlwaysStat = _config->FindB("APT::FTPArchive::AlwaysStat", false);
345 DoContents = _config->FindB("APT::FTPArchive::Contents",true);
346 NoOverride = _config->FindB("APT::FTPArchive::NoOverrideMsg",false);
347 LongDescription = _config->FindB("APT::FTPArchive::LongDescription",true);
348
349 if (Db.Loaded() == false)
350 DoContents = false;
351
352 // Read the override file
353 if (Overrides.empty() == false && Over.ReadOverride(Overrides) == false)
354 return;
355 else
356 NoOverride = true;
357
358 if (ExtOverrides.empty() == false)
359 Over.ReadExtraOverride(ExtOverrides);
360
361 _error->DumpErrors();
362 }
363 /*}}}*/
364 // FTWScanner::SetExts - Set extensions to support /*{{{*/
365 // ---------------------------------------------------------------------
366 /* */
367 bool FTWScanner::SetExts(string const &Vals)
368 {
369 ClearPatterns();
370 string::size_type Start = 0;
371 while (Start <= Vals.length()-1)
372 {
373 string::size_type const Space = Vals.find(' ',Start);
374 string::size_type const Length = ((Space == string::npos) ? Vals.length() : Space) - Start;
375 if ( Arch.empty() == false )
376 {
377 AddPattern(string("*_") + Arch + Vals.substr(Start, Length));
378 AddPattern(string("*_all") + Vals.substr(Start, Length));
379 }
380 else
381 AddPattern(string("*") + Vals.substr(Start, Length));
382
383 Start += Length + 1;
384 }
385
386 return true;
387 }
388
389 /*}}}*/
390 // PackagesWriter::DoPackage - Process a single package /*{{{*/
391 // ---------------------------------------------------------------------
392 /* This method takes a package and gets its control information and
393 MD5, SHA1 and SHA256 then writes out a control record with the proper fields
394 rewritten and the path/size/hash appended. */
395 bool PackagesWriter::DoPackage(string FileName)
396 {
397 // Pull all the data we need form the DB
398 if (Db.GetFileInfo(FileName,
399 true, /* DoControl */
400 DoContents,
401 true, /* GenContentsOnly */
402 false, /* DoSource */
403 DoHashes, DoAlwaysStat) == false)
404 {
405 return false;
406 }
407
408 unsigned long long FileSize = Db.GetFileSize();
409 if (Delink(FileName,OriginalPath,Stats.DeLinkBytes,FileSize) == false)
410 return false;
411
412 // Lookup the overide information
413 pkgTagSection &Tags = Db.Control.Section;
414 string Package = Tags.FindS("Package");
415 string Architecture;
416 // if we generate a Packages file for a given arch, we use it to
417 // look for overrides. if we run in "simple" mode without the
418 // "Architecures" variable in the config we use the architecure value
419 // from the deb file
420 if(Arch != "")
421 Architecture = Arch;
422 else
423 Architecture = Tags.FindS("Architecture");
424 auto_ptr<Override::Item> OverItem(Over.GetItem(Package,Architecture));
425
426 if (Package.empty() == true)
427 return _error->Error(_("Archive had no package field"));
428
429 // If we need to do any rewriting of the header do it now..
430 if (OverItem.get() == 0)
431 {
432 if (NoOverride == false)
433 {
434 NewLine(1);
435 ioprintf(c1out, _(" %s has no override entry\n"), Package.c_str());
436 }
437
438 OverItem = auto_ptr<Override::Item>(new Override::Item);
439 OverItem->FieldOverride["Section"] = Tags.FindS("Section");
440 OverItem->Priority = Tags.FindS("Priority");
441 }
442
443 char Size[40];
444 sprintf(Size,"%llu", (unsigned long long) FileSize);
445
446 // Strip the DirStrip prefix from the FileName and add the PathPrefix
447 string NewFileName;
448 if (DirStrip.empty() == false &&
449 FileName.length() > DirStrip.length() &&
450 stringcmp(FileName.begin(),FileName.begin() + DirStrip.length(),
451 DirStrip.begin(),DirStrip.end()) == 0)
452 NewFileName = string(FileName.begin() + DirStrip.length(),FileName.end());
453 else
454 NewFileName = FileName;
455 if (PathPrefix.empty() == false)
456 NewFileName = flCombine(PathPrefix,NewFileName);
457
458 /* Configuration says we don't want to include the long Description
459 in the package file - instead we want to ship a separated file */
460 string desc;
461 if (LongDescription == false) {
462 desc = Tags.FindS("Description").append("\n");
463 OverItem->FieldOverride["Description"] = desc.substr(0, desc.find('\n')).c_str();
464 }
465
466 // This lists all the changes to the fields we are going to make.
467 std::vector<TFRewriteData> Changes;
468
469 Changes.push_back(SetTFRewriteData("Size", Size));
470 for (HashStringList::const_iterator hs = Db.HashesList.begin(); hs != Db.HashesList.end(); ++hs)
471 {
472 if (hs->HashType() == "MD5Sum")
473 Changes.push_back(SetTFRewriteData("MD5sum", hs->HashValue().c_str()));
474 else
475 Changes.push_back(SetTFRewriteData(hs->HashType().c_str(), hs->HashValue().c_str()));
476 }
477 Changes.push_back(SetTFRewriteData("Filename", NewFileName.c_str()));
478 Changes.push_back(SetTFRewriteData("Priority", OverItem->Priority.c_str()));
479 Changes.push_back(SetTFRewriteData("Status", 0));
480 Changes.push_back(SetTFRewriteData("Optional", 0));
481
482 string DescriptionMd5;
483 if (LongDescription == false) {
484 MD5Summation descmd5;
485 descmd5.Add(desc.c_str());
486 DescriptionMd5 = descmd5.Result().Value();
487 Changes.push_back(SetTFRewriteData("Description-md5", DescriptionMd5.c_str()));
488 if (TransWriter != NULL)
489 TransWriter->DoPackage(Package, desc, DescriptionMd5);
490 }
491
492 // Rewrite the maintainer field if necessary
493 bool MaintFailed;
494 string NewMaint = OverItem->SwapMaint(Tags.FindS("Maintainer"),MaintFailed);
495 if (MaintFailed == true)
496 {
497 if (NoOverride == false)
498 {
499 NewLine(1);
500 ioprintf(c1out, _(" %s maintainer is %s not %s\n"),
501 Package.c_str(), Tags.FindS("Maintainer").c_str(), OverItem->OldMaint.c_str());
502 }
503 }
504
505 if (NewMaint.empty() == false)
506 Changes.push_back(SetTFRewriteData("Maintainer", NewMaint.c_str()));
507
508 /* Get rid of the Optional tag. This is an ugly, ugly, ugly hack that
509 dpkg-scanpackages does. Well sort of. dpkg-scanpackages just does renaming
510 but dpkg does this append bit. So we do the append bit, at least that way the
511 status file and package file will remain similar. There are other transforms
512 but optional is the only legacy one still in use for some lazy reason. */
513 string OptionalStr = Tags.FindS("Optional");
514 if (OptionalStr.empty() == false)
515 {
516 if (Tags.FindS("Suggests").empty() == false)
517 OptionalStr = Tags.FindS("Suggests") + ", " + OptionalStr;
518 Changes.push_back(SetTFRewriteData("Suggests", OptionalStr.c_str()));
519 }
520
521 for (map<string,string>::const_iterator I = OverItem->FieldOverride.begin();
522 I != OverItem->FieldOverride.end(); ++I)
523 Changes.push_back(SetTFRewriteData(I->first.c_str(),I->second.c_str()));
524
525 Changes.push_back(SetTFRewriteData( 0, 0));
526
527 // Rewrite and store the fields.
528 if (TFRewrite(Output,Tags,TFRewritePackageOrder,Changes.data()) == false)
529 return false;
530 fprintf(Output,"\n");
531
532 return Db.Finish();
533 }
534 /*}}}*/
535
536 // TranslationWriter::TranslationWriter - Constructor /*{{{*/
537 // ---------------------------------------------------------------------
538 /* Create a Translation-Master file for this Packages file */
539 TranslationWriter::TranslationWriter(string const &File, string const &TransCompress,
540 mode_t const &Permissions) : Output(NULL),
541 RefCounter(0)
542 {
543 if (File.empty() == true)
544 return;
545
546 Comp = new MultiCompress(File, TransCompress, Permissions);
547 Output = Comp->Input;
548 }
549 /*}}}*/
550 // TranslationWriter::DoPackage - Process a single package /*{{{*/
551 // ---------------------------------------------------------------------
552 /* Create a Translation-Master file for this Packages file */
553 bool TranslationWriter::DoPackage(string const &Pkg, string const &Desc,
554 string const &MD5)
555 {
556 if (Output == NULL)
557 return true;
558
559 // Different archs can include different versions and therefore
560 // different descriptions - so we need to check for both name and md5.
561 string const Record = Pkg + ":" + MD5;
562
563 if (Included.find(Record) != Included.end())
564 return true;
565
566 fprintf(Output, "Package: %s\nDescription-md5: %s\nDescription-en: %s\n",
567 Pkg.c_str(), MD5.c_str(), Desc.c_str());
568
569 Included.insert(Record);
570 return true;
571 }
572 /*}}}*/
573 // TranslationWriter::~TranslationWriter - Destructor /*{{{*/
574 // ---------------------------------------------------------------------
575 /* */
576 TranslationWriter::~TranslationWriter()
577 {
578 if (Comp == NULL)
579 return;
580
581 delete Comp;
582 }
583 /*}}}*/
584
585 // SourcesWriter::SourcesWriter - Constructor /*{{{*/
586 // ---------------------------------------------------------------------
587 /* */
588 SourcesWriter::SourcesWriter(string const &DB, string const &BOverrides,string const &SOverrides,
589 string const &ExtOverrides) :
590 Db(DB), Stats(Db.Stats)
591 {
592 Output = stdout;
593 AddPattern("*.dsc");
594 DeLinkLimit = 0;
595 Buffer = 0;
596 BufSize = 0;
597
598 // Process the command line options
599 ConfigToDoHashes(DoHashes, "APT::FTPArchive::Sources");
600 NoOverride = _config->FindB("APT::FTPArchive::NoOverrideMsg",false);
601 DoAlwaysStat = _config->FindB("APT::FTPArchive::AlwaysStat", false);
602
603 // Read the override file
604 if (BOverrides.empty() == false && BOver.ReadOverride(BOverrides) == false)
605 return;
606 else
607 NoOverride = true;
608
609 // WTF?? The logic above: if we can't read binary overrides, don't even try
610 // reading source overrides. if we can read binary overrides, then say there
611 // are no overrides. THIS MAKES NO SENSE! -- ajt@d.o, 2006/02/28
612
613 if (ExtOverrides.empty() == false)
614 SOver.ReadExtraOverride(ExtOverrides);
615
616 if (SOverrides.empty() == false && FileExists(SOverrides) == true)
617 SOver.ReadOverride(SOverrides,true);
618 }
619 /*}}}*/
620 // SourcesWriter::DoPackage - Process a single package /*{{{*/
621 static std::ostream& addDscHash(std::ostream &out, unsigned int const DoHashes,
622 Hashes::SupportedHashes const DoIt, pkgTagSection &Tags, char const * const FieldName,
623 HashString const * const Hash, unsigned long long Size, std::string FileName)
624 {
625 if ((DoHashes & DoIt) != DoIt || Tags.Exists(FieldName) == false || Hash == NULL)
626 return out;
627 out << "\n " << Hash->HashValue() << " " << Size << " " << FileName
628 << "\n " << Tags.FindS(FieldName);
629 return out;
630 }
631 bool SourcesWriter::DoPackage(string FileName)
632 {
633 // Pull all the data we need form the DB
634 if (Db.GetFileInfo(FileName,
635 false, /* DoControl */
636 false, /* DoContents */
637 false, /* GenContentsOnly */
638 true, /* DoSource */
639 DoHashes, DoAlwaysStat) == false)
640 {
641 return false;
642 }
643
644 // we need to perform a "write" here (this is what finish is doing)
645 // because the call to Db.GetFileInfo() in the loop will change
646 // the "db cursor"
647 Db.Finish();
648
649 // read stuff
650 char *Start = Db.Dsc.Data;
651 char *BlkEnd = Db.Dsc.Data + Db.Dsc.Length;
652
653 // Add extra \n to the end, just in case (as in clearsigned they are missing)
654 *BlkEnd++ = '\n';
655 *BlkEnd++ = '\n';
656
657 pkgTagSection Tags;
658 if (Tags.Scan(Start,BlkEnd - Start) == false)
659 return _error->Error("Could not find a record in the DSC '%s'",FileName.c_str());
660
661 if (Tags.Exists("Source") == false)
662 return _error->Error("Could not find a Source entry in the DSC '%s'",FileName.c_str());
663 Tags.Trim();
664
665 // Lookup the overide information, finding first the best priority.
666 string BestPrio;
667 string Bins = Tags.FindS("Binary");
668 char Buffer[Bins.length() + 1];
669 auto_ptr<Override::Item> OverItem(0);
670 if (Bins.empty() == false)
671 {
672 strcpy(Buffer,Bins.c_str());
673
674 // Ignore too-long errors.
675 char *BinList[400];
676 TokSplitString(',',Buffer,BinList,sizeof(BinList)/sizeof(BinList[0]));
677
678 // Look at all the binaries
679 unsigned char BestPrioV = pkgCache::State::Extra;
680 for (unsigned I = 0; BinList[I] != 0; I++)
681 {
682 auto_ptr<Override::Item> Itm(BOver.GetItem(BinList[I]));
683 if (Itm.get() == 0)
684 continue;
685
686 unsigned char NewPrioV = debListParser::GetPrio(Itm->Priority);
687 if (NewPrioV < BestPrioV || BestPrio.empty() == true)
688 {
689 BestPrioV = NewPrioV;
690 BestPrio = Itm->Priority;
691 }
692
693 if (OverItem.get() == 0)
694 OverItem = Itm;
695 }
696 }
697
698 // If we need to do any rewriting of the header do it now..
699 if (OverItem.get() == 0)
700 {
701 if (NoOverride == false)
702 {
703 NewLine(1);
704 ioprintf(c1out, _(" %s has no override entry\n"), Tags.FindS("Source").c_str());
705 }
706
707 OverItem = auto_ptr<Override::Item>(new Override::Item);
708 }
709
710 struct stat St;
711 if (stat(FileName.c_str(), &St) != 0)
712 return _error->Errno("fstat","Failed to stat %s",FileName.c_str());
713
714 auto_ptr<Override::Item> SOverItem(SOver.GetItem(Tags.FindS("Source")));
715 // const auto_ptr<Override::Item> autoSOverItem(SOverItem);
716 if (SOverItem.get() == 0)
717 {
718 ioprintf(c1out, _(" %s has no source override entry\n"), Tags.FindS("Source").c_str());
719 SOverItem = auto_ptr<Override::Item>(BOver.GetItem(Tags.FindS("Source")));
720 if (SOverItem.get() == 0)
721 {
722 ioprintf(c1out, _(" %s has no binary override entry either\n"), Tags.FindS("Source").c_str());
723 SOverItem = auto_ptr<Override::Item>(new Override::Item);
724 *SOverItem = *OverItem;
725 }
726 }
727
728 // Add the dsc to the files hash list
729 string const strippedName = flNotDir(FileName);
730 std::ostringstream ostreamFiles;
731 addDscHash(ostreamFiles, DoHashes, Hashes::MD5SUM, Tags, "Files", Db.HashesList.find("MD5Sum"), St.st_size, strippedName);
732 string const Files = ostreamFiles.str();
733
734 std::ostringstream ostreamSha1;
735 addDscHash(ostreamSha1, DoHashes, Hashes::SHA1SUM, Tags, "Checksums-Sha1", Db.HashesList.find("SHA1"), St.st_size, strippedName);
736 std::ostringstream ostreamSha256;
737 addDscHash(ostreamSha256, DoHashes, Hashes::SHA256SUM, Tags, "Checksums-Sha256", Db.HashesList.find("SHA256"), St.st_size, strippedName);
738 std::ostringstream ostreamSha512;
739 addDscHash(ostreamSha512, DoHashes, Hashes::SHA512SUM, Tags, "Checksums-Sha512", Db.HashesList.find("SHA512"), St.st_size, strippedName);
740
741 // Strip the DirStrip prefix from the FileName and add the PathPrefix
742 string NewFileName;
743 if (DirStrip.empty() == false &&
744 FileName.length() > DirStrip.length() &&
745 stringcmp(DirStrip,OriginalPath,OriginalPath + DirStrip.length()) == 0)
746 NewFileName = string(OriginalPath + DirStrip.length());
747 else
748 NewFileName = OriginalPath;
749 if (PathPrefix.empty() == false)
750 NewFileName = flCombine(PathPrefix,NewFileName);
751
752 string Directory = flNotFile(OriginalPath);
753 string Package = Tags.FindS("Source");
754
755 // Perform operation over all of the files
756 string ParseJnk;
757 const char *C = Files.c_str();
758 char *RealPath = NULL;
759 for (;isspace(*C); C++);
760 while (*C != 0)
761 {
762 // Parse each of the elements
763 if (ParseQuoteWord(C,ParseJnk) == false ||
764 ParseQuoteWord(C,ParseJnk) == false ||
765 ParseQuoteWord(C,ParseJnk) == false)
766 return _error->Error("Error parsing file record");
767
768 string OriginalPath = Directory + ParseJnk;
769
770 // Add missing hashes to source files
771 if (((DoHashes & Hashes::SHA1SUM) == Hashes::SHA1SUM && !Tags.Exists("Checksums-Sha1")) ||
772 ((DoHashes & Hashes::SHA256SUM) == Hashes::SHA256SUM && !Tags.Exists("Checksums-Sha256")) ||
773 ((DoHashes & Hashes::SHA512SUM) == Hashes::SHA512SUM && !Tags.Exists("Checksums-Sha512")))
774 {
775 if (Db.GetFileInfo(OriginalPath,
776 false, /* DoControl */
777 false, /* DoContents */
778 false, /* GenContentsOnly */
779 false, /* DoSource */
780 DoHashes,
781 DoAlwaysStat) == false)
782 {
783 return _error->Error("Error getting file info");
784 }
785
786 for (HashStringList::const_iterator hs = Db.HashesList.begin(); hs != Db.HashesList.end(); ++hs)
787 {
788 if (hs->HashType() == "MD5Sum")
789 continue;
790 char const * fieldname;
791 std::ostream * out;
792 if (hs->HashType() == "SHA1")
793 {
794 fieldname = "Checksums-Sha1";
795 out = &ostreamSha1;
796 }
797 else if (hs->HashType() == "SHA256")
798 {
799 fieldname = "Checksums-Sha256";
800 out = &ostreamSha256;
801 }
802 else if (hs->HashType() == "SHA512")
803 {
804 fieldname = "Checksums-Sha512";
805 out = &ostreamSha512;
806 }
807 else
808 {
809 _error->Warning("Ignoring unknown Checksumtype %s in SourcesWriter::DoPackages", hs->HashType().c_str());
810 continue;
811 }
812 if (Tags.Exists(fieldname) == true)
813 continue;
814 (*out) << "\n " << hs->HashValue() << " " << Db.GetFileSize() << " " << ParseJnk;
815 }
816
817 // write back the GetFileInfo() stats data
818 Db.Finish();
819 }
820
821 // Perform the delinking operation
822 char Jnk[2];
823
824 if (readlink(OriginalPath.c_str(),Jnk,sizeof(Jnk)) != -1 &&
825 (RealPath = realpath(OriginalPath.c_str(),NULL)) != 0)
826 {
827 string RP = RealPath;
828 free(RealPath);
829 if (Delink(RP,OriginalPath.c_str(),Stats.DeLinkBytes,St.st_size) == false)
830 return false;
831 }
832 }
833
834 Directory = flNotFile(NewFileName);
835 if (Directory.length() > 2)
836 Directory.erase(Directory.end()-1);
837
838 string const ChecksumsSha1 = ostreamSha1.str();
839 string const ChecksumsSha256 = ostreamSha256.str();
840 string const ChecksumsSha512 = ostreamSha512.str();
841
842 // This lists all the changes to the fields we are going to make.
843 // (5 hardcoded + checksums + maintainer + end marker)
844 std::vector<TFRewriteData> Changes;
845
846 Changes.push_back(SetTFRewriteData("Source",Package.c_str(),"Package"));
847 if (Files.empty() == false)
848 Changes.push_back(SetTFRewriteData("Files",Files.c_str()));
849 if (ChecksumsSha1.empty() == false)
850 Changes.push_back(SetTFRewriteData("Checksums-Sha1",ChecksumsSha1.c_str()));
851 if (ChecksumsSha256.empty() == false)
852 Changes.push_back(SetTFRewriteData("Checksums-Sha256",ChecksumsSha256.c_str()));
853 if (ChecksumsSha512.empty() == false)
854 Changes.push_back(SetTFRewriteData("Checksums-Sha512",ChecksumsSha512.c_str()));
855 if (Directory != "./")
856 Changes.push_back(SetTFRewriteData("Directory",Directory.c_str()));
857 Changes.push_back(SetTFRewriteData("Priority",BestPrio.c_str()));
858 Changes.push_back(SetTFRewriteData("Status",0));
859
860 // Rewrite the maintainer field if necessary
861 bool MaintFailed;
862 string NewMaint = OverItem->SwapMaint(Tags.FindS("Maintainer"),MaintFailed);
863 if (MaintFailed == true)
864 {
865 if (NoOverride == false)
866 {
867 NewLine(1);
868 ioprintf(c1out, _(" %s maintainer is %s not %s\n"), Package.c_str(),
869 Tags.FindS("Maintainer").c_str(), OverItem->OldMaint.c_str());
870 }
871 }
872 if (NewMaint.empty() == false)
873 Changes.push_back(SetTFRewriteData("Maintainer", NewMaint.c_str()));
874
875 for (map<string,string>::const_iterator I = SOverItem->FieldOverride.begin();
876 I != SOverItem->FieldOverride.end(); ++I)
877 Changes.push_back(SetTFRewriteData(I->first.c_str(),I->second.c_str()));
878
879 Changes.push_back(SetTFRewriteData(0, 0));
880
881 // Rewrite and store the fields.
882 if (TFRewrite(Output,Tags,TFRewriteSourceOrder,Changes.data()) == false)
883 return false;
884 fprintf(Output,"\n");
885
886 Stats.Packages++;
887
888 return true;
889 }
890 /*}}}*/
891
892 // ContentsWriter::ContentsWriter - Constructor /*{{{*/
893 // ---------------------------------------------------------------------
894 /* */
895 ContentsWriter::ContentsWriter(string const &DB, string const &Arch) :
896 FTWScanner(Arch), Db(DB), Stats(Db.Stats)
897
898 {
899 SetExts(".deb");
900 Output = stdout;
901 }
902 /*}}}*/
903 // ContentsWriter::DoPackage - Process a single package /*{{{*/
904 // ---------------------------------------------------------------------
905 /* If Package is the empty string the control record will be parsed to
906 determine what the package name is. */
907 bool ContentsWriter::DoPackage(string FileName, string Package)
908 {
909 if (!Db.GetFileInfo(FileName,
910 Package.empty(), /* DoControl */
911 true, /* DoContents */
912 false, /* GenContentsOnly */
913 false, /* DoSource */
914 0, /* DoHashes */
915 false /* checkMtime */))
916 {
917 return false;
918 }
919
920 // Parse the package name
921 if (Package.empty() == true)
922 {
923 Package = Db.Control.Section.FindS("Package");
924 }
925
926 Db.Contents.Add(Gen,Package);
927
928 return Db.Finish();
929 }
930 /*}}}*/
931 // ContentsWriter::ReadFromPkgs - Read from a packages file /*{{{*/
932 // ---------------------------------------------------------------------
933 /* */
934 bool ContentsWriter::ReadFromPkgs(string const &PkgFile,string const &PkgCompress)
935 {
936 MultiCompress Pkgs(PkgFile,PkgCompress,0,false);
937 if (_error->PendingError() == true)
938 return false;
939
940 // Open the package file
941 FileFd Fd;
942 if (Pkgs.OpenOld(Fd) == false)
943 return false;
944
945 pkgTagFile Tags(&Fd);
946 if (_error->PendingError() == true)
947 return false;
948
949 // Parse.
950 pkgTagSection Section;
951 while (Tags.Step(Section) == true)
952 {
953 string File = flCombine(Prefix,Section.FindS("FileName"));
954 string Package = Section.FindS("Section");
955 if (Package.empty() == false && Package.end()[-1] != '/')
956 {
957 Package += '/';
958 Package += Section.FindS("Package");
959 }
960 else
961 Package += Section.FindS("Package");
962
963 DoPackage(File,Package);
964 if (_error->empty() == false)
965 {
966 _error->Error("Errors apply to file '%s'",File.c_str());
967 _error->DumpErrors();
968 }
969 }
970
971 // Tidy the compressor
972 Fd.Close();
973
974 return true;
975 }
976
977 /*}}}*/
978
979 // ReleaseWriter::ReleaseWriter - Constructor /*{{{*/
980 // ---------------------------------------------------------------------
981 /* */
982 ReleaseWriter::ReleaseWriter(string const &/*DB*/)
983 {
984 if (_config->FindB("APT::FTPArchive::Release::Default-Patterns", true) == true)
985 {
986 AddPattern("Packages");
987 AddPattern("Packages.gz");
988 AddPattern("Packages.bz2");
989 AddPattern("Packages.lzma");
990 AddPattern("Packages.xz");
991 AddPattern("Translation-*");
992 AddPattern("Sources");
993 AddPattern("Sources.gz");
994 AddPattern("Sources.bz2");
995 AddPattern("Sources.lzma");
996 AddPattern("Sources.xz");
997 AddPattern("Release");
998 AddPattern("Contents-*");
999 AddPattern("Index");
1000 AddPattern("md5sum.txt");
1001 }
1002 AddPatterns(_config->FindVector("APT::FTPArchive::Release::Patterns"));
1003
1004 Output = stdout;
1005 time_t const now = time(NULL);
1006
1007 setlocale(LC_TIME, "C");
1008
1009 char datestr[128];
1010 if (strftime(datestr, sizeof(datestr), "%a, %d %b %Y %H:%M:%S UTC",
1011 gmtime(&now)) == 0)
1012 {
1013 datestr[0] = '\0';
1014 }
1015
1016 time_t const validuntil = now + _config->FindI("APT::FTPArchive::Release::ValidTime", 0);
1017 char validstr[128];
1018 if (now == validuntil ||
1019 strftime(validstr, sizeof(validstr), "%a, %d %b %Y %H:%M:%S UTC",
1020 gmtime(&validuntil)) == 0)
1021 {
1022 validstr[0] = '\0';
1023 }
1024
1025 setlocale(LC_TIME, "");
1026
1027 map<string,string> Fields;
1028 Fields["Origin"] = "";
1029 Fields["Label"] = "";
1030 Fields["Suite"] = "";
1031 Fields["Version"] = "";
1032 Fields["Codename"] = "";
1033 Fields["Date"] = datestr;
1034 Fields["Valid-Until"] = validstr;
1035 Fields["Architectures"] = "";
1036 Fields["Components"] = "";
1037 Fields["Description"] = "";
1038
1039 for(map<string,string>::const_iterator I = Fields.begin();
1040 I != Fields.end();
1041 ++I)
1042 {
1043 string Config = string("APT::FTPArchive::Release::") + (*I).first;
1044 string Value = _config->Find(Config, (*I).second.c_str());
1045 if (Value == "")
1046 continue;
1047
1048 fprintf(Output, "%s: %s\n", (*I).first.c_str(), Value.c_str());
1049 }
1050
1051 ConfigToDoHashes(DoHashes, "APT::FTPArchive::Release");
1052 }
1053 /*}}}*/
1054 // ReleaseWriter::DoPackage - Process a single package /*{{{*/
1055 // ---------------------------------------------------------------------
1056 bool ReleaseWriter::DoPackage(string FileName)
1057 {
1058 // Strip the DirStrip prefix from the FileName and add the PathPrefix
1059 string NewFileName;
1060 if (DirStrip.empty() == false &&
1061 FileName.length() > DirStrip.length() &&
1062 stringcmp(FileName.begin(),FileName.begin() + DirStrip.length(),
1063 DirStrip.begin(),DirStrip.end()) == 0)
1064 {
1065 NewFileName = string(FileName.begin() + DirStrip.length(),FileName.end());
1066 while (NewFileName[0] == '/')
1067 NewFileName = string(NewFileName.begin() + 1,NewFileName.end());
1068 }
1069 else
1070 NewFileName = FileName;
1071
1072 if (PathPrefix.empty() == false)
1073 NewFileName = flCombine(PathPrefix,NewFileName);
1074
1075 FileFd fd(FileName, FileFd::ReadOnly);
1076
1077 if (!fd.IsOpen())
1078 {
1079 return false;
1080 }
1081
1082 CheckSums[NewFileName].size = fd.Size();
1083
1084 Hashes hs;
1085 hs.AddFD(fd, 0, DoHashes);
1086 CheckSums[NewFileName].Hashes = hs.GetHashStringList();
1087 fd.Close();
1088
1089 return true;
1090 }
1091
1092 /*}}}*/
1093 // ReleaseWriter::Finish - Output the checksums /*{{{*/
1094 // ---------------------------------------------------------------------
1095 static void printChecksumTypeRecord(FILE * const Output, char const * const Type, map<string, ReleaseWriter::CheckSum> const &CheckSums)
1096 {
1097 fprintf(Output, "%s:\n", Type);
1098 for(map<string,ReleaseWriter::CheckSum>::const_iterator I = CheckSums.begin();
1099 I != CheckSums.end(); ++I)
1100 {
1101 HashString const * const hs = I->second.Hashes.find(Type);
1102 if (hs == NULL)
1103 continue;
1104 fprintf(Output, " %s %16llu %s\n",
1105 hs->HashValue().c_str(),
1106 (*I).second.size,
1107 (*I).first.c_str());
1108 }
1109 }
1110 void ReleaseWriter::Finish()
1111 {
1112 if ((DoHashes & Hashes::MD5SUM) == Hashes::MD5SUM)
1113 printChecksumTypeRecord(Output, "MD5Sum", CheckSums);
1114 if ((DoHashes & Hashes::SHA1SUM) == Hashes::SHA1SUM)
1115 printChecksumTypeRecord(Output, "SHA1", CheckSums);
1116 if ((DoHashes & Hashes::SHA256SUM) == Hashes::SHA256SUM)
1117 printChecksumTypeRecord(Output, "SHA256", CheckSums);
1118 if ((DoHashes & Hashes::SHA512SUM) == Hashes::SHA512SUM)
1119 printChecksumTypeRecord(Output, "SHA512", CheckSums);
1120 }