]>
git.saurik.com Git - apt.git/blob - ftparchive/writer.cc
1 // -*- mode: cpp; mode: fold -*-
3 // $Id: writer.cc,v 1.14 2004/03/24 01:40:43 mdz Exp $
4 /* ######################################################################
8 The file writer classes. These write various types of output, sources,
11 ##################################################################### */
13 // Include Files /*{{{*/
16 #include <apt-pkg/configuration.h>
17 #include <apt-pkg/deblistparser.h>
18 #include <apt-pkg/error.h>
19 #include <apt-pkg/fileutl.h>
20 #include <apt-pkg/gpgv.h>
21 #include <apt-pkg/hashes.h>
22 #include <apt-pkg/md5.h>
23 #include <apt-pkg/strutl.h>
24 #include <apt-pkg/debfile.h>
25 #include <apt-pkg/pkgcache.h>
26 #include <apt-pkg/sha1.h>
27 #include <apt-pkg/sha2.h>
28 #include <apt-pkg/tagfile.h>
36 #include <sys/types.h>
46 #include "apt-ftparchive.h"
49 #include "multicompress.h"
55 FTWScanner
*FTWScanner::Owner
;
57 // ConfigToDoHashes - which hashes to generate /*{{{*/
58 static void SingleConfigToDoHashes(unsigned int &DoHashes
, std::string
const &Conf
, unsigned int const Flag
)
60 if (_config
->FindB(Conf
, (DoHashes
& Flag
) == Flag
) == true)
65 static void ConfigToDoHashes(unsigned int &DoHashes
, std::string
const &Conf
)
67 SingleConfigToDoHashes(DoHashes
, Conf
+ "::MD5", Hashes::MD5SUM
);
68 SingleConfigToDoHashes(DoHashes
, Conf
+ "::SHA1", Hashes::SHA1SUM
);
69 SingleConfigToDoHashes(DoHashes
, Conf
+ "::SHA256", Hashes::SHA256SUM
);
70 SingleConfigToDoHashes(DoHashes
, Conf
+ "::SHA512", Hashes::SHA512SUM
);
74 // FTWScanner::FTWScanner - Constructor /*{{{*/
75 FTWScanner::FTWScanner(FileFd
* const GivenOutput
, string
const &Arch
, bool const IncludeArchAll
)
76 : Arch(Arch
), IncludeArchAll(IncludeArchAll
), DoHashes(~0)
78 if (GivenOutput
== NULL
)
82 Output
->OpenDescriptor(STDOUT_FILENO
, FileFd::WriteOnly
, false);
90 NoLinkAct
= !_config
->FindB("APT::FTPArchive::DeLinkAct",true);
91 ConfigToDoHashes(DoHashes
, "APT::FTPArchive");
94 FTWScanner::~FTWScanner()
96 if (Output
!= NULL
&& OwnsOutput
)
99 // FTWScanner::Scanner - FTW Scanner /*{{{*/
100 // ---------------------------------------------------------------------
101 /* This is the FTW scanner, it processes each directory element in the
103 int FTWScanner::ScannerFTW(const char *File
,const struct stat
* /*sb*/,int Flag
)
108 ioprintf(c1out
, _("W: Unable to read directory %s\n"), File
);
113 ioprintf(c1out
, _("W: Unable to stat %s\n"), File
);
118 return ScannerFile(File
, true);
121 // FTWScanner::ScannerFile - File Scanner /*{{{*/
122 // ---------------------------------------------------------------------
124 int FTWScanner::ScannerFile(const char *File
, bool const &ReadLink
)
126 const char *LastComponent
= strrchr(File
, '/');
127 char *RealPath
= NULL
;
129 if (LastComponent
== NULL
)
130 LastComponent
= File
;
134 vector
<string
>::const_iterator I
;
135 for(I
= Owner
->Patterns
.begin(); I
!= Owner
->Patterns
.end(); ++I
)
137 if (fnmatch((*I
).c_str(), LastComponent
, 0) == 0)
140 if (I
== Owner
->Patterns
.end())
143 /* Process it. If the file is a link then resolve it into an absolute
144 name.. This works best if the directory components the scanner are
145 given are not links themselves. */
147 Owner
->OriginalPath
= File
;
149 readlink(File
,Jnk
,sizeof(Jnk
)) != -1 &&
150 (RealPath
= realpath(File
,NULL
)) != 0)
152 Owner
->DoPackage(RealPath
);
156 Owner
->DoPackage(File
);
158 if (_error
->empty() == false)
160 // Print any errors or warnings found
162 bool SeenPath
= false;
163 while (_error
->empty() == false)
167 bool const Type
= _error
->PopMessage(Err
);
169 cerr
<< _("E: ") << Err
<< endl
;
171 cerr
<< _("W: ") << Err
<< endl
;
173 if (Err
.find(File
) != string::npos
)
177 if (SeenPath
== false)
178 cerr
<< _("E: Errors apply to file ") << "'" << File
<< "'" << endl
;
185 // FTWScanner::RecursiveScan - Just scan a directory tree /*{{{*/
186 // ---------------------------------------------------------------------
188 bool FTWScanner::RecursiveScan(string
const &Dir
)
190 char *RealPath
= NULL
;
191 /* If noprefix is set then jam the scan root in, so we don't generate
192 link followed paths out of control */
193 if (InternalPrefix
.empty() == true)
195 if ((RealPath
= realpath(Dir
.c_str(),NULL
)) == 0)
196 return _error
->Errno("realpath",_("Failed to resolve %s"),Dir
.c_str());
197 InternalPrefix
= RealPath
;
201 // Do recursive directory searching
203 int const Res
= ftw(Dir
.c_str(),ScannerFTW
,30);
205 // Error treewalking?
208 if (_error
->PendingError() == false)
209 _error
->Errno("ftw",_("Tree walking failed"));
216 // FTWScanner::LoadFileList - Load the file list from a file /*{{{*/
217 // ---------------------------------------------------------------------
218 /* This is an alternative to using FTW to locate files, it reads the list
219 of files from another file. */
220 bool FTWScanner::LoadFileList(string
const &Dir
, string
const &File
)
222 char *RealPath
= NULL
;
223 /* If noprefix is set then jam the scan root in, so we don't generate
224 link followed paths out of control */
225 if (InternalPrefix
.empty() == true)
227 if ((RealPath
= realpath(Dir
.c_str(),NULL
)) == 0)
228 return _error
->Errno("realpath",_("Failed to resolve %s"),Dir
.c_str());
229 InternalPrefix
= RealPath
;
234 FILE *List
= fopen(File
.c_str(),"r");
236 return _error
->Errno("fopen",_("Failed to open %s"),File
.c_str());
238 /* We are a tad tricky here.. We prefix the buffer with the directory
239 name, that way if we need a full path with just use line.. Sneaky and
243 if (Dir
.empty() == true || Dir
.end()[-1] != '/')
244 FileStart
= Line
+ snprintf(Line
,sizeof(Line
),"%s/",Dir
.c_str());
246 FileStart
= Line
+ snprintf(Line
,sizeof(Line
),"%s",Dir
.c_str());
247 while (fgets(FileStart
,sizeof(Line
) - (FileStart
- Line
),List
) != 0)
249 char *FileName
= _strstrip(FileStart
);
250 if (FileName
[0] == 0)
253 if (FileName
[0] != '/')
255 if (FileName
!= FileStart
)
256 memmove(FileStart
,FileName
,strlen(FileStart
));
263 if (stat(FileName
,&St
) != 0)
267 if (ScannerFile(FileName
, false) != 0)
275 // FTWScanner::Delink - Delink symlinks /*{{{*/
276 // ---------------------------------------------------------------------
278 bool FTWScanner::Delink(string
&FileName
,const char *OriginalPath
,
279 unsigned long long &DeLinkBytes
,
280 unsigned long long const &FileSize
)
282 // See if this isn't an internaly prefix'd file name.
283 if (InternalPrefix
.empty() == false &&
284 InternalPrefix
.length() < FileName
.length() &&
285 stringcmp(FileName
.begin(),FileName
.begin() + InternalPrefix
.length(),
286 InternalPrefix
.begin(),InternalPrefix
.end()) != 0)
288 if (DeLinkLimit
!= 0 && DeLinkBytes
/1024 < DeLinkLimit
)
290 // Tidy up the display
291 if (DeLinkBytes
== 0)
295 ioprintf(c1out
, _(" DeLink %s [%s]\n"), (OriginalPath
+ InternalPrefix
.length()),
296 SizeToStr(FileSize
).c_str());
299 if (NoLinkAct
== false)
302 if (readlink(OriginalPath
,OldLink
,sizeof(OldLink
)) == -1)
303 _error
->Errno("readlink",_("Failed to readlink %s"),OriginalPath
);
306 if (RemoveFile("FTWScanner::Delink", OriginalPath
))
308 if (link(FileName
.c_str(),OriginalPath
) != 0)
310 // Panic! Restore the symlink
311 if (symlink(OldLink
,OriginalPath
) != 0)
312 _error
->Errno("symlink", "failed to restore symlink");
313 return _error
->Errno("link",_("*** Failed to link %s to %s"),
321 DeLinkBytes
+= FileSize
;
322 if (DeLinkBytes
/1024 >= DeLinkLimit
)
323 ioprintf(c1out
, _(" DeLink limit of %sB hit.\n"), SizeToStr(DeLinkBytes
).c_str());
326 FileName
= OriginalPath
;
332 // FTWScanner::SetExts - Set extensions to support /*{{{*/
333 // ---------------------------------------------------------------------
335 bool FTWScanner::SetExts(string
const &Vals
)
338 string::size_type Start
= 0;
339 while (Start
<= Vals
.length()-1)
341 string::size_type
const Space
= Vals
.find(' ',Start
);
342 string::size_type
const Length
= ((Space
== string::npos
) ? Vals
.length() : Space
) - Start
;
343 if ( Arch
.empty() == false )
345 AddPattern(string("*_") + Arch
+ Vals
.substr(Start
, Length
));
346 if (IncludeArchAll
== true && Arch
!= "all")
347 AddPattern(string("*_all") + Vals
.substr(Start
, Length
));
350 AddPattern(string("*") + Vals
.substr(Start
, Length
));
359 // PackagesWriter::PackagesWriter - Constructor /*{{{*/
360 // ---------------------------------------------------------------------
362 PackagesWriter::PackagesWriter(FileFd
* const GivenOutput
, TranslationWriter
* const transWriter
,
363 string
const &DB
,string
const &Overrides
,string
const &ExtOverrides
,
364 string
const &Arch
, bool const IncludeArchAll
) :
365 FTWScanner(GivenOutput
, Arch
, IncludeArchAll
), Db(DB
), Stats(Db
.Stats
), TransWriter(transWriter
)
367 SetExts(".deb .udeb");
370 // Process the command line options
371 ConfigToDoHashes(DoHashes
, "APT::FTPArchive::Packages");
372 DoAlwaysStat
= _config
->FindB("APT::FTPArchive::AlwaysStat", false);
373 DoContents
= _config
->FindB("APT::FTPArchive::Contents",true);
374 NoOverride
= _config
->FindB("APT::FTPArchive::NoOverrideMsg",false);
375 LongDescription
= _config
->FindB("APT::FTPArchive::LongDescription",true);
377 if (Db
.Loaded() == false)
380 // Read the override file
381 if (Overrides
.empty() == false && Over
.ReadOverride(Overrides
) == false)
386 if (ExtOverrides
.empty() == false)
387 Over
.ReadExtraOverride(ExtOverrides
);
389 _error
->DumpErrors();
392 // PackagesWriter::DoPackage - Process a single package /*{{{*/
393 // ---------------------------------------------------------------------
394 /* This method takes a package and gets its control information and
395 MD5, SHA1 and SHA256 then writes out a control record with the proper fields
396 rewritten and the path/size/hash appended. */
397 bool PackagesWriter::DoPackage(string FileName
)
399 // Pull all the data we need form the DB
400 if (Db
.GetFileInfo(FileName
,
401 true, /* DoControl */
403 true, /* GenContentsOnly */
404 false, /* DoSource */
405 DoHashes
, DoAlwaysStat
) == false)
410 unsigned long long FileSize
= Db
.GetFileSize();
411 if (Delink(FileName
,OriginalPath
,Stats
.DeLinkBytes
,FileSize
) == false)
414 // Lookup the overide information
415 pkgTagSection
&Tags
= Db
.Control
.Section
;
416 string Package
= Tags
.FindS("Package");
418 // if we generate a Packages file for a given arch, we use it to
419 // look for overrides. if we run in "simple" mode without the
420 // "Architecures" variable in the config we use the architecure value
425 Architecture
= Tags
.FindS("Architecture");
426 unique_ptr
<Override::Item
> OverItem(Over
.GetItem(Package
,Architecture
));
428 if (Package
.empty() == true)
429 return _error
->Error(_("Archive had no package field"));
431 // If we need to do any rewriting of the header do it now..
432 if (OverItem
.get() == 0)
434 if (NoOverride
== false)
437 ioprintf(c1out
, _(" %s has no override entry\n"), Package
.c_str());
440 OverItem
= unique_ptr
<Override::Item
>(new Override::Item
);
441 OverItem
->FieldOverride
["Section"] = Tags
.FindS("Section");
442 OverItem
->Priority
= Tags
.FindS("Priority");
445 // Strip the DirStrip prefix from the FileName and add the PathPrefix
447 if (DirStrip
.empty() == false &&
448 FileName
.length() > DirStrip
.length() &&
449 stringcmp(FileName
.begin(),FileName
.begin() + DirStrip
.length(),
450 DirStrip
.begin(),DirStrip
.end()) == 0)
451 NewFileName
= string(FileName
.begin() + DirStrip
.length(),FileName
.end());
453 NewFileName
= FileName
;
454 if (PathPrefix
.empty() == false)
455 NewFileName
= flCombine(PathPrefix
,NewFileName
);
457 /* Configuration says we don't want to include the long Description
458 in the package file - instead we want to ship a separated file */
460 if (LongDescription
== false) {
461 desc
= Tags
.FindS("Description").append("\n");
462 OverItem
->FieldOverride
["Description"] = desc
.substr(0, desc
.find('\n')).c_str();
465 // This lists all the changes to the fields we are going to make.
466 std::vector
<pkgTagSection::Tag
> Changes
;
469 strprintf(Size
, "%llu", (unsigned long long) FileSize
);
470 Changes
.push_back(pkgTagSection::Tag::Rewrite("Size", Size
));
472 for (HashStringList::const_iterator hs
= Db
.HashesList
.begin(); hs
!= Db
.HashesList
.end(); ++hs
)
474 if (hs
->HashType() == "MD5Sum")
475 Changes
.push_back(pkgTagSection::Tag::Rewrite("MD5sum", hs
->HashValue()));
476 else if (hs
->HashType() == "Checksum-FileSize")
479 Changes
.push_back(pkgTagSection::Tag::Rewrite(hs
->HashType(), hs
->HashValue()));
481 Changes
.push_back(pkgTagSection::Tag::Rewrite("Filename", NewFileName
));
482 Changes
.push_back(pkgTagSection::Tag::Rewrite("Priority", OverItem
->Priority
));
483 Changes
.push_back(pkgTagSection::Tag::Remove("Status"));
484 Changes
.push_back(pkgTagSection::Tag::Remove("Optional"));
486 string DescriptionMd5
;
487 if (LongDescription
== false) {
488 MD5Summation descmd5
;
489 descmd5
.Add(desc
.c_str());
490 DescriptionMd5
= descmd5
.Result().Value();
491 Changes
.push_back(pkgTagSection::Tag::Rewrite("Description-md5", DescriptionMd5
));
492 if (TransWriter
!= NULL
)
493 TransWriter
->DoPackage(Package
, desc
, DescriptionMd5
);
496 // Rewrite the maintainer field if necessary
498 string NewMaint
= OverItem
->SwapMaint(Tags
.FindS("Maintainer"),MaintFailed
);
499 if (MaintFailed
== true)
501 if (NoOverride
== false)
504 ioprintf(c1out
, _(" %s maintainer is %s not %s\n"),
505 Package
.c_str(), Tags
.FindS("Maintainer").c_str(), OverItem
->OldMaint
.c_str());
509 if (NewMaint
.empty() == false)
510 Changes
.push_back(pkgTagSection::Tag::Rewrite("Maintainer", NewMaint
));
512 /* Get rid of the Optional tag. This is an ugly, ugly, ugly hack that
513 dpkg-scanpackages does. Well sort of. dpkg-scanpackages just does renaming
514 but dpkg does this append bit. So we do the append bit, at least that way the
515 status file and package file will remain similar. There are other transforms
516 but optional is the only legacy one still in use for some lazy reason. */
517 string OptionalStr
= Tags
.FindS("Optional");
518 if (OptionalStr
.empty() == false)
520 if (Tags
.FindS("Suggests").empty() == false)
521 OptionalStr
= Tags
.FindS("Suggests") + ", " + OptionalStr
;
522 Changes
.push_back(pkgTagSection::Tag::Rewrite("Suggests", OptionalStr
));
525 for (map
<string
,string
>::const_iterator I
= OverItem
->FieldOverride
.begin();
526 I
!= OverItem
->FieldOverride
.end(); ++I
)
527 Changes
.push_back(pkgTagSection::Tag::Rewrite(I
->first
, I
->second
));
529 // Rewrite and store the fields.
530 if (Tags
.Write(*Output
, TFRewritePackageOrder
, Changes
) == false ||
531 Output
->Write("\n", 1) == false)
537 PackagesWriter::~PackagesWriter() /*{{{*/
542 // TranslationWriter::TranslationWriter - Constructor /*{{{*/
543 // ---------------------------------------------------------------------
544 /* Create a Translation-Master file for this Packages file */
545 TranslationWriter::TranslationWriter(string
const &File
, string
const &TransCompress
,
546 mode_t
const &Permissions
) : Comp(NULL
), Output(NULL
)
548 if (File
.empty() == true)
551 Comp
= new MultiCompress(File
, TransCompress
, Permissions
);
552 Output
= &Comp
->Input
;
555 // TranslationWriter::DoPackage - Process a single package /*{{{*/
556 // ---------------------------------------------------------------------
557 /* Create a Translation-Master file for this Packages file */
558 bool TranslationWriter::DoPackage(string
const &Pkg
, string
const &Desc
,
564 // Different archs can include different versions and therefore
565 // different descriptions - so we need to check for both name and md5.
566 string
const Record
= Pkg
+ ":" + MD5
;
568 if (Included
.find(Record
) != Included
.end())
572 strprintf(out
, "Package: %s\nDescription-md5: %s\nDescription-en: %s\n",
573 Pkg
.c_str(), MD5
.c_str(), Desc
.c_str());
574 Output
->Write(out
.c_str(), out
.length());
576 Included
.insert(Record
);
580 // TranslationWriter::~TranslationWriter - Destructor /*{{{*/
581 // ---------------------------------------------------------------------
583 TranslationWriter::~TranslationWriter()
590 // SourcesWriter::SourcesWriter - Constructor /*{{{*/
591 // ---------------------------------------------------------------------
593 SourcesWriter::SourcesWriter(FileFd
* const GivenOutput
, string
const &DB
, string
const &BOverrides
,string
const &SOverrides
,
594 string
const &ExtOverrides
) :
595 FTWScanner(GivenOutput
), Db(DB
), Stats(Db
.Stats
)
602 // Process the command line options
603 ConfigToDoHashes(DoHashes
, "APT::FTPArchive::Sources");
604 NoOverride
= _config
->FindB("APT::FTPArchive::NoOverrideMsg",false);
605 DoAlwaysStat
= _config
->FindB("APT::FTPArchive::AlwaysStat", false);
607 // Read the override file
608 if (BOverrides
.empty() == false && BOver
.ReadOverride(BOverrides
) == false)
613 // WTF?? The logic above: if we can't read binary overrides, don't even try
614 // reading source overrides. if we can read binary overrides, then say there
615 // are no overrides. THIS MAKES NO SENSE! -- ajt@d.o, 2006/02/28
617 if (ExtOverrides
.empty() == false)
618 SOver
.ReadExtraOverride(ExtOverrides
);
620 if (SOverrides
.empty() == false && FileExists(SOverrides
) == true)
621 SOver
.ReadOverride(SOverrides
,true);
624 // SourcesWriter::DoPackage - Process a single package /*{{{*/
625 static std::string
getDscHash(unsigned int const DoHashes
,
626 Hashes::SupportedHashes
const DoIt
, pkgTagSection
&Tags
, char const * const FieldName
,
627 HashString
const * const Hash
, unsigned long long Size
, std::string
const &FileName
)
629 if ((DoHashes
& DoIt
) != DoIt
|| Tags
.Exists(FieldName
) == false || Hash
== NULL
)
631 std::ostringstream out
;
632 out
<< "\n " << Hash
->HashValue() << " " << std::to_string(Size
) << " " << FileName
633 << "\n " << Tags
.FindS(FieldName
);
636 bool SourcesWriter::DoPackage(string FileName
)
638 // Pull all the data we need form the DB
639 if (Db
.GetFileInfo(FileName
,
640 false, /* DoControl */
641 false, /* DoContents */
642 false, /* GenContentsOnly */
644 DoHashes
, DoAlwaysStat
) == false)
649 // we need to perform a "write" here (this is what finish is doing)
650 // because the call to Db.GetFileInfo() in the loop will change
655 if (Tags
.Scan(Db
.Dsc
.Data
.c_str(), Db
.Dsc
.Data
.length()) == false)
656 return _error
->Error("Could not find a record in the DSC '%s'",FileName
.c_str());
658 if (Tags
.Exists("Source") == false)
659 return _error
->Error("Could not find a Source entry in the DSC '%s'",FileName
.c_str());
662 // Lookup the overide information, finding first the best priority.
664 string Bins
= Tags
.FindS("Binary");
665 char Buffer
[Bins
.length() + 1];
666 unique_ptr
<Override::Item
> OverItem(nullptr);
667 if (Bins
.empty() == false)
669 strcpy(Buffer
,Bins
.c_str());
671 // Ignore too-long errors.
673 TokSplitString(',',Buffer
,BinList
,sizeof(BinList
)/sizeof(BinList
[0]));
675 // Look at all the binaries
676 unsigned char BestPrioV
= pkgCache::State::Extra
;
677 for (unsigned I
= 0; BinList
[I
] != 0; I
++)
679 unique_ptr
<Override::Item
> Itm(BOver
.GetItem(BinList
[I
]));
683 unsigned char NewPrioV
= debListParser::GetPrio(Itm
->Priority
);
684 if (NewPrioV
< BestPrioV
|| BestPrio
.empty() == true)
686 BestPrioV
= NewPrioV
;
687 BestPrio
= Itm
->Priority
;
690 if (OverItem
.get() == 0)
691 OverItem
= std::move(Itm
);
695 // If we need to do any rewriting of the header do it now..
696 if (OverItem
.get() == 0)
698 if (NoOverride
== false)
701 ioprintf(c1out
, _(" %s has no override entry\n"), Tags
.FindS("Source").c_str());
704 OverItem
.reset(new Override::Item
);
708 if (stat(FileName
.c_str(), &St
) != 0)
709 return _error
->Errno("fstat","Failed to stat %s",FileName
.c_str());
711 unique_ptr
<Override::Item
> SOverItem(SOver
.GetItem(Tags
.FindS("Source")));
712 // const unique_ptr<Override::Item> autoSOverItem(SOverItem);
713 if (SOverItem
.get() == 0)
715 ioprintf(c1out
, _(" %s has no source override entry\n"), Tags
.FindS("Source").c_str());
716 SOverItem
= unique_ptr
<Override::Item
>(BOver
.GetItem(Tags
.FindS("Source")));
717 if (SOverItem
.get() == 0)
719 ioprintf(c1out
, _(" %s has no binary override entry either\n"), Tags
.FindS("Source").c_str());
720 SOverItem
= unique_ptr
<Override::Item
>(new Override::Item
);
721 *SOverItem
= *OverItem
;
725 // Add the dsc to the files hash list
726 string
const strippedName
= flNotDir(FileName
);
727 std::string
const Files
= getDscHash(DoHashes
, Hashes::MD5SUM
, Tags
, "Files", Db
.HashesList
.find("MD5Sum"), St
.st_size
, strippedName
);
728 std::string ChecksumsSha1
= getDscHash(DoHashes
, Hashes::SHA1SUM
, Tags
, "Checksums-Sha1", Db
.HashesList
.find("SHA1"), St
.st_size
, strippedName
);
729 std::string ChecksumsSha256
= getDscHash(DoHashes
, Hashes::SHA256SUM
, Tags
, "Checksums-Sha256", Db
.HashesList
.find("SHA256"), St
.st_size
, strippedName
);
730 std::string ChecksumsSha512
= getDscHash(DoHashes
, Hashes::SHA512SUM
, Tags
, "Checksums-Sha512", Db
.HashesList
.find("SHA512"), St
.st_size
, strippedName
);
732 // Strip the DirStrip prefix from the FileName and add the PathPrefix
734 if (DirStrip
.empty() == false &&
735 FileName
.length() > DirStrip
.length() &&
736 stringcmp(DirStrip
,OriginalPath
,OriginalPath
+ DirStrip
.length()) == 0)
737 NewFileName
= string(OriginalPath
+ DirStrip
.length());
739 NewFileName
= OriginalPath
;
740 if (PathPrefix
.empty() == false)
741 NewFileName
= flCombine(PathPrefix
,NewFileName
);
743 string Directory
= flNotFile(OriginalPath
);
744 string Package
= Tags
.FindS("Source");
746 // Perform operation over all of the files
748 const char *C
= Files
.c_str();
749 char *RealPath
= NULL
;
750 for (;isspace(*C
); C
++);
753 // Parse each of the elements
754 if (ParseQuoteWord(C
,ParseJnk
) == false ||
755 ParseQuoteWord(C
,ParseJnk
) == false ||
756 ParseQuoteWord(C
,ParseJnk
) == false)
757 return _error
->Error("Error parsing file record");
759 string OriginalPath
= Directory
+ ParseJnk
;
761 // Add missing hashes to source files
762 if (((DoHashes
& Hashes::SHA1SUM
) == Hashes::SHA1SUM
&& !Tags
.Exists("Checksums-Sha1")) ||
763 ((DoHashes
& Hashes::SHA256SUM
) == Hashes::SHA256SUM
&& !Tags
.Exists("Checksums-Sha256")) ||
764 ((DoHashes
& Hashes::SHA512SUM
) == Hashes::SHA512SUM
&& !Tags
.Exists("Checksums-Sha512")))
766 if (Db
.GetFileInfo(OriginalPath
,
767 false, /* DoControl */
768 false, /* DoContents */
769 false, /* GenContentsOnly */
770 false, /* DoSource */
772 DoAlwaysStat
) == false)
774 return _error
->Error("Error getting file info");
777 for (HashStringList::const_iterator hs
= Db
.HashesList
.begin(); hs
!= Db
.HashesList
.end(); ++hs
)
779 if (hs
->HashType() == "MD5Sum" || hs
->HashType() == "Checksum-FileSize")
781 char const * fieldname
;
783 if (hs
->HashType() == "SHA1")
785 fieldname
= "Checksums-Sha1";
786 out
= &ChecksumsSha1
;
788 else if (hs
->HashType() == "SHA256")
790 fieldname
= "Checksums-Sha256";
791 out
= &ChecksumsSha256
;
793 else if (hs
->HashType() == "SHA512")
795 fieldname
= "Checksums-Sha512";
796 out
= &ChecksumsSha512
;
800 _error
->Warning("Ignoring unknown Checksumtype %s in SourcesWriter::DoPackages", hs
->HashType().c_str());
803 if (Tags
.Exists(fieldname
) == true)
805 std::ostringstream streamout
;
806 streamout
<< "\n " << hs
->HashValue() << " " << std::to_string(Db
.GetFileSize()) << " " << ParseJnk
;
807 out
->append(streamout
.str());
810 // write back the GetFileInfo() stats data
814 // Perform the delinking operation
817 if (readlink(OriginalPath
.c_str(),Jnk
,sizeof(Jnk
)) != -1 &&
818 (RealPath
= realpath(OriginalPath
.c_str(),NULL
)) != 0)
820 string RP
= RealPath
;
822 if (Delink(RP
,OriginalPath
.c_str(),Stats
.DeLinkBytes
,St
.st_size
) == false)
827 Directory
= flNotFile(NewFileName
);
828 if (Directory
.length() > 2)
829 Directory
.erase(Directory
.end()-1);
831 // This lists all the changes to the fields we are going to make.
832 // (5 hardcoded + checksums + maintainer + end marker)
833 std::vector
<pkgTagSection::Tag
> Changes
;
835 Changes
.push_back(pkgTagSection::Tag::Remove("Source"));
836 Changes
.push_back(pkgTagSection::Tag::Rewrite("Package", Package
));
837 if (Files
.empty() == false)
838 Changes
.push_back(pkgTagSection::Tag::Rewrite("Files", Files
));
839 if (ChecksumsSha1
.empty() == false)
840 Changes
.push_back(pkgTagSection::Tag::Rewrite("Checksums-Sha1", ChecksumsSha1
));
841 if (ChecksumsSha256
.empty() == false)
842 Changes
.push_back(pkgTagSection::Tag::Rewrite("Checksums-Sha256", ChecksumsSha256
));
843 if (ChecksumsSha512
.empty() == false)
844 Changes
.push_back(pkgTagSection::Tag::Rewrite("Checksums-Sha512", ChecksumsSha512
));
845 if (Directory
!= "./")
846 Changes
.push_back(pkgTagSection::Tag::Rewrite("Directory", Directory
));
847 Changes
.push_back(pkgTagSection::Tag::Rewrite("Priority", BestPrio
));
848 Changes
.push_back(pkgTagSection::Tag::Remove("Status"));
850 // Rewrite the maintainer field if necessary
852 string NewMaint
= OverItem
->SwapMaint(Tags
.FindS("Maintainer"), MaintFailed
);
853 if (MaintFailed
== true)
855 if (NoOverride
== false)
858 ioprintf(c1out
, _(" %s maintainer is %s not %s\n"), Package
.c_str(),
859 Tags
.FindS("Maintainer").c_str(), OverItem
->OldMaint
.c_str());
862 if (NewMaint
.empty() == false)
863 Changes
.push_back(pkgTagSection::Tag::Rewrite("Maintainer", NewMaint
.c_str()));
865 for (map
<string
,string
>::const_iterator I
= SOverItem
->FieldOverride
.begin();
866 I
!= SOverItem
->FieldOverride
.end(); ++I
)
867 Changes
.push_back(pkgTagSection::Tag::Rewrite(I
->first
, I
->second
));
869 // Rewrite and store the fields.
870 if (Tags
.Write(*Output
, TFRewriteSourceOrder
, Changes
) == false ||
871 Output
->Write("\n", 1) == false)
880 // ContentsWriter::ContentsWriter - Constructor /*{{{*/
881 // ---------------------------------------------------------------------
883 ContentsWriter::ContentsWriter(FileFd
* const GivenOutput
, string
const &DB
,
884 string
const &Arch
, bool const IncludeArchAll
) :
885 FTWScanner(GivenOutput
, Arch
, IncludeArchAll
), Db(DB
), Stats(Db
.Stats
)
891 // ContentsWriter::DoPackage - Process a single package /*{{{*/
892 // ---------------------------------------------------------------------
893 /* If Package is the empty string the control record will be parsed to
894 determine what the package name is. */
895 bool ContentsWriter::DoPackage(string FileName
, string Package
)
897 if (!Db
.GetFileInfo(FileName
,
898 Package
.empty(), /* DoControl */
899 true, /* DoContents */
900 false, /* GenContentsOnly */
901 false, /* DoSource */
903 false /* checkMtime */))
908 // Parse the package name
909 if (Package
.empty() == true)
911 Package
= Db
.Control
.Section
.FindS("Package");
914 Db
.Contents
.Add(Gen
,Package
);
919 // ContentsWriter::ReadFromPkgs - Read from a packages file /*{{{*/
920 // ---------------------------------------------------------------------
922 bool ContentsWriter::ReadFromPkgs(string
const &PkgFile
,string
const &PkgCompress
)
924 MultiCompress
Pkgs(PkgFile
,PkgCompress
,0,false);
925 if (_error
->PendingError() == true)
928 // Open the package file
930 if (Pkgs
.OpenOld(Fd
) == false)
933 pkgTagFile
Tags(&Fd
);
934 if (_error
->PendingError() == true)
938 pkgTagSection Section
;
939 while (Tags
.Step(Section
) == true)
941 string File
= flCombine(Prefix
,Section
.FindS("FileName"));
942 string Package
= Section
.FindS("Section");
943 if (Package
.empty() == false && Package
.end()[-1] != '/')
946 Package
+= Section
.FindS("Package");
949 Package
+= Section
.FindS("Package");
951 DoPackage(File
,Package
);
952 if (_error
->empty() == false)
954 _error
->Error("Errors apply to file '%s'",File
.c_str());
955 _error
->DumpErrors();
959 // Tidy the compressor
967 // ReleaseWriter::ReleaseWriter - Constructor /*{{{*/
968 // ---------------------------------------------------------------------
970 static std::string
formatUTCDateTime(time_t const now
)
972 bool const NumericTimezone
= _config
->FindB("APT::FTPArchive::Release::NumericTimezone", true);
973 // TimeRFC1123 uses GMT to satisfy HTTP/1.1
974 std::string datetime
= TimeRFC1123(now
, NumericTimezone
);
975 if (NumericTimezone
== false)
977 auto const lastspace
= datetime
.rfind(' ');
978 if (likely(lastspace
!= std::string::npos
))
979 datetime
.replace(lastspace
+ 1, 3, "UTC");
983 ReleaseWriter::ReleaseWriter(FileFd
* const GivenOutput
, string
const &/*DB*/) : FTWScanner(GivenOutput
)
985 if (_config
->FindB("APT::FTPArchive::Release::Default-Patterns", true) == true)
987 AddPattern("Packages");
988 AddPattern("Packages.*");
989 AddPattern("Translation-*");
990 AddPattern("Sources");
991 AddPattern("Sources.*");
992 AddPattern("Release");
993 AddPattern("Contents-*");
995 AddPattern("Index.*");
996 AddPattern("icons-*.tar");
997 AddPattern("icons-*.tar.*");
998 AddPattern("Components-*.yml");
999 AddPattern("Components-*.yml.*");
1000 AddPattern("md5sum.txt");
1002 AddPatterns(_config
->FindVector("APT::FTPArchive::Release::Patterns"));
1004 time_t const now
= time(NULL
);
1005 time_t const validuntil
= now
+ _config
->FindI("APT::FTPArchive::Release::ValidTime", 0);
1007 map
<string
,bool> BoolFields
;
1008 map
<string
,string
> Fields
;
1009 Fields
["Origin"] = "";
1010 Fields
["Label"] = "";
1011 Fields
["Suite"] = "";
1012 Fields
["Version"] = "";
1013 Fields
["Codename"] = "";
1014 Fields
["Date"] = formatUTCDateTime(now
);
1015 if (validuntil
!= now
)
1016 Fields
["Valid-Until"] = formatUTCDateTime(validuntil
);
1017 Fields
["Architectures"] = "";
1018 Fields
["Components"] = "";
1019 Fields
["Description"] = "";
1020 Fields
["Signed-By"] = "";
1021 BoolFields
["Acquire-By-Hash"] = _config
->FindB("APT::FTPArchive::DoByHash", false);
1022 BoolFields
["NotAutomatic"] = false;
1023 BoolFields
["ButAutomaticUpgrades"] = false;
1025 // Read configuration for string fields, but don't output them
1026 for (auto &&I
: Fields
)
1028 string Config
= string("APT::FTPArchive::Release::") + I
.first
;
1029 I
.second
= _config
->Find(Config
, I
.second
);
1032 // Read configuration for bool fields, and add them to Fields if true
1033 for (auto &&I
: BoolFields
)
1035 string Config
= string("APT::FTPArchive::Release::") + I
.first
;
1036 I
.second
= _config
->FindB(Config
, I
.second
);
1038 Fields
[I
.first
] = "yes";
1041 // All configuration read and stored in Fields; output
1042 for (auto &&I
: Fields
)
1044 if (I
.second
.empty())
1046 std::string
const out
= I
.first
+ ": " + I
.second
+ "\n";
1047 Output
->Write(out
.c_str(), out
.length());
1050 ConfigToDoHashes(DoHashes
, "APT::FTPArchive::Release");
1053 // ReleaseWriter::DoPackage - Process a single package /*{{{*/
1054 // ---------------------------------------------------------------------
1055 bool ReleaseWriter::DoPackage(string FileName
)
1057 // Strip the DirStrip prefix from the FileName and add the PathPrefix
1059 if (DirStrip
.empty() == false &&
1060 FileName
.length() > DirStrip
.length() &&
1061 stringcmp(FileName
.begin(),FileName
.begin() + DirStrip
.length(),
1062 DirStrip
.begin(),DirStrip
.end()) == 0)
1064 NewFileName
= string(FileName
.begin() + DirStrip
.length(),FileName
.end());
1065 while (NewFileName
[0] == '/')
1066 NewFileName
= string(NewFileName
.begin() + 1,NewFileName
.end());
1069 NewFileName
= FileName
;
1071 if (PathPrefix
.empty() == false)
1072 NewFileName
= flCombine(PathPrefix
,NewFileName
);
1074 FileFd
fd(FileName
, FileFd::ReadOnly
);
1081 CheckSums
[NewFileName
].size
= fd
.Size();
1083 Hashes
hs(DoHashes
);
1085 CheckSums
[NewFileName
].Hashes
= hs
.GetHashStringList();
1088 // FIXME: wrong layer in the code(?)
1089 // FIXME2: symlink instead of create a copy
1090 if (_config
->FindB("APT::FTPArchive::DoByHash", false) == true)
1092 std::string Input
= FileName
;
1093 HashStringList hsl
= hs
.GetHashStringList();
1094 for(HashStringList::const_iterator h
= hsl
.begin();
1095 h
!= hsl
.end(); ++h
)
1099 if (flNotDir(FileName
) == "Release" || flNotDir(FileName
) == "InRelease")
1102 std::string ByHashOutputFile
= GenByHashFilename(Input
, *h
);
1103 std::string ByHashOutputDir
= flNotFile(ByHashOutputFile
);
1104 if(!CreateDirectory(flNotFile(Input
), ByHashOutputDir
))
1105 return _error
->Warning("can not create dir %s", flNotFile(ByHashOutputFile
).c_str());
1108 FileFd
In(Input
, FileFd::ReadOnly
);
1109 FileFd
Out(ByHashOutputFile
, FileFd::WriteEmpty
);
1110 if(!CopyFile(In
, Out
))
1111 return _error
->Warning("failed to copy %s %s", Input
.c_str(), ByHashOutputFile
.c_str());
1119 // ReleaseWriter::Finish - Output the checksums /*{{{*/
1120 // ---------------------------------------------------------------------
1121 static void printChecksumTypeRecord(FileFd
&Output
, char const * const Type
, map
<string
, ReleaseWriter::CheckSum
> const &CheckSums
)
1125 strprintf(out
, "%s:\n", Type
);
1126 Output
.Write(out
.c_str(), out
.length());
1128 for(map
<string
,ReleaseWriter::CheckSum
>::const_iterator I
= CheckSums
.begin();
1129 I
!= CheckSums
.end(); ++I
)
1131 HashString
const * const hs
= I
->second
.Hashes
.find(Type
);
1135 strprintf(out
, " %s %16llu %s\n",
1136 hs
->HashValue().c_str(),
1138 (*I
).first
.c_str());
1139 Output
.Write(out
.c_str(), out
.length());
1142 void ReleaseWriter::Finish()
1144 if ((DoHashes
& Hashes::MD5SUM
) == Hashes::MD5SUM
)
1145 printChecksumTypeRecord(*Output
, "MD5Sum", CheckSums
);
1146 if ((DoHashes
& Hashes::SHA1SUM
) == Hashes::SHA1SUM
)
1147 printChecksumTypeRecord(*Output
, "SHA1", CheckSums
);
1148 if ((DoHashes
& Hashes::SHA256SUM
) == Hashes::SHA256SUM
)
1149 printChecksumTypeRecord(*Output
, "SHA256", CheckSums
);
1150 if ((DoHashes
& Hashes::SHA512SUM
) == Hashes::SHA512SUM
)
1151 printChecksumTypeRecord(*Output
, "SHA512", CheckSums
);
1153 // go by-hash cleanup
1154 map
<string
,ReleaseWriter::CheckSum
>::const_iterator prev
= CheckSums
.begin();
1155 if (_config
->FindB("APT::FTPArchive::DoByHash", false) == true)
1157 for(map
<string
,ReleaseWriter::CheckSum
>::const_iterator I
= CheckSums
.begin();
1158 I
!= CheckSums
.end(); ++I
)
1160 if (I
->first
== "Release" || I
->first
== "InRelease")
1163 // keep iterating until we find a new subdir
1164 if(flNotFile(I
->first
) == flNotFile(prev
->first
))
1167 // clean that subdir up
1168 int keepFiles
= _config
->FindI("APT::FTPArchive::By-Hash-Keep", 3);
1169 // calculate how many compressors are used (the amount of files
1170 // in that subdir generated for this run)
1171 keepFiles
*= std::distance(prev
, I
);
1174 HashStringList hsl
= prev
->second
.Hashes
;
1175 for(HashStringList::const_iterator h
= hsl
.begin();
1176 h
!= hsl
.end(); ++h
)
1182 std::string RealFilename
= DirStrip
+"/"+prev
->first
;
1183 std::string ByHashOutputFile
= GenByHashFilename(RealFilename
, *h
);
1184 DeleteAllButMostRecent(flNotFile(ByHashOutputFile
), keepFiles
);