]>
Commit | Line | Data |
---|---|---|
1 | // -*- mode: cpp; mode: fold -*- | |
2 | // Description /*{{{*/ | |
3 | // $Id: writer.cc,v 1.14 2004/03/24 01:40:43 mdz Exp $ | |
4 | /* ###################################################################### | |
5 | ||
6 | Writer | |
7 | ||
8 | The file writer classes. These write various types of output, sources, | |
9 | packages and contents. | |
10 | ||
11 | ##################################################################### */ | |
12 | /*}}}*/ | |
13 | // Include Files /*{{{*/ | |
14 | #include <config.h> | |
15 | ||
16 | #include <apt-pkg/configuration.h> | |
17 | #include <apt-pkg/deblistparser.h> | |
18 | #include <apt-pkg/error.h> | |
19 | #include <apt-pkg/fileutl.h> | |
20 | #include <apt-pkg/gpgv.h> | |
21 | #include <apt-pkg/hashes.h> | |
22 | #include <apt-pkg/md5.h> | |
23 | #include <apt-pkg/strutl.h> | |
24 | #include <apt-pkg/debfile.h> | |
25 | #include <apt-pkg/pkgcache.h> | |
26 | #include <apt-pkg/sha1.h> | |
27 | #include <apt-pkg/sha2.h> | |
28 | #include <apt-pkg/tagfile.h> | |
29 | ||
30 | #include <ctype.h> | |
31 | #include <fnmatch.h> | |
32 | #include <ftw.h> | |
33 | #include <locale.h> | |
34 | #include <string.h> | |
35 | #include <sys/stat.h> | |
36 | #include <sys/types.h> | |
37 | #include <unistd.h> | |
38 | #include <ctime> | |
39 | #include <iostream> | |
40 | #include <sstream> | |
41 | #include <memory> | |
42 | #include <utility> | |
43 | ||
44 | #include "apt-ftparchive.h" | |
45 | #include "writer.h" | |
46 | #include "cachedb.h" | |
47 | #include "multicompress.h" | |
48 | ||
49 | #include <apti18n.h> | |
50 | /*}}}*/ | |
51 | using namespace std; | |
52 | FTWScanner *FTWScanner::Owner; | |
53 | ||
54 | // SetTFRewriteData - Helper for setting rewrite lists /*{{{*/ | |
55 | // --------------------------------------------------------------------- | |
56 | /* */ | |
57 | inline void SetTFRewriteData(struct TFRewriteData &tfrd, | |
58 | const char *tag, | |
59 | const char *rewrite, | |
60 | const char *newtag = 0) | |
61 | { | |
62 | tfrd.Tag = tag; | |
63 | tfrd.Rewrite = rewrite; | |
64 | tfrd.NewTag = newtag; | |
65 | } | |
66 | /*}}}*/ | |
67 | ||
68 | // FTWScanner::FTWScanner - Constructor /*{{{*/ | |
69 | // --------------------------------------------------------------------- | |
70 | /* */ | |
71 | FTWScanner::FTWScanner(string const &Arch): Arch(Arch) | |
72 | { | |
73 | ErrorPrinted = false; | |
74 | NoLinkAct = !_config->FindB("APT::FTPArchive::DeLinkAct",true); | |
75 | ||
76 | DoMD5 = _config->FindB("APT::FTPArchive::MD5",true); | |
77 | DoSHA1 = _config->FindB("APT::FTPArchive::SHA1",true); | |
78 | DoSHA256 = _config->FindB("APT::FTPArchive::SHA256",true); | |
79 | DoSHA512 = _config->FindB("APT::FTPArchive::SHA512",true); | |
80 | } | |
81 | /*}}}*/ | |
82 | // FTWScanner::Scanner - FTW Scanner /*{{{*/ | |
83 | // --------------------------------------------------------------------- | |
84 | /* This is the FTW scanner, it processes each directory element in the | |
85 | directory tree. */ | |
86 | int FTWScanner::ScannerFTW(const char *File,const struct stat * /*sb*/,int Flag) | |
87 | { | |
88 | if (Flag == FTW_DNR) | |
89 | { | |
90 | Owner->NewLine(1); | |
91 | ioprintf(c1out, _("W: Unable to read directory %s\n"), File); | |
92 | } | |
93 | if (Flag == FTW_NS) | |
94 | { | |
95 | Owner->NewLine(1); | |
96 | ioprintf(c1out, _("W: Unable to stat %s\n"), File); | |
97 | } | |
98 | if (Flag != FTW_F) | |
99 | return 0; | |
100 | ||
101 | return ScannerFile(File, true); | |
102 | } | |
103 | /*}}}*/ | |
104 | // FTWScanner::ScannerFile - File Scanner /*{{{*/ | |
105 | // --------------------------------------------------------------------- | |
106 | /* */ | |
107 | int FTWScanner::ScannerFile(const char *File, bool const &ReadLink) | |
108 | { | |
109 | const char *LastComponent = strrchr(File, '/'); | |
110 | char *RealPath = NULL; | |
111 | ||
112 | if (LastComponent == NULL) | |
113 | LastComponent = File; | |
114 | else | |
115 | LastComponent++; | |
116 | ||
117 | vector<string>::const_iterator I; | |
118 | for(I = Owner->Patterns.begin(); I != Owner->Patterns.end(); ++I) | |
119 | { | |
120 | if (fnmatch((*I).c_str(), LastComponent, 0) == 0) | |
121 | break; | |
122 | } | |
123 | if (I == Owner->Patterns.end()) | |
124 | return 0; | |
125 | ||
126 | /* Process it. If the file is a link then resolve it into an absolute | |
127 | name.. This works best if the directory components the scanner are | |
128 | given are not links themselves. */ | |
129 | char Jnk[2]; | |
130 | Owner->OriginalPath = File; | |
131 | if (ReadLink && | |
132 | readlink(File,Jnk,sizeof(Jnk)) != -1 && | |
133 | (RealPath = realpath(File,NULL)) != 0) | |
134 | { | |
135 | Owner->DoPackage(RealPath); | |
136 | free(RealPath); | |
137 | } | |
138 | else | |
139 | Owner->DoPackage(File); | |
140 | ||
141 | if (_error->empty() == false) | |
142 | { | |
143 | // Print any errors or warnings found | |
144 | string Err; | |
145 | bool SeenPath = false; | |
146 | while (_error->empty() == false) | |
147 | { | |
148 | Owner->NewLine(1); | |
149 | ||
150 | bool const Type = _error->PopMessage(Err); | |
151 | if (Type == true) | |
152 | cerr << _("E: ") << Err << endl; | |
153 | else | |
154 | cerr << _("W: ") << Err << endl; | |
155 | ||
156 | if (Err.find(File) != string::npos) | |
157 | SeenPath = true; | |
158 | } | |
159 | ||
160 | if (SeenPath == false) | |
161 | cerr << _("E: Errors apply to file ") << "'" << File << "'" << endl; | |
162 | return 0; | |
163 | } | |
164 | ||
165 | return 0; | |
166 | } | |
167 | /*}}}*/ | |
168 | // FTWScanner::RecursiveScan - Just scan a directory tree /*{{{*/ | |
169 | // --------------------------------------------------------------------- | |
170 | /* */ | |
171 | bool FTWScanner::RecursiveScan(string const &Dir) | |
172 | { | |
173 | char *RealPath = NULL; | |
174 | /* If noprefix is set then jam the scan root in, so we don't generate | |
175 | link followed paths out of control */ | |
176 | if (InternalPrefix.empty() == true) | |
177 | { | |
178 | if ((RealPath = realpath(Dir.c_str(),NULL)) == 0) | |
179 | return _error->Errno("realpath",_("Failed to resolve %s"),Dir.c_str()); | |
180 | InternalPrefix = RealPath; | |
181 | free(RealPath); | |
182 | } | |
183 | ||
184 | // Do recursive directory searching | |
185 | Owner = this; | |
186 | int const Res = ftw(Dir.c_str(),ScannerFTW,30); | |
187 | ||
188 | // Error treewalking? | |
189 | if (Res != 0) | |
190 | { | |
191 | if (_error->PendingError() == false) | |
192 | _error->Errno("ftw",_("Tree walking failed")); | |
193 | return false; | |
194 | } | |
195 | ||
196 | return true; | |
197 | } | |
198 | /*}}}*/ | |
199 | // FTWScanner::LoadFileList - Load the file list from a file /*{{{*/ | |
200 | // --------------------------------------------------------------------- | |
201 | /* This is an alternative to using FTW to locate files, it reads the list | |
202 | of files from another file. */ | |
203 | bool FTWScanner::LoadFileList(string const &Dir, string const &File) | |
204 | { | |
205 | char *RealPath = NULL; | |
206 | /* If noprefix is set then jam the scan root in, so we don't generate | |
207 | link followed paths out of control */ | |
208 | if (InternalPrefix.empty() == true) | |
209 | { | |
210 | if ((RealPath = realpath(Dir.c_str(),NULL)) == 0) | |
211 | return _error->Errno("realpath",_("Failed to resolve %s"),Dir.c_str()); | |
212 | InternalPrefix = RealPath; | |
213 | free(RealPath); | |
214 | } | |
215 | ||
216 | Owner = this; | |
217 | FILE *List = fopen(File.c_str(),"r"); | |
218 | if (List == 0) | |
219 | return _error->Errno("fopen",_("Failed to open %s"),File.c_str()); | |
220 | ||
221 | /* We are a tad tricky here.. We prefix the buffer with the directory | |
222 | name, that way if we need a full path with just use line.. Sneaky and | |
223 | fully evil. */ | |
224 | char Line[1000]; | |
225 | char *FileStart; | |
226 | if (Dir.empty() == true || Dir.end()[-1] != '/') | |
227 | FileStart = Line + snprintf(Line,sizeof(Line),"%s/",Dir.c_str()); | |
228 | else | |
229 | FileStart = Line + snprintf(Line,sizeof(Line),"%s",Dir.c_str()); | |
230 | while (fgets(FileStart,sizeof(Line) - (FileStart - Line),List) != 0) | |
231 | { | |
232 | char *FileName = _strstrip(FileStart); | |
233 | if (FileName[0] == 0) | |
234 | continue; | |
235 | ||
236 | if (FileName[0] != '/') | |
237 | { | |
238 | if (FileName != FileStart) | |
239 | memmove(FileStart,FileName,strlen(FileStart)); | |
240 | FileName = Line; | |
241 | } | |
242 | ||
243 | #if 0 | |
244 | struct stat St; | |
245 | int Flag = FTW_F; | |
246 | if (stat(FileName,&St) != 0) | |
247 | Flag = FTW_NS; | |
248 | #endif | |
249 | ||
250 | if (ScannerFile(FileName, false) != 0) | |
251 | break; | |
252 | } | |
253 | ||
254 | fclose(List); | |
255 | return true; | |
256 | } | |
257 | /*}}}*/ | |
258 | // FTWScanner::Delink - Delink symlinks /*{{{*/ | |
259 | // --------------------------------------------------------------------- | |
260 | /* */ | |
261 | bool FTWScanner::Delink(string &FileName,const char *OriginalPath, | |
262 | unsigned long long &DeLinkBytes, | |
263 | unsigned long long const &FileSize) | |
264 | { | |
265 | // See if this isn't an internaly prefix'd file name. | |
266 | if (InternalPrefix.empty() == false && | |
267 | InternalPrefix.length() < FileName.length() && | |
268 | stringcmp(FileName.begin(),FileName.begin() + InternalPrefix.length(), | |
269 | InternalPrefix.begin(),InternalPrefix.end()) != 0) | |
270 | { | |
271 | if (DeLinkLimit != 0 && DeLinkBytes/1024 < DeLinkLimit) | |
272 | { | |
273 | // Tidy up the display | |
274 | if (DeLinkBytes == 0) | |
275 | cout << endl; | |
276 | ||
277 | NewLine(1); | |
278 | ioprintf(c1out, _(" DeLink %s [%s]\n"), (OriginalPath + InternalPrefix.length()), | |
279 | SizeToStr(FileSize).c_str()); | |
280 | c1out << flush; | |
281 | ||
282 | if (NoLinkAct == false) | |
283 | { | |
284 | char OldLink[400]; | |
285 | if (readlink(OriginalPath,OldLink,sizeof(OldLink)) == -1) | |
286 | _error->Errno("readlink",_("Failed to readlink %s"),OriginalPath); | |
287 | else | |
288 | { | |
289 | if (unlink(OriginalPath) != 0) | |
290 | _error->Errno("unlink",_("Failed to unlink %s"),OriginalPath); | |
291 | else | |
292 | { | |
293 | if (link(FileName.c_str(),OriginalPath) != 0) | |
294 | { | |
295 | // Panic! Restore the symlink | |
296 | if (symlink(OldLink,OriginalPath) != 0) | |
297 | _error->Errno("symlink", "failed to restore symlink"); | |
298 | return _error->Errno("link",_("*** Failed to link %s to %s"), | |
299 | FileName.c_str(), | |
300 | OriginalPath); | |
301 | } | |
302 | } | |
303 | } | |
304 | } | |
305 | ||
306 | DeLinkBytes += FileSize; | |
307 | if (DeLinkBytes/1024 >= DeLinkLimit) | |
308 | ioprintf(c1out, _(" DeLink limit of %sB hit.\n"), SizeToStr(DeLinkBytes).c_str()); | |
309 | } | |
310 | ||
311 | FileName = OriginalPath; | |
312 | } | |
313 | ||
314 | return true; | |
315 | } | |
316 | /*}}}*/ | |
317 | ||
318 | // PackagesWriter::PackagesWriter - Constructor /*{{{*/ | |
319 | // --------------------------------------------------------------------- | |
320 | /* */ | |
321 | PackagesWriter::PackagesWriter(string const &DB,string const &Overrides,string const &ExtOverrides, | |
322 | string const &Arch) : | |
323 | FTWScanner(Arch), Db(DB), Stats(Db.Stats), TransWriter(NULL) | |
324 | { | |
325 | Output = stdout; | |
326 | SetExts(".deb .udeb"); | |
327 | DeLinkLimit = 0; | |
328 | ||
329 | // Process the command line options | |
330 | DoMD5 = _config->FindB("APT::FTPArchive::Packages::MD5",DoMD5); | |
331 | DoSHA1 = _config->FindB("APT::FTPArchive::Packages::SHA1",DoSHA1); | |
332 | DoSHA256 = _config->FindB("APT::FTPArchive::Packages::SHA256",DoSHA256); | |
333 | DoSHA512 = _config->FindB("APT::FTPArchive::Packages::SHA512",DoSHA512); | |
334 | DoAlwaysStat = _config->FindB("APT::FTPArchive::AlwaysStat", false); | |
335 | DoContents = _config->FindB("APT::FTPArchive::Contents",true); | |
336 | NoOverride = _config->FindB("APT::FTPArchive::NoOverrideMsg",false); | |
337 | LongDescription = _config->FindB("APT::FTPArchive::LongDescription",true); | |
338 | ||
339 | if (Db.Loaded() == false) | |
340 | DoContents = false; | |
341 | ||
342 | // Read the override file | |
343 | if (Overrides.empty() == false && Over.ReadOverride(Overrides) == false) | |
344 | return; | |
345 | else | |
346 | NoOverride = true; | |
347 | ||
348 | if (ExtOverrides.empty() == false) | |
349 | Over.ReadExtraOverride(ExtOverrides); | |
350 | ||
351 | _error->DumpErrors(); | |
352 | } | |
353 | /*}}}*/ | |
354 | // FTWScanner::SetExts - Set extensions to support /*{{{*/ | |
355 | // --------------------------------------------------------------------- | |
356 | /* */ | |
357 | bool FTWScanner::SetExts(string const &Vals) | |
358 | { | |
359 | ClearPatterns(); | |
360 | string::size_type Start = 0; | |
361 | while (Start <= Vals.length()-1) | |
362 | { | |
363 | string::size_type const Space = Vals.find(' ',Start); | |
364 | string::size_type const Length = ((Space == string::npos) ? Vals.length() : Space) - Start; | |
365 | if ( Arch.empty() == false ) | |
366 | { | |
367 | AddPattern(string("*_") + Arch + Vals.substr(Start, Length)); | |
368 | AddPattern(string("*_all") + Vals.substr(Start, Length)); | |
369 | } | |
370 | else | |
371 | AddPattern(string("*") + Vals.substr(Start, Length)); | |
372 | ||
373 | Start += Length + 1; | |
374 | } | |
375 | ||
376 | return true; | |
377 | } | |
378 | ||
379 | /*}}}*/ | |
380 | // PackagesWriter::DoPackage - Process a single package /*{{{*/ | |
381 | // --------------------------------------------------------------------- | |
382 | /* This method takes a package and gets its control information and | |
383 | MD5, SHA1 and SHA256 then writes out a control record with the proper fields | |
384 | rewritten and the path/size/hash appended. */ | |
385 | bool PackagesWriter::DoPackage(string FileName) | |
386 | { | |
387 | // Pull all the data we need form the DB | |
388 | if (Db.GetFileInfo(FileName, true, DoContents, true, DoMD5, DoSHA1, DoSHA256, DoSHA512, DoAlwaysStat) | |
389 | == false) | |
390 | { | |
391 | return false; | |
392 | } | |
393 | ||
394 | unsigned long long FileSize = Db.GetFileSize(); | |
395 | if (Delink(FileName,OriginalPath,Stats.DeLinkBytes,FileSize) == false) | |
396 | return false; | |
397 | ||
398 | // Lookup the overide information | |
399 | pkgTagSection &Tags = Db.Control.Section; | |
400 | string Package = Tags.FindS("Package"); | |
401 | string Architecture; | |
402 | // if we generate a Packages file for a given arch, we use it to | |
403 | // look for overrides. if we run in "simple" mode without the | |
404 | // "Architecures" variable in the config we use the architecure value | |
405 | // from the deb file | |
406 | if(Arch != "") | |
407 | Architecture = Arch; | |
408 | else | |
409 | Architecture = Tags.FindS("Architecture"); | |
410 | auto_ptr<Override::Item> OverItem(Over.GetItem(Package,Architecture)); | |
411 | ||
412 | if (Package.empty() == true) | |
413 | return _error->Error(_("Archive had no package field")); | |
414 | ||
415 | // If we need to do any rewriting of the header do it now.. | |
416 | if (OverItem.get() == 0) | |
417 | { | |
418 | if (NoOverride == false) | |
419 | { | |
420 | NewLine(1); | |
421 | ioprintf(c1out, _(" %s has no override entry\n"), Package.c_str()); | |
422 | } | |
423 | ||
424 | OverItem = auto_ptr<Override::Item>(new Override::Item); | |
425 | OverItem->FieldOverride["Section"] = Tags.FindS("Section"); | |
426 | OverItem->Priority = Tags.FindS("Priority"); | |
427 | } | |
428 | ||
429 | char Size[40]; | |
430 | sprintf(Size,"%llu", (unsigned long long) FileSize); | |
431 | ||
432 | // Strip the DirStrip prefix from the FileName and add the PathPrefix | |
433 | string NewFileName; | |
434 | if (DirStrip.empty() == false && | |
435 | FileName.length() > DirStrip.length() && | |
436 | stringcmp(FileName.begin(),FileName.begin() + DirStrip.length(), | |
437 | DirStrip.begin(),DirStrip.end()) == 0) | |
438 | NewFileName = string(FileName.begin() + DirStrip.length(),FileName.end()); | |
439 | else | |
440 | NewFileName = FileName; | |
441 | if (PathPrefix.empty() == false) | |
442 | NewFileName = flCombine(PathPrefix,NewFileName); | |
443 | ||
444 | /* Configuration says we don't want to include the long Description | |
445 | in the package file - instead we want to ship a separated file */ | |
446 | string desc; | |
447 | if (LongDescription == false) { | |
448 | desc = Tags.FindS("Description").append("\n"); | |
449 | OverItem->FieldOverride["Description"] = desc.substr(0, desc.find('\n')).c_str(); | |
450 | } | |
451 | ||
452 | // This lists all the changes to the fields we are going to make. | |
453 | // (7 hardcoded + maintainer + suggests + end marker) | |
454 | TFRewriteData Changes[6+2+OverItem->FieldOverride.size()+1+1]; | |
455 | ||
456 | unsigned int End = 0; | |
457 | SetTFRewriteData(Changes[End++], "Size", Size); | |
458 | if (DoMD5 == true) | |
459 | SetTFRewriteData(Changes[End++], "MD5sum", Db.MD5Res.c_str()); | |
460 | if (DoSHA1 == true) | |
461 | SetTFRewriteData(Changes[End++], "SHA1", Db.SHA1Res.c_str()); | |
462 | if (DoSHA256 == true) | |
463 | SetTFRewriteData(Changes[End++], "SHA256", Db.SHA256Res.c_str()); | |
464 | if (DoSHA512 == true) | |
465 | SetTFRewriteData(Changes[End++], "SHA512", Db.SHA512Res.c_str()); | |
466 | SetTFRewriteData(Changes[End++], "Filename", NewFileName.c_str()); | |
467 | SetTFRewriteData(Changes[End++], "Priority", OverItem->Priority.c_str()); | |
468 | SetTFRewriteData(Changes[End++], "Status", 0); | |
469 | SetTFRewriteData(Changes[End++], "Optional", 0); | |
470 | ||
471 | string DescriptionMd5; | |
472 | if (LongDescription == false) { | |
473 | MD5Summation descmd5; | |
474 | descmd5.Add(desc.c_str()); | |
475 | DescriptionMd5 = descmd5.Result().Value(); | |
476 | SetTFRewriteData(Changes[End++], "Description-md5", DescriptionMd5.c_str()); | |
477 | if (TransWriter != NULL) | |
478 | TransWriter->DoPackage(Package, desc, DescriptionMd5); | |
479 | } | |
480 | ||
481 | // Rewrite the maintainer field if necessary | |
482 | bool MaintFailed; | |
483 | string NewMaint = OverItem->SwapMaint(Tags.FindS("Maintainer"),MaintFailed); | |
484 | if (MaintFailed == true) | |
485 | { | |
486 | if (NoOverride == false) | |
487 | { | |
488 | NewLine(1); | |
489 | ioprintf(c1out, _(" %s maintainer is %s not %s\n"), | |
490 | Package.c_str(), Tags.FindS("Maintainer").c_str(), OverItem->OldMaint.c_str()); | |
491 | } | |
492 | } | |
493 | ||
494 | if (NewMaint.empty() == false) | |
495 | SetTFRewriteData(Changes[End++], "Maintainer", NewMaint.c_str()); | |
496 | ||
497 | /* Get rid of the Optional tag. This is an ugly, ugly, ugly hack that | |
498 | dpkg-scanpackages does. Well sort of. dpkg-scanpackages just does renaming | |
499 | but dpkg does this append bit. So we do the append bit, at least that way the | |
500 | status file and package file will remain similar. There are other transforms | |
501 | but optional is the only legacy one still in use for some lazy reason. */ | |
502 | string OptionalStr = Tags.FindS("Optional"); | |
503 | if (OptionalStr.empty() == false) | |
504 | { | |
505 | if (Tags.FindS("Suggests").empty() == false) | |
506 | OptionalStr = Tags.FindS("Suggests") + ", " + OptionalStr; | |
507 | SetTFRewriteData(Changes[End++], "Suggests", OptionalStr.c_str()); | |
508 | } | |
509 | ||
510 | for (map<string,string>::const_iterator I = OverItem->FieldOverride.begin(); | |
511 | I != OverItem->FieldOverride.end(); ++I) | |
512 | SetTFRewriteData(Changes[End++],I->first.c_str(),I->second.c_str()); | |
513 | ||
514 | SetTFRewriteData(Changes[End++], 0, 0); | |
515 | ||
516 | // Rewrite and store the fields. | |
517 | if (TFRewrite(Output,Tags,TFRewritePackageOrder,Changes) == false) | |
518 | return false; | |
519 | fprintf(Output,"\n"); | |
520 | ||
521 | return Db.Finish(); | |
522 | } | |
523 | /*}}}*/ | |
524 | ||
525 | // TranslationWriter::TranslationWriter - Constructor /*{{{*/ | |
526 | // --------------------------------------------------------------------- | |
527 | /* Create a Translation-Master file for this Packages file */ | |
528 | TranslationWriter::TranslationWriter(string const &File, string const &TransCompress, | |
529 | mode_t const &Permissions) : Output(NULL), | |
530 | RefCounter(0) | |
531 | { | |
532 | if (File.empty() == true) | |
533 | return; | |
534 | ||
535 | Comp = new MultiCompress(File, TransCompress, Permissions); | |
536 | Output = Comp->Input; | |
537 | } | |
538 | /*}}}*/ | |
539 | // TranslationWriter::DoPackage - Process a single package /*{{{*/ | |
540 | // --------------------------------------------------------------------- | |
541 | /* Create a Translation-Master file for this Packages file */ | |
542 | bool TranslationWriter::DoPackage(string const &Pkg, string const &Desc, | |
543 | string const &MD5) | |
544 | { | |
545 | if (Output == NULL) | |
546 | return true; | |
547 | ||
548 | // Different archs can include different versions and therefore | |
549 | // different descriptions - so we need to check for both name and md5. | |
550 | string const Record = Pkg + ":" + MD5; | |
551 | ||
552 | if (Included.find(Record) != Included.end()) | |
553 | return true; | |
554 | ||
555 | fprintf(Output, "Package: %s\nDescription-md5: %s\nDescription-en: %s\n", | |
556 | Pkg.c_str(), MD5.c_str(), Desc.c_str()); | |
557 | ||
558 | Included.insert(Record); | |
559 | return true; | |
560 | } | |
561 | /*}}}*/ | |
562 | // TranslationWriter::~TranslationWriter - Destructor /*{{{*/ | |
563 | // --------------------------------------------------------------------- | |
564 | /* */ | |
565 | TranslationWriter::~TranslationWriter() | |
566 | { | |
567 | if (Comp == NULL) | |
568 | return; | |
569 | ||
570 | delete Comp; | |
571 | } | |
572 | /*}}}*/ | |
573 | ||
574 | // SourcesWriter::SourcesWriter - Constructor /*{{{*/ | |
575 | // --------------------------------------------------------------------- | |
576 | /* */ | |
577 | SourcesWriter::SourcesWriter(string const &DB, string const &BOverrides,string const &SOverrides, | |
578 | string const &ExtOverrides) : | |
579 | Db(DB), Stats(Db.Stats) | |
580 | { | |
581 | Output = stdout; | |
582 | AddPattern("*.dsc"); | |
583 | DeLinkLimit = 0; | |
584 | Buffer = 0; | |
585 | BufSize = 0; | |
586 | ||
587 | // Process the command line options | |
588 | DoMD5 = _config->FindB("APT::FTPArchive::Sources::MD5",DoMD5); | |
589 | DoSHA1 = _config->FindB("APT::FTPArchive::Sources::SHA1",DoSHA1); | |
590 | DoSHA256 = _config->FindB("APT::FTPArchive::Sources::SHA256",DoSHA256); | |
591 | DoSHA512 = _config->FindB("APT::FTPArchive::Sources::SHA512",DoSHA512); | |
592 | NoOverride = _config->FindB("APT::FTPArchive::NoOverrideMsg",false); | |
593 | DoAlwaysStat = _config->FindB("APT::FTPArchive::AlwaysStat", false); | |
594 | ||
595 | // Read the override file | |
596 | if (BOverrides.empty() == false && BOver.ReadOverride(BOverrides) == false) | |
597 | return; | |
598 | else | |
599 | NoOverride = true; | |
600 | ||
601 | // WTF?? The logic above: if we can't read binary overrides, don't even try | |
602 | // reading source overrides. if we can read binary overrides, then say there | |
603 | // are no overrides. THIS MAKES NO SENSE! -- ajt@d.o, 2006/02/28 | |
604 | ||
605 | if (ExtOverrides.empty() == false) | |
606 | SOver.ReadExtraOverride(ExtOverrides); | |
607 | ||
608 | if (SOverrides.empty() == false && FileExists(SOverrides) == true) | |
609 | SOver.ReadOverride(SOverrides,true); | |
610 | } | |
611 | /*}}}*/ | |
612 | // SourcesWriter::DoPackage - Process a single package /*{{{*/ | |
613 | // --------------------------------------------------------------------- | |
614 | /* */ | |
615 | bool SourcesWriter::DoPackage(string FileName) | |
616 | { | |
617 | // Open the archive | |
618 | FileFd F; | |
619 | if (OpenMaybeClearSignedFile(FileName, F) == false) | |
620 | return false; | |
621 | ||
622 | unsigned long long const FSize = F.FileSize(); | |
623 | //FIXME: do we really need to enforce a maximum size of the dsc file? | |
624 | if (FSize > 128*1024) | |
625 | return _error->Error("DSC file '%s' is too large!",FileName.c_str()); | |
626 | ||
627 | if (BufSize < FSize + 2) | |
628 | { | |
629 | BufSize = FSize + 2; | |
630 | Buffer = (char *)realloc(Buffer , BufSize); | |
631 | } | |
632 | ||
633 | if (F.Read(Buffer, FSize) == false) | |
634 | return false; | |
635 | ||
636 | // Stat the file for later (F might be clearsigned, so not F.FileSize()) | |
637 | struct stat St; | |
638 | if (stat(FileName.c_str(), &St) != 0) | |
639 | return _error->Errno("fstat","Failed to stat %s",FileName.c_str()); | |
640 | ||
641 | // Hash the file | |
642 | char *Start = Buffer; | |
643 | char *BlkEnd = Buffer + FSize; | |
644 | ||
645 | Hashes DscHashes; | |
646 | if (FSize == (unsigned long long) St.st_size) | |
647 | { | |
648 | if (DoMD5 == true) | |
649 | DscHashes.MD5.Add((unsigned char *)Start,BlkEnd - Start); | |
650 | if (DoSHA1 == true) | |
651 | DscHashes.SHA1.Add((unsigned char *)Start,BlkEnd - Start); | |
652 | if (DoSHA256 == true) | |
653 | DscHashes.SHA256.Add((unsigned char *)Start,BlkEnd - Start); | |
654 | if (DoSHA512 == true) | |
655 | DscHashes.SHA512.Add((unsigned char *)Start,BlkEnd - Start); | |
656 | } | |
657 | else | |
658 | { | |
659 | FileFd DscFile(FileName, FileFd::ReadOnly); | |
660 | DscHashes.AddFD(DscFile, St.st_size, DoMD5, DoSHA1, DoSHA256, DoSHA512); | |
661 | } | |
662 | ||
663 | // Add extra \n to the end, just in case (as in clearsigned they are missing) | |
664 | *BlkEnd++ = '\n'; | |
665 | *BlkEnd++ = '\n'; | |
666 | ||
667 | pkgTagSection Tags; | |
668 | if (Tags.Scan(Start,BlkEnd - Start) == false || Tags.Exists("Source") == false) | |
669 | return _error->Error("Could not find a record in the DSC '%s'",FileName.c_str()); | |
670 | Tags.Trim(); | |
671 | ||
672 | // Lookup the overide information, finding first the best priority. | |
673 | string BestPrio; | |
674 | string Bins = Tags.FindS("Binary"); | |
675 | char Buffer[Bins.length() + 1]; | |
676 | auto_ptr<Override::Item> OverItem(0); | |
677 | if (Bins.empty() == false) | |
678 | { | |
679 | strcpy(Buffer,Bins.c_str()); | |
680 | ||
681 | // Ignore too-long errors. | |
682 | char *BinList[400]; | |
683 | TokSplitString(',',Buffer,BinList,sizeof(BinList)/sizeof(BinList[0])); | |
684 | ||
685 | // Look at all the binaries | |
686 | unsigned char BestPrioV = pkgCache::State::Extra; | |
687 | for (unsigned I = 0; BinList[I] != 0; I++) | |
688 | { | |
689 | auto_ptr<Override::Item> Itm(BOver.GetItem(BinList[I])); | |
690 | if (Itm.get() == 0) | |
691 | continue; | |
692 | ||
693 | unsigned char NewPrioV = debListParser::GetPrio(Itm->Priority); | |
694 | if (NewPrioV < BestPrioV || BestPrio.empty() == true) | |
695 | { | |
696 | BestPrioV = NewPrioV; | |
697 | BestPrio = Itm->Priority; | |
698 | } | |
699 | ||
700 | if (OverItem.get() == 0) | |
701 | OverItem = Itm; | |
702 | } | |
703 | } | |
704 | ||
705 | // If we need to do any rewriting of the header do it now.. | |
706 | if (OverItem.get() == 0) | |
707 | { | |
708 | if (NoOverride == false) | |
709 | { | |
710 | NewLine(1); | |
711 | ioprintf(c1out, _(" %s has no override entry\n"), Tags.FindS("Source").c_str()); | |
712 | } | |
713 | ||
714 | OverItem = auto_ptr<Override::Item>(new Override::Item); | |
715 | } | |
716 | ||
717 | auto_ptr<Override::Item> SOverItem(SOver.GetItem(Tags.FindS("Source"))); | |
718 | // const auto_ptr<Override::Item> autoSOverItem(SOverItem); | |
719 | if (SOverItem.get() == 0) | |
720 | { | |
721 | ioprintf(c1out, _(" %s has no source override entry\n"), Tags.FindS("Source").c_str()); | |
722 | SOverItem = auto_ptr<Override::Item>(BOver.GetItem(Tags.FindS("Source"))); | |
723 | if (SOverItem.get() == 0) | |
724 | { | |
725 | ioprintf(c1out, _(" %s has no binary override entry either\n"), Tags.FindS("Source").c_str()); | |
726 | SOverItem = auto_ptr<Override::Item>(new Override::Item); | |
727 | *SOverItem = *OverItem; | |
728 | } | |
729 | } | |
730 | ||
731 | // Add the dsc to the files hash list | |
732 | string const strippedName = flNotDir(FileName); | |
733 | std::ostringstream ostreamFiles; | |
734 | if (DoMD5 == true && Tags.Exists("Files")) | |
735 | ostreamFiles << "\n " << string(DscHashes.MD5.Result()) << " " << St.st_size << " " | |
736 | << strippedName << "\n " << Tags.FindS("Files"); | |
737 | string const Files = ostreamFiles.str(); | |
738 | ||
739 | std::ostringstream ostreamSha1; | |
740 | if (DoSHA1 == true && Tags.Exists("Checksums-Sha1")) | |
741 | ostreamSha1 << "\n " << string(DscHashes.SHA1.Result()) << " " << St.st_size << " " | |
742 | << strippedName << "\n " << Tags.FindS("Checksums-Sha1"); | |
743 | ||
744 | std::ostringstream ostreamSha256; | |
745 | if (DoSHA256 == true && Tags.Exists("Checksums-Sha256")) | |
746 | ostreamSha256 << "\n " << string(DscHashes.SHA256.Result()) << " " << St.st_size << " " | |
747 | << strippedName << "\n " << Tags.FindS("Checksums-Sha256"); | |
748 | ||
749 | std::ostringstream ostreamSha512; | |
750 | if (DoSHA512 == true && Tags.Exists("Checksums-Sha512")) | |
751 | ostreamSha512 << "\n " << string(DscHashes.SHA512.Result()) << " " << St.st_size << " " | |
752 | << strippedName << "\n " << Tags.FindS("Checksums-Sha512"); | |
753 | ||
754 | // Strip the DirStrip prefix from the FileName and add the PathPrefix | |
755 | string NewFileName; | |
756 | if (DirStrip.empty() == false && | |
757 | FileName.length() > DirStrip.length() && | |
758 | stringcmp(DirStrip,OriginalPath,OriginalPath + DirStrip.length()) == 0) | |
759 | NewFileName = string(OriginalPath + DirStrip.length()); | |
760 | else | |
761 | NewFileName = OriginalPath; | |
762 | if (PathPrefix.empty() == false) | |
763 | NewFileName = flCombine(PathPrefix,NewFileName); | |
764 | ||
765 | string Directory = flNotFile(OriginalPath); | |
766 | string Package = Tags.FindS("Source"); | |
767 | ||
768 | // Perform operation over all of the files | |
769 | string ParseJnk; | |
770 | const char *C = Files.c_str(); | |
771 | char *RealPath = NULL; | |
772 | for (;isspace(*C); C++); | |
773 | while (*C != 0) | |
774 | { | |
775 | // Parse each of the elements | |
776 | if (ParseQuoteWord(C,ParseJnk) == false || | |
777 | ParseQuoteWord(C,ParseJnk) == false || | |
778 | ParseQuoteWord(C,ParseJnk) == false) | |
779 | return _error->Error("Error parsing file record"); | |
780 | ||
781 | string OriginalPath = Directory + ParseJnk; | |
782 | ||
783 | // Add missing hashes to source files | |
784 | if ((DoSHA1 == true && !Tags.Exists("Checksums-Sha1")) || | |
785 | (DoSHA256 == true && !Tags.Exists("Checksums-Sha256")) || | |
786 | (DoSHA512 == true && !Tags.Exists("Checksums-Sha512"))) | |
787 | { | |
788 | if (Db.GetFileInfo(OriginalPath, false, false, false, DoMD5, DoSHA1, DoSHA256, DoSHA512, DoAlwaysStat) | |
789 | == false) | |
790 | { | |
791 | return _error->Error("Error getting file info"); | |
792 | } | |
793 | ||
794 | if (DoSHA1 == true && !Tags.Exists("Checksums-Sha1")) | |
795 | ostreamSha1 << "\n " << string(Db.SHA1Res) << " " | |
796 | << Db.GetFileSize() << " " << ParseJnk; | |
797 | ||
798 | if (DoSHA256 == true && !Tags.Exists("Checksums-Sha256")) | |
799 | ostreamSha256 << "\n " << string(Db.SHA256Res) << " " | |
800 | << Db.GetFileSize() << " " << ParseJnk; | |
801 | ||
802 | if (DoSHA512 == true && !Tags.Exists("Checksums-Sha512")) | |
803 | ostreamSha512 << "\n " << string(Db.SHA512Res) << " " | |
804 | << Db.GetFileSize() << " " << ParseJnk; | |
805 | } | |
806 | ||
807 | // Perform the delinking operation | |
808 | char Jnk[2]; | |
809 | ||
810 | if (readlink(OriginalPath.c_str(),Jnk,sizeof(Jnk)) != -1 && | |
811 | (RealPath = realpath(OriginalPath.c_str(),NULL)) != 0) | |
812 | { | |
813 | string RP = RealPath; | |
814 | free(RealPath); | |
815 | if (Delink(RP,OriginalPath.c_str(),Stats.DeLinkBytes,St.st_size) == false) | |
816 | return false; | |
817 | } | |
818 | } | |
819 | ||
820 | Directory = flNotFile(NewFileName); | |
821 | if (Directory.length() > 2) | |
822 | Directory.erase(Directory.end()-1); | |
823 | ||
824 | string const ChecksumsSha1 = ostreamSha1.str(); | |
825 | string const ChecksumsSha256 = ostreamSha256.str(); | |
826 | string const ChecksumsSha512 = ostreamSha512.str(); | |
827 | ||
828 | // This lists all the changes to the fields we are going to make. | |
829 | // (5 hardcoded + checksums + maintainer + end marker) | |
830 | TFRewriteData Changes[5+2+1+SOverItem->FieldOverride.size()+1]; | |
831 | ||
832 | unsigned int End = 0; | |
833 | SetTFRewriteData(Changes[End++],"Source",Package.c_str(),"Package"); | |
834 | if (Files.empty() == false) | |
835 | SetTFRewriteData(Changes[End++],"Files",Files.c_str()); | |
836 | if (ChecksumsSha1.empty() == false) | |
837 | SetTFRewriteData(Changes[End++],"Checksums-Sha1",ChecksumsSha1.c_str()); | |
838 | if (ChecksumsSha256.empty() == false) | |
839 | SetTFRewriteData(Changes[End++],"Checksums-Sha256",ChecksumsSha256.c_str()); | |
840 | if (ChecksumsSha512.empty() == false) | |
841 | SetTFRewriteData(Changes[End++],"Checksums-Sha512",ChecksumsSha512.c_str()); | |
842 | if (Directory != "./") | |
843 | SetTFRewriteData(Changes[End++],"Directory",Directory.c_str()); | |
844 | SetTFRewriteData(Changes[End++],"Priority",BestPrio.c_str()); | |
845 | SetTFRewriteData(Changes[End++],"Status",0); | |
846 | ||
847 | // Rewrite the maintainer field if necessary | |
848 | bool MaintFailed; | |
849 | string NewMaint = OverItem->SwapMaint(Tags.FindS("Maintainer"),MaintFailed); | |
850 | if (MaintFailed == true) | |
851 | { | |
852 | if (NoOverride == false) | |
853 | { | |
854 | NewLine(1); | |
855 | ioprintf(c1out, _(" %s maintainer is %s not %s\n"), Package.c_str(), | |
856 | Tags.FindS("Maintainer").c_str(), OverItem->OldMaint.c_str()); | |
857 | } | |
858 | } | |
859 | if (NewMaint.empty() == false) | |
860 | SetTFRewriteData(Changes[End++], "Maintainer", NewMaint.c_str()); | |
861 | ||
862 | for (map<string,string>::const_iterator I = SOverItem->FieldOverride.begin(); | |
863 | I != SOverItem->FieldOverride.end(); ++I) | |
864 | SetTFRewriteData(Changes[End++],I->first.c_str(),I->second.c_str()); | |
865 | ||
866 | SetTFRewriteData(Changes[End++], 0, 0); | |
867 | ||
868 | // Rewrite and store the fields. | |
869 | if (TFRewrite(Output,Tags,TFRewriteSourceOrder,Changes) == false) | |
870 | return false; | |
871 | fprintf(Output,"\n"); | |
872 | ||
873 | Stats.Packages++; | |
874 | ||
875 | return Db.Finish(); | |
876 | } | |
877 | /*}}}*/ | |
878 | ||
879 | // ContentsWriter::ContentsWriter - Constructor /*{{{*/ | |
880 | // --------------------------------------------------------------------- | |
881 | /* */ | |
882 | ContentsWriter::ContentsWriter(string const &DB, string const &Arch) : | |
883 | FTWScanner(Arch), Db(DB), Stats(Db.Stats) | |
884 | ||
885 | { | |
886 | SetExts(".deb"); | |
887 | Output = stdout; | |
888 | } | |
889 | /*}}}*/ | |
890 | // ContentsWriter::DoPackage - Process a single package /*{{{*/ | |
891 | // --------------------------------------------------------------------- | |
892 | /* If Package is the empty string the control record will be parsed to | |
893 | determine what the package name is. */ | |
894 | bool ContentsWriter::DoPackage(string FileName, string Package) | |
895 | { | |
896 | if (!Db.GetFileInfo(FileName, Package.empty(), true, false, false, false, false, false)) | |
897 | { | |
898 | return false; | |
899 | } | |
900 | ||
901 | // Parse the package name | |
902 | if (Package.empty() == true) | |
903 | { | |
904 | Package = Db.Control.Section.FindS("Package"); | |
905 | } | |
906 | ||
907 | Db.Contents.Add(Gen,Package); | |
908 | ||
909 | return Db.Finish(); | |
910 | } | |
911 | /*}}}*/ | |
912 | // ContentsWriter::ReadFromPkgs - Read from a packages file /*{{{*/ | |
913 | // --------------------------------------------------------------------- | |
914 | /* */ | |
915 | bool ContentsWriter::ReadFromPkgs(string const &PkgFile,string const &PkgCompress) | |
916 | { | |
917 | MultiCompress Pkgs(PkgFile,PkgCompress,0,false); | |
918 | if (_error->PendingError() == true) | |
919 | return false; | |
920 | ||
921 | // Open the package file | |
922 | FileFd Fd; | |
923 | if (Pkgs.OpenOld(Fd) == false) | |
924 | return false; | |
925 | ||
926 | pkgTagFile Tags(&Fd); | |
927 | if (_error->PendingError() == true) | |
928 | return false; | |
929 | ||
930 | // Parse. | |
931 | pkgTagSection Section; | |
932 | while (Tags.Step(Section) == true) | |
933 | { | |
934 | string File = flCombine(Prefix,Section.FindS("FileName")); | |
935 | string Package = Section.FindS("Section"); | |
936 | if (Package.empty() == false && Package.end()[-1] != '/') | |
937 | { | |
938 | Package += '/'; | |
939 | Package += Section.FindS("Package"); | |
940 | } | |
941 | else | |
942 | Package += Section.FindS("Package"); | |
943 | ||
944 | DoPackage(File,Package); | |
945 | if (_error->empty() == false) | |
946 | { | |
947 | _error->Error("Errors apply to file '%s'",File.c_str()); | |
948 | _error->DumpErrors(); | |
949 | } | |
950 | } | |
951 | ||
952 | // Tidy the compressor | |
953 | Fd.Close(); | |
954 | ||
955 | return true; | |
956 | } | |
957 | ||
958 | /*}}}*/ | |
959 | ||
960 | // ReleaseWriter::ReleaseWriter - Constructor /*{{{*/ | |
961 | // --------------------------------------------------------------------- | |
962 | /* */ | |
963 | ReleaseWriter::ReleaseWriter(string const &/*DB*/) | |
964 | { | |
965 | if (_config->FindB("APT::FTPArchive::Release::Default-Patterns", true) == true) | |
966 | { | |
967 | AddPattern("Packages"); | |
968 | AddPattern("Packages.gz"); | |
969 | AddPattern("Packages.bz2"); | |
970 | AddPattern("Packages.lzma"); | |
971 | AddPattern("Packages.xz"); | |
972 | AddPattern("Translation-*"); | |
973 | AddPattern("Sources"); | |
974 | AddPattern("Sources.gz"); | |
975 | AddPattern("Sources.bz2"); | |
976 | AddPattern("Sources.lzma"); | |
977 | AddPattern("Sources.xz"); | |
978 | AddPattern("Release"); | |
979 | AddPattern("Contents-*"); | |
980 | AddPattern("Index"); | |
981 | AddPattern("md5sum.txt"); | |
982 | } | |
983 | AddPatterns(_config->FindVector("APT::FTPArchive::Release::Patterns")); | |
984 | ||
985 | Output = stdout; | |
986 | time_t const now = time(NULL); | |
987 | ||
988 | setlocale(LC_TIME, "C"); | |
989 | ||
990 | char datestr[128]; | |
991 | if (strftime(datestr, sizeof(datestr), "%a, %d %b %Y %H:%M:%S UTC", | |
992 | gmtime(&now)) == 0) | |
993 | { | |
994 | datestr[0] = '\0'; | |
995 | } | |
996 | ||
997 | time_t const validuntil = now + _config->FindI("APT::FTPArchive::Release::ValidTime", 0); | |
998 | char validstr[128]; | |
999 | if (now == validuntil || | |
1000 | strftime(validstr, sizeof(validstr), "%a, %d %b %Y %H:%M:%S UTC", | |
1001 | gmtime(&validuntil)) == 0) | |
1002 | { | |
1003 | validstr[0] = '\0'; | |
1004 | } | |
1005 | ||
1006 | setlocale(LC_TIME, ""); | |
1007 | ||
1008 | map<string,string> Fields; | |
1009 | Fields["Origin"] = ""; | |
1010 | Fields["Label"] = ""; | |
1011 | Fields["Suite"] = ""; | |
1012 | Fields["Version"] = ""; | |
1013 | Fields["Codename"] = ""; | |
1014 | Fields["Date"] = datestr; | |
1015 | Fields["Valid-Until"] = validstr; | |
1016 | Fields["Architectures"] = ""; | |
1017 | Fields["Components"] = ""; | |
1018 | Fields["Description"] = ""; | |
1019 | ||
1020 | for(map<string,string>::const_iterator I = Fields.begin(); | |
1021 | I != Fields.end(); | |
1022 | ++I) | |
1023 | { | |
1024 | string Config = string("APT::FTPArchive::Release::") + (*I).first; | |
1025 | string Value = _config->Find(Config, (*I).second.c_str()); | |
1026 | if (Value == "") | |
1027 | continue; | |
1028 | ||
1029 | fprintf(Output, "%s: %s\n", (*I).first.c_str(), Value.c_str()); | |
1030 | } | |
1031 | ||
1032 | DoMD5 = _config->FindB("APT::FTPArchive::Release::MD5",DoMD5); | |
1033 | DoSHA1 = _config->FindB("APT::FTPArchive::Release::SHA1",DoSHA1); | |
1034 | DoSHA256 = _config->FindB("APT::FTPArchive::Release::SHA256",DoSHA256); | |
1035 | } | |
1036 | /*}}}*/ | |
1037 | // ReleaseWriter::DoPackage - Process a single package /*{{{*/ | |
1038 | // --------------------------------------------------------------------- | |
1039 | bool ReleaseWriter::DoPackage(string FileName) | |
1040 | { | |
1041 | // Strip the DirStrip prefix from the FileName and add the PathPrefix | |
1042 | string NewFileName; | |
1043 | if (DirStrip.empty() == false && | |
1044 | FileName.length() > DirStrip.length() && | |
1045 | stringcmp(FileName.begin(),FileName.begin() + DirStrip.length(), | |
1046 | DirStrip.begin(),DirStrip.end()) == 0) | |
1047 | { | |
1048 | NewFileName = string(FileName.begin() + DirStrip.length(),FileName.end()); | |
1049 | while (NewFileName[0] == '/') | |
1050 | NewFileName = string(NewFileName.begin() + 1,NewFileName.end()); | |
1051 | } | |
1052 | else | |
1053 | NewFileName = FileName; | |
1054 | ||
1055 | if (PathPrefix.empty() == false) | |
1056 | NewFileName = flCombine(PathPrefix,NewFileName); | |
1057 | ||
1058 | FileFd fd(FileName, FileFd::ReadOnly); | |
1059 | ||
1060 | if (!fd.IsOpen()) | |
1061 | { | |
1062 | return false; | |
1063 | } | |
1064 | ||
1065 | CheckSums[NewFileName].size = fd.Size(); | |
1066 | ||
1067 | Hashes hs; | |
1068 | hs.AddFD(fd, 0, DoMD5, DoSHA1, DoSHA256, DoSHA512); | |
1069 | if (DoMD5 == true) | |
1070 | CheckSums[NewFileName].MD5 = hs.MD5.Result(); | |
1071 | if (DoSHA1 == true) | |
1072 | CheckSums[NewFileName].SHA1 = hs.SHA1.Result(); | |
1073 | if (DoSHA256 == true) | |
1074 | CheckSums[NewFileName].SHA256 = hs.SHA256.Result(); | |
1075 | if (DoSHA512 == true) | |
1076 | CheckSums[NewFileName].SHA512 = hs.SHA512.Result(); | |
1077 | fd.Close(); | |
1078 | ||
1079 | return true; | |
1080 | } | |
1081 | ||
1082 | /*}}}*/ | |
1083 | // ReleaseWriter::Finish - Output the checksums /*{{{*/ | |
1084 | // --------------------------------------------------------------------- | |
1085 | void ReleaseWriter::Finish() | |
1086 | { | |
1087 | if (DoMD5 == true) | |
1088 | { | |
1089 | fprintf(Output, "MD5Sum:\n"); | |
1090 | for(map<string,struct CheckSum>::const_iterator I = CheckSums.begin(); | |
1091 | I != CheckSums.end(); ++I) | |
1092 | { | |
1093 | fprintf(Output, " %s %16llu %s\n", | |
1094 | (*I).second.MD5.c_str(), | |
1095 | (*I).second.size, | |
1096 | (*I).first.c_str()); | |
1097 | } | |
1098 | } | |
1099 | if (DoSHA1 == true) | |
1100 | { | |
1101 | fprintf(Output, "SHA1:\n"); | |
1102 | for(map<string,struct CheckSum>::const_iterator I = CheckSums.begin(); | |
1103 | I != CheckSums.end(); ++I) | |
1104 | { | |
1105 | fprintf(Output, " %s %16llu %s\n", | |
1106 | (*I).second.SHA1.c_str(), | |
1107 | (*I).second.size, | |
1108 | (*I).first.c_str()); | |
1109 | } | |
1110 | } | |
1111 | if (DoSHA256 == true) | |
1112 | { | |
1113 | fprintf(Output, "SHA256:\n"); | |
1114 | for(map<string,struct CheckSum>::const_iterator I = CheckSums.begin(); | |
1115 | I != CheckSums.end(); ++I) | |
1116 | { | |
1117 | fprintf(Output, " %s %16llu %s\n", | |
1118 | (*I).second.SHA256.c_str(), | |
1119 | (*I).second.size, | |
1120 | (*I).first.c_str()); | |
1121 | } | |
1122 | } | |
1123 | ||
1124 | fprintf(Output, "SHA512:\n"); | |
1125 | for(map<string,struct CheckSum>::const_iterator I = CheckSums.begin(); | |
1126 | I != CheckSums.end(); | |
1127 | ++I) | |
1128 | { | |
1129 | fprintf(Output, " %s %16llu %s\n", | |
1130 | (*I).second.SHA512.c_str(), | |
1131 | (*I).second.size, | |
1132 | (*I).first.c_str()); | |
1133 | } | |
1134 | ||
1135 | } |