]> git.saurik.com Git - apt.git/blob - apt-pkg/acquire-item.cc
Release 1.4~beta1
[apt.git] / apt-pkg / acquire-item.cc
1 // -*- mode: cpp; mode: fold -*-
2 // Description /*{{{*/
3 // $Id: acquire-item.cc,v 1.46.2.9 2004/01/16 18:51:11 mdz Exp $
4 /* ######################################################################
5
6 Acquire Item - Item to acquire
7
8 Each item can download to exactly one file at a time. This means you
9 cannot create an item that fetches two uri's to two files at the same
10 time. The pkgAcqIndex class creates a second class upon instantiation
11 to fetch the other index files because of this.
12
13 ##################################################################### */
14 /*}}}*/
15 // Include Files /*{{{*/
16 #include <config.h>
17
18 #include <apt-pkg/acquire-item.h>
19 #include <apt-pkg/configuration.h>
20 #include <apt-pkg/aptconfiguration.h>
21 #include <apt-pkg/sourcelist.h>
22 #include <apt-pkg/error.h>
23 #include <apt-pkg/strutl.h>
24 #include <apt-pkg/fileutl.h>
25 #include <apt-pkg/tagfile.h>
26 #include <apt-pkg/metaindex.h>
27 #include <apt-pkg/acquire.h>
28 #include <apt-pkg/hashes.h>
29 #include <apt-pkg/indexfile.h>
30 #include <apt-pkg/pkgcache.h>
31 #include <apt-pkg/cacheiterators.h>
32 #include <apt-pkg/pkgrecords.h>
33 #include <apt-pkg/gpgv.h>
34
35 #include <algorithm>
36 #include <stddef.h>
37 #include <stdlib.h>
38 #include <string.h>
39 #include <iostream>
40 #include <vector>
41 #include <sys/stat.h>
42 #include <unistd.h>
43 #include <errno.h>
44 #include <string>
45 #include <stdio.h>
46 #include <ctime>
47 #include <sstream>
48 #include <numeric>
49 #include <random>
50
51 #include <apti18n.h>
52 /*}}}*/
53
54 using namespace std;
55
56 static void printHashSumComparison(std::string const &URI, HashStringList const &Expected, HashStringList const &Actual) /*{{{*/
57 {
58 if (_config->FindB("Debug::Acquire::HashSumMismatch", false) == false)
59 return;
60 std::cerr << std::endl << URI << ":" << std::endl << " Expected Hash: " << std::endl;
61 for (HashStringList::const_iterator hs = Expected.begin(); hs != Expected.end(); ++hs)
62 std::cerr << "\t- " << hs->toStr() << std::endl;
63 std::cerr << " Actual Hash: " << std::endl;
64 for (HashStringList::const_iterator hs = Actual.begin(); hs != Actual.end(); ++hs)
65 std::cerr << "\t- " << hs->toStr() << std::endl;
66 }
67 /*}}}*/
68 static std::string GetPartialFileName(std::string const &file) /*{{{*/
69 {
70 std::string DestFile = _config->FindDir("Dir::State::lists") + "partial/";
71 DestFile += file;
72 return DestFile;
73 }
74 /*}}}*/
75 static std::string GetPartialFileNameFromURI(std::string const &uri) /*{{{*/
76 {
77 return GetPartialFileName(URItoFileName(uri));
78 }
79 /*}}}*/
80 static std::string GetFinalFileNameFromURI(std::string const &uri) /*{{{*/
81 {
82 return _config->FindDir("Dir::State::lists") + URItoFileName(uri);
83 }
84 /*}}}*/
85 static std::string GetKeepCompressedFileName(std::string file, IndexTarget const &Target)/*{{{*/
86 {
87 if (Target.KeepCompressed == false)
88 return file;
89
90 std::string const KeepCompressedAs = Target.Option(IndexTarget::KEEPCOMPRESSEDAS);
91 if (KeepCompressedAs.empty() == false)
92 {
93 std::string const ext = KeepCompressedAs.substr(0, KeepCompressedAs.find(' '));
94 if (ext != "uncompressed")
95 file.append(".").append(ext);
96 }
97 return file;
98 }
99 /*}}}*/
100 static std::string GetMergeDiffsPatchFileName(std::string const &Final, std::string const &Patch)/*{{{*/
101 {
102 // rred expects the patch as $FinalFile.ed.$patchname.gz
103 return Final + ".ed." + Patch + ".gz";
104 }
105 /*}}}*/
106 static std::string GetDiffsPatchFileName(std::string const &Final) /*{{{*/
107 {
108 // rred expects the patch as $FinalFile.ed
109 return Final + ".ed";
110 }
111 /*}}}*/
112 static std::string GetExistingFilename(std::string const &File) /*{{{*/
113 {
114 if (RealFileExists(File))
115 return File;
116 for (auto const &type : APT::Configuration::getCompressorExtensions())
117 {
118 std::string const Final = File + type;
119 if (RealFileExists(Final))
120 return Final;
121 }
122 return "";
123 }
124 /*}}}*/
125 static std::string GetDiffIndexFileName(std::string const &Name) /*{{{*/
126 {
127 return Name + ".diff/Index";
128 }
129 /*}}}*/
130 static std::string GetDiffIndexURI(IndexTarget const &Target) /*{{{*/
131 {
132 return Target.URI + ".diff/Index";
133 }
134 /*}}}*/
135
136 static void ReportMirrorFailureToCentral(pkgAcquire::Item const &I, std::string const &FailCode, std::string const &Details)/*{{{*/
137 {
138 // we only act if a mirror was used at all
139 if(I.UsedMirror.empty())
140 return;
141 #if 0
142 std::cerr << "\nReportMirrorFailure: "
143 << UsedMirror
144 << " Uri: " << DescURI()
145 << " FailCode: "
146 << FailCode << std::endl;
147 #endif
148 string const report = _config->Find("Methods::Mirror::ProblemReporting",
149 LIBEXEC_DIR "/apt-report-mirror-failure");
150 if(!FileExists(report))
151 return;
152
153 std::vector<char const*> const Args = {
154 report.c_str(),
155 I.UsedMirror.c_str(),
156 I.DescURI().c_str(),
157 FailCode.c_str(),
158 Details.c_str(),
159 NULL
160 };
161
162 pid_t pid = ExecFork();
163 if(pid < 0)
164 {
165 _error->Error("ReportMirrorFailure Fork failed");
166 return;
167 }
168 else if(pid == 0)
169 {
170 execvp(Args[0], (char**)Args.data());
171 std::cerr << "Could not exec " << Args[0] << std::endl;
172 _exit(100);
173 }
174 if(!ExecWait(pid, "report-mirror-failure"))
175 _error->Warning("Couldn't report problem to '%s'", report.c_str());
176 }
177 /*}}}*/
178
179 static APT_NONNULL(2) bool MessageInsecureRepository(bool const isError, char const * const msg, std::string const &repo)/*{{{*/
180 {
181 std::string m;
182 strprintf(m, msg, repo.c_str());
183 if (isError)
184 {
185 _error->Error("%s", m.c_str());
186 _error->Notice("%s", _("Updating from such a repository can't be done securely, and is therefore disabled by default."));
187 }
188 else
189 {
190 _error->Warning("%s", m.c_str());
191 _error->Notice("%s", _("Data from such a repository can't be authenticated and is therefore potentially dangerous to use."));
192 }
193 _error->Notice("%s", _("See apt-secure(8) manpage for repository creation and user configuration details."));
194 return false;
195 }
196 /*}}}*/
197 // AllowInsecureRepositories /*{{{*/
198 enum class InsecureType { UNSIGNED, WEAK, NORELEASE };
199 static bool TargetIsAllowedToBe(IndexTarget const &Target, InsecureType const type)
200 {
201 if (_config->FindB("Acquire::AllowInsecureRepositories"))
202 return true;
203
204 if (Target.OptionBool(IndexTarget::ALLOW_INSECURE))
205 return true;
206
207 switch (type)
208 {
209 case InsecureType::UNSIGNED: break;
210 case InsecureType::NORELEASE: break;
211 case InsecureType::WEAK:
212 if (_config->FindB("Acquire::AllowWeakRepositories"))
213 return true;
214 if (Target.OptionBool(IndexTarget::ALLOW_WEAK))
215 return true;
216 break;
217 }
218 return false;
219 }
220 static bool APT_NONNULL(3, 4, 5) AllowInsecureRepositories(InsecureType const msg, std::string const &repo,
221 metaIndex const * const MetaIndexParser, pkgAcqMetaClearSig * const TransactionManager, pkgAcquire::Item * const I)
222 {
223 // we skip weak downgrades as its unlikely that a repository gets really weaker –
224 // its more realistic that apt got pickier in a newer version
225 if (msg != InsecureType::WEAK)
226 {
227 std::string const FinalInRelease = TransactionManager->GetFinalFilename();
228 std::string const FinalReleasegpg = FinalInRelease.substr(0, FinalInRelease.length() - strlen("InRelease")) + "Release.gpg";
229 if (RealFileExists(FinalReleasegpg) || RealFileExists(FinalInRelease))
230 {
231 char const * msgstr = nullptr;
232 switch (msg)
233 {
234 case InsecureType::UNSIGNED: msgstr = _("The repository '%s' is no longer signed."); break;
235 case InsecureType::NORELEASE: msgstr = _("The repository '%s' does no longer have a Release file."); break;
236 case InsecureType::WEAK: /* unreachable */ break;
237 }
238 if (_config->FindB("Acquire::AllowDowngradeToInsecureRepositories") ||
239 TransactionManager->Target.OptionBool(IndexTarget::ALLOW_DOWNGRADE_TO_INSECURE))
240 {
241 // meh, the users wants to take risks (we still mark the packages
242 // from this repository as unauthenticated)
243 _error->Warning(msgstr, repo.c_str());
244 _error->Warning(_("This is normally not allowed, but the option "
245 "Acquire::AllowDowngradeToInsecureRepositories was "
246 "given to override it."));
247 } else {
248 MessageInsecureRepository(true, msgstr, repo);
249 TransactionManager->AbortTransaction();
250 I->Status = pkgAcquire::Item::StatError;
251 return false;
252 }
253 }
254 }
255
256 if(MetaIndexParser->GetTrusted() == metaIndex::TRI_YES)
257 return true;
258
259 char const * msgstr = nullptr;
260 switch (msg)
261 {
262 case InsecureType::UNSIGNED: msgstr = _("The repository '%s' is not signed."); break;
263 case InsecureType::NORELEASE: msgstr = _("The repository '%s' does not have a Release file."); break;
264 case InsecureType::WEAK: msgstr = _("The repository '%s' provides only weak security information."); break;
265 }
266
267 if (TargetIsAllowedToBe(TransactionManager->Target, msg) == true)
268 {
269 MessageInsecureRepository(false, msgstr, repo);
270 return true;
271 }
272
273 MessageInsecureRepository(true, msgstr, repo);
274 TransactionManager->AbortTransaction();
275 I->Status = pkgAcquire::Item::StatError;
276 return false;
277 }
278 /*}}}*/
279 static HashStringList GetExpectedHashesFromFor(metaIndex * const Parser, std::string const &MetaKey)/*{{{*/
280 {
281 if (Parser == NULL)
282 return HashStringList();
283 metaIndex::checkSum * const R = Parser->Lookup(MetaKey);
284 if (R == NULL)
285 return HashStringList();
286 return R->Hashes;
287 }
288 /*}}}*/
289
290 // all ::HashesRequired and ::GetExpectedHashes implementations /*{{{*/
291 /* ::GetExpectedHashes is abstract and has to be implemented by all subclasses.
292 It is best to implement it as broadly as possible, while ::HashesRequired defaults
293 to true and should be as restrictive as possible for false cases. Note that if
294 a hash is returned by ::GetExpectedHashes it must match. Only if it doesn't
295 ::HashesRequired is called to evaluate if its okay to have no hashes. */
296 APT_CONST bool pkgAcqTransactionItem::HashesRequired() const
297 {
298 /* signed repositories obviously have a parser and good hashes.
299 unsigned repositories, too, as even if we can't trust them for security,
300 we can at least trust them for integrity of the download itself.
301 Only repositories without a Release file can (obviously) not have
302 hashes – and they are very uncommon and strongly discouraged */
303 if (TransactionManager->MetaIndexParser->GetLoadedSuccessfully() != metaIndex::TRI_YES)
304 return false;
305 if (TargetIsAllowedToBe(Target, InsecureType::WEAK))
306 {
307 /* If we allow weak hashes, we check that we have some (weak) and then
308 declare hashes not needed. That will tip us in the right direction
309 as if hashes exist, they will be used, even if not required */
310 auto const hsl = GetExpectedHashes();
311 if (hsl.usable())
312 return true;
313 if (hsl.empty() == false)
314 return false;
315 }
316 return true;
317 }
318 HashStringList pkgAcqTransactionItem::GetExpectedHashes() const
319 {
320 return GetExpectedHashesFor(GetMetaKey());
321 }
322
323 APT_CONST bool pkgAcqMetaBase::HashesRequired() const
324 {
325 // Release and co have no hashes 'by design'.
326 return false;
327 }
328 HashStringList pkgAcqMetaBase::GetExpectedHashes() const
329 {
330 return HashStringList();
331 }
332
333 APT_CONST bool pkgAcqIndexDiffs::HashesRequired() const
334 {
335 /* We can't check hashes of rred result as we don't know what the
336 hash of the file will be. We just know the hash of the patch(es),
337 the hash of the file they will apply on and the hash of the resulting
338 file. */
339 if (State == StateFetchDiff)
340 return true;
341 return false;
342 }
343 HashStringList pkgAcqIndexDiffs::GetExpectedHashes() const
344 {
345 if (State == StateFetchDiff)
346 return available_patches[0].download_hashes;
347 return HashStringList();
348 }
349
350 APT_CONST bool pkgAcqIndexMergeDiffs::HashesRequired() const
351 {
352 /* @see #pkgAcqIndexDiffs::HashesRequired, with the difference that
353 we can check the rred result after all patches are applied as
354 we know the expected result rather than potentially apply more patches */
355 if (State == StateFetchDiff)
356 return true;
357 return State == StateApplyDiff;
358 }
359 HashStringList pkgAcqIndexMergeDiffs::GetExpectedHashes() const
360 {
361 if (State == StateFetchDiff)
362 return patch.download_hashes;
363 else if (State == StateApplyDiff)
364 return GetExpectedHashesFor(Target.MetaKey);
365 return HashStringList();
366 }
367
368 APT_CONST bool pkgAcqArchive::HashesRequired() const
369 {
370 return LocalSource == false;
371 }
372 HashStringList pkgAcqArchive::GetExpectedHashes() const
373 {
374 // figured out while parsing the records
375 return ExpectedHashes;
376 }
377
378 APT_CONST bool pkgAcqFile::HashesRequired() const
379 {
380 // supplied as parameter at creation time, so the caller decides
381 return ExpectedHashes.usable();
382 }
383 HashStringList pkgAcqFile::GetExpectedHashes() const
384 {
385 return ExpectedHashes;
386 }
387 /*}}}*/
388 // Acquire::Item::QueueURI and specialisations from child classes /*{{{*/
389 bool pkgAcquire::Item::QueueURI(pkgAcquire::ItemDesc &Item)
390 {
391 Owner->Enqueue(Item);
392 return true;
393 }
394 /* The idea here is that an item isn't queued if it exists on disk and the
395 transition manager was a hit as this means that the files it contains
396 the checksums for can't be updated either (or they are and we are asking
397 for a hashsum mismatch to happen which helps nobody) */
398 bool pkgAcqTransactionItem::QueueURI(pkgAcquire::ItemDesc &Item)
399 {
400 if (TransactionManager->State != TransactionStarted)
401 {
402 if (_config->FindB("Debug::Acquire::Transaction", false))
403 std::clog << "Skip " << Target.URI << " as transaction was already dealt with!" << std::endl;
404 return false;
405 }
406 std::string const FinalFile = GetFinalFilename();
407 if (TransactionManager->IMSHit == true && FileExists(FinalFile) == true)
408 {
409 PartialFile = DestFile = FinalFile;
410 Status = StatDone;
411 return false;
412 }
413 // If we got the InRelease file via a mirror, pick all indexes directly from this mirror, too
414 if (TransactionManager->BaseURI.empty() == false && UsedMirror.empty() &&
415 URI::SiteOnly(Item.URI) != URI::SiteOnly(TransactionManager->BaseURI))
416 {
417 // this ensures we rewrite only once and only the first step
418 auto const OldBaseURI = Target.Option(IndexTarget::BASE_URI);
419 if (OldBaseURI.empty() == false && APT::String::Startswith(Item.URI, OldBaseURI))
420 {
421 auto const ExtraPath = Item.URI.substr(OldBaseURI.length());
422 Item.URI = flCombine(TransactionManager->BaseURI, ExtraPath);
423 UsedMirror = TransactionManager->UsedMirror;
424 if (Item.Description.find(" ") != string::npos)
425 Item.Description.replace(0, Item.Description.find(" "), UsedMirror);
426 }
427 }
428 return pkgAcquire::Item::QueueURI(Item);
429 }
430 /* The transition manager InRelease itself (or its older sisters-in-law
431 Release & Release.gpg) is always queued as this allows us to rerun gpgv
432 on it to verify that we aren't stalled with old files */
433 bool pkgAcqMetaBase::QueueURI(pkgAcquire::ItemDesc &Item)
434 {
435 return pkgAcquire::Item::QueueURI(Item);
436 }
437 /* the Diff/Index needs to queue also the up-to-date complete index file
438 to ensure that the list cleaner isn't eating it */
439 bool pkgAcqDiffIndex::QueueURI(pkgAcquire::ItemDesc &Item)
440 {
441 if (pkgAcqTransactionItem::QueueURI(Item) == true)
442 return true;
443 QueueOnIMSHit();
444 return false;
445 }
446 /*}}}*/
447 // Acquire::Item::GetFinalFilename and specialisations for child classes /*{{{*/
448 std::string pkgAcquire::Item::GetFinalFilename() const
449 {
450 // Beware: Desc.URI is modified by redirections
451 return GetFinalFileNameFromURI(Desc.URI);
452 }
453 std::string pkgAcqDiffIndex::GetFinalFilename() const
454 {
455 std::string const FinalFile = GetFinalFileNameFromURI(GetDiffIndexURI(Target));
456 // we don't want recompress, so lets keep whatever we got
457 if (CurrentCompressionExtension == "uncompressed")
458 return FinalFile;
459 return FinalFile + "." + CurrentCompressionExtension;
460 }
461 std::string pkgAcqIndex::GetFinalFilename() const
462 {
463 std::string const FinalFile = GetFinalFileNameFromURI(Target.URI);
464 return GetKeepCompressedFileName(FinalFile, Target);
465 }
466 std::string pkgAcqMetaSig::GetFinalFilename() const
467 {
468 return GetFinalFileNameFromURI(Target.URI);
469 }
470 std::string pkgAcqBaseIndex::GetFinalFilename() const
471 {
472 return GetFinalFileNameFromURI(Target.URI);
473 }
474 std::string pkgAcqMetaBase::GetFinalFilename() const
475 {
476 return GetFinalFileNameFromURI(Target.URI);
477 }
478 std::string pkgAcqArchive::GetFinalFilename() const
479 {
480 return _config->FindDir("Dir::Cache::Archives") + flNotDir(StoreFilename);
481 }
482 /*}}}*/
483 // pkgAcqTransactionItem::GetMetaKey and specialisations for child classes /*{{{*/
484 std::string pkgAcqTransactionItem::GetMetaKey() const
485 {
486 return Target.MetaKey;
487 }
488 std::string pkgAcqIndex::GetMetaKey() const
489 {
490 if (Stage == STAGE_DECOMPRESS_AND_VERIFY || CurrentCompressionExtension == "uncompressed")
491 return Target.MetaKey;
492 return Target.MetaKey + "." + CurrentCompressionExtension;
493 }
494 std::string pkgAcqDiffIndex::GetMetaKey() const
495 {
496 auto const metakey = GetDiffIndexFileName(Target.MetaKey);
497 if (CurrentCompressionExtension == "uncompressed")
498 return metakey;
499 return metakey + "." + CurrentCompressionExtension;
500 }
501 /*}}}*/
502 //pkgAcqTransactionItem::TransactionState and specialisations for child classes /*{{{*/
503 bool pkgAcqTransactionItem::TransactionState(TransactionStates const state)
504 {
505 bool const Debug = _config->FindB("Debug::Acquire::Transaction", false);
506 switch(state)
507 {
508 case TransactionStarted: _error->Fatal("Item %s changed to invalid transaction start state!", Target.URI.c_str()); break;
509 case TransactionAbort:
510 if(Debug == true)
511 std::clog << " Cancel: " << DestFile << std::endl;
512 if (Status == pkgAcquire::Item::StatIdle)
513 {
514 Status = pkgAcquire::Item::StatDone;
515 Dequeue();
516 }
517 break;
518 case TransactionCommit:
519 if(PartialFile.empty() == false)
520 {
521 bool sameFile = (PartialFile == DestFile);
522 // we use symlinks on IMS-Hit to avoid copies
523 if (RealFileExists(DestFile))
524 {
525 struct stat Buf;
526 if (lstat(PartialFile.c_str(), &Buf) != -1)
527 {
528 if (S_ISLNK(Buf.st_mode) && Buf.st_size > 0)
529 {
530 char partial[Buf.st_size + 1];
531 ssize_t const sp = readlink(PartialFile.c_str(), partial, Buf.st_size);
532 if (sp == -1)
533 _error->Errno("pkgAcqTransactionItem::TransactionState-sp", _("Failed to readlink %s"), PartialFile.c_str());
534 else
535 {
536 partial[sp] = '\0';
537 sameFile = (DestFile == partial);
538 }
539 }
540 }
541 else
542 _error->Errno("pkgAcqTransactionItem::TransactionState-stat", _("Failed to stat %s"), PartialFile.c_str());
543 }
544 if (sameFile == false)
545 {
546 // ensure that even without lists-cleanup all compressions are nuked
547 std::string FinalFile = GetFinalFileNameFromURI(Target.URI);
548 if (FileExists(FinalFile))
549 {
550 if(Debug == true)
551 std::clog << "rm " << FinalFile << " # " << DescURI() << std::endl;
552 if (RemoveFile("TransactionStates-Cleanup", FinalFile) == false)
553 return false;
554 }
555 for (auto const &ext: APT::Configuration::getCompressorExtensions())
556 {
557 auto const Final = FinalFile + ext;
558 if (FileExists(Final))
559 {
560 if(Debug == true)
561 std::clog << "rm " << Final << " # " << DescURI() << std::endl;
562 if (RemoveFile("TransactionStates-Cleanup", Final) == false)
563 return false;
564 }
565 }
566 if(Debug == true)
567 std::clog << "mv " << PartialFile << " -> "<< DestFile << " # " << DescURI() << std::endl;
568 if (Rename(PartialFile, DestFile) == false)
569 return false;
570 }
571 else if(Debug == true)
572 std::clog << "keep " << PartialFile << " # " << DescURI() << std::endl;
573
574 } else {
575 if(Debug == true)
576 std::clog << "rm " << DestFile << " # " << DescURI() << std::endl;
577 if (RemoveFile("TransItem::TransactionCommit", DestFile) == false)
578 return false;
579 }
580 break;
581 }
582 return true;
583 }
584 bool pkgAcqMetaBase::TransactionState(TransactionStates const state)
585 {
586 // Do not remove InRelease on IMSHit of Release.gpg [yes, this is very edgecasey]
587 if (TransactionManager->IMSHit == false)
588 return pkgAcqTransactionItem::TransactionState(state);
589 return true;
590 }
591 bool pkgAcqIndex::TransactionState(TransactionStates const state)
592 {
593 if (pkgAcqTransactionItem::TransactionState(state) == false)
594 return false;
595
596 switch (state)
597 {
598 case TransactionStarted: _error->Fatal("AcqIndex %s changed to invalid transaction start state!", Target.URI.c_str()); break;
599 case TransactionAbort:
600 if (Stage == STAGE_DECOMPRESS_AND_VERIFY)
601 {
602 // keep the compressed file, but drop the decompressed
603 EraseFileName.clear();
604 if (PartialFile.empty() == false && flExtension(PartialFile) != CurrentCompressionExtension)
605 RemoveFile("TransactionAbort", PartialFile);
606 }
607 break;
608 case TransactionCommit:
609 if (EraseFileName.empty() == false)
610 RemoveFile("AcqIndex::TransactionCommit", EraseFileName);
611 break;
612 }
613 return true;
614 }
615 bool pkgAcqDiffIndex::TransactionState(TransactionStates const state)
616 {
617 if (pkgAcqTransactionItem::TransactionState(state) == false)
618 return false;
619
620 switch (state)
621 {
622 case TransactionStarted: _error->Fatal("Item %s changed to invalid transaction start state!", Target.URI.c_str()); break;
623 case TransactionCommit:
624 break;
625 case TransactionAbort:
626 std::string const Partial = GetPartialFileNameFromURI(Target.URI);
627 RemoveFile("TransactionAbort", Partial);
628 break;
629 }
630
631 return true;
632 }
633 /*}}}*/
634
635 class APT_HIDDEN NoActionItem : public pkgAcquire::Item /*{{{*/
636 /* The sole purpose of this class is having an item which does nothing to
637 reach its done state to prevent cleanup deleting the mentioned file.
638 Handy in cases in which we know we have the file already, like IMS-Hits. */
639 {
640 IndexTarget const Target;
641 public:
642 virtual std::string DescURI() const APT_OVERRIDE {return Target.URI;};
643 virtual HashStringList GetExpectedHashes() const APT_OVERRIDE {return HashStringList();};
644
645 NoActionItem(pkgAcquire * const Owner, IndexTarget const &Target) :
646 pkgAcquire::Item(Owner), Target(Target)
647 {
648 Status = StatDone;
649 DestFile = GetFinalFileNameFromURI(Target.URI);
650 }
651 NoActionItem(pkgAcquire * const Owner, IndexTarget const &Target, std::string const &FinalFile) :
652 pkgAcquire::Item(Owner), Target(Target)
653 {
654 Status = StatDone;
655 DestFile = FinalFile;
656 }
657 };
658 /*}}}*/
659 class APT_HIDDEN CleanupItem : public pkgAcqTransactionItem /*{{{*/
660 /* This class ensures that a file which was configured but isn't downloaded
661 for various reasons isn't kept in an old version in the lists directory.
662 In a way its the reverse of NoActionItem as it helps with removing files
663 even if the lists-cleanup is deactivated. */
664 {
665 public:
666 virtual std::string DescURI() const APT_OVERRIDE {return Target.URI;};
667 virtual HashStringList GetExpectedHashes() const APT_OVERRIDE {return HashStringList();};
668
669 CleanupItem(pkgAcquire * const Owner, pkgAcqMetaClearSig * const TransactionManager, IndexTarget const &Target) :
670 pkgAcqTransactionItem(Owner, TransactionManager, Target)
671 {
672 Status = StatDone;
673 DestFile = GetFinalFileNameFromURI(Target.URI);
674 }
675 bool TransactionState(TransactionStates const state) APT_OVERRIDE
676 {
677 switch (state)
678 {
679 case TransactionStarted:
680 break;
681 case TransactionAbort:
682 break;
683 case TransactionCommit:
684 if (_config->FindB("Debug::Acquire::Transaction", false) == true)
685 std::clog << "rm " << DestFile << " # " << DescURI() << std::endl;
686 if (RemoveFile("TransItem::TransactionCommit", DestFile) == false)
687 return false;
688 break;
689 }
690 return true;
691 }
692 };
693 /*}}}*/
694
695 // Acquire::Item::Item - Constructor /*{{{*/
696 class pkgAcquire::Item::Private
697 {
698 public:
699 std::vector<std::string> PastRedirections;
700 };
701 APT_IGNORE_DEPRECATED_PUSH
702 pkgAcquire::Item::Item(pkgAcquire * const owner) :
703 FileSize(0), PartialSize(0), Mode(0), ID(0), Complete(false), Local(false),
704 QueueCounter(0), ExpectedAdditionalItems(0), Owner(owner), d(new Private())
705 {
706 Owner->Add(this);
707 Status = StatIdle;
708 }
709 APT_IGNORE_DEPRECATED_POP
710 /*}}}*/
711 // Acquire::Item::~Item - Destructor /*{{{*/
712 pkgAcquire::Item::~Item()
713 {
714 Owner->Remove(this);
715 delete d;
716 }
717 /*}}}*/
718 std::string pkgAcquire::Item::Custom600Headers() const /*{{{*/
719 {
720 return std::string();
721 }
722 /*}}}*/
723 std::string pkgAcquire::Item::ShortDesc() const /*{{{*/
724 {
725 return DescURI();
726 }
727 /*}}}*/
728 APT_CONST void pkgAcquire::Item::Finished() /*{{{*/
729 {
730 }
731 /*}}}*/
732 APT_PURE pkgAcquire * pkgAcquire::Item::GetOwner() const /*{{{*/
733 {
734 return Owner;
735 }
736 /*}}}*/
737 APT_CONST pkgAcquire::ItemDesc &pkgAcquire::Item::GetItemDesc() /*{{{*/
738 {
739 return Desc;
740 }
741 /*}}}*/
742 APT_CONST bool pkgAcquire::Item::IsTrusted() const /*{{{*/
743 {
744 return false;
745 }
746 /*}}}*/
747 // Acquire::Item::Failed - Item failed to download /*{{{*/
748 // ---------------------------------------------------------------------
749 /* We return to an idle state if there are still other queues that could
750 fetch this object */
751 static void formatHashsum(std::ostream &out, HashString const &hs)
752 {
753 auto const type = hs.HashType();
754 if (type == "Checksum-FileSize")
755 out << " - Filesize";
756 else
757 out << " - " << type;
758 out << ':' << hs.HashValue();
759 if (hs.usable() == false)
760 out << " [weak]";
761 out << std::endl;
762 }
763 void pkgAcquire::Item::Failed(string const &Message,pkgAcquire::MethodConfig const * const Cnf)
764 {
765 if (QueueCounter <= 1)
766 {
767 /* This indicates that the file is not available right now but might
768 be sometime later. If we do a retry cycle then this should be
769 retried [CDROMs] */
770 if (Cnf != NULL && Cnf->LocalOnly == true &&
771 StringToBool(LookupTag(Message,"Transient-Failure"),false) == true)
772 {
773 Status = StatIdle;
774 Dequeue();
775 return;
776 }
777
778 switch (Status)
779 {
780 case StatIdle:
781 case StatFetching:
782 case StatDone:
783 Status = StatError;
784 break;
785 case StatAuthError:
786 case StatError:
787 case StatTransientNetworkError:
788 break;
789 }
790 Complete = false;
791 Dequeue();
792 }
793
794 string const FailReason = LookupTag(Message, "FailReason");
795 enum { MAXIMUM_SIZE_EXCEEDED, HASHSUM_MISMATCH, WEAK_HASHSUMS, REDIRECTION_LOOP, OTHER } failreason = OTHER;
796 if ( FailReason == "MaximumSizeExceeded")
797 failreason = MAXIMUM_SIZE_EXCEEDED;
798 else if ( FailReason == "WeakHashSums")
799 failreason = WEAK_HASHSUMS;
800 else if (FailReason == "RedirectionLoop")
801 failreason = REDIRECTION_LOOP;
802 else if (Status == StatAuthError)
803 failreason = HASHSUM_MISMATCH;
804
805 if(ErrorText.empty())
806 {
807 std::ostringstream out;
808 switch (failreason)
809 {
810 case HASHSUM_MISMATCH:
811 out << _("Hash Sum mismatch") << std::endl;
812 break;
813 case WEAK_HASHSUMS:
814 out << _("Insufficient information available to perform this download securely") << std::endl;
815 break;
816 case REDIRECTION_LOOP:
817 out << "Redirection loop encountered" << std::endl;
818 break;
819 case MAXIMUM_SIZE_EXCEEDED:
820 out << LookupTag(Message, "Message") << std::endl;
821 break;
822 case OTHER:
823 out << LookupTag(Message, "Message");
824 break;
825 }
826
827 if (Status == StatAuthError)
828 {
829 auto const ExpectedHashes = GetExpectedHashes();
830 if (ExpectedHashes.empty() == false)
831 {
832 out << "Hashes of expected file:" << std::endl;
833 for (auto const &hs: ExpectedHashes)
834 formatHashsum(out, hs);
835 }
836 if (failreason == HASHSUM_MISMATCH)
837 {
838 out << "Hashes of received file:" << std::endl;
839 for (char const * const * type = HashString::SupportedHashes(); *type != NULL; ++type)
840 {
841 std::string const tagname = std::string(*type) + "-Hash";
842 std::string const hashsum = LookupTag(Message, tagname.c_str());
843 if (hashsum.empty() == false)
844 formatHashsum(out, HashString(*type, hashsum));
845 }
846 }
847 auto const lastmod = LookupTag(Message, "Last-Modified", "");
848 if (lastmod.empty() == false)
849 out << "Last modification reported: " << lastmod << std::endl;
850 }
851 ErrorText = out.str();
852 }
853
854 switch (failreason)
855 {
856 case MAXIMUM_SIZE_EXCEEDED: RenameOnError(MaximumSizeExceeded); break;
857 case HASHSUM_MISMATCH: RenameOnError(HashSumMismatch); break;
858 case WEAK_HASHSUMS: break;
859 case REDIRECTION_LOOP: break;
860 case OTHER: break;
861 }
862
863 if (FailReason.empty() == false)
864 ReportMirrorFailureToCentral(*this, FailReason, ErrorText);
865 else
866 ReportMirrorFailureToCentral(*this, ErrorText, ErrorText);
867
868 if (QueueCounter > 1)
869 Status = StatIdle;
870 }
871 /*}}}*/
872 // Acquire::Item::Start - Item has begun to download /*{{{*/
873 // ---------------------------------------------------------------------
874 /* Stash status and the file size. Note that setting Complete means
875 sub-phases of the acquire process such as decompresion are operating */
876 void pkgAcquire::Item::Start(string const &/*Message*/, unsigned long long const Size)
877 {
878 Status = StatFetching;
879 ErrorText.clear();
880 if (FileSize == 0 && Complete == false)
881 FileSize = Size;
882 }
883 /*}}}*/
884 // Acquire::Item::VerifyDone - check if Item was downloaded OK /*{{{*/
885 /* Note that hash-verification is 'hardcoded' in acquire-worker and has
886 * already passed if this method is called. */
887 bool pkgAcquire::Item::VerifyDone(std::string const &Message,
888 pkgAcquire::MethodConfig const * const /*Cnf*/)
889 {
890 std::string const FileName = LookupTag(Message,"Filename");
891 if (FileName.empty() == true)
892 {
893 Status = StatError;
894 ErrorText = "Method gave a blank filename";
895 return false;
896 }
897
898 return true;
899 }
900 /*}}}*/
901 // Acquire::Item::Done - Item downloaded OK /*{{{*/
902 void pkgAcquire::Item::Done(string const &/*Message*/, HashStringList const &Hashes,
903 pkgAcquire::MethodConfig const * const /*Cnf*/)
904 {
905 // We just downloaded something..
906 if (FileSize == 0)
907 {
908 unsigned long long const downloadedSize = Hashes.FileSize();
909 if (downloadedSize != 0)
910 {
911 FileSize = downloadedSize;
912 }
913 }
914 Status = StatDone;
915 ErrorText = string();
916 Owner->Dequeue(this);
917 }
918 /*}}}*/
919 // Acquire::Item::Rename - Rename a file /*{{{*/
920 // ---------------------------------------------------------------------
921 /* This helper function is used by a lot of item methods as their final
922 step */
923 bool pkgAcquire::Item::Rename(string const &From,string const &To)
924 {
925 if (From == To || rename(From.c_str(),To.c_str()) == 0)
926 return true;
927
928 std::string S;
929 strprintf(S, _("rename failed, %s (%s -> %s)."), strerror(errno),
930 From.c_str(),To.c_str());
931 Status = StatError;
932 if (ErrorText.empty())
933 ErrorText = S;
934 else
935 ErrorText = ErrorText + ": " + S;
936 return false;
937 }
938 /*}}}*/
939 void pkgAcquire::Item::Dequeue() /*{{{*/
940 {
941 Owner->Dequeue(this);
942 }
943 /*}}}*/
944 bool pkgAcquire::Item::RenameOnError(pkgAcquire::Item::RenameOnErrorState const error)/*{{{*/
945 {
946 if (RealFileExists(DestFile))
947 Rename(DestFile, DestFile + ".FAILED");
948
949 std::string errtext;
950 switch (error)
951 {
952 case HashSumMismatch:
953 errtext = _("Hash Sum mismatch");
954 break;
955 case SizeMismatch:
956 errtext = _("Size mismatch");
957 Status = StatAuthError;
958 break;
959 case InvalidFormat:
960 errtext = _("Invalid file format");
961 Status = StatError;
962 // do not report as usually its not the mirrors fault, but Portal/Proxy
963 break;
964 case SignatureError:
965 errtext = _("Signature error");
966 Status = StatError;
967 break;
968 case NotClearsigned:
969 strprintf(errtext, _("Clearsigned file isn't valid, got '%s' (does the network require authentication?)"), "NOSPLIT");
970 Status = StatAuthError;
971 break;
972 case MaximumSizeExceeded:
973 // the method is expected to report a good error for this
974 break;
975 case PDiffError:
976 // no handling here, done by callers
977 break;
978 }
979 if (ErrorText.empty())
980 ErrorText = errtext;
981 return false;
982 }
983 /*}}}*/
984 void pkgAcquire::Item::SetActiveSubprocess(const std::string &subprocess)/*{{{*/
985 {
986 ActiveSubprocess = subprocess;
987 APT_IGNORE_DEPRECATED(Mode = ActiveSubprocess.c_str();)
988 }
989 /*}}}*/
990 // Acquire::Item::ReportMirrorFailure /*{{{*/
991 void pkgAcquire::Item::ReportMirrorFailure(std::string const &FailCode)
992 {
993 ReportMirrorFailureToCentral(*this, FailCode, FailCode);
994 }
995 /*}}}*/
996 std::string pkgAcquire::Item::HashSum() const /*{{{*/
997 {
998 HashStringList const hashes = GetExpectedHashes();
999 HashString const * const hs = hashes.find(NULL);
1000 return hs != NULL ? hs->toStr() : "";
1001 }
1002 /*}}}*/
1003 bool pkgAcquire::Item::IsRedirectionLoop(std::string const &NewURI) /*{{{*/
1004 {
1005 // store can fail due to permission errors and the item will "loop" then
1006 if (APT::String::Startswith(NewURI, "store:"))
1007 return false;
1008 if (d->PastRedirections.empty())
1009 {
1010 d->PastRedirections.push_back(NewURI);
1011 return false;
1012 }
1013 auto const LastURI = std::prev(d->PastRedirections.end());
1014 // redirections to the same file are a way of restarting/resheduling,
1015 // individual methods will have to make sure that they aren't looping this way
1016 if (*LastURI == NewURI)
1017 return false;
1018 if (std::find(d->PastRedirections.begin(), LastURI, NewURI) != LastURI)
1019 return true;
1020 d->PastRedirections.push_back(NewURI);
1021 return false;
1022 }
1023 /*}}}*/
1024 int pkgAcquire::Item::Priority() /*{{{*/
1025 {
1026 // Stage 1: Meta indices and diff indices
1027 // - those need to be fetched first to have progress reporting working
1028 // for the rest
1029 if (dynamic_cast<pkgAcqMetaSig*>(this) != nullptr
1030 || dynamic_cast<pkgAcqMetaBase*>(this) != nullptr
1031 || dynamic_cast<pkgAcqDiffIndex*>(this) != nullptr)
1032 return 1000;
1033 // Stage 2: Diff files
1034 // - fetch before complete indexes so we can apply the diffs while fetching
1035 // larger files.
1036 if (dynamic_cast<pkgAcqIndexDiffs*>(this) != nullptr ||
1037 dynamic_cast<pkgAcqIndexMergeDiffs*>(this) != nullptr)
1038 return 800;
1039
1040 // Stage 3: The rest - complete index files and other stuff
1041 return 500;
1042 }
1043 /*}}}*/
1044
1045 pkgAcqTransactionItem::pkgAcqTransactionItem(pkgAcquire * const Owner, /*{{{*/
1046 pkgAcqMetaClearSig * const transactionManager, IndexTarget const &target) :
1047 pkgAcquire::Item(Owner), d(NULL), Target(target), TransactionManager(transactionManager)
1048 {
1049 if (TransactionManager != this)
1050 TransactionManager->Add(this);
1051 }
1052 /*}}}*/
1053 pkgAcqTransactionItem::~pkgAcqTransactionItem() /*{{{*/
1054 {
1055 }
1056 /*}}}*/
1057 HashStringList pkgAcqTransactionItem::GetExpectedHashesFor(std::string const &MetaKey) const /*{{{*/
1058 {
1059 return GetExpectedHashesFromFor(TransactionManager->MetaIndexParser, MetaKey);
1060 }
1061 /*}}}*/
1062
1063 static void LoadLastMetaIndexParser(pkgAcqMetaClearSig * const TransactionManager, std::string const &FinalRelease, std::string const &FinalInRelease)/*{{{*/
1064 {
1065 if (TransactionManager->IMSHit == true)
1066 return;
1067 if (RealFileExists(FinalInRelease) || RealFileExists(FinalRelease))
1068 {
1069 TransactionManager->LastMetaIndexParser = TransactionManager->MetaIndexParser->UnloadedClone();
1070 if (TransactionManager->LastMetaIndexParser != NULL)
1071 {
1072 _error->PushToStack();
1073 if (RealFileExists(FinalInRelease))
1074 TransactionManager->LastMetaIndexParser->Load(FinalInRelease, NULL);
1075 else
1076 TransactionManager->LastMetaIndexParser->Load(FinalRelease, NULL);
1077 // its unlikely to happen, but if what we have is bad ignore it
1078 if (_error->PendingError())
1079 {
1080 delete TransactionManager->LastMetaIndexParser;
1081 TransactionManager->LastMetaIndexParser = NULL;
1082 }
1083 _error->RevertToStack();
1084 }
1085 }
1086 }
1087 /*}}}*/
1088
1089 // AcqMetaBase - Constructor /*{{{*/
1090 pkgAcqMetaBase::pkgAcqMetaBase(pkgAcquire * const Owner,
1091 pkgAcqMetaClearSig * const TransactionManager,
1092 IndexTarget const &DataTarget)
1093 : pkgAcqTransactionItem(Owner, TransactionManager, DataTarget), d(NULL),
1094 AuthPass(false), IMSHit(false), State(TransactionStarted)
1095 {
1096 }
1097 /*}}}*/
1098 // AcqMetaBase::Add - Add a item to the current Transaction /*{{{*/
1099 void pkgAcqMetaBase::Add(pkgAcqTransactionItem * const I)
1100 {
1101 Transaction.push_back(I);
1102 }
1103 /*}}}*/
1104 // AcqMetaBase::AbortTransaction - Abort the current Transaction /*{{{*/
1105 void pkgAcqMetaBase::AbortTransaction()
1106 {
1107 if(_config->FindB("Debug::Acquire::Transaction", false) == true)
1108 std::clog << "AbortTransaction: " << TransactionManager << std::endl;
1109
1110 switch (TransactionManager->State)
1111 {
1112 case TransactionStarted: break;
1113 case TransactionAbort: _error->Fatal("Transaction %s was already aborted and is aborted again", TransactionManager->Target.URI.c_str()); return;
1114 case TransactionCommit: _error->Fatal("Transaction %s was already aborted and is now committed", TransactionManager->Target.URI.c_str()); return;
1115 }
1116 TransactionManager->State = TransactionAbort;
1117
1118 // ensure the toplevel is in error state too
1119 for (std::vector<pkgAcqTransactionItem*>::iterator I = Transaction.begin();
1120 I != Transaction.end(); ++I)
1121 {
1122 if ((*I)->Status != pkgAcquire::Item::StatFetching)
1123 Owner->Dequeue(*I);
1124 (*I)->TransactionState(TransactionAbort);
1125 }
1126 Transaction.clear();
1127 }
1128 /*}}}*/
1129 // AcqMetaBase::TransactionHasError - Check for errors in Transaction /*{{{*/
1130 APT_PURE bool pkgAcqMetaBase::TransactionHasError() const
1131 {
1132 for (std::vector<pkgAcqTransactionItem*>::const_iterator I = Transaction.begin();
1133 I != Transaction.end(); ++I)
1134 {
1135 switch((*I)->Status) {
1136 case StatDone: break;
1137 case StatIdle: break;
1138 case StatAuthError: return true;
1139 case StatError: return true;
1140 case StatTransientNetworkError: return true;
1141 case StatFetching: break;
1142 }
1143 }
1144 return false;
1145 }
1146 /*}}}*/
1147 // AcqMetaBase::CommitTransaction - Commit a transaction /*{{{*/
1148 void pkgAcqMetaBase::CommitTransaction()
1149 {
1150 if(_config->FindB("Debug::Acquire::Transaction", false) == true)
1151 std::clog << "CommitTransaction: " << this << std::endl;
1152
1153 switch (TransactionManager->State)
1154 {
1155 case TransactionStarted: break;
1156 case TransactionAbort: _error->Fatal("Transaction %s was already committed and is now aborted", TransactionManager->Target.URI.c_str()); return;
1157 case TransactionCommit: _error->Fatal("Transaction %s was already committed and is again committed", TransactionManager->Target.URI.c_str()); return;
1158 }
1159 TransactionManager->State = TransactionCommit;
1160
1161 // move new files into place *and* remove files that are not
1162 // part of the transaction but are still on disk
1163 for (std::vector<pkgAcqTransactionItem*>::iterator I = Transaction.begin();
1164 I != Transaction.end(); ++I)
1165 {
1166 (*I)->TransactionState(TransactionCommit);
1167 }
1168 Transaction.clear();
1169 }
1170 /*}}}*/
1171 // AcqMetaBase::TransactionStageCopy - Stage a file for copying /*{{{*/
1172 void pkgAcqMetaBase::TransactionStageCopy(pkgAcqTransactionItem * const I,
1173 const std::string &From,
1174 const std::string &To)
1175 {
1176 I->PartialFile = From;
1177 I->DestFile = To;
1178 }
1179 /*}}}*/
1180 // AcqMetaBase::TransactionStageRemoval - Stage a file for removal /*{{{*/
1181 void pkgAcqMetaBase::TransactionStageRemoval(pkgAcqTransactionItem * const I,
1182 const std::string &FinalFile)
1183 {
1184 I->PartialFile = "";
1185 I->DestFile = FinalFile;
1186 }
1187 /*}}}*/
1188 // AcqMetaBase::GenerateAuthWarning - Check gpg authentication error /*{{{*/
1189 /* This method is called from ::Failed handlers. If it returns true,
1190 no fallback to other files or modi is performed */
1191 bool pkgAcqMetaBase::CheckStopAuthentication(pkgAcquire::Item * const I, const std::string &Message)
1192 {
1193 string const Final = I->GetFinalFilename();
1194 std::string const GPGError = LookupTag(Message, "Message");
1195 if (FileExists(Final))
1196 {
1197 I->Status = StatTransientNetworkError;
1198 _error->Warning(_("An error occurred during the signature verification. "
1199 "The repository is not updated and the previous index files will be used. "
1200 "GPG error: %s: %s"),
1201 Desc.Description.c_str(),
1202 GPGError.c_str());
1203 RunScripts("APT::Update::Auth-Failure");
1204 return true;
1205 } else if (LookupTag(Message,"Message").find("NODATA") != string::npos) {
1206 /* Invalid signature file, reject (LP: #346386) (Closes: #627642) */
1207 _error->Error(_("GPG error: %s: %s"),
1208 Desc.Description.c_str(),
1209 GPGError.c_str());
1210 I->Status = StatAuthError;
1211 return true;
1212 } else {
1213 _error->Warning(_("GPG error: %s: %s"),
1214 Desc.Description.c_str(),
1215 GPGError.c_str());
1216 }
1217 // gpgv method failed
1218 ReportMirrorFailureToCentral(*this, "GPGFailure", GPGError);
1219 return false;
1220 }
1221 /*}}}*/
1222 // AcqMetaBase::Custom600Headers - Get header for AcqMetaBase /*{{{*/
1223 // ---------------------------------------------------------------------
1224 string pkgAcqMetaBase::Custom600Headers() const
1225 {
1226 std::string Header = "\nIndex-File: true";
1227 std::string MaximumSize;
1228 strprintf(MaximumSize, "\nMaximum-Size: %i",
1229 _config->FindI("Acquire::MaxReleaseFileSize", 10*1000*1000));
1230 Header += MaximumSize;
1231
1232 string const FinalFile = GetFinalFilename();
1233 struct stat Buf;
1234 if (stat(FinalFile.c_str(),&Buf) == 0)
1235 Header += "\nLast-Modified: " + TimeRFC1123(Buf.st_mtime, false);
1236
1237 return Header;
1238 }
1239 /*}}}*/
1240 // AcqMetaBase::QueueForSignatureVerify /*{{{*/
1241 void pkgAcqMetaBase::QueueForSignatureVerify(pkgAcqTransactionItem * const I, std::string const &File, std::string const &Signature)
1242 {
1243 AuthPass = true;
1244 I->Desc.URI = "gpgv:" + Signature;
1245 I->DestFile = File;
1246 QueueURI(I->Desc);
1247 I->SetActiveSubprocess("gpgv");
1248 }
1249 /*}}}*/
1250 // AcqMetaBase::CheckDownloadDone /*{{{*/
1251 bool pkgAcqMetaBase::CheckDownloadDone(pkgAcqTransactionItem * const I, const std::string &Message, HashStringList const &Hashes) const
1252 {
1253 // We have just finished downloading a Release file (it is not
1254 // verified yet)
1255
1256 // Save the final base URI we got this Release file from
1257 if (I->UsedMirror.empty() == false && _config->FindB("Acquire::SameMirrorForAllIndexes", true))
1258 {
1259 if (APT::String::Endswith(I->Desc.URI, "InRelease"))
1260 {
1261 TransactionManager->BaseURI = I->Desc.URI.substr(0, I->Desc.URI.length() - strlen("InRelease"));
1262 TransactionManager->UsedMirror = I->UsedMirror;
1263 }
1264 else if (APT::String::Endswith(I->Desc.URI, "Release"))
1265 {
1266 TransactionManager->BaseURI = I->Desc.URI.substr(0, I->Desc.URI.length() - strlen("Release"));
1267 TransactionManager->UsedMirror = I->UsedMirror;
1268 }
1269 }
1270
1271 std::string const FileName = LookupTag(Message,"Filename");
1272 if (FileName != I->DestFile && RealFileExists(I->DestFile) == false)
1273 {
1274 I->Local = true;
1275 I->Desc.URI = "copy:" + FileName;
1276 I->QueueURI(I->Desc);
1277 return false;
1278 }
1279
1280 // make sure to verify against the right file on I-M-S hit
1281 bool IMSHit = StringToBool(LookupTag(Message,"IMS-Hit"), false);
1282 if (IMSHit == false && Hashes.usable())
1283 {
1284 // detect IMS-Hits servers haven't detected by Hash comparison
1285 std::string const FinalFile = I->GetFinalFilename();
1286 if (RealFileExists(FinalFile) && Hashes.VerifyFile(FinalFile) == true)
1287 {
1288 IMSHit = true;
1289 RemoveFile("CheckDownloadDone", I->DestFile);
1290 }
1291 }
1292
1293 if(IMSHit == true)
1294 {
1295 // for simplicity, the transaction manager is always InRelease
1296 // even if it doesn't exist.
1297 TransactionManager->IMSHit = true;
1298 I->PartialFile = I->DestFile = I->GetFinalFilename();
1299 }
1300
1301 // set Item to complete as the remaining work is all local (verify etc)
1302 I->Complete = true;
1303
1304 return true;
1305 }
1306 /*}}}*/
1307 bool pkgAcqMetaBase::CheckAuthDone(string const &Message) /*{{{*/
1308 {
1309 // At this point, the gpgv method has succeeded, so there is a
1310 // valid signature from a key in the trusted keyring. We
1311 // perform additional verification of its contents, and use them
1312 // to verify the indexes we are about to download
1313 if (_config->FindB("Debug::pkgAcquire::Auth", false))
1314 std::cerr << "Signature verification succeeded: " << DestFile << std::endl;
1315
1316 if (TransactionManager->IMSHit == false)
1317 {
1318 // open the last (In)Release if we have it
1319 std::string const FinalFile = GetFinalFilename();
1320 std::string FinalRelease;
1321 std::string FinalInRelease;
1322 if (APT::String::Endswith(FinalFile, "InRelease"))
1323 {
1324 FinalInRelease = FinalFile;
1325 FinalRelease = FinalFile.substr(0, FinalFile.length() - strlen("InRelease")) + "Release";
1326 }
1327 else
1328 {
1329 FinalInRelease = FinalFile.substr(0, FinalFile.length() - strlen("Release")) + "InRelease";
1330 FinalRelease = FinalFile;
1331 }
1332 LoadLastMetaIndexParser(TransactionManager, FinalRelease, FinalInRelease);
1333 }
1334
1335 bool const GoodAuth = TransactionManager->MetaIndexParser->Load(DestFile, &ErrorText);
1336 if (GoodAuth == false && AllowInsecureRepositories(InsecureType::WEAK, Target.Description, TransactionManager->MetaIndexParser, TransactionManager, this) == false)
1337 {
1338 Status = StatAuthError;
1339 return false;
1340 }
1341
1342 if (!VerifyVendor(Message))
1343 {
1344 Status = StatAuthError;
1345 return false;
1346 }
1347
1348 // Download further indexes with verification
1349 TransactionManager->QueueIndexes(GoodAuth);
1350
1351 return GoodAuth;
1352 }
1353 /*}}}*/
1354 void pkgAcqMetaClearSig::QueueIndexes(bool const verify) /*{{{*/
1355 {
1356 // at this point the real Items are loaded in the fetcher
1357 ExpectedAdditionalItems = 0;
1358
1359 std::set<std::string> targetsSeen;
1360 bool const hasReleaseFile = TransactionManager->MetaIndexParser != NULL;
1361 bool const metaBaseSupportsByHash = hasReleaseFile && TransactionManager->MetaIndexParser->GetSupportsAcquireByHash();
1362 bool hasHashes = true;
1363 auto IndexTargets = TransactionManager->MetaIndexParser->GetIndexTargets();
1364 if (hasReleaseFile && verify == false)
1365 hasHashes = std::any_of(IndexTargets.begin(), IndexTargets.end(),
1366 [&](IndexTarget const &Target) { return TransactionManager->MetaIndexParser->Exists(Target.MetaKey); });
1367 if (_config->FindB("Acquire::IndexTargets::Randomized", true) && likely(IndexTargets.empty() == false))
1368 {
1369 /* For fallback handling and to have some reasonable progress information
1370 we can't randomize everything, but at least the order in the same type
1371 can be as we shouldn't be telling the mirrors (and everyone else watching)
1372 which is native/foreign arch, specific order of preference of translations, … */
1373 auto range_start = IndexTargets.begin();
1374 std::random_device rd;
1375 std::default_random_engine g(rd());
1376 do {
1377 auto const type = range_start->Option(IndexTarget::CREATED_BY);
1378 auto const range_end = std::find_if_not(range_start, IndexTargets.end(),
1379 [&type](IndexTarget const &T) { return type == T.Option(IndexTarget::CREATED_BY); });
1380 std::shuffle(range_start, range_end, g);
1381 range_start = range_end;
1382 } while (range_start != IndexTargets.end());
1383 }
1384 for (auto&& Target: IndexTargets)
1385 {
1386 // if we have seen a target which is created-by a target this one here is declared a
1387 // fallback to, we skip acquiring the fallback (but we make sure we clean up)
1388 if (targetsSeen.find(Target.Option(IndexTarget::FALLBACK_OF)) != targetsSeen.end())
1389 {
1390 targetsSeen.emplace(Target.Option(IndexTarget::CREATED_BY));
1391 new CleanupItem(Owner, TransactionManager, Target);
1392 continue;
1393 }
1394 // all is an implementation detail. Users shouldn't use this as arch
1395 // We need this support trickery here as e.g. Debian has binary-all files already,
1396 // but arch:all packages are still in the arch:any files, so we would waste precious
1397 // download time, bandwidth and diskspace for nothing, BUT Debian doesn't feature all
1398 // in the set of supported architectures, so we can filter based on this property rather
1399 // than invent an entirely new flag we would need to carry for all of eternity.
1400 if (hasReleaseFile && Target.Option(IndexTarget::ARCHITECTURE) == "all")
1401 {
1402 if (TransactionManager->MetaIndexParser->IsArchitectureAllSupportedFor(Target) == false)
1403 {
1404 new CleanupItem(Owner, TransactionManager, Target);
1405 continue;
1406 }
1407 }
1408
1409 bool trypdiff = Target.OptionBool(IndexTarget::PDIFFS);
1410 if (hasReleaseFile == true)
1411 {
1412 if (TransactionManager->MetaIndexParser->Exists(Target.MetaKey) == false)
1413 {
1414 // optional targets that we do not have in the Release file are skipped
1415 if (hasHashes == true && Target.IsOptional)
1416 {
1417 new CleanupItem(Owner, TransactionManager, Target);
1418 continue;
1419 }
1420
1421 std::string const &arch = Target.Option(IndexTarget::ARCHITECTURE);
1422 if (arch.empty() == false)
1423 {
1424 if (TransactionManager->MetaIndexParser->IsArchitectureSupported(arch) == false)
1425 {
1426 new CleanupItem(Owner, TransactionManager, Target);
1427 _error->Notice(_("Skipping acquire of configured file '%s' as repository '%s' doesn't support architecture '%s'"),
1428 Target.MetaKey.c_str(), TransactionManager->Target.Description.c_str(), arch.c_str());
1429 continue;
1430 }
1431 // if the architecture is officially supported but currently no packages for it available,
1432 // ignore silently as this is pretty much the same as just shipping an empty file.
1433 // if we don't know which architectures are supported, we do NOT ignore it to notify user about this
1434 if (hasHashes == true && TransactionManager->MetaIndexParser->IsArchitectureSupported("*undefined*") == false)
1435 {
1436 new CleanupItem(Owner, TransactionManager, Target);
1437 continue;
1438 }
1439 }
1440
1441 if (hasHashes == true)
1442 {
1443 Status = StatAuthError;
1444 strprintf(ErrorText, _("Unable to find expected entry '%s' in Release file (Wrong sources.list entry or malformed file)"), Target.MetaKey.c_str());
1445 return;
1446 }
1447 else
1448 {
1449 new pkgAcqIndex(Owner, TransactionManager, Target);
1450 continue;
1451 }
1452 }
1453 else if (verify)
1454 {
1455 auto const hashes = GetExpectedHashesFor(Target.MetaKey);
1456 if (hashes.empty() == false)
1457 {
1458 if (hashes.usable() == false && TargetIsAllowedToBe(TransactionManager->Target, InsecureType::WEAK) == false)
1459 {
1460 new CleanupItem(Owner, TransactionManager, Target);
1461 _error->Warning(_("Skipping acquire of configured file '%s' as repository '%s' provides only weak security information for it"),
1462 Target.MetaKey.c_str(), TransactionManager->Target.Description.c_str());
1463 continue;
1464 }
1465 // empty files are skipped as acquiring the very small compressed files is a waste of time
1466 else if (hashes.FileSize() == 0)
1467 {
1468 new CleanupItem(Owner, TransactionManager, Target);
1469 targetsSeen.emplace(Target.Option(IndexTarget::CREATED_BY));
1470 continue;
1471 }
1472 }
1473 }
1474
1475 // autoselect the compression method
1476 std::vector<std::string> types = VectorizeString(Target.Option(IndexTarget::COMPRESSIONTYPES), ' ');
1477 types.erase(std::remove_if(types.begin(), types.end(), [&](std::string const &t) {
1478 if (t == "uncompressed")
1479 return TransactionManager->MetaIndexParser->Exists(Target.MetaKey) == false;
1480 std::string const MetaKey = Target.MetaKey + "." + t;
1481 return TransactionManager->MetaIndexParser->Exists(MetaKey) == false;
1482 }), types.end());
1483 if (types.empty() == false)
1484 {
1485 std::ostringstream os;
1486 // add the special compressiontype byhash first if supported
1487 std::string const useByHashConf = Target.Option(IndexTarget::BY_HASH);
1488 bool useByHash = false;
1489 if(useByHashConf == "force")
1490 useByHash = true;
1491 else
1492 useByHash = StringToBool(useByHashConf) == true && metaBaseSupportsByHash;
1493 if (useByHash == true)
1494 os << "by-hash ";
1495 std::copy(types.begin(), types.end()-1, std::ostream_iterator<std::string>(os, " "));
1496 os << *types.rbegin();
1497 Target.Options["COMPRESSIONTYPES"] = os.str();
1498 }
1499 else
1500 Target.Options["COMPRESSIONTYPES"].clear();
1501
1502 std::string filename = GetExistingFilename(GetFinalFileNameFromURI(Target.URI));
1503 if (filename.empty() == false)
1504 {
1505 // if the Release file is a hit and we have an index it must be the current one
1506 if (TransactionManager->IMSHit == true)
1507 ;
1508 else if (TransactionManager->LastMetaIndexParser != NULL)
1509 {
1510 // see if the file changed since the last Release file
1511 // we use the uncompressed files as we might compress differently compared to the server,
1512 // so the hashes might not match, even if they contain the same data.
1513 HashStringList const newFile = GetExpectedHashesFromFor(TransactionManager->MetaIndexParser, Target.MetaKey);
1514 HashStringList const oldFile = GetExpectedHashesFromFor(TransactionManager->LastMetaIndexParser, Target.MetaKey);
1515 if (newFile != oldFile)
1516 filename.clear();
1517 }
1518 else
1519 filename.clear();
1520 }
1521 else
1522 trypdiff = false; // no file to patch
1523
1524 if (filename.empty() == false)
1525 {
1526 new NoActionItem(Owner, Target, filename);
1527 std::string const idxfilename = GetFinalFileNameFromURI(GetDiffIndexURI(Target));
1528 if (FileExists(idxfilename))
1529 new NoActionItem(Owner, Target, idxfilename);
1530 targetsSeen.emplace(Target.Option(IndexTarget::CREATED_BY));
1531 continue;
1532 }
1533
1534 // check if we have patches available
1535 trypdiff &= TransactionManager->MetaIndexParser->Exists(GetDiffIndexFileName(Target.MetaKey));
1536 }
1537 else
1538 {
1539 // if we have no file to patch, no point in trying
1540 trypdiff &= (GetExistingFilename(GetFinalFileNameFromURI(Target.URI)).empty() == false);
1541 }
1542
1543 // no point in patching from local sources
1544 if (trypdiff)
1545 {
1546 std::string const proto = Target.URI.substr(0, strlen("file:/"));
1547 if (proto == "file:/" || proto == "copy:/" || proto == "cdrom:")
1548 trypdiff = false;
1549 }
1550
1551 // Queue the Index file (Packages, Sources, Translation-$foo, …)
1552 targetsSeen.emplace(Target.Option(IndexTarget::CREATED_BY));
1553 if (trypdiff)
1554 new pkgAcqDiffIndex(Owner, TransactionManager, Target);
1555 else
1556 new pkgAcqIndex(Owner, TransactionManager, Target);
1557 }
1558 }
1559 /*}}}*/
1560 bool pkgAcqMetaBase::VerifyVendor(string const &) /*{{{*/
1561 {
1562 if (TransactionManager->MetaIndexParser->GetValidUntil() > 0)
1563 {
1564 time_t const invalid_since = time(NULL) - TransactionManager->MetaIndexParser->GetValidUntil();
1565 if (invalid_since > 0)
1566 {
1567 std::string errmsg;
1568 strprintf(errmsg,
1569 // TRANSLATOR: The first %s is the URL of the bad Release file, the second is
1570 // the time since then the file is invalid - formatted in the same way as in
1571 // the download progress display (e.g. 7d 3h 42min 1s)
1572 _("Release file for %s is expired (invalid since %s). "
1573 "Updates for this repository will not be applied."),
1574 Target.URI.c_str(), TimeToStr(invalid_since).c_str());
1575 if (ErrorText.empty())
1576 ErrorText = errmsg;
1577 return _error->Error("%s", errmsg.c_str());
1578 }
1579 }
1580
1581 /* Did we get a file older than what we have? This is a last minute IMS hit and doubles
1582 as a prevention of downgrading us to older (still valid) files */
1583 if (TransactionManager->IMSHit == false && TransactionManager->LastMetaIndexParser != NULL &&
1584 TransactionManager->LastMetaIndexParser->GetDate() > TransactionManager->MetaIndexParser->GetDate())
1585 {
1586 TransactionManager->IMSHit = true;
1587 RemoveFile("VerifyVendor", DestFile);
1588 PartialFile = DestFile = GetFinalFilename();
1589 // load the 'old' file in the 'new' one instead of flipping pointers as
1590 // the new one isn't owned by us, while the old one is so cleanup would be confused.
1591 TransactionManager->MetaIndexParser->swapLoad(TransactionManager->LastMetaIndexParser);
1592 delete TransactionManager->LastMetaIndexParser;
1593 TransactionManager->LastMetaIndexParser = NULL;
1594 }
1595
1596 if (_config->FindB("Debug::pkgAcquire::Auth", false))
1597 {
1598 std::cerr << "Got Codename: " << TransactionManager->MetaIndexParser->GetCodename() << std::endl;
1599 std::cerr << "Got Suite: " << TransactionManager->MetaIndexParser->GetSuite() << std::endl;
1600 std::cerr << "Expecting Dist: " << TransactionManager->MetaIndexParser->GetExpectedDist() << std::endl;
1601 }
1602
1603 // One day that might become fatal…
1604 auto const ExpectedDist = TransactionManager->MetaIndexParser->GetExpectedDist();
1605 auto const NowCodename = TransactionManager->MetaIndexParser->GetCodename();
1606 if (TransactionManager->MetaIndexParser->CheckDist(ExpectedDist) == false)
1607 _error->Warning(_("Conflicting distribution: %s (expected %s but got %s)"),
1608 Desc.Description.c_str(), ExpectedDist.c_str(), NowCodename.c_str());
1609 // might be okay, might be not
1610 if (TransactionManager->LastMetaIndexParser != nullptr)
1611 {
1612 auto const LastCodename = TransactionManager->LastMetaIndexParser->GetCodename();
1613 if (LastCodename.empty() == false && NowCodename.empty() == false && LastCodename != NowCodename)
1614 _error->Warning(_("Conflicting distribution: %s (expected %s but got %s)"),
1615 Desc.Description.c_str(), LastCodename.c_str(), NowCodename.c_str());
1616 }
1617 return true;
1618 }
1619 /*}}}*/
1620 pkgAcqMetaBase::~pkgAcqMetaBase()
1621 {
1622 }
1623
1624 pkgAcqMetaClearSig::pkgAcqMetaClearSig(pkgAcquire * const Owner, /*{{{*/
1625 IndexTarget const &ClearsignedTarget,
1626 IndexTarget const &DetachedDataTarget, IndexTarget const &DetachedSigTarget,
1627 metaIndex * const MetaIndexParser) :
1628 pkgAcqMetaIndex(Owner, this, ClearsignedTarget, DetachedSigTarget),
1629 d(NULL), DetachedDataTarget(DetachedDataTarget),
1630 MetaIndexParser(MetaIndexParser), LastMetaIndexParser(NULL)
1631 {
1632 // index targets + (worst case:) Release/Release.gpg
1633 ExpectedAdditionalItems = std::numeric_limits<decltype(ExpectedAdditionalItems)>::max();
1634 TransactionManager->Add(this);
1635 }
1636 /*}}}*/
1637 pkgAcqMetaClearSig::~pkgAcqMetaClearSig() /*{{{*/
1638 {
1639 if (LastMetaIndexParser != NULL)
1640 delete LastMetaIndexParser;
1641 }
1642 /*}}}*/
1643 // pkgAcqMetaClearSig::Custom600Headers - Insert custom request headers /*{{{*/
1644 string pkgAcqMetaClearSig::Custom600Headers() const
1645 {
1646 string Header = pkgAcqMetaBase::Custom600Headers();
1647 Header += "\nFail-Ignore: true";
1648 std::string const key = TransactionManager->MetaIndexParser->GetSignedBy();
1649 if (key.empty() == false)
1650 Header += "\nSigned-By: " + key;
1651
1652 return Header;
1653 }
1654 /*}}}*/
1655 void pkgAcqMetaClearSig::Finished() /*{{{*/
1656 {
1657 if(_config->FindB("Debug::Acquire::Transaction", false) == true)
1658 std::clog << "Finished: " << DestFile <<std::endl;
1659 if(TransactionManager->State == TransactionStarted &&
1660 TransactionManager->TransactionHasError() == false)
1661 TransactionManager->CommitTransaction();
1662 }
1663 /*}}}*/
1664 bool pkgAcqMetaClearSig::VerifyDone(std::string const &Message, /*{{{*/
1665 pkgAcquire::MethodConfig const * const Cnf)
1666 {
1667 Item::VerifyDone(Message, Cnf);
1668
1669 if (FileExists(DestFile) && !StartsWithGPGClearTextSignature(DestFile))
1670 return RenameOnError(NotClearsigned);
1671
1672 return true;
1673 }
1674 /*}}}*/
1675 // pkgAcqMetaClearSig::Done - We got a file /*{{{*/
1676 void pkgAcqMetaClearSig::Done(std::string const &Message,
1677 HashStringList const &Hashes,
1678 pkgAcquire::MethodConfig const * const Cnf)
1679 {
1680 Item::Done(Message, Hashes, Cnf);
1681
1682 if(AuthPass == false)
1683 {
1684 if(CheckDownloadDone(this, Message, Hashes) == true)
1685 QueueForSignatureVerify(this, DestFile, DestFile);
1686 return;
1687 }
1688 else if(CheckAuthDone(Message) == true)
1689 {
1690 if (TransactionManager->IMSHit == false)
1691 TransactionManager->TransactionStageCopy(this, DestFile, GetFinalFilename());
1692 else if (RealFileExists(GetFinalFilename()) == false)
1693 {
1694 // We got an InRelease file IMSHit, but we haven't one, which means
1695 // we had a valid Release/Release.gpg combo stepping in, which we have
1696 // to 'acquire' now to ensure list cleanup isn't removing them
1697 new NoActionItem(Owner, DetachedDataTarget);
1698 new NoActionItem(Owner, DetachedSigTarget);
1699 }
1700 }
1701 else if (Status != StatAuthError)
1702 {
1703 string const FinalFile = GetFinalFileNameFromURI(DetachedDataTarget.URI);
1704 string const OldFile = GetFinalFilename();
1705 if (TransactionManager->IMSHit == false)
1706 TransactionManager->TransactionStageCopy(this, DestFile, FinalFile);
1707 else if (RealFileExists(OldFile) == false)
1708 new NoActionItem(Owner, DetachedDataTarget);
1709 else
1710 TransactionManager->TransactionStageCopy(this, OldFile, FinalFile);
1711 }
1712 }
1713 /*}}}*/
1714 void pkgAcqMetaClearSig::Failed(string const &Message,pkgAcquire::MethodConfig const * const Cnf) /*{{{*/
1715 {
1716 Item::Failed(Message, Cnf);
1717
1718 if (AuthPass == false)
1719 {
1720 if (Status == StatAuthError || Status == StatTransientNetworkError)
1721 {
1722 // if we expected a ClearTextSignature (InRelease) but got a network
1723 // error or got a file, but it wasn't valid, we end up here (see VerifyDone).
1724 // As these is usually called by web-portals we do not try Release/Release.gpg
1725 // as this is gonna fail anyway and instead abort our try (LP#346386)
1726 TransactionManager->AbortTransaction();
1727 return;
1728 }
1729
1730 // Queue the 'old' InRelease file for removal if we try Release.gpg
1731 // as otherwise the file will stay around and gives a false-auth
1732 // impression (CVE-2012-0214)
1733 TransactionManager->TransactionStageRemoval(this, GetFinalFilename());
1734 Status = StatDone;
1735
1736 new pkgAcqMetaIndex(Owner, TransactionManager, DetachedDataTarget, DetachedSigTarget);
1737 }
1738 else
1739 {
1740 if(CheckStopAuthentication(this, Message))
1741 return;
1742
1743 if(AllowInsecureRepositories(InsecureType::UNSIGNED, Target.Description, TransactionManager->MetaIndexParser, TransactionManager, this) == true)
1744 {
1745 Status = StatDone;
1746
1747 /* InRelease files become Release files, otherwise
1748 * they would be considered as trusted later on */
1749 string const FinalRelease = GetFinalFileNameFromURI(DetachedDataTarget.URI);
1750 string const PartialRelease = GetPartialFileNameFromURI(DetachedDataTarget.URI);
1751 string const FinalReleasegpg = GetFinalFileNameFromURI(DetachedSigTarget.URI);
1752 string const FinalInRelease = GetFinalFilename();
1753 Rename(DestFile, PartialRelease);
1754 TransactionManager->TransactionStageCopy(this, PartialRelease, FinalRelease);
1755 LoadLastMetaIndexParser(TransactionManager, FinalRelease, FinalInRelease);
1756
1757 // we parse the indexes here because at this point the user wanted
1758 // a repository that may potentially harm him
1759 if (TransactionManager->MetaIndexParser->Load(PartialRelease, &ErrorText) == false || VerifyVendor(Message) == false)
1760 /* expired Release files are still a problem you need extra force for */;
1761 else
1762 TransactionManager->QueueIndexes(true);
1763 }
1764 }
1765 }
1766 /*}}}*/
1767
1768 pkgAcqMetaIndex::pkgAcqMetaIndex(pkgAcquire * const Owner, /*{{{*/
1769 pkgAcqMetaClearSig * const TransactionManager,
1770 IndexTarget const &DataTarget,
1771 IndexTarget const &DetachedSigTarget) :
1772 pkgAcqMetaBase(Owner, TransactionManager, DataTarget), d(NULL),
1773 DetachedSigTarget(DetachedSigTarget)
1774 {
1775 if(_config->FindB("Debug::Acquire::Transaction", false) == true)
1776 std::clog << "New pkgAcqMetaIndex with TransactionManager "
1777 << this->TransactionManager << std::endl;
1778
1779 DestFile = GetPartialFileNameFromURI(DataTarget.URI);
1780
1781 // Create the item
1782 Desc.Description = DataTarget.Description;
1783 Desc.Owner = this;
1784 Desc.ShortDesc = DataTarget.ShortDesc;
1785 Desc.URI = DataTarget.URI;
1786 QueueURI(Desc);
1787 }
1788 /*}}}*/
1789 void pkgAcqMetaIndex::Done(string const &Message, /*{{{*/
1790 HashStringList const &Hashes,
1791 pkgAcquire::MethodConfig const * const Cfg)
1792 {
1793 Item::Done(Message,Hashes,Cfg);
1794
1795 if(CheckDownloadDone(this, Message, Hashes))
1796 {
1797 // we have a Release file, now download the Signature, all further
1798 // verify/queue for additional downloads will be done in the
1799 // pkgAcqMetaSig::Done() code
1800 new pkgAcqMetaSig(Owner, TransactionManager, DetachedSigTarget, this);
1801 }
1802 }
1803 /*}}}*/
1804 // pkgAcqMetaIndex::Failed - no Release file present /*{{{*/
1805 void pkgAcqMetaIndex::Failed(string const &Message,
1806 pkgAcquire::MethodConfig const * const Cnf)
1807 {
1808 pkgAcquire::Item::Failed(Message, Cnf);
1809 Status = StatDone;
1810
1811 // No Release file was present so fall
1812 // back to queueing Packages files without verification
1813 // only allow going further if the user explicitly wants it
1814 if(AllowInsecureRepositories(InsecureType::NORELEASE, Target.Description, TransactionManager->MetaIndexParser, TransactionManager, this) == true)
1815 {
1816 // ensure old Release files are removed
1817 TransactionManager->TransactionStageRemoval(this, GetFinalFilename());
1818
1819 // queue without any kind of hashsum support
1820 TransactionManager->QueueIndexes(false);
1821 }
1822 }
1823 /*}}}*/
1824 std::string pkgAcqMetaIndex::DescURI() const /*{{{*/
1825 {
1826 return Target.URI;
1827 }
1828 /*}}}*/
1829 pkgAcqMetaIndex::~pkgAcqMetaIndex() {}
1830
1831 // AcqMetaSig::AcqMetaSig - Constructor /*{{{*/
1832 pkgAcqMetaSig::pkgAcqMetaSig(pkgAcquire * const Owner,
1833 pkgAcqMetaClearSig * const TransactionManager,
1834 IndexTarget const &Target,
1835 pkgAcqMetaIndex * const MetaIndex) :
1836 pkgAcqTransactionItem(Owner, TransactionManager, Target), d(NULL), MetaIndex(MetaIndex)
1837 {
1838 DestFile = GetPartialFileNameFromURI(Target.URI);
1839
1840 // remove any partial downloaded sig-file in partial/.
1841 // it may confuse proxies and is too small to warrant a
1842 // partial download anyway
1843 RemoveFile("pkgAcqMetaSig", DestFile);
1844
1845 // set the TransactionManager
1846 if(_config->FindB("Debug::Acquire::Transaction", false) == true)
1847 std::clog << "New pkgAcqMetaSig with TransactionManager "
1848 << TransactionManager << std::endl;
1849
1850 // Create the item
1851 Desc.Description = Target.Description;
1852 Desc.Owner = this;
1853 Desc.ShortDesc = Target.ShortDesc;
1854 Desc.URI = Target.URI;
1855
1856 // If we got a hit for Release, we will get one for Release.gpg too (or obscure errors),
1857 // so we skip the download step and go instantly to verification
1858 if (TransactionManager->IMSHit == true && RealFileExists(GetFinalFilename()))
1859 {
1860 Complete = true;
1861 Status = StatDone;
1862 PartialFile = DestFile = GetFinalFilename();
1863 MetaIndexFileSignature = DestFile;
1864 MetaIndex->QueueForSignatureVerify(this, MetaIndex->DestFile, DestFile);
1865 }
1866 else
1867 QueueURI(Desc);
1868 }
1869 /*}}}*/
1870 pkgAcqMetaSig::~pkgAcqMetaSig() /*{{{*/
1871 {
1872 }
1873 /*}}}*/
1874 // pkgAcqMetaSig::Custom600Headers - Insert custom request headers /*{{{*/
1875 std::string pkgAcqMetaSig::Custom600Headers() const
1876 {
1877 std::string Header = pkgAcqTransactionItem::Custom600Headers();
1878 std::string const key = TransactionManager->MetaIndexParser->GetSignedBy();
1879 if (key.empty() == false)
1880 Header += "\nSigned-By: " + key;
1881 return Header;
1882 }
1883 /*}}}*/
1884 // AcqMetaSig::Done - The signature was downloaded/verified /*{{{*/
1885 void pkgAcqMetaSig::Done(string const &Message, HashStringList const &Hashes,
1886 pkgAcquire::MethodConfig const * const Cfg)
1887 {
1888 if (MetaIndexFileSignature.empty() == false)
1889 {
1890 DestFile = MetaIndexFileSignature;
1891 MetaIndexFileSignature.clear();
1892 }
1893 Item::Done(Message, Hashes, Cfg);
1894
1895 if(MetaIndex->AuthPass == false)
1896 {
1897 if(MetaIndex->CheckDownloadDone(this, Message, Hashes) == true)
1898 {
1899 // destfile will be modified to point to MetaIndexFile for the
1900 // gpgv method, so we need to save it here
1901 MetaIndexFileSignature = DestFile;
1902 MetaIndex->QueueForSignatureVerify(this, MetaIndex->DestFile, DestFile);
1903 }
1904 return;
1905 }
1906 else if(MetaIndex->CheckAuthDone(Message) == true)
1907 {
1908 auto const Releasegpg = GetFinalFilename();
1909 auto const Release = MetaIndex->GetFinalFilename();
1910 // if this is an IMS-Hit on Release ensure we also have the the Release.gpg file stored
1911 // (previously an unknown pubkey) – but only if the Release file exists locally (unlikely
1912 // event of InRelease removed from the mirror causing fallback but still an IMS-Hit)
1913 if (TransactionManager->IMSHit == false ||
1914 (FileExists(Releasegpg) == false && FileExists(Release) == true))
1915 {
1916 TransactionManager->TransactionStageCopy(this, DestFile, Releasegpg);
1917 TransactionManager->TransactionStageCopy(MetaIndex, MetaIndex->DestFile, Release);
1918 }
1919 }
1920 else if (MetaIndex->Status != StatAuthError)
1921 {
1922 std::string const FinalFile = MetaIndex->GetFinalFilename();
1923 if (TransactionManager->IMSHit == false)
1924 TransactionManager->TransactionStageCopy(MetaIndex, MetaIndex->DestFile, FinalFile);
1925 else
1926 TransactionManager->TransactionStageCopy(MetaIndex, FinalFile, FinalFile);
1927 }
1928 }
1929 /*}}}*/
1930 void pkgAcqMetaSig::Failed(string const &Message,pkgAcquire::MethodConfig const * const Cnf)/*{{{*/
1931 {
1932 Item::Failed(Message,Cnf);
1933
1934 // check if we need to fail at this point
1935 if (MetaIndex->AuthPass == true && MetaIndex->CheckStopAuthentication(this, Message))
1936 return;
1937
1938 // ensures that a Release.gpg file in the lists/ is removed by the transaction
1939 TransactionManager->TransactionStageRemoval(this, DestFile);
1940
1941 // only allow going further if the user explicitly wants it
1942 if (AllowInsecureRepositories(InsecureType::UNSIGNED, MetaIndex->Target.Description, TransactionManager->MetaIndexParser, TransactionManager, this) == true)
1943 {
1944 string const FinalRelease = MetaIndex->GetFinalFilename();
1945 string const FinalInRelease = TransactionManager->GetFinalFilename();
1946 LoadLastMetaIndexParser(TransactionManager, FinalRelease, FinalInRelease);
1947
1948 // we parse the indexes here because at this point the user wanted
1949 // a repository that may potentially harm him
1950 bool const GoodLoad = TransactionManager->MetaIndexParser->Load(MetaIndex->DestFile, &ErrorText);
1951 if (MetaIndex->VerifyVendor(Message) == false)
1952 /* expired Release files are still a problem you need extra force for */;
1953 else
1954 TransactionManager->QueueIndexes(GoodLoad);
1955
1956 TransactionManager->TransactionStageCopy(MetaIndex, MetaIndex->DestFile, FinalRelease);
1957 }
1958 else if (TransactionManager->IMSHit == false)
1959 Rename(MetaIndex->DestFile, MetaIndex->DestFile + ".FAILED");
1960
1961 // FIXME: this is used often (e.g. in pkgAcqIndexTrans) so refactor
1962 if (Cnf->LocalOnly == true ||
1963 StringToBool(LookupTag(Message,"Transient-Failure"),false) == false)
1964 {
1965 // Ignore this
1966 Status = StatDone;
1967 }
1968 }
1969 /*}}}*/
1970
1971
1972 // AcqBaseIndex - Constructor /*{{{*/
1973 pkgAcqBaseIndex::pkgAcqBaseIndex(pkgAcquire * const Owner,
1974 pkgAcqMetaClearSig * const TransactionManager,
1975 IndexTarget const &Target)
1976 : pkgAcqTransactionItem(Owner, TransactionManager, Target), d(NULL)
1977 {
1978 }
1979 /*}}}*/
1980 void pkgAcqBaseIndex::Failed(std::string const &Message,pkgAcquire::MethodConfig const * const Cnf)/*{{{*/
1981 {
1982 pkgAcquire::Item::Failed(Message, Cnf);
1983 if (Status != StatAuthError)
1984 return;
1985
1986 ErrorText.append("Release file created at: ");
1987 auto const timespec = TransactionManager->MetaIndexParser->GetDate();
1988 if (timespec == 0)
1989 ErrorText.append("<unknown>");
1990 else
1991 ErrorText.append(TimeRFC1123(timespec, true));
1992 ErrorText.append("\n");
1993 }
1994 /*}}}*/
1995 pkgAcqBaseIndex::~pkgAcqBaseIndex() {}
1996
1997 // AcqDiffIndex::AcqDiffIndex - Constructor /*{{{*/
1998 // ---------------------------------------------------------------------
1999 /* Get the DiffIndex file first and see if there are patches available
2000 * If so, create a pkgAcqIndexDiffs fetcher that will get and apply the
2001 * patches. If anything goes wrong in that process, it will fall back to
2002 * the original packages file
2003 */
2004 pkgAcqDiffIndex::pkgAcqDiffIndex(pkgAcquire * const Owner,
2005 pkgAcqMetaClearSig * const TransactionManager,
2006 IndexTarget const &Target)
2007 : pkgAcqIndex(Owner, TransactionManager, Target, true), d(NULL), diffs(NULL)
2008 {
2009 // FIXME: Magic number as an upper bound on pdiffs we will reasonably acquire
2010 ExpectedAdditionalItems = 40;
2011 Debug = _config->FindB("Debug::pkgAcquire::Diffs",false);
2012
2013 CompressionExtensions.clear();
2014 {
2015 std::vector<std::string> types = APT::Configuration::getCompressionTypes();
2016 if (types.empty() == false)
2017 {
2018 std::ostringstream os;
2019 std::copy_if(types.begin(), types.end()-1, std::ostream_iterator<std::string>(os, " "), [&](std::string const type) {
2020 if (type == "uncompressed")
2021 return true;
2022 return TransactionManager->MetaIndexParser->Exists(GetDiffIndexFileName(Target.MetaKey) + '.' + type);
2023 });
2024 os << *types.rbegin();
2025 CompressionExtensions = os.str();
2026 }
2027 }
2028 if (Target.Option(IndexTarget::COMPRESSIONTYPES).find("by-hash") != std::string::npos)
2029 CompressionExtensions = "by-hash " + CompressionExtensions;
2030 Init(GetDiffIndexURI(Target), GetDiffIndexFileName(Target.Description), Target.ShortDesc);
2031
2032 if(Debug)
2033 std::clog << "pkgAcqDiffIndex: " << Desc.URI << std::endl;
2034 }
2035 /*}}}*/
2036 void pkgAcqDiffIndex::QueueOnIMSHit() const /*{{{*/
2037 {
2038 // list cleanup needs to know that this file as well as the already
2039 // present index is ours, so we create an empty diff to save it for us
2040 new pkgAcqIndexDiffs(Owner, TransactionManager, Target, UsedMirror, Target.URI);
2041 }
2042 /*}}}*/
2043 static bool RemoveFileForBootstrapLinking(bool const Debug, std::string const &For, std::string const &Boot)/*{{{*/
2044 {
2045 if (FileExists(Boot) && RemoveFile("Bootstrap-linking", Boot) == false)
2046 {
2047 if (Debug)
2048 std::clog << "Bootstrap-linking for patching " << For
2049 << " by removing stale " << Boot << " failed!" << std::endl;
2050 return false;
2051 }
2052 return true;
2053 }
2054 /*}}}*/
2055 bool pkgAcqDiffIndex::ParseDiffIndex(string const &IndexDiffFile) /*{{{*/
2056 {
2057 ExpectedAdditionalItems = 0;
2058 // failing here is fine: our caller will take care of trying to
2059 // get the complete file if patching fails
2060 if(Debug)
2061 std::clog << "pkgAcqDiffIndex::ParseIndexDiff() " << IndexDiffFile
2062 << std::endl;
2063
2064 FileFd Fd(IndexDiffFile, FileFd::ReadOnly, FileFd::Extension);
2065 pkgTagFile TF(&Fd);
2066 if (Fd.IsOpen() == false || Fd.Failed())
2067 return false;
2068
2069 pkgTagSection Tags;
2070 if(unlikely(TF.Step(Tags) == false))
2071 return false;
2072
2073 HashStringList ServerHashes;
2074 unsigned long long ServerSize = 0;
2075
2076 auto const &posix = std::locale::classic();
2077 for (char const * const * type = HashString::SupportedHashes(); *type != NULL; ++type)
2078 {
2079 std::string tagname = *type;
2080 tagname.append("-Current");
2081 std::string const tmp = Tags.FindS(tagname.c_str());
2082 if (tmp.empty() == true)
2083 continue;
2084
2085 string hash;
2086 unsigned long long size;
2087 std::stringstream ss(tmp);
2088 ss.imbue(posix);
2089 ss >> hash >> size;
2090 if (unlikely(hash.empty() == true))
2091 continue;
2092 if (unlikely(ServerSize != 0 && ServerSize != size))
2093 continue;
2094 ServerHashes.push_back(HashString(*type, hash));
2095 ServerSize = size;
2096 }
2097
2098 if (ServerHashes.usable() == false)
2099 {
2100 if (Debug == true)
2101 std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": Did not find a good hashsum in the index" << std::endl;
2102 return false;
2103 }
2104
2105 std::string const CurrentPackagesFile = GetFinalFileNameFromURI(Target.URI);
2106 HashStringList const TargetFileHashes = GetExpectedHashesFor(Target.MetaKey);
2107 if (TargetFileHashes.usable() == false || ServerHashes != TargetFileHashes)
2108 {
2109 if (Debug == true)
2110 {
2111 std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": Index has different hashes than parser, probably older, so fail pdiffing" << std::endl;
2112 printHashSumComparison(CurrentPackagesFile, ServerHashes, TargetFileHashes);
2113 }
2114 return false;
2115 }
2116
2117 HashStringList LocalHashes;
2118 // try avoiding calculating the hash here as this is costly
2119 if (TransactionManager->LastMetaIndexParser != NULL)
2120 LocalHashes = GetExpectedHashesFromFor(TransactionManager->LastMetaIndexParser, Target.MetaKey);
2121 if (LocalHashes.usable() == false)
2122 {
2123 FileFd fd(CurrentPackagesFile, FileFd::ReadOnly, FileFd::Auto);
2124 Hashes LocalHashesCalc(ServerHashes);
2125 LocalHashesCalc.AddFD(fd);
2126 LocalHashes = LocalHashesCalc.GetHashStringList();
2127 }
2128
2129 if (ServerHashes == LocalHashes)
2130 {
2131 // we have the same sha1 as the server so we are done here
2132 if(Debug)
2133 std::clog << "pkgAcqDiffIndex: Package file " << CurrentPackagesFile << " is up-to-date" << std::endl;
2134 QueueOnIMSHit();
2135 return true;
2136 }
2137
2138 if(Debug)
2139 std::clog << "Server-Current: " << ServerHashes.find(NULL)->toStr() << " and we start at "
2140 << CurrentPackagesFile << " " << LocalHashes.FileSize() << " " << LocalHashes.find(NULL)->toStr() << std::endl;
2141
2142 // historically, older hashes have more info than newer ones, so start
2143 // collecting with older ones first to avoid implementing complicated
2144 // information merging techniques… a failure is after all always
2145 // recoverable with a complete file and hashes aren't changed that often.
2146 std::vector<char const *> types;
2147 for (char const * const * type = HashString::SupportedHashes(); *type != NULL; ++type)
2148 types.push_back(*type);
2149
2150 // parse all of (provided) history
2151 vector<DiffInfo> available_patches;
2152 bool firstAcceptedHashes = true;
2153 for (auto type = types.crbegin(); type != types.crend(); ++type)
2154 {
2155 if (LocalHashes.find(*type) == NULL)
2156 continue;
2157
2158 std::string tagname = *type;
2159 tagname.append("-History");
2160 std::string const tmp = Tags.FindS(tagname.c_str());
2161 if (tmp.empty() == true)
2162 continue;
2163
2164 string hash, filename;
2165 unsigned long long size;
2166 std::stringstream ss(tmp);
2167 ss.imbue(posix);
2168
2169 while (ss >> hash >> size >> filename)
2170 {
2171 if (unlikely(hash.empty() == true || filename.empty() == true))
2172 continue;
2173
2174 // see if we have a record for this file already
2175 std::vector<DiffInfo>::iterator cur = available_patches.begin();
2176 for (; cur != available_patches.end(); ++cur)
2177 {
2178 if (cur->file != filename)
2179 continue;
2180 cur->result_hashes.push_back(HashString(*type, hash));
2181 break;
2182 }
2183 if (cur != available_patches.end())
2184 continue;
2185 if (firstAcceptedHashes == true)
2186 {
2187 DiffInfo next;
2188 next.file = filename;
2189 next.result_hashes.push_back(HashString(*type, hash));
2190 next.result_hashes.FileSize(size);
2191 available_patches.push_back(next);
2192 }
2193 else
2194 {
2195 if (Debug == true)
2196 std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": File " << filename
2197 << " wasn't in the list for the first parsed hash! (history)" << std::endl;
2198 break;
2199 }
2200 }
2201 firstAcceptedHashes = false;
2202 }
2203
2204 if (unlikely(available_patches.empty() == true))
2205 {
2206 if (Debug)
2207 std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": "
2208 << "Couldn't find any patches for the patch series." << std::endl;
2209 return false;
2210 }
2211
2212 for (auto type = types.crbegin(); type != types.crend(); ++type)
2213 {
2214 if (LocalHashes.find(*type) == NULL)
2215 continue;
2216
2217 std::string tagname = *type;
2218 tagname.append("-Patches");
2219 std::string const tmp = Tags.FindS(tagname.c_str());
2220 if (tmp.empty() == true)
2221 continue;
2222
2223 string hash, filename;
2224 unsigned long long size;
2225 std::stringstream ss(tmp);
2226 ss.imbue(posix);
2227
2228 while (ss >> hash >> size >> filename)
2229 {
2230 if (unlikely(hash.empty() == true || filename.empty() == true))
2231 continue;
2232
2233 // see if we have a record for this file already
2234 std::vector<DiffInfo>::iterator cur = available_patches.begin();
2235 for (; cur != available_patches.end(); ++cur)
2236 {
2237 if (cur->file != filename)
2238 continue;
2239 if (cur->patch_hashes.empty())
2240 cur->patch_hashes.FileSize(size);
2241 cur->patch_hashes.push_back(HashString(*type, hash));
2242 break;
2243 }
2244 if (cur != available_patches.end())
2245 continue;
2246 if (Debug == true)
2247 std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": File " << filename
2248 << " wasn't in the list for the first parsed hash! (patches)" << std::endl;
2249 break;
2250 }
2251 }
2252
2253 for (auto type = types.crbegin(); type != types.crend(); ++type)
2254 {
2255 std::string tagname = *type;
2256 tagname.append("-Download");
2257 std::string const tmp = Tags.FindS(tagname.c_str());
2258 if (tmp.empty() == true)
2259 continue;
2260
2261 string hash, filename;
2262 unsigned long long size;
2263 std::stringstream ss(tmp);
2264 ss.imbue(posix);
2265
2266 // FIXME: all of pdiff supports only .gz compressed patches
2267 while (ss >> hash >> size >> filename)
2268 {
2269 if (unlikely(hash.empty() == true || filename.empty() == true))
2270 continue;
2271 if (unlikely(APT::String::Endswith(filename, ".gz") == false))
2272 continue;
2273 filename.erase(filename.length() - 3);
2274
2275 // see if we have a record for this file already
2276 std::vector<DiffInfo>::iterator cur = available_patches.begin();
2277 for (; cur != available_patches.end(); ++cur)
2278 {
2279 if (cur->file != filename)
2280 continue;
2281 if (cur->download_hashes.empty())
2282 cur->download_hashes.FileSize(size);
2283 cur->download_hashes.push_back(HashString(*type, hash));
2284 break;
2285 }
2286 if (cur != available_patches.end())
2287 continue;
2288 if (Debug == true)
2289 std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": File " << filename
2290 << " wasn't in the list for the first parsed hash! (download)" << std::endl;
2291 break;
2292 }
2293 }
2294
2295
2296 bool foundStart = false;
2297 for (std::vector<DiffInfo>::iterator cur = available_patches.begin();
2298 cur != available_patches.end(); ++cur)
2299 {
2300 if (LocalHashes != cur->result_hashes)
2301 continue;
2302
2303 available_patches.erase(available_patches.begin(), cur);
2304 foundStart = true;
2305 break;
2306 }
2307
2308 if (foundStart == false || unlikely(available_patches.empty() == true))
2309 {
2310 if (Debug)
2311 std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": "
2312 << "Couldn't find the start of the patch series." << std::endl;
2313 return false;
2314 }
2315
2316 for (auto const &patch: available_patches)
2317 if (patch.result_hashes.usable() == false ||
2318 patch.patch_hashes.usable() == false ||
2319 patch.download_hashes.usable() == false)
2320 {
2321 if (Debug)
2322 std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": provides no usable hashes for " << patch.file
2323 << " so fallback to complete download" << std::endl;
2324 return false;
2325 }
2326
2327 // patching with too many files is rather slow compared to a fast download
2328 unsigned long const fileLimit = _config->FindI("Acquire::PDiffs::FileLimit", 0);
2329 if (fileLimit != 0 && fileLimit < available_patches.size())
2330 {
2331 if (Debug)
2332 std::clog << "Need " << available_patches.size() << " diffs (Limit is " << fileLimit
2333 << ") so fallback to complete download" << std::endl;
2334 return false;
2335 }
2336
2337 // calculate the size of all patches we have to get
2338 unsigned short const sizeLimitPercent = _config->FindI("Acquire::PDiffs::SizeLimit", 100);
2339 if (sizeLimitPercent > 0)
2340 {
2341 unsigned long long downloadSize = std::accumulate(available_patches.begin(),
2342 available_patches.end(), 0llu, [](unsigned long long const T, DiffInfo const &I) {
2343 return T + I.download_hashes.FileSize();
2344 });
2345 if (downloadSize != 0)
2346 {
2347 unsigned long long downloadSizeIdx = 0;
2348 auto const types = VectorizeString(Target.Option(IndexTarget::COMPRESSIONTYPES), ' ');
2349 for (auto const &t : types)
2350 {
2351 std::string MetaKey = Target.MetaKey;
2352 if (t != "uncompressed")
2353 MetaKey += '.' + t;
2354 HashStringList const hsl = GetExpectedHashesFor(MetaKey);
2355 if (unlikely(hsl.usable() == false))
2356 continue;
2357 downloadSizeIdx = hsl.FileSize();
2358 break;
2359 }
2360 unsigned long long const sizeLimit = downloadSizeIdx * sizeLimitPercent;
2361 if ((sizeLimit/100) < downloadSize)
2362 {
2363 if (Debug)
2364 std::clog << "Need " << downloadSize << " compressed bytes (Limit is " << (sizeLimit/100) << ", "
2365 << "original is " << downloadSizeIdx << ") so fallback to complete download" << std::endl;
2366 return false;
2367 }
2368 }
2369 }
2370
2371 // we have something, queue the diffs
2372 string::size_type const last_space = Description.rfind(" ");
2373 if(last_space != string::npos)
2374 Description.erase(last_space, Description.size()-last_space);
2375
2376 /* decide if we should download patches one by one or in one go:
2377 The first is good if the server merges patches, but many don't so client
2378 based merging can be attempt in which case the second is better.
2379 "bad things" will happen if patches are merged on the server,
2380 but client side merging is attempt as well */
2381 bool pdiff_merge = _config->FindB("Acquire::PDiffs::Merge", true);
2382 if (pdiff_merge == true)
2383 {
2384 // reprepro adds this flag if it has merged patches on the server
2385 std::string const precedence = Tags.FindS("X-Patch-Precedence");
2386 pdiff_merge = (precedence != "merged");
2387 }
2388
2389 // clean the plate
2390 {
2391 std::string const Final = GetExistingFilename(CurrentPackagesFile);
2392 if (unlikely(Final.empty())) // because we wouldn't be called in such a case
2393 return false;
2394 std::string const PartialFile = GetPartialFileNameFromURI(Target.URI);
2395 std::string const PatchedFile = GetKeepCompressedFileName(PartialFile + "-patched", Target);
2396 if (RemoveFileForBootstrapLinking(Debug, CurrentPackagesFile, PartialFile) == false ||
2397 RemoveFileForBootstrapLinking(Debug, CurrentPackagesFile, PatchedFile) == false)
2398 return false;
2399 for (auto const &ext : APT::Configuration::getCompressorExtensions())
2400 {
2401 if (RemoveFileForBootstrapLinking(Debug, CurrentPackagesFile, PartialFile + ext) == false ||
2402 RemoveFileForBootstrapLinking(Debug, CurrentPackagesFile, PatchedFile + ext) == false)
2403 return false;
2404 }
2405 std::string const Ext = Final.substr(CurrentPackagesFile.length());
2406 std::string const Partial = PartialFile + Ext;
2407 if (symlink(Final.c_str(), Partial.c_str()) != 0)
2408 {
2409 if (Debug)
2410 std::clog << "Bootstrap-linking for patching " << CurrentPackagesFile
2411 << " by linking " << Final << " to " << Partial << " failed!" << std::endl;
2412 return false;
2413 }
2414 }
2415
2416 std::string indexURI = Desc.URI;
2417 auto const byhashidx = indexURI.find("/by-hash/");
2418 if (byhashidx != std::string::npos)
2419 indexURI = indexURI.substr(0, byhashidx - strlen(".diff"));
2420 else
2421 {
2422 auto end = indexURI.length() - strlen(".diff/Index");
2423 if (CurrentCompressionExtension != "uncompressed")
2424 end -= (1 + CurrentCompressionExtension.length());
2425 indexURI = indexURI.substr(0, end);
2426 }
2427
2428 if (pdiff_merge == false)
2429 new pkgAcqIndexDiffs(Owner, TransactionManager, Target, UsedMirror, indexURI, available_patches);
2430 else
2431 {
2432 diffs = new std::vector<pkgAcqIndexMergeDiffs*>(available_patches.size());
2433 for(size_t i = 0; i < available_patches.size(); ++i)
2434 (*diffs)[i] = new pkgAcqIndexMergeDiffs(Owner, TransactionManager,
2435 Target, UsedMirror, indexURI,
2436 available_patches[i],
2437 diffs);
2438 }
2439
2440 Complete = false;
2441 Status = StatDone;
2442 Dequeue();
2443 return true;
2444 }
2445 /*}}}*/
2446 void pkgAcqDiffIndex::Failed(string const &Message,pkgAcquire::MethodConfig const * const Cnf)/*{{{*/
2447 {
2448 if (CommonFailed(GetDiffIndexURI(Target), GetDiffIndexFileName(Target.Description), Message, Cnf))
2449 return;
2450
2451 Status = StatDone;
2452 ExpectedAdditionalItems = 0;
2453
2454 if(Debug)
2455 std::clog << "pkgAcqDiffIndex failed: " << Desc.URI << " with " << Message << std::endl
2456 << "Falling back to normal index file acquire" << std::endl;
2457
2458 new pkgAcqIndex(Owner, TransactionManager, Target);
2459 }
2460 /*}}}*/
2461 void pkgAcqDiffIndex::Done(string const &Message,HashStringList const &Hashes, /*{{{*/
2462 pkgAcquire::MethodConfig const * const Cnf)
2463 {
2464 if(Debug)
2465 std::clog << "pkgAcqDiffIndex::Done(): " << Desc.URI << std::endl;
2466
2467 Item::Done(Message, Hashes, Cnf);
2468
2469 string const FinalFile = GetFinalFilename();
2470 if(StringToBool(LookupTag(Message,"IMS-Hit"),false))
2471 DestFile = FinalFile;
2472
2473 if(ParseDiffIndex(DestFile) == false)
2474 {
2475 Failed("Message: Couldn't parse pdiff index", Cnf);
2476 // queue for final move - this should happen even if we fail
2477 // while parsing (e.g. on sizelimit) and download the complete file.
2478 TransactionManager->TransactionStageCopy(this, DestFile, FinalFile);
2479 return;
2480 }
2481
2482 TransactionManager->TransactionStageCopy(this, DestFile, FinalFile);
2483
2484 Complete = true;
2485 Status = StatDone;
2486 Dequeue();
2487
2488 return;
2489 }
2490 /*}}}*/
2491 pkgAcqDiffIndex::~pkgAcqDiffIndex()
2492 {
2493 if (diffs != NULL)
2494 delete diffs;
2495 }
2496
2497 // AcqIndexDiffs::AcqIndexDiffs - Constructor /*{{{*/
2498 // ---------------------------------------------------------------------
2499 /* The package diff is added to the queue. one object is constructed
2500 * for each diff and the index
2501 */
2502 pkgAcqIndexDiffs::pkgAcqIndexDiffs(pkgAcquire * const Owner,
2503 pkgAcqMetaClearSig * const TransactionManager,
2504 IndexTarget const &Target,
2505 std::string const &indexUsedMirror, std::string const &indexURI,
2506 vector<DiffInfo> const &diffs)
2507 : pkgAcqBaseIndex(Owner, TransactionManager, Target), indexURI(indexURI),
2508 available_patches(diffs)
2509 {
2510 DestFile = GetKeepCompressedFileName(GetPartialFileNameFromURI(Target.URI), Target);
2511
2512 Debug = _config->FindB("Debug::pkgAcquire::Diffs",false);
2513
2514 Desc.Owner = this;
2515 Description = Target.Description;
2516 Desc.ShortDesc = Target.ShortDesc;
2517
2518 UsedMirror = indexUsedMirror;
2519 if (UsedMirror == "DIRECT")
2520 UsedMirror.clear();
2521 else if (UsedMirror.empty() == false && Description.find(" ") != string::npos)
2522 Description.replace(0, Description.find(" "), UsedMirror);
2523
2524 if(available_patches.empty() == true)
2525 {
2526 // we are done (yeah!), check hashes against the final file
2527 DestFile = GetKeepCompressedFileName(GetFinalFileNameFromURI(Target.URI), Target);
2528 Finish(true);
2529 }
2530 else
2531 {
2532 State = StateFetchDiff;
2533 QueueNextDiff();
2534 }
2535 }
2536 /*}}}*/
2537 void pkgAcqIndexDiffs::Failed(string const &Message,pkgAcquire::MethodConfig const * const Cnf)/*{{{*/
2538 {
2539 pkgAcqBaseIndex::Failed(Message,Cnf);
2540 Status = StatDone;
2541
2542 DestFile = GetKeepCompressedFileName(GetPartialFileNameFromURI(Target.URI), Target);
2543 if(Debug)
2544 std::clog << "pkgAcqIndexDiffs failed: " << Desc.URI << " with " << Message << std::endl
2545 << "Falling back to normal index file acquire " << std::endl;
2546 RenameOnError(PDiffError);
2547 std::string const patchname = GetDiffsPatchFileName(DestFile);
2548 if (RealFileExists(patchname))
2549 Rename(patchname, patchname + ".FAILED");
2550 std::string const UnpatchedFile = GetExistingFilename(GetPartialFileNameFromURI(Target.URI));
2551 if (UnpatchedFile.empty() == false && FileExists(UnpatchedFile))
2552 Rename(UnpatchedFile, UnpatchedFile + ".FAILED");
2553 new pkgAcqIndex(Owner, TransactionManager, Target);
2554 Finish();
2555 }
2556 /*}}}*/
2557 // Finish - helper that cleans the item out of the fetcher queue /*{{{*/
2558 void pkgAcqIndexDiffs::Finish(bool allDone)
2559 {
2560 if(Debug)
2561 std::clog << "pkgAcqIndexDiffs::Finish(): "
2562 << allDone << " "
2563 << Desc.URI << std::endl;
2564
2565 // we restore the original name, this is required, otherwise
2566 // the file will be cleaned
2567 if(allDone)
2568 {
2569 std::string const Final = GetKeepCompressedFileName(GetFinalFilename(), Target);
2570 TransactionManager->TransactionStageCopy(this, DestFile, Final);
2571
2572 // this is for the "real" finish
2573 Complete = true;
2574 Status = StatDone;
2575 Dequeue();
2576 if(Debug)
2577 std::clog << "\n\nallDone: " << DestFile << "\n" << std::endl;
2578 return;
2579 }
2580 else
2581 DestFile.clear();
2582
2583 if(Debug)
2584 std::clog << "Finishing: " << Desc.URI << std::endl;
2585 Complete = false;
2586 Status = StatDone;
2587 Dequeue();
2588 return;
2589 }
2590 /*}}}*/
2591 bool pkgAcqIndexDiffs::QueueNextDiff() /*{{{*/
2592 {
2593 // calc sha1 of the just patched file
2594 std::string const PartialFile = GetExistingFilename(GetPartialFileNameFromURI(Target.URI));
2595 if(unlikely(PartialFile.empty()))
2596 {
2597 Failed("Message: The file " + GetPartialFileNameFromURI(Target.URI) + " isn't available", NULL);
2598 return false;
2599 }
2600
2601 FileFd fd(PartialFile, FileFd::ReadOnly, FileFd::Extension);
2602 Hashes LocalHashesCalc;
2603 LocalHashesCalc.AddFD(fd);
2604 HashStringList const LocalHashes = LocalHashesCalc.GetHashStringList();
2605
2606 if(Debug)
2607 std::clog << "QueueNextDiff: " << PartialFile << " (" << LocalHashes.find(NULL)->toStr() << ")" << std::endl;
2608
2609 HashStringList const TargetFileHashes = GetExpectedHashesFor(Target.MetaKey);
2610 if (unlikely(LocalHashes.usable() == false || TargetFileHashes.usable() == false))
2611 {
2612 Failed("Local/Expected hashes are not usable for " + PartialFile, NULL);
2613 return false;
2614 }
2615
2616 // final file reached before all patches are applied
2617 if(LocalHashes == TargetFileHashes)
2618 {
2619 Finish(true);
2620 return true;
2621 }
2622
2623 // remove all patches until the next matching patch is found
2624 // this requires the Index file to be ordered
2625 available_patches.erase(available_patches.begin(),
2626 std::find_if(available_patches.begin(), available_patches.end(), [&](DiffInfo const &I) {
2627 return I.result_hashes == LocalHashes;
2628 }));
2629
2630 // error checking and falling back if no patch was found
2631 if(available_patches.empty() == true)
2632 {
2633 Failed("No patches left to reach target for " + PartialFile, NULL);
2634 return false;
2635 }
2636
2637 // queue the right diff
2638 Desc.URI = indexURI + ".diff/" + available_patches[0].file + ".gz";
2639 Desc.Description = Description + " " + available_patches[0].file + string(".pdiff");
2640 DestFile = GetKeepCompressedFileName(GetPartialFileNameFromURI(Target.URI + ".diff/" + available_patches[0].file), Target);
2641
2642 if(Debug)
2643 std::clog << "pkgAcqIndexDiffs::QueueNextDiff(): " << Desc.URI << std::endl;
2644
2645 QueueURI(Desc);
2646
2647 return true;
2648 }
2649 /*}}}*/
2650 void pkgAcqIndexDiffs::Done(string const &Message, HashStringList const &Hashes, /*{{{*/
2651 pkgAcquire::MethodConfig const * const Cnf)
2652 {
2653 if (Debug)
2654 std::clog << "pkgAcqIndexDiffs::Done(): " << Desc.URI << std::endl;
2655
2656 Item::Done(Message, Hashes, Cnf);
2657
2658 std::string const UncompressedUnpatchedFile = GetPartialFileNameFromURI(Target.URI);
2659 std::string const UnpatchedFile = GetExistingFilename(UncompressedUnpatchedFile);
2660 std::string const PatchFile = GetDiffsPatchFileName(UnpatchedFile);
2661 std::string const PatchedFile = GetKeepCompressedFileName(UncompressedUnpatchedFile, Target);
2662
2663 switch (State)
2664 {
2665 // success in downloading a diff, enter ApplyDiff state
2666 case StateFetchDiff:
2667 Rename(DestFile, PatchFile);
2668 DestFile = GetKeepCompressedFileName(UncompressedUnpatchedFile + "-patched", Target);
2669 if(Debug)
2670 std::clog << "Sending to rred method: " << UnpatchedFile << std::endl;
2671 State = StateApplyDiff;
2672 Local = true;
2673 Desc.URI = "rred:" + UnpatchedFile;
2674 QueueURI(Desc);
2675 SetActiveSubprocess("rred");
2676 return;
2677 // success in download/apply a diff, queue next (if needed)
2678 case StateApplyDiff:
2679 // remove the just applied patch and base file
2680 available_patches.erase(available_patches.begin());
2681 RemoveFile("pkgAcqIndexDiffs::Done", PatchFile);
2682 RemoveFile("pkgAcqIndexDiffs::Done", UnpatchedFile);
2683 if(Debug)
2684 std::clog << "Moving patched file in place: " << std::endl
2685 << DestFile << " -> " << PatchedFile << std::endl;
2686 Rename(DestFile, PatchedFile);
2687
2688 // see if there is more to download
2689 if(available_patches.empty() == false)
2690 {
2691 new pkgAcqIndexDiffs(Owner, TransactionManager, Target, UsedMirror, indexURI, available_patches);
2692 Finish();
2693 } else {
2694 DestFile = PatchedFile;
2695 Finish(true);
2696 }
2697 return;
2698 }
2699 }
2700 /*}}}*/
2701 std::string pkgAcqIndexDiffs::Custom600Headers() const /*{{{*/
2702 {
2703 if(State != StateApplyDiff)
2704 return pkgAcqBaseIndex::Custom600Headers();
2705 std::ostringstream patchhashes;
2706 for (auto && hs : available_patches[0].result_hashes)
2707 patchhashes << "\nStart-" << hs.HashType() << "-Hash: " << hs.HashValue();
2708 for (auto && hs : available_patches[0].patch_hashes)
2709 patchhashes << "\nPatch-0-" << hs.HashType() << "-Hash: " << hs.HashValue();
2710 patchhashes << pkgAcqBaseIndex::Custom600Headers();
2711 return patchhashes.str();
2712 }
2713 /*}}}*/
2714 pkgAcqIndexDiffs::~pkgAcqIndexDiffs() {}
2715
2716 // AcqIndexMergeDiffs::AcqIndexMergeDiffs - Constructor /*{{{*/
2717 pkgAcqIndexMergeDiffs::pkgAcqIndexMergeDiffs(pkgAcquire * const Owner,
2718 pkgAcqMetaClearSig * const TransactionManager,
2719 IndexTarget const &Target,
2720 std::string const &indexUsedMirror, std::string const &indexURI,
2721 DiffInfo const &patch,
2722 std::vector<pkgAcqIndexMergeDiffs*> const * const allPatches)
2723 : pkgAcqBaseIndex(Owner, TransactionManager, Target), indexURI(indexURI),
2724 patch(patch), allPatches(allPatches), State(StateFetchDiff)
2725 {
2726 Debug = _config->FindB("Debug::pkgAcquire::Diffs",false);
2727
2728 Description = Target.Description;
2729 UsedMirror = indexUsedMirror;
2730 if (UsedMirror == "DIRECT")
2731 UsedMirror.clear();
2732 else if (UsedMirror.empty() == false && Description.find(" ") != string::npos)
2733 Description.replace(0, Description.find(" "), UsedMirror);
2734
2735 Desc.Owner = this;
2736 Desc.ShortDesc = Target.ShortDesc;
2737 Desc.URI = indexURI + ".diff/" + patch.file + ".gz";
2738 Desc.Description = Description + " " + patch.file + ".pdiff";
2739 DestFile = GetPartialFileNameFromURI(Target.URI + ".diff/" + patch.file + ".gz");
2740
2741 if(Debug)
2742 std::clog << "pkgAcqIndexMergeDiffs: " << Desc.URI << std::endl;
2743
2744 QueueURI(Desc);
2745 }
2746 /*}}}*/
2747 void pkgAcqIndexMergeDiffs::Failed(string const &Message,pkgAcquire::MethodConfig const * const Cnf)/*{{{*/
2748 {
2749 if(Debug)
2750 std::clog << "pkgAcqIndexMergeDiffs failed: " << Desc.URI << " with " << Message << std::endl;
2751
2752 pkgAcqBaseIndex::Failed(Message,Cnf);
2753 Status = StatDone;
2754
2755 // check if we are the first to fail, otherwise we are done here
2756 State = StateDoneDiff;
2757 for (std::vector<pkgAcqIndexMergeDiffs *>::const_iterator I = allPatches->begin();
2758 I != allPatches->end(); ++I)
2759 if ((*I)->State == StateErrorDiff)
2760 {
2761 State = StateErrorDiff;
2762 return;
2763 }
2764
2765 // first failure means we should fallback
2766 State = StateErrorDiff;
2767 if (Debug)
2768 std::clog << "Falling back to normal index file acquire" << std::endl;
2769 RenameOnError(PDiffError);
2770 if (RealFileExists(DestFile))
2771 Rename(DestFile, DestFile + ".FAILED");
2772 std::string const UnpatchedFile = GetExistingFilename(GetPartialFileNameFromURI(Target.URI));
2773 if (UnpatchedFile.empty() == false && FileExists(UnpatchedFile))
2774 Rename(UnpatchedFile, UnpatchedFile + ".FAILED");
2775 DestFile.clear();
2776 new pkgAcqIndex(Owner, TransactionManager, Target);
2777 }
2778 /*}}}*/
2779 void pkgAcqIndexMergeDiffs::Done(string const &Message, HashStringList const &Hashes, /*{{{*/
2780 pkgAcquire::MethodConfig const * const Cnf)
2781 {
2782 if(Debug)
2783 std::clog << "pkgAcqIndexMergeDiffs::Done(): " << Desc.URI << std::endl;
2784
2785 Item::Done(Message, Hashes, Cnf);
2786
2787 if (std::any_of(allPatches->begin(), allPatches->end(),
2788 [](pkgAcqIndexMergeDiffs const * const P) { return P->State == StateErrorDiff; }))
2789 {
2790 if(Debug)
2791 std::clog << "Another patch failed already, no point in processing this one." << std::endl;
2792 State = StateErrorDiff;
2793 return;
2794 }
2795
2796 std::string const UncompressedUnpatchedFile = GetPartialFileNameFromURI(Target.URI);
2797 std::string const UnpatchedFile = GetExistingFilename(UncompressedUnpatchedFile);
2798 if (UnpatchedFile.empty())
2799 {
2800 _error->Fatal("Unpatched file %s doesn't exist (anymore)!", UncompressedUnpatchedFile.c_str());
2801 State = StateErrorDiff;
2802 return;
2803 }
2804 std::string const PatchFile = GetMergeDiffsPatchFileName(UnpatchedFile, patch.file);
2805 std::string const PatchedFile = GetKeepCompressedFileName(UncompressedUnpatchedFile, Target);
2806
2807 switch (State)
2808 {
2809 case StateFetchDiff:
2810 Rename(DestFile, PatchFile);
2811
2812 // check if this is the last completed diff
2813 State = StateDoneDiff;
2814 for (std::vector<pkgAcqIndexMergeDiffs *>::const_iterator I = allPatches->begin();
2815 I != allPatches->end(); ++I)
2816 if ((*I)->State != StateDoneDiff)
2817 {
2818 if(Debug)
2819 std::clog << "Not the last done diff in the batch: " << Desc.URI << std::endl;
2820 return;
2821 }
2822 // this is the last completed diff, so we are ready to apply now
2823 DestFile = GetKeepCompressedFileName(UncompressedUnpatchedFile + "-patched", Target);
2824 if(Debug)
2825 std::clog << "Sending to rred method: " << UnpatchedFile << std::endl;
2826 State = StateApplyDiff;
2827 Local = true;
2828 Desc.URI = "rred:" + UnpatchedFile;
2829 QueueURI(Desc);
2830 SetActiveSubprocess("rred");
2831 return;
2832 case StateApplyDiff:
2833 // success in download & apply all diffs, finialize and clean up
2834 if(Debug)
2835 std::clog << "Queue patched file in place: " << std::endl
2836 << DestFile << " -> " << PatchedFile << std::endl;
2837
2838 // queue for copy by the transaction manager
2839 TransactionManager->TransactionStageCopy(this, DestFile, GetKeepCompressedFileName(GetFinalFilename(), Target));
2840
2841 // ensure the ed's are gone regardless of list-cleanup
2842 for (std::vector<pkgAcqIndexMergeDiffs *>::const_iterator I = allPatches->begin();
2843 I != allPatches->end(); ++I)
2844 RemoveFile("pkgAcqIndexMergeDiffs::Done", GetMergeDiffsPatchFileName(UnpatchedFile, (*I)->patch.file));
2845 RemoveFile("pkgAcqIndexMergeDiffs::Done", UnpatchedFile);
2846
2847 // all set and done
2848 Complete = true;
2849 if(Debug)
2850 std::clog << "allDone: " << DestFile << "\n" << std::endl;
2851 return;
2852 case StateDoneDiff: _error->Fatal("Done called for %s which is in an invalid Done state", PatchFile.c_str()); break;
2853 case StateErrorDiff: _error->Fatal("Done called for %s which is in an invalid Error state", PatchFile.c_str()); break;
2854 }
2855 }
2856 /*}}}*/
2857 std::string pkgAcqIndexMergeDiffs::Custom600Headers() const /*{{{*/
2858 {
2859 if(State != StateApplyDiff)
2860 return pkgAcqBaseIndex::Custom600Headers();
2861 std::ostringstream patchhashes;
2862 unsigned int seen_patches = 0;
2863 for (auto && hs : (*allPatches)[0]->patch.result_hashes)
2864 patchhashes << "\nStart-" << hs.HashType() << "-Hash: " << hs.HashValue();
2865 for (std::vector<pkgAcqIndexMergeDiffs *>::const_iterator I = allPatches->begin();
2866 I != allPatches->end(); ++I)
2867 {
2868 HashStringList const ExpectedHashes = (*I)->patch.patch_hashes;
2869 for (HashStringList::const_iterator hs = ExpectedHashes.begin(); hs != ExpectedHashes.end(); ++hs)
2870 patchhashes << "\nPatch-" << std::to_string(seen_patches) << "-" << hs->HashType() << "-Hash: " << hs->HashValue();
2871 ++seen_patches;
2872 }
2873 patchhashes << pkgAcqBaseIndex::Custom600Headers();
2874 return patchhashes.str();
2875 }
2876 /*}}}*/
2877 pkgAcqIndexMergeDiffs::~pkgAcqIndexMergeDiffs() {}
2878
2879 // AcqIndex::AcqIndex - Constructor /*{{{*/
2880 pkgAcqIndex::pkgAcqIndex(pkgAcquire * const Owner,
2881 pkgAcqMetaClearSig * const TransactionManager,
2882 IndexTarget const &Target, bool const Derived)
2883 : pkgAcqBaseIndex(Owner, TransactionManager, Target), d(NULL), Stage(STAGE_DOWNLOAD),
2884 CompressionExtensions(Target.Option(IndexTarget::COMPRESSIONTYPES))
2885 {
2886 if (Derived)
2887 return;
2888 Init(Target.URI, Target.Description, Target.ShortDesc);
2889
2890 if(_config->FindB("Debug::Acquire::Transaction", false) == true)
2891 std::clog << "New pkgIndex with TransactionManager "
2892 << TransactionManager << std::endl;
2893 }
2894 /*}}}*/
2895 // AcqIndex::Init - defered Constructor /*{{{*/
2896 static void NextCompressionExtension(std::string &CurrentCompressionExtension, std::string &CompressionExtensions, bool const preview)
2897 {
2898 size_t const nextExt = CompressionExtensions.find(' ');
2899 if (nextExt == std::string::npos)
2900 {
2901 CurrentCompressionExtension = CompressionExtensions;
2902 if (preview == false)
2903 CompressionExtensions.clear();
2904 }
2905 else
2906 {
2907 CurrentCompressionExtension = CompressionExtensions.substr(0, nextExt);
2908 if (preview == false)
2909 CompressionExtensions = CompressionExtensions.substr(nextExt+1);
2910 }
2911 }
2912 void pkgAcqIndex::Init(string const &URI, string const &URIDesc,
2913 string const &ShortDesc)
2914 {
2915 Stage = STAGE_DOWNLOAD;
2916
2917 DestFile = GetPartialFileNameFromURI(URI);
2918 NextCompressionExtension(CurrentCompressionExtension, CompressionExtensions, false);
2919
2920 if (CurrentCompressionExtension == "uncompressed")
2921 {
2922 Desc.URI = URI;
2923 }
2924 else if (CurrentCompressionExtension == "by-hash")
2925 {
2926 NextCompressionExtension(CurrentCompressionExtension, CompressionExtensions, true);
2927 if(unlikely(CurrentCompressionExtension.empty()))
2928 return;
2929 if (CurrentCompressionExtension != "uncompressed")
2930 {
2931 Desc.URI = URI + '.' + CurrentCompressionExtension;
2932 DestFile = DestFile + '.' + CurrentCompressionExtension;
2933 }
2934 else
2935 Desc.URI = URI;
2936
2937 HashStringList const Hashes = GetExpectedHashes();
2938 HashString const * const TargetHash = Hashes.find(NULL);
2939 if (unlikely(TargetHash == nullptr))
2940 return;
2941 std::string const ByHash = "/by-hash/" + TargetHash->HashType() + "/" + TargetHash->HashValue();
2942 size_t const trailing_slash = Desc.URI.find_last_of("/");
2943 if (unlikely(trailing_slash == std::string::npos))
2944 return;
2945 Desc.URI = Desc.URI.replace(
2946 trailing_slash,
2947 Desc.URI.substr(trailing_slash+1).size()+1,
2948 ByHash);
2949 }
2950 else if (unlikely(CurrentCompressionExtension.empty()))
2951 return;
2952 else
2953 {
2954 Desc.URI = URI + '.' + CurrentCompressionExtension;
2955 DestFile = DestFile + '.' + CurrentCompressionExtension;
2956 }
2957
2958 // store file size of the download to ensure the fetcher gives
2959 // accurate progress reporting
2960 FileSize = GetExpectedHashes().FileSize();
2961
2962 Desc.Description = URIDesc;
2963 Desc.Owner = this;
2964 Desc.ShortDesc = ShortDesc;
2965
2966 QueueURI(Desc);
2967 }
2968 /*}}}*/
2969 // AcqIndex::Custom600Headers - Insert custom request headers /*{{{*/
2970 // ---------------------------------------------------------------------
2971 /* The only header we use is the last-modified header. */
2972 string pkgAcqIndex::Custom600Headers() const
2973 {
2974
2975 string msg = "\nIndex-File: true";
2976
2977 if (TransactionManager->LastMetaIndexParser == NULL)
2978 {
2979 std::string const Final = GetFinalFilename();
2980
2981 struct stat Buf;
2982 if (stat(Final.c_str(),&Buf) == 0)
2983 msg += "\nLast-Modified: " + TimeRFC1123(Buf.st_mtime, false);
2984 }
2985
2986 if(Target.IsOptional)
2987 msg += "\nFail-Ignore: true";
2988
2989 return msg;
2990 }
2991 /*}}}*/
2992 // AcqIndex::Failed - getting the indexfile failed /*{{{*/
2993 bool pkgAcqIndex::CommonFailed(std::string const &TargetURI, std::string const TargetDesc,
2994 std::string const &Message, pkgAcquire::MethodConfig const * const Cnf)
2995 {
2996 pkgAcqBaseIndex::Failed(Message,Cnf);
2997
2998 if (UsedMirror.empty() == false && UsedMirror != "DIRECT" &&
2999 LookupTag(Message, "FailReason") == "HttpError404")
3000 {
3001 UsedMirror = "DIRECT";
3002 if (Desc.URI.find("/by-hash/") != std::string::npos)
3003 CompressionExtensions = "by-hash " + CompressionExtensions;
3004 else
3005 CompressionExtensions = CurrentCompressionExtension + ' ' + CompressionExtensions;
3006 Init(TargetURI, TargetDesc, Desc.ShortDesc);
3007 Status = StatIdle;
3008 return true;
3009 }
3010
3011 // authorisation matches will not be fixed by other compression types
3012 if (Status != StatAuthError)
3013 {
3014 if (CompressionExtensions.empty() == false)
3015 {
3016 Init(TargetURI, Desc.Description, Desc.ShortDesc);
3017 Status = StatIdle;
3018 return true;
3019 }
3020 }
3021 return false;
3022 }
3023 void pkgAcqIndex::Failed(string const &Message,pkgAcquire::MethodConfig const * const Cnf)
3024 {
3025 if (CommonFailed(Target.URI, Target.Description, Message, Cnf))
3026 return;
3027
3028 if(Target.IsOptional && GetExpectedHashes().empty() && Stage == STAGE_DOWNLOAD)
3029 Status = StatDone;
3030 else
3031 TransactionManager->AbortTransaction();
3032 }
3033 /*}}}*/
3034 // AcqIndex::Done - Finished a fetch /*{{{*/
3035 // ---------------------------------------------------------------------
3036 /* This goes through a number of states.. On the initial fetch the
3037 method could possibly return an alternate filename which points
3038 to the uncompressed version of the file. If this is so the file
3039 is copied into the partial directory. In all other cases the file
3040 is decompressed with a compressed uri. */
3041 void pkgAcqIndex::Done(string const &Message,
3042 HashStringList const &Hashes,
3043 pkgAcquire::MethodConfig const * const Cfg)
3044 {
3045 Item::Done(Message,Hashes,Cfg);
3046
3047 switch(Stage)
3048 {
3049 case STAGE_DOWNLOAD:
3050 StageDownloadDone(Message);
3051 break;
3052 case STAGE_DECOMPRESS_AND_VERIFY:
3053 StageDecompressDone();
3054 break;
3055 }
3056 }
3057 /*}}}*/
3058 // AcqIndex::StageDownloadDone - Queue for decompress and verify /*{{{*/
3059 void pkgAcqIndex::StageDownloadDone(string const &Message)
3060 {
3061 Local = true;
3062 Complete = true;
3063
3064 std::string const AltFilename = LookupTag(Message,"Alt-Filename");
3065 std::string Filename = LookupTag(Message,"Filename");
3066
3067 // we need to verify the file against the current Release file again
3068 // on if-modfied-since hit to avoid a stale attack against us
3069 if(StringToBool(LookupTag(Message,"IMS-Hit"),false) == true)
3070 {
3071 // copy FinalFile into partial/ so that we check the hash again
3072 string const FinalFile = GetExistingFilename(GetFinalFileNameFromURI(Target.URI));
3073 if (symlink(FinalFile.c_str(), DestFile.c_str()) != 0)
3074 _error->WarningE("pkgAcqIndex::StageDownloadDone", "Symlinking final file %s back to %s failed", FinalFile.c_str(), DestFile.c_str());
3075 else
3076 {
3077 EraseFileName = DestFile;
3078 Filename = DestFile;
3079 }
3080 Stage = STAGE_DECOMPRESS_AND_VERIFY;
3081 Desc.URI = "store:" + Filename;
3082 QueueURI(Desc);
3083 SetActiveSubprocess(::URI(Desc.URI).Access);
3084 return;
3085 }
3086 // methods like file:// give us an alternative (uncompressed) file
3087 else if (Target.KeepCompressed == false && AltFilename.empty() == false)
3088 {
3089 Filename = AltFilename;
3090 EraseFileName.clear();
3091 }
3092 // Methods like e.g. "file:" will give us a (compressed) FileName that is
3093 // not the "DestFile" we set, in this case we uncompress from the local file
3094 else if (Filename != DestFile && RealFileExists(DestFile) == false)
3095 {
3096 // symlinking ensures that the filename can be used for compression detection
3097 // that is e.g. needed for by-hash which has no extension over file
3098 if (symlink(Filename.c_str(),DestFile.c_str()) != 0)
3099 _error->WarningE("pkgAcqIndex::StageDownloadDone", "Symlinking file %s to %s failed", Filename.c_str(), DestFile.c_str());
3100 else
3101 {
3102 EraseFileName = DestFile;
3103 Filename = DestFile;
3104 }
3105 }
3106
3107 Stage = STAGE_DECOMPRESS_AND_VERIFY;
3108 DestFile = GetKeepCompressedFileName(GetPartialFileNameFromURI(Target.URI), Target);
3109 if (Filename != DestFile && flExtension(Filename) == flExtension(DestFile))
3110 Desc.URI = "copy:" + Filename;
3111 else
3112 Desc.URI = "store:" + Filename;
3113 if (DestFile == Filename)
3114 {
3115 if (CurrentCompressionExtension == "uncompressed")
3116 return StageDecompressDone();
3117 DestFile = "/dev/null";
3118 }
3119
3120 if (EraseFileName.empty() && Filename != AltFilename)
3121 EraseFileName = Filename;
3122
3123 // queue uri for the next stage
3124 QueueURI(Desc);
3125 SetActiveSubprocess(::URI(Desc.URI).Access);
3126 }
3127 /*}}}*/
3128 // AcqIndex::StageDecompressDone - Final verification /*{{{*/
3129 void pkgAcqIndex::StageDecompressDone()
3130 {
3131 if (DestFile == "/dev/null")
3132 DestFile = GetKeepCompressedFileName(GetPartialFileNameFromURI(Target.URI), Target);
3133
3134 // Done, queue for rename on transaction finished
3135 TransactionManager->TransactionStageCopy(this, DestFile, GetFinalFilename());
3136 }
3137 /*}}}*/
3138 pkgAcqIndex::~pkgAcqIndex() {}
3139
3140
3141 // AcqArchive::AcqArchive - Constructor /*{{{*/
3142 // ---------------------------------------------------------------------
3143 /* This just sets up the initial fetch environment and queues the first
3144 possibilitiy */
3145 pkgAcqArchive::pkgAcqArchive(pkgAcquire * const Owner,pkgSourceList * const Sources,
3146 pkgRecords * const Recs,pkgCache::VerIterator const &Version,
3147 string &StoreFilename) :
3148 Item(Owner), d(NULL), LocalSource(false), Version(Version), Sources(Sources), Recs(Recs),
3149 StoreFilename(StoreFilename), Vf(Version.FileList()),
3150 Trusted(false)
3151 {
3152 Retries = _config->FindI("Acquire::Retries",0);
3153
3154 if (Version.Arch() == 0)
3155 {
3156 _error->Error(_("I wasn't able to locate a file for the %s package. "
3157 "This might mean you need to manually fix this package. "
3158 "(due to missing arch)"),
3159 Version.ParentPkg().FullName().c_str());
3160 return;
3161 }
3162
3163 /* We need to find a filename to determine the extension. We make the
3164 assumption here that all the available sources for this version share
3165 the same extension.. */
3166 // Skip not source sources, they do not have file fields.
3167 for (; Vf.end() == false; ++Vf)
3168 {
3169 if (Vf.File().Flagged(pkgCache::Flag::NotSource))
3170 continue;
3171 break;
3172 }
3173
3174 // Does not really matter here.. we are going to fail out below
3175 if (Vf.end() != true)
3176 {
3177 // If this fails to get a file name we will bomb out below.
3178 pkgRecords::Parser &Parse = Recs->Lookup(Vf);
3179 if (_error->PendingError() == true)
3180 return;
3181
3182 // Generate the final file name as: package_version_arch.foo
3183 StoreFilename = QuoteString(Version.ParentPkg().Name(),"_:") + '_' +
3184 QuoteString(Version.VerStr(),"_:") + '_' +
3185 QuoteString(Version.Arch(),"_:.") +
3186 "." + flExtension(Parse.FileName());
3187 }
3188
3189 // check if we have one trusted source for the package. if so, switch
3190 // to "TrustedOnly" mode - but only if not in AllowUnauthenticated mode
3191 bool const allowUnauth = _config->FindB("APT::Get::AllowUnauthenticated", false);
3192 bool const debugAuth = _config->FindB("Debug::pkgAcquire::Auth", false);
3193 bool seenUntrusted = false;
3194 for (pkgCache::VerFileIterator i = Version.FileList(); i.end() == false; ++i)
3195 {
3196 pkgIndexFile *Index;
3197 if (Sources->FindIndex(i.File(),Index) == false)
3198 continue;
3199
3200 if (debugAuth == true)
3201 std::cerr << "Checking index: " << Index->Describe()
3202 << "(Trusted=" << Index->IsTrusted() << ")" << std::endl;
3203
3204 if (Index->IsTrusted() == true)
3205 {
3206 Trusted = true;
3207 if (allowUnauth == false)
3208 break;
3209 }
3210 else
3211 seenUntrusted = true;
3212 }
3213
3214 // "allow-unauthenticated" restores apts old fetching behaviour
3215 // that means that e.g. unauthenticated file:// uris are higher
3216 // priority than authenticated http:// uris
3217 if (allowUnauth == true && seenUntrusted == true)
3218 Trusted = false;
3219
3220 // Select a source
3221 if (QueueNext() == false && _error->PendingError() == false)
3222 _error->Error(_("Can't find a source to download version '%s' of '%s'"),
3223 Version.VerStr(), Version.ParentPkg().FullName(false).c_str());
3224 }
3225 /*}}}*/
3226 // AcqArchive::QueueNext - Queue the next file source /*{{{*/
3227 // ---------------------------------------------------------------------
3228 /* This queues the next available file version for download. It checks if
3229 the archive is already available in the cache and stashs the MD5 for
3230 checking later. */
3231 bool pkgAcqArchive::QueueNext()
3232 {
3233 for (; Vf.end() == false; ++Vf)
3234 {
3235 pkgCache::PkgFileIterator const PkgF = Vf.File();
3236 // Ignore not source sources
3237 if (PkgF.Flagged(pkgCache::Flag::NotSource))
3238 continue;
3239
3240 // Try to cross match against the source list
3241 pkgIndexFile *Index;
3242 if (Sources->FindIndex(PkgF, Index) == false)
3243 continue;
3244 LocalSource = PkgF.Flagged(pkgCache::Flag::LocalSource);
3245
3246 // only try to get a trusted package from another source if that source
3247 // is also trusted
3248 if(Trusted && !Index->IsTrusted())
3249 continue;
3250
3251 // Grab the text package record
3252 pkgRecords::Parser &Parse = Recs->Lookup(Vf);
3253 if (_error->PendingError() == true)
3254 return false;
3255
3256 string PkgFile = Parse.FileName();
3257 ExpectedHashes = Parse.Hashes();
3258
3259 if (PkgFile.empty() == true)
3260 return _error->Error(_("The package index files are corrupted. No Filename: "
3261 "field for package %s."),
3262 Version.ParentPkg().Name());
3263
3264 Desc.URI = Index->ArchiveURI(PkgFile);
3265 Desc.Description = Index->ArchiveInfo(Version);
3266 Desc.Owner = this;
3267 Desc.ShortDesc = Version.ParentPkg().FullName(true);
3268
3269 // See if we already have the file. (Legacy filenames)
3270 FileSize = Version->Size;
3271 string FinalFile = _config->FindDir("Dir::Cache::Archives") + flNotDir(PkgFile);
3272 struct stat Buf;
3273 if (stat(FinalFile.c_str(),&Buf) == 0)
3274 {
3275 // Make sure the size matches
3276 if ((unsigned long long)Buf.st_size == Version->Size)
3277 {
3278 Complete = true;
3279 Local = true;
3280 Status = StatDone;
3281 StoreFilename = DestFile = FinalFile;
3282 return true;
3283 }
3284
3285 /* Hmm, we have a file and its size does not match, this means it is
3286 an old style mismatched arch */
3287 RemoveFile("pkgAcqArchive::QueueNext", FinalFile);
3288 }
3289
3290 // Check it again using the new style output filenames
3291 FinalFile = _config->FindDir("Dir::Cache::Archives") + flNotDir(StoreFilename);
3292 if (stat(FinalFile.c_str(),&Buf) == 0)
3293 {
3294 // Make sure the size matches
3295 if ((unsigned long long)Buf.st_size == Version->Size)
3296 {
3297 Complete = true;
3298 Local = true;
3299 Status = StatDone;
3300 StoreFilename = DestFile = FinalFile;
3301 return true;
3302 }
3303
3304 /* Hmm, we have a file and its size does not match, this shouldn't
3305 happen.. */
3306 RemoveFile("pkgAcqArchive::QueueNext", FinalFile);
3307 }
3308
3309 DestFile = _config->FindDir("Dir::Cache::Archives") + "partial/" + flNotDir(StoreFilename);
3310
3311 // Check the destination file
3312 if (stat(DestFile.c_str(),&Buf) == 0)
3313 {
3314 // Hmm, the partial file is too big, erase it
3315 if ((unsigned long long)Buf.st_size > Version->Size)
3316 RemoveFile("pkgAcqArchive::QueueNext", DestFile);
3317 else
3318 PartialSize = Buf.st_size;
3319 }
3320
3321 // Disables download of archives - useful if no real installation follows,
3322 // e.g. if we are just interested in proposed installation order
3323 if (_config->FindB("Debug::pkgAcqArchive::NoQueue", false) == true)
3324 {
3325 Complete = true;
3326 Local = true;
3327 Status = StatDone;
3328 StoreFilename = DestFile = FinalFile;
3329 return true;
3330 }
3331
3332 // Create the item
3333 Local = false;
3334 ++Vf;
3335 QueueURI(Desc);
3336 return true;
3337 }
3338 return false;
3339 }
3340 /*}}}*/
3341 // AcqArchive::Done - Finished fetching /*{{{*/
3342 // ---------------------------------------------------------------------
3343 /* */
3344 void pkgAcqArchive::Done(string const &Message, HashStringList const &Hashes,
3345 pkgAcquire::MethodConfig const * const Cfg)
3346 {
3347 Item::Done(Message, Hashes, Cfg);
3348
3349 // Grab the output filename
3350 std::string const FileName = LookupTag(Message,"Filename");
3351 if (DestFile != FileName && RealFileExists(DestFile) == false)
3352 {
3353 StoreFilename = DestFile = FileName;
3354 Local = true;
3355 Complete = true;
3356 return;
3357 }
3358
3359 // Done, move it into position
3360 string const FinalFile = GetFinalFilename();
3361 Rename(DestFile,FinalFile);
3362 StoreFilename = DestFile = FinalFile;
3363 Complete = true;
3364 }
3365 /*}}}*/
3366 // AcqArchive::Failed - Failure handler /*{{{*/
3367 // ---------------------------------------------------------------------
3368 /* Here we try other sources */
3369 void pkgAcqArchive::Failed(string const &Message,pkgAcquire::MethodConfig const * const Cnf)
3370 {
3371 Item::Failed(Message,Cnf);
3372
3373 /* We don't really want to retry on failed media swaps, this prevents
3374 that. An interesting observation is that permanent failures are not
3375 recorded. */
3376 if (Cnf->Removable == true &&
3377 StringToBool(LookupTag(Message,"Transient-Failure"),false) == true)
3378 {
3379 // Vf = Version.FileList();
3380 while (Vf.end() == false) ++Vf;
3381 StoreFilename = string();
3382 return;
3383 }
3384
3385 Status = StatIdle;
3386 if (QueueNext() == false)
3387 {
3388 // This is the retry counter
3389 if (Retries != 0 &&
3390 Cnf->LocalOnly == false &&
3391 StringToBool(LookupTag(Message,"Transient-Failure"),false) == true)
3392 {
3393 Retries--;
3394 Vf = Version.FileList();
3395 if (QueueNext() == true)
3396 return;
3397 }
3398
3399 StoreFilename = string();
3400 Status = StatError;
3401 }
3402 }
3403 /*}}}*/
3404 APT_PURE bool pkgAcqArchive::IsTrusted() const /*{{{*/
3405 {
3406 return Trusted;
3407 }
3408 /*}}}*/
3409 void pkgAcqArchive::Finished() /*{{{*/
3410 {
3411 if (Status == pkgAcquire::Item::StatDone &&
3412 Complete == true)
3413 return;
3414 StoreFilename = string();
3415 }
3416 /*}}}*/
3417 std::string pkgAcqArchive::DescURI() const /*{{{*/
3418 {
3419 return Desc.URI;
3420 }
3421 /*}}}*/
3422 std::string pkgAcqArchive::ShortDesc() const /*{{{*/
3423 {
3424 return Desc.ShortDesc;
3425 }
3426 /*}}}*/
3427 pkgAcqArchive::~pkgAcqArchive() {}
3428
3429 // AcqChangelog::pkgAcqChangelog - Constructors /*{{{*/
3430 class pkgAcqChangelog::Private
3431 {
3432 public:
3433 std::string FinalFile;
3434 };
3435 pkgAcqChangelog::pkgAcqChangelog(pkgAcquire * const Owner, pkgCache::VerIterator const &Ver,
3436 std::string const &DestDir, std::string const &DestFilename) :
3437 pkgAcquire::Item(Owner), d(new pkgAcqChangelog::Private()), SrcName(Ver.SourcePkgName()), SrcVersion(Ver.SourceVerStr())
3438 {
3439 Desc.URI = URI(Ver);
3440 Init(DestDir, DestFilename);
3441 }
3442 // some parameters are char* here as they come likely from char* interfaces – which can also return NULL
3443 pkgAcqChangelog::pkgAcqChangelog(pkgAcquire * const Owner, pkgCache::RlsFileIterator const &RlsFile,
3444 char const * const Component, char const * const SrcName, char const * const SrcVersion,
3445 const string &DestDir, const string &DestFilename) :
3446 pkgAcquire::Item(Owner), d(new pkgAcqChangelog::Private()), SrcName(SrcName), SrcVersion(SrcVersion)
3447 {
3448 Desc.URI = URI(RlsFile, Component, SrcName, SrcVersion);
3449 Init(DestDir, DestFilename);
3450 }
3451 pkgAcqChangelog::pkgAcqChangelog(pkgAcquire * const Owner,
3452 std::string const &URI, char const * const SrcName, char const * const SrcVersion,
3453 const string &DestDir, const string &DestFilename) :
3454 pkgAcquire::Item(Owner), d(new pkgAcqChangelog::Private()), SrcName(SrcName), SrcVersion(SrcVersion)
3455 {
3456 Desc.URI = URI;
3457 Init(DestDir, DestFilename);
3458 }
3459 void pkgAcqChangelog::Init(std::string const &DestDir, std::string const &DestFilename)
3460 {
3461 if (Desc.URI.empty())
3462 {
3463 Status = StatError;
3464 // TRANSLATOR: %s=%s is sourcename=sourceversion, e.g. apt=1.1
3465 strprintf(ErrorText, _("Changelog unavailable for %s=%s"), SrcName.c_str(), SrcVersion.c_str());
3466 // Let the error message print something sensible rather than "Failed to fetch /"
3467 if (DestFilename.empty())
3468 DestFile = SrcName + ".changelog";
3469 else
3470 DestFile = DestFilename;
3471 Desc.URI = "changelog:/" + DestFile;
3472 return;
3473 }
3474
3475 std::string DestFileName;
3476 if (DestFilename.empty())
3477 DestFileName = flCombine(DestFile, SrcName + ".changelog");
3478 else
3479 DestFileName = flCombine(DestFile, DestFilename);
3480
3481 std::string const SandboxUser = _config->Find("APT::Sandbox::User");
3482 std::string const systemTemp = GetTempDir(SandboxUser);
3483 char tmpname[1000];
3484 snprintf(tmpname, sizeof(tmpname), "%s/apt-changelog-XXXXXX", systemTemp.c_str());
3485 if (NULL == mkdtemp(tmpname))
3486 {
3487 _error->Errno("mkdtemp", "mkdtemp failed in changelog acquire of %s %s", SrcName.c_str(), SrcVersion.c_str());
3488 Status = StatError;
3489 return;
3490 }
3491 TemporaryDirectory = tmpname;
3492
3493 ChangeOwnerAndPermissionOfFile("Item::QueueURI", TemporaryDirectory.c_str(),
3494 SandboxUser.c_str(), ROOT_GROUP, 0700);
3495
3496 DestFile = flCombine(TemporaryDirectory, DestFileName);
3497 if (DestDir.empty() == false)
3498 {
3499 d->FinalFile = flCombine(DestDir, DestFileName);
3500 if (RealFileExists(d->FinalFile))
3501 {
3502 FileFd file1, file2;
3503 if (file1.Open(DestFile, FileFd::WriteOnly | FileFd::Create | FileFd::Exclusive) &&
3504 file2.Open(d->FinalFile, FileFd::ReadOnly) && CopyFile(file2, file1))
3505 {
3506 struct timeval times[2];
3507 times[0].tv_sec = times[1].tv_sec = file2.ModificationTime();
3508 times[0].tv_usec = times[1].tv_usec = 0;
3509 utimes(DestFile.c_str(), times);
3510 }
3511 }
3512 }
3513
3514 Desc.ShortDesc = "Changelog";
3515 strprintf(Desc.Description, "%s %s %s Changelog", URI::SiteOnly(Desc.URI).c_str(), SrcName.c_str(), SrcVersion.c_str());
3516 Desc.Owner = this;
3517 QueueURI(Desc);
3518 }
3519 /*}}}*/
3520 std::string pkgAcqChangelog::URI(pkgCache::VerIterator const &Ver) /*{{{*/
3521 {
3522 std::string const confOnline = "Acquire::Changelogs::AlwaysOnline";
3523 bool AlwaysOnline = _config->FindB(confOnline, false);
3524 if (AlwaysOnline == false)
3525 for (pkgCache::VerFileIterator VF = Ver.FileList(); VF.end() == false; ++VF)
3526 {
3527 pkgCache::PkgFileIterator const PF = VF.File();
3528 if (PF.Flagged(pkgCache::Flag::NotSource) || PF->Release == 0)
3529 continue;
3530 pkgCache::RlsFileIterator const RF = PF.ReleaseFile();
3531 if (RF->Origin != 0 && _config->FindB(confOnline + "::Origin::" + RF.Origin(), false))
3532 {
3533 AlwaysOnline = true;
3534 break;
3535 }
3536 }
3537 if (AlwaysOnline == false)
3538 {
3539 pkgCache::PkgIterator const Pkg = Ver.ParentPkg();
3540 if (Pkg->CurrentVer != 0 && Pkg.CurrentVer() == Ver)
3541 {
3542 std::string const root = _config->FindDir("Dir");
3543 std::string const basename = root + std::string("usr/share/doc/") + Pkg.Name() + "/changelog";
3544 std::string const debianname = basename + ".Debian";
3545 if (FileExists(debianname))
3546 return "copy://" + debianname;
3547 else if (FileExists(debianname + ".gz"))
3548 return "gzip://" + debianname + ".gz";
3549 else if (FileExists(basename))
3550 return "copy://" + basename;
3551 else if (FileExists(basename + ".gz"))
3552 return "gzip://" + basename + ".gz";
3553 }
3554 }
3555
3556 char const * const SrcName = Ver.SourcePkgName();
3557 char const * const SrcVersion = Ver.SourceVerStr();
3558 // find the first source for this version which promises a changelog
3559 for (pkgCache::VerFileIterator VF = Ver.FileList(); VF.end() == false; ++VF)
3560 {
3561 pkgCache::PkgFileIterator const PF = VF.File();
3562 if (PF.Flagged(pkgCache::Flag::NotSource) || PF->Release == 0)
3563 continue;
3564 pkgCache::RlsFileIterator const RF = PF.ReleaseFile();
3565 std::string const uri = URI(RF, PF.Component(), SrcName, SrcVersion);
3566 if (uri.empty())
3567 continue;
3568 return uri;
3569 }
3570 return "";
3571 }
3572 std::string pkgAcqChangelog::URITemplate(pkgCache::RlsFileIterator const &Rls)
3573 {
3574 if (Rls.end() == true || (Rls->Label == 0 && Rls->Origin == 0))
3575 return "";
3576 std::string const serverConfig = "Acquire::Changelogs::URI";
3577 std::string server;
3578 #define APT_EMPTY_SERVER \
3579 if (server.empty() == false) \
3580 { \
3581 if (server != "no") \
3582 return server; \
3583 return ""; \
3584 }
3585 #define APT_CHECK_SERVER(X, Y) \
3586 if (Rls->X != 0) \
3587 { \
3588 std::string const specialServerConfig = serverConfig + "::" + Y + #X + "::" + Rls.X(); \
3589 server = _config->Find(specialServerConfig); \
3590 APT_EMPTY_SERVER \
3591 }
3592 // this way e.g. Debian-Security can fallback to Debian
3593 APT_CHECK_SERVER(Label, "Override::")
3594 APT_CHECK_SERVER(Origin, "Override::")
3595
3596 if (RealFileExists(Rls.FileName()))
3597 {
3598 _error->PushToStack();
3599 FileFd rf;
3600 /* This can be costly. A caller wanting to get millions of URIs might
3601 want to do this on its own once and use Override settings.
3602 We don't do this here as Origin/Label are not as unique as they
3603 should be so this could produce request order-dependent anomalies */
3604 if (OpenMaybeClearSignedFile(Rls.FileName(), rf) == true)
3605 {
3606 pkgTagFile TagFile(&rf, rf.Size());
3607 pkgTagSection Section;
3608 if (TagFile.Step(Section) == true)
3609 server = Section.FindS("Changelogs");
3610 }
3611 _error->RevertToStack();
3612 APT_EMPTY_SERVER
3613 }
3614
3615 APT_CHECK_SERVER(Label, "")
3616 APT_CHECK_SERVER(Origin, "")
3617 #undef APT_CHECK_SERVER
3618 #undef APT_EMPTY_SERVER
3619 return "";
3620 }
3621 std::string pkgAcqChangelog::URI(pkgCache::RlsFileIterator const &Rls,
3622 char const * const Component, char const * const SrcName,
3623 char const * const SrcVersion)
3624 {
3625 return URI(URITemplate(Rls), Component, SrcName, SrcVersion);
3626 }
3627 std::string pkgAcqChangelog::URI(std::string const &Template,
3628 char const * const Component, char const * const SrcName,
3629 char const * const SrcVersion)
3630 {
3631 if (Template.find("@CHANGEPATH@") == std::string::npos)
3632 return "";
3633
3634 // the path is: COMPONENT/SRC/SRCNAME/SRCNAME_SRCVER, e.g. main/a/apt/1.1 or contrib/liba/libapt/2.0
3635 std::string Src = SrcName;
3636 std::string path = APT::String::Startswith(SrcName, "lib") ? Src.substr(0, 4) : Src.substr(0,1);
3637 path.append("/").append(Src).append("/");
3638 path.append(Src).append("_").append(StripEpoch(SrcVersion));
3639 // we omit component for releases without one (= flat-style repositories)
3640 if (Component != NULL && strlen(Component) != 0)
3641 path = std::string(Component) + "/" + path;
3642
3643 return SubstVar(Template, "@CHANGEPATH@", path);
3644 }
3645 /*}}}*/
3646 // AcqChangelog::Failed - Failure handler /*{{{*/
3647 void pkgAcqChangelog::Failed(string const &Message, pkgAcquire::MethodConfig const * const Cnf)
3648 {
3649 Item::Failed(Message,Cnf);
3650
3651 std::string errText;
3652 // TRANSLATOR: %s=%s is sourcename=sourceversion, e.g. apt=1.1
3653 strprintf(errText, _("Changelog unavailable for %s=%s"), SrcName.c_str(), SrcVersion.c_str());
3654
3655 // Error is probably something techy like 404 Not Found
3656 if (ErrorText.empty())
3657 ErrorText = errText;
3658 else
3659 ErrorText = errText + " (" + ErrorText + ")";
3660 }
3661 /*}}}*/
3662 // AcqChangelog::Done - Item downloaded OK /*{{{*/
3663 void pkgAcqChangelog::Done(string const &Message,HashStringList const &CalcHashes,
3664 pkgAcquire::MethodConfig const * const Cnf)
3665 {
3666 Item::Done(Message,CalcHashes,Cnf);
3667 if (d->FinalFile.empty() == false)
3668 {
3669 if (RemoveFile("pkgAcqChangelog::Done", d->FinalFile) == false ||
3670 Rename(DestFile, d->FinalFile) == false)
3671 Status = StatError;
3672 }
3673
3674 Complete = true;
3675 }
3676 /*}}}*/
3677 pkgAcqChangelog::~pkgAcqChangelog() /*{{{*/
3678 {
3679 if (TemporaryDirectory.empty() == false)
3680 {
3681 RemoveFile("~pkgAcqChangelog", DestFile);
3682 rmdir(TemporaryDirectory.c_str());
3683 }
3684 delete d;
3685 }
3686 /*}}}*/
3687
3688 // AcqFile::pkgAcqFile - Constructor /*{{{*/
3689 pkgAcqFile::pkgAcqFile(pkgAcquire * const Owner,string const &URI, HashStringList const &Hashes,
3690 unsigned long long const Size,string const &Dsc,string const &ShortDesc,
3691 const string &DestDir, const string &DestFilename,
3692 bool const IsIndexFile) :
3693 Item(Owner), d(NULL), IsIndexFile(IsIndexFile), ExpectedHashes(Hashes)
3694 {
3695 Retries = _config->FindI("Acquire::Retries",0);
3696
3697 if(!DestFilename.empty())
3698 DestFile = DestFilename;
3699 else if(!DestDir.empty())
3700 DestFile = DestDir + "/" + flNotDir(URI);
3701 else
3702 DestFile = flNotDir(URI);
3703
3704 // Create the item
3705 Desc.URI = URI;
3706 Desc.Description = Dsc;
3707 Desc.Owner = this;
3708
3709 // Set the short description to the archive component
3710 Desc.ShortDesc = ShortDesc;
3711
3712 // Get the transfer sizes
3713 FileSize = Size;
3714 struct stat Buf;
3715 if (stat(DestFile.c_str(),&Buf) == 0)
3716 {
3717 // Hmm, the partial file is too big, erase it
3718 if ((Size > 0) && (unsigned long long)Buf.st_size > Size)
3719 RemoveFile("pkgAcqFile", DestFile);
3720 else
3721 PartialSize = Buf.st_size;
3722 }
3723
3724 QueueURI(Desc);
3725 }
3726 /*}}}*/
3727 // AcqFile::Done - Item downloaded OK /*{{{*/
3728 void pkgAcqFile::Done(string const &Message,HashStringList const &CalcHashes,
3729 pkgAcquire::MethodConfig const * const Cnf)
3730 {
3731 Item::Done(Message,CalcHashes,Cnf);
3732
3733 std::string const FileName = LookupTag(Message,"Filename");
3734 Complete = true;
3735
3736 // The files timestamp matches
3737 if (StringToBool(LookupTag(Message,"IMS-Hit"),false) == true)
3738 return;
3739
3740 // We have to copy it into place
3741 if (RealFileExists(DestFile.c_str()) == false)
3742 {
3743 Local = true;
3744 if (_config->FindB("Acquire::Source-Symlinks",true) == false ||
3745 Cnf->Removable == true)
3746 {
3747 Desc.URI = "copy:" + FileName;
3748 QueueURI(Desc);
3749 return;
3750 }
3751
3752 // Erase the file if it is a symlink so we can overwrite it
3753 struct stat St;
3754 if (lstat(DestFile.c_str(),&St) == 0)
3755 {
3756 if (S_ISLNK(St.st_mode) != 0)
3757 RemoveFile("pkgAcqFile::Done", DestFile);
3758 }
3759
3760 // Symlink the file
3761 if (symlink(FileName.c_str(),DestFile.c_str()) != 0)
3762 {
3763 _error->PushToStack();
3764 _error->Errno("pkgAcqFile::Done", "Symlinking file %s failed", DestFile.c_str());
3765 std::stringstream msg;
3766 _error->DumpErrors(msg, GlobalError::DEBUG, false);
3767 _error->RevertToStack();
3768 ErrorText = msg.str();
3769 Status = StatError;
3770 Complete = false;
3771 }
3772 }
3773 }
3774 /*}}}*/
3775 // AcqFile::Failed - Failure handler /*{{{*/
3776 // ---------------------------------------------------------------------
3777 /* Here we try other sources */
3778 void pkgAcqFile::Failed(string const &Message, pkgAcquire::MethodConfig const * const Cnf)
3779 {
3780 Item::Failed(Message,Cnf);
3781
3782 // This is the retry counter
3783 if (Retries != 0 &&
3784 Cnf->LocalOnly == false &&
3785 StringToBool(LookupTag(Message,"Transient-Failure"),false) == true)
3786 {
3787 --Retries;
3788 QueueURI(Desc);
3789 Status = StatIdle;
3790 return;
3791 }
3792
3793 }
3794 /*}}}*/
3795 string pkgAcqFile::Custom600Headers() const /*{{{*/
3796 {
3797 if (IsIndexFile)
3798 return "\nIndex-File: true";
3799 return "";
3800 }
3801 /*}}}*/
3802 pkgAcqFile::~pkgAcqFile() {}