]> git.saurik.com Git - apt.git/blame_incremental - apt-pkg/acquire-item.cc
Don't download "optional" files not in Release :/.
[apt.git] / apt-pkg / acquire-item.cc
... / ...
CommitLineData
1// -*- mode: cpp; mode: fold -*-
2// Description /*{{{*/
3// $Id: acquire-item.cc,v 1.46.2.9 2004/01/16 18:51:11 mdz Exp $
4/* ######################################################################
5
6 Acquire Item - Item to acquire
7
8 Each item can download to exactly one file at a time. This means you
9 cannot create an item that fetches two uri's to two files at the same
10 time. The pkgAcqIndex class creates a second class upon instantiation
11 to fetch the other index files because of this.
12
13 ##################################################################### */
14 /*}}}*/
15// Include Files /*{{{*/
16#include <config.h>
17
18#include <apt-pkg/acquire-item.h>
19#include <apt-pkg/configuration.h>
20#include <apt-pkg/aptconfiguration.h>
21#include <apt-pkg/sourcelist.h>
22#include <apt-pkg/error.h>
23#include <apt-pkg/strutl.h>
24#include <apt-pkg/fileutl.h>
25#include <apt-pkg/tagfile.h>
26#include <apt-pkg/metaindex.h>
27#include <apt-pkg/acquire.h>
28#include <apt-pkg/hashes.h>
29#include <apt-pkg/indexfile.h>
30#include <apt-pkg/pkgcache.h>
31#include <apt-pkg/cacheiterators.h>
32#include <apt-pkg/pkgrecords.h>
33#include <apt-pkg/gpgv.h>
34
35#include <algorithm>
36#include <stddef.h>
37#include <stdlib.h>
38#include <string.h>
39#include <iostream>
40#include <vector>
41#include <sys/stat.h>
42#include <unistd.h>
43#include <errno.h>
44#include <string>
45#include <stdio.h>
46#include <ctime>
47#include <sstream>
48#include <numeric>
49#include <random>
50
51#include <apti18n.h>
52 /*}}}*/
53
54using namespace std;
55
56static void printHashSumComparison(std::string const &URI, HashStringList const &Expected, HashStringList const &Actual) /*{{{*/
57{
58 if (_config->FindB("Debug::Acquire::HashSumMismatch", false) == false)
59 return;
60 std::cerr << std::endl << URI << ":" << std::endl << " Expected Hash: " << std::endl;
61 for (HashStringList::const_iterator hs = Expected.begin(); hs != Expected.end(); ++hs)
62 std::cerr << "\t- " << hs->toStr() << std::endl;
63 std::cerr << " Actual Hash: " << std::endl;
64 for (HashStringList::const_iterator hs = Actual.begin(); hs != Actual.end(); ++hs)
65 std::cerr << "\t- " << hs->toStr() << std::endl;
66}
67 /*}}}*/
68static std::string GetPartialFileName(std::string const &file) /*{{{*/
69{
70 std::string DestFile = _config->FindDir("Dir::State::lists") + "partial/";
71 DestFile += file;
72 return DestFile;
73}
74 /*}}}*/
75static std::string GetPartialFileNameFromURI(std::string const &uri) /*{{{*/
76{
77 return GetPartialFileName(URItoFileName(uri));
78}
79 /*}}}*/
80static std::string GetFinalFileNameFromURI(std::string const &uri) /*{{{*/
81{
82 return _config->FindDir("Dir::State::lists") + URItoFileName(uri);
83}
84 /*}}}*/
85static std::string GetKeepCompressedFileName(std::string file, IndexTarget const &Target)/*{{{*/
86{
87 if (Target.KeepCompressed == false)
88 return file;
89
90 std::string const KeepCompressedAs = Target.Option(IndexTarget::KEEPCOMPRESSEDAS);
91 if (KeepCompressedAs.empty() == false)
92 {
93 std::string const ext = KeepCompressedAs.substr(0, KeepCompressedAs.find(' '));
94 if (ext != "uncompressed")
95 file.append(".").append(ext);
96 }
97 return file;
98}
99 /*}}}*/
100static std::string GetMergeDiffsPatchFileName(std::string const &Final, std::string const &Patch)/*{{{*/
101{
102 // rred expects the patch as $FinalFile.ed.$patchname.gz
103 return Final + ".ed." + Patch + ".gz";
104}
105 /*}}}*/
106static std::string GetDiffsPatchFileName(std::string const &Final) /*{{{*/
107{
108 // rred expects the patch as $FinalFile.ed
109 return Final + ".ed";
110}
111 /*}}}*/
112static std::string GetExistingFilename(std::string const &File) /*{{{*/
113{
114 if (RealFileExists(File))
115 return File;
116 for (auto const &type : APT::Configuration::getCompressorExtensions())
117 {
118 std::string const Final = File + type;
119 if (RealFileExists(Final))
120 return Final;
121 }
122 return "";
123}
124 /*}}}*/
125static std::string GetDiffIndexFileName(std::string const &Name) /*{{{*/
126{
127 return Name + ".diff/Index";
128}
129 /*}}}*/
130static std::string GetDiffIndexURI(IndexTarget const &Target) /*{{{*/
131{
132 return Target.URI + ".diff/Index";
133}
134 /*}}}*/
135
136static void ReportMirrorFailureToCentral(pkgAcquire::Item const &I, std::string const &FailCode, std::string const &Details)/*{{{*/
137{
138 // we only act if a mirror was used at all
139 if(I.UsedMirror.empty())
140 return;
141#if 0
142 std::cerr << "\nReportMirrorFailure: "
143 << UsedMirror
144 << " Uri: " << DescURI()
145 << " FailCode: "
146 << FailCode << std::endl;
147#endif
148 string const report = _config->Find("Methods::Mirror::ProblemReporting",
149 LIBEXEC_DIR "/apt-report-mirror-failure");
150 if(!FileExists(report))
151 return;
152
153 std::vector<char const*> const Args = {
154 report.c_str(),
155 I.UsedMirror.c_str(),
156 I.DescURI().c_str(),
157 FailCode.c_str(),
158 Details.c_str(),
159 NULL
160 };
161
162 pid_t pid = ExecFork();
163 if(pid < 0)
164 {
165 _error->Error("ReportMirrorFailure Fork failed");
166 return;
167 }
168 else if(pid == 0)
169 {
170 execvp(Args[0], (char**)Args.data());
171 std::cerr << "Could not exec " << Args[0] << std::endl;
172 _exit(100);
173 }
174 if(!ExecWait(pid, "report-mirror-failure"))
175 _error->Warning("Couldn't report problem to '%s'", report.c_str());
176}
177 /*}}}*/
178
179static APT_NONNULL(2) bool MessageInsecureRepository(bool const isError, char const * const msg, std::string const &repo)/*{{{*/
180{
181 std::string m;
182 strprintf(m, msg, repo.c_str());
183 if (isError)
184 {
185 _error->Error("%s", m.c_str());
186 _error->Notice("%s", _("Updating from such a repository can't be done securely, and is therefore disabled by default."));
187 }
188 else
189 {
190 _error->Warning("%s", m.c_str());
191 _error->Notice("%s", _("Data from such a repository can't be authenticated and is therefore potentially dangerous to use."));
192 }
193 _error->Notice("%s", _("See apt-secure(8) manpage for repository creation and user configuration details."));
194 return false;
195}
196 /*}}}*/
197// AllowInsecureRepositories /*{{{*/
198enum class InsecureType { UNSIGNED, WEAK, NORELEASE };
199static bool TargetIsAllowedToBe(IndexTarget const &Target, InsecureType const type)
200{
201 if (_config->FindB("Acquire::AllowInsecureRepositories"))
202 return true;
203
204 if (Target.OptionBool(IndexTarget::ALLOW_INSECURE))
205 return true;
206
207 switch (type)
208 {
209 case InsecureType::UNSIGNED: break;
210 case InsecureType::NORELEASE: break;
211 case InsecureType::WEAK:
212 if (_config->FindB("Acquire::AllowWeakRepositories"))
213 return true;
214 if (Target.OptionBool(IndexTarget::ALLOW_WEAK))
215 return true;
216 break;
217 }
218 return false;
219}
220static bool APT_NONNULL(3, 4, 5) AllowInsecureRepositories(InsecureType const msg, std::string const &repo,
221 metaIndex const * const MetaIndexParser, pkgAcqMetaClearSig * const TransactionManager, pkgAcquire::Item * const I)
222{
223 // we skip weak downgrades as its unlikely that a repository gets really weaker –
224 // its more realistic that apt got pickier in a newer version
225 if (msg != InsecureType::WEAK)
226 {
227 std::string const FinalInRelease = TransactionManager->GetFinalFilename();
228 std::string const FinalReleasegpg = FinalInRelease.substr(0, FinalInRelease.length() - strlen("InRelease")) + "Release.gpg";
229 if (RealFileExists(FinalReleasegpg) || RealFileExists(FinalInRelease))
230 {
231 char const * msgstr = nullptr;
232 switch (msg)
233 {
234 case InsecureType::UNSIGNED: msgstr = _("The repository '%s' is no longer signed."); break;
235 case InsecureType::NORELEASE: msgstr = _("The repository '%s' does no longer have a Release file."); break;
236 case InsecureType::WEAK: /* unreachable */ break;
237 }
238 if (_config->FindB("Acquire::AllowDowngradeToInsecureRepositories") ||
239 TransactionManager->Target.OptionBool(IndexTarget::ALLOW_DOWNGRADE_TO_INSECURE))
240 {
241 // meh, the users wants to take risks (we still mark the packages
242 // from this repository as unauthenticated)
243 _error->Warning(msgstr, repo.c_str());
244 _error->Warning(_("This is normally not allowed, but the option "
245 "Acquire::AllowDowngradeToInsecureRepositories was "
246 "given to override it."));
247 } else {
248 MessageInsecureRepository(true, msgstr, repo);
249 TransactionManager->AbortTransaction();
250 I->Status = pkgAcquire::Item::StatError;
251 return false;
252 }
253 }
254 }
255
256 if(MetaIndexParser->GetTrusted() == metaIndex::TRI_YES)
257 return true;
258
259 char const * msgstr = nullptr;
260 switch (msg)
261 {
262 case InsecureType::UNSIGNED: msgstr = _("The repository '%s' is not signed."); break;
263 case InsecureType::NORELEASE: msgstr = _("The repository '%s' does not have a Release file."); break;
264 case InsecureType::WEAK: msgstr = _("The repository '%s' provides only weak security information."); break;
265 }
266
267 if (TargetIsAllowedToBe(TransactionManager->Target, msg) == true)
268 {
269 //MessageInsecureRepository(false, msgstr, repo);
270 return true;
271 }
272
273 MessageInsecureRepository(true, msgstr, repo);
274 TransactionManager->AbortTransaction();
275 I->Status = pkgAcquire::Item::StatError;
276 return false;
277}
278 /*}}}*/
279static HashStringList GetExpectedHashesFromFor(metaIndex * const Parser, std::string const &MetaKey)/*{{{*/
280{
281 if (Parser == NULL)
282 return HashStringList();
283 metaIndex::checkSum * const R = Parser->Lookup(MetaKey);
284 if (R == NULL)
285 return HashStringList();
286 return R->Hashes;
287}
288 /*}}}*/
289
290// all ::HashesRequired and ::GetExpectedHashes implementations /*{{{*/
291/* ::GetExpectedHashes is abstract and has to be implemented by all subclasses.
292 It is best to implement it as broadly as possible, while ::HashesRequired defaults
293 to true and should be as restrictive as possible for false cases. Note that if
294 a hash is returned by ::GetExpectedHashes it must match. Only if it doesn't
295 ::HashesRequired is called to evaluate if its okay to have no hashes. */
296APT_CONST bool pkgAcqTransactionItem::HashesRequired() const
297{
298 /* signed repositories obviously have a parser and good hashes.
299 unsigned repositories, too, as even if we can't trust them for security,
300 we can at least trust them for integrity of the download itself.
301 Only repositories without a Release file can (obviously) not have
302 hashes – and they are very uncommon and strongly discouraged */
303 if (TransactionManager->MetaIndexParser->GetLoadedSuccessfully() != metaIndex::TRI_YES)
304 return false;
305 if (TargetIsAllowedToBe(Target, InsecureType::WEAK))
306 {
307 /* If we allow weak hashes, we check that we have some (weak) and then
308 declare hashes not needed. That will tip us in the right direction
309 as if hashes exist, they will be used, even if not required */
310 auto const hsl = GetExpectedHashes();
311 if (hsl.usable())
312 return true;
313 if (hsl.empty() == false)
314 return false;
315 }
316 return true;
317}
318HashStringList pkgAcqTransactionItem::GetExpectedHashes() const
319{
320 return GetExpectedHashesFor(GetMetaKey());
321}
322
323APT_CONST bool pkgAcqMetaBase::HashesRequired() const
324{
325 // Release and co have no hashes 'by design'.
326 return false;
327}
328HashStringList pkgAcqMetaBase::GetExpectedHashes() const
329{
330 return HashStringList();
331}
332
333APT_CONST bool pkgAcqIndexDiffs::HashesRequired() const
334{
335 /* We can't check hashes of rred result as we don't know what the
336 hash of the file will be. We just know the hash of the patch(es),
337 the hash of the file they will apply on and the hash of the resulting
338 file. */
339 if (State == StateFetchDiff)
340 return true;
341 return false;
342}
343HashStringList pkgAcqIndexDiffs::GetExpectedHashes() const
344{
345 if (State == StateFetchDiff)
346 return available_patches[0].download_hashes;
347 return HashStringList();
348}
349
350APT_CONST bool pkgAcqIndexMergeDiffs::HashesRequired() const
351{
352 /* @see #pkgAcqIndexDiffs::HashesRequired, with the difference that
353 we can check the rred result after all patches are applied as
354 we know the expected result rather than potentially apply more patches */
355 if (State == StateFetchDiff)
356 return true;
357 return State == StateApplyDiff;
358}
359HashStringList pkgAcqIndexMergeDiffs::GetExpectedHashes() const
360{
361 if (State == StateFetchDiff)
362 return patch.download_hashes;
363 else if (State == StateApplyDiff)
364 return GetExpectedHashesFor(Target.MetaKey);
365 return HashStringList();
366}
367
368APT_CONST bool pkgAcqArchive::HashesRequired() const
369{
370 return LocalSource == false;
371}
372HashStringList pkgAcqArchive::GetExpectedHashes() const
373{
374 // figured out while parsing the records
375 return ExpectedHashes;
376}
377
378APT_CONST bool pkgAcqFile::HashesRequired() const
379{
380 // supplied as parameter at creation time, so the caller decides
381 return ExpectedHashes.usable();
382}
383HashStringList pkgAcqFile::GetExpectedHashes() const
384{
385 return ExpectedHashes;
386}
387 /*}}}*/
388// Acquire::Item::QueueURI and specialisations from child classes /*{{{*/
389bool pkgAcquire::Item::QueueURI(pkgAcquire::ItemDesc &Item)
390{
391 Owner->Enqueue(Item);
392 return true;
393}
394/* The idea here is that an item isn't queued if it exists on disk and the
395 transition manager was a hit as this means that the files it contains
396 the checksums for can't be updated either (or they are and we are asking
397 for a hashsum mismatch to happen which helps nobody) */
398bool pkgAcqTransactionItem::QueueURI(pkgAcquire::ItemDesc &Item)
399{
400 if (TransactionManager->State != TransactionStarted)
401 {
402 if (_config->FindB("Debug::Acquire::Transaction", false))
403 std::clog << "Skip " << Target.URI << " as transaction was already dealt with!" << std::endl;
404 return false;
405 }
406 std::string const FinalFile = GetFinalFilename();
407 if (TransactionManager->IMSHit == true && FileExists(FinalFile) == true)
408 {
409 PartialFile = DestFile = FinalFile;
410 Status = StatDone;
411 return false;
412 }
413 // If we got the InRelease file via a mirror, pick all indexes directly from this mirror, too
414 if (TransactionManager->BaseURI.empty() == false && UsedMirror.empty() &&
415 URI::SiteOnly(Item.URI) != URI::SiteOnly(TransactionManager->BaseURI))
416 {
417 // this ensures we rewrite only once and only the first step
418 auto const OldBaseURI = Target.Option(IndexTarget::BASE_URI);
419 if (OldBaseURI.empty() == false && APT::String::Startswith(Item.URI, OldBaseURI))
420 {
421 auto const ExtraPath = Item.URI.substr(OldBaseURI.length());
422 Item.URI = flCombine(TransactionManager->BaseURI, ExtraPath);
423 UsedMirror = TransactionManager->UsedMirror;
424 if (Item.Description.find(" ") != string::npos)
425 Item.Description.replace(0, Item.Description.find(" "), UsedMirror);
426 }
427 }
428 return pkgAcquire::Item::QueueURI(Item);
429}
430/* The transition manager InRelease itself (or its older sisters-in-law
431 Release & Release.gpg) is always queued as this allows us to rerun gpgv
432 on it to verify that we aren't stalled with old files */
433bool pkgAcqMetaBase::QueueURI(pkgAcquire::ItemDesc &Item)
434{
435 return pkgAcquire::Item::QueueURI(Item);
436}
437/* the Diff/Index needs to queue also the up-to-date complete index file
438 to ensure that the list cleaner isn't eating it */
439bool pkgAcqDiffIndex::QueueURI(pkgAcquire::ItemDesc &Item)
440{
441 if (pkgAcqTransactionItem::QueueURI(Item) == true)
442 return true;
443 QueueOnIMSHit();
444 return false;
445}
446 /*}}}*/
447// Acquire::Item::GetFinalFilename and specialisations for child classes /*{{{*/
448std::string pkgAcquire::Item::GetFinalFilename() const
449{
450 // Beware: Desc.URI is modified by redirections
451 return GetFinalFileNameFromURI(Desc.URI);
452}
453std::string pkgAcqDiffIndex::GetFinalFilename() const
454{
455 std::string const FinalFile = GetFinalFileNameFromURI(GetDiffIndexURI(Target));
456 // we don't want recompress, so lets keep whatever we got
457 if (CurrentCompressionExtension == "uncompressed")
458 return FinalFile;
459 return FinalFile + "." + CurrentCompressionExtension;
460}
461std::string pkgAcqIndex::GetFinalFilename() const
462{
463 std::string const FinalFile = GetFinalFileNameFromURI(Target.URI);
464 return GetKeepCompressedFileName(FinalFile, Target);
465}
466std::string pkgAcqMetaSig::GetFinalFilename() const
467{
468 return GetFinalFileNameFromURI(Target.URI);
469}
470std::string pkgAcqBaseIndex::GetFinalFilename() const
471{
472 return GetFinalFileNameFromURI(Target.URI);
473}
474std::string pkgAcqMetaBase::GetFinalFilename() const
475{
476 return GetFinalFileNameFromURI(Target.URI);
477}
478std::string pkgAcqArchive::GetFinalFilename() const
479{
480 return _config->FindDir("Dir::Cache::Archives") + flNotDir(StoreFilename);
481}
482 /*}}}*/
483// pkgAcqTransactionItem::GetMetaKey and specialisations for child classes /*{{{*/
484std::string pkgAcqTransactionItem::GetMetaKey() const
485{
486 return Target.MetaKey;
487}
488std::string pkgAcqIndex::GetMetaKey() const
489{
490 if (Stage == STAGE_DECOMPRESS_AND_VERIFY || CurrentCompressionExtension == "uncompressed")
491 return Target.MetaKey;
492 return Target.MetaKey + "." + CurrentCompressionExtension;
493}
494std::string pkgAcqDiffIndex::GetMetaKey() const
495{
496 auto const metakey = GetDiffIndexFileName(Target.MetaKey);
497 if (CurrentCompressionExtension == "uncompressed")
498 return metakey;
499 return metakey + "." + CurrentCompressionExtension;
500}
501 /*}}}*/
502//pkgAcqTransactionItem::TransactionState and specialisations for child classes /*{{{*/
503bool pkgAcqTransactionItem::TransactionState(TransactionStates const state)
504{
505 bool const Debug = _config->FindB("Debug::Acquire::Transaction", false);
506 switch(state)
507 {
508 case TransactionStarted: _error->Fatal("Item %s changed to invalid transaction start state!", Target.URI.c_str()); break;
509 case TransactionAbort:
510 if(Debug == true)
511 std::clog << " Cancel: " << DestFile << std::endl;
512 if (Status == pkgAcquire::Item::StatIdle)
513 {
514 Status = pkgAcquire::Item::StatDone;
515 Dequeue();
516 }
517 break;
518 case TransactionCommit:
519 if(PartialFile.empty() == false)
520 {
521 bool sameFile = (PartialFile == DestFile);
522 // we use symlinks on IMS-Hit to avoid copies
523 if (RealFileExists(DestFile))
524 {
525 struct stat Buf;
526 if (lstat(PartialFile.c_str(), &Buf) != -1)
527 {
528 if (S_ISLNK(Buf.st_mode) && Buf.st_size > 0)
529 {
530 char partial[Buf.st_size + 1];
531 ssize_t const sp = readlink(PartialFile.c_str(), partial, Buf.st_size);
532 if (sp == -1)
533 _error->Errno("pkgAcqTransactionItem::TransactionState-sp", _("Failed to readlink %s"), PartialFile.c_str());
534 else
535 {
536 partial[sp] = '\0';
537 sameFile = (DestFile == partial);
538 }
539 }
540 }
541 else
542 _error->Errno("pkgAcqTransactionItem::TransactionState-stat", _("Failed to stat %s"), PartialFile.c_str());
543 }
544 if (sameFile == false)
545 {
546 // ensure that even without lists-cleanup all compressions are nuked
547 std::string FinalFile = GetFinalFileNameFromURI(Target.URI);
548 if (FileExists(FinalFile))
549 {
550 if(Debug == true)
551 std::clog << "rm " << FinalFile << " # " << DescURI() << std::endl;
552 if (RemoveFile("TransactionStates-Cleanup", FinalFile) == false)
553 return false;
554 }
555 for (auto const &ext: APT::Configuration::getCompressorExtensions())
556 {
557 auto const Final = FinalFile + ext;
558 if (FileExists(Final))
559 {
560 if(Debug == true)
561 std::clog << "rm " << Final << " # " << DescURI() << std::endl;
562 if (RemoveFile("TransactionStates-Cleanup", Final) == false)
563 return false;
564 }
565 }
566 if(Debug == true)
567 std::clog << "mv " << PartialFile << " -> "<< DestFile << " # " << DescURI() << std::endl;
568 if (Rename(PartialFile, DestFile) == false)
569 return false;
570 }
571 else if(Debug == true)
572 std::clog << "keep " << PartialFile << " # " << DescURI() << std::endl;
573
574 } else {
575 if(Debug == true)
576 std::clog << "rm " << DestFile << " # " << DescURI() << std::endl;
577 if (RemoveFile("TransItem::TransactionCommit", DestFile) == false)
578 return false;
579 }
580 break;
581 }
582 return true;
583}
584bool pkgAcqMetaBase::TransactionState(TransactionStates const state)
585{
586 // Do not remove InRelease on IMSHit of Release.gpg [yes, this is very edgecasey]
587 if (TransactionManager->IMSHit == false)
588 return pkgAcqTransactionItem::TransactionState(state);
589 return true;
590}
591bool pkgAcqIndex::TransactionState(TransactionStates const state)
592{
593 if (pkgAcqTransactionItem::TransactionState(state) == false)
594 return false;
595
596 switch (state)
597 {
598 case TransactionStarted: _error->Fatal("AcqIndex %s changed to invalid transaction start state!", Target.URI.c_str()); break;
599 case TransactionAbort:
600 if (Stage == STAGE_DECOMPRESS_AND_VERIFY)
601 {
602 // keep the compressed file, but drop the decompressed
603 EraseFileName.clear();
604 if (PartialFile.empty() == false && flExtension(PartialFile) != CurrentCompressionExtension)
605 RemoveFile("TransactionAbort", PartialFile);
606 }
607 break;
608 case TransactionCommit:
609 if (EraseFileName.empty() == false)
610 RemoveFile("AcqIndex::TransactionCommit", EraseFileName);
611 break;
612 }
613 return true;
614}
615bool pkgAcqDiffIndex::TransactionState(TransactionStates const state)
616{
617 if (pkgAcqTransactionItem::TransactionState(state) == false)
618 return false;
619
620 switch (state)
621 {
622 case TransactionStarted: _error->Fatal("Item %s changed to invalid transaction start state!", Target.URI.c_str()); break;
623 case TransactionCommit:
624 break;
625 case TransactionAbort:
626 std::string const Partial = GetPartialFileNameFromURI(Target.URI);
627 RemoveFile("TransactionAbort", Partial);
628 break;
629 }
630
631 return true;
632}
633 /*}}}*/
634
635class APT_HIDDEN NoActionItem : public pkgAcquire::Item /*{{{*/
636/* The sole purpose of this class is having an item which does nothing to
637 reach its done state to prevent cleanup deleting the mentioned file.
638 Handy in cases in which we know we have the file already, like IMS-Hits. */
639{
640 IndexTarget const Target;
641 public:
642 virtual std::string DescURI() const APT_OVERRIDE {return Target.URI;};
643 virtual HashStringList GetExpectedHashes() const APT_OVERRIDE {return HashStringList();};
644
645 NoActionItem(pkgAcquire * const Owner, IndexTarget const &Target) :
646 pkgAcquire::Item(Owner), Target(Target)
647 {
648 Status = StatDone;
649 DestFile = GetFinalFileNameFromURI(Target.URI);
650 }
651 NoActionItem(pkgAcquire * const Owner, IndexTarget const &Target, std::string const &FinalFile) :
652 pkgAcquire::Item(Owner), Target(Target)
653 {
654 Status = StatDone;
655 DestFile = FinalFile;
656 }
657};
658 /*}}}*/
659class APT_HIDDEN CleanupItem : public pkgAcqTransactionItem /*{{{*/
660/* This class ensures that a file which was configured but isn't downloaded
661 for various reasons isn't kept in an old version in the lists directory.
662 In a way its the reverse of NoActionItem as it helps with removing files
663 even if the lists-cleanup is deactivated. */
664{
665 public:
666 virtual std::string DescURI() const APT_OVERRIDE {return Target.URI;};
667 virtual HashStringList GetExpectedHashes() const APT_OVERRIDE {return HashStringList();};
668
669 CleanupItem(pkgAcquire * const Owner, pkgAcqMetaClearSig * const TransactionManager, IndexTarget const &Target) :
670 pkgAcqTransactionItem(Owner, TransactionManager, Target)
671 {
672 Status = StatDone;
673 DestFile = GetFinalFileNameFromURI(Target.URI);
674 }
675 bool TransactionState(TransactionStates const state) APT_OVERRIDE
676 {
677 switch (state)
678 {
679 case TransactionStarted:
680 break;
681 case TransactionAbort:
682 break;
683 case TransactionCommit:
684 if (_config->FindB("Debug::Acquire::Transaction", false) == true)
685 std::clog << "rm " << DestFile << " # " << DescURI() << std::endl;
686 if (RemoveFile("TransItem::TransactionCommit", DestFile) == false)
687 return false;
688 break;
689 }
690 return true;
691 }
692};
693 /*}}}*/
694
695// Acquire::Item::Item - Constructor /*{{{*/
696class pkgAcquire::Item::Private
697{
698public:
699 std::vector<std::string> PastRedirections;
700};
701APT_IGNORE_DEPRECATED_PUSH
702pkgAcquire::Item::Item(pkgAcquire * const owner) :
703 FileSize(0), PartialSize(0), Mode(0), ID(0), Complete(false), Local(false),
704 QueueCounter(0), ExpectedAdditionalItems(0), Owner(owner), d(new Private())
705{
706 Owner->Add(this);
707 Status = StatIdle;
708}
709APT_IGNORE_DEPRECATED_POP
710 /*}}}*/
711// Acquire::Item::~Item - Destructor /*{{{*/
712pkgAcquire::Item::~Item()
713{
714 Owner->Remove(this);
715 delete d;
716}
717 /*}}}*/
718std::string pkgAcquire::Item::Custom600Headers() const /*{{{*/
719{
720 return std::string();
721}
722 /*}}}*/
723std::string pkgAcquire::Item::ShortDesc() const /*{{{*/
724{
725 return DescURI();
726}
727 /*}}}*/
728APT_CONST void pkgAcquire::Item::Finished() /*{{{*/
729{
730}
731 /*}}}*/
732APT_PURE pkgAcquire * pkgAcquire::Item::GetOwner() const /*{{{*/
733{
734 return Owner;
735}
736 /*}}}*/
737APT_CONST pkgAcquire::ItemDesc &pkgAcquire::Item::GetItemDesc() /*{{{*/
738{
739 return Desc;
740}
741 /*}}}*/
742APT_CONST bool pkgAcquire::Item::IsTrusted() const /*{{{*/
743{
744 return false;
745}
746 /*}}}*/
747// Acquire::Item::Failed - Item failed to download /*{{{*/
748// ---------------------------------------------------------------------
749/* We return to an idle state if there are still other queues that could
750 fetch this object */
751static void formatHashsum(std::ostream &out, HashString const &hs)
752{
753 auto const type = hs.HashType();
754 if (type == "Checksum-FileSize")
755 out << " - Filesize";
756 else
757 out << " - " << type;
758 out << ':' << hs.HashValue();
759 if (hs.usable() == false)
760 out << " [weak]";
761 out << std::endl;
762}
763void pkgAcquire::Item::Failed(string const &Message,pkgAcquire::MethodConfig const * const Cnf)
764{
765 if (QueueCounter <= 1)
766 {
767 /* This indicates that the file is not available right now but might
768 be sometime later. If we do a retry cycle then this should be
769 retried [CDROMs] */
770 if (Cnf != NULL && Cnf->LocalOnly == true &&
771 StringToBool(LookupTag(Message,"Transient-Failure"),false) == true)
772 {
773 Status = StatIdle;
774 Dequeue();
775 return;
776 }
777
778 switch (Status)
779 {
780 case StatIdle:
781 case StatFetching:
782 case StatDone:
783 Status = StatError;
784 break;
785 case StatAuthError:
786 case StatError:
787 case StatTransientNetworkError:
788 break;
789 }
790 Complete = false;
791 Dequeue();
792 }
793
794 string const FailReason = LookupTag(Message, "FailReason");
795 enum { MAXIMUM_SIZE_EXCEEDED, HASHSUM_MISMATCH, WEAK_HASHSUMS, REDIRECTION_LOOP, OTHER } failreason = OTHER;
796 if ( FailReason == "MaximumSizeExceeded")
797 failreason = MAXIMUM_SIZE_EXCEEDED;
798 else if ( FailReason == "WeakHashSums")
799 failreason = WEAK_HASHSUMS;
800 else if (FailReason == "RedirectionLoop")
801 failreason = REDIRECTION_LOOP;
802 else if (Status == StatAuthError)
803 failreason = HASHSUM_MISMATCH;
804
805 if(ErrorText.empty())
806 {
807 std::ostringstream out;
808 switch (failreason)
809 {
810 case HASHSUM_MISMATCH:
811 out << _("Hash Sum mismatch") << std::endl;
812 break;
813 case WEAK_HASHSUMS:
814 out << _("Insufficient information available to perform this download securely") << std::endl;
815 break;
816 case REDIRECTION_LOOP:
817 out << "Redirection loop encountered" << std::endl;
818 break;
819 case MAXIMUM_SIZE_EXCEEDED:
820 out << LookupTag(Message, "Message") << std::endl;
821 break;
822 case OTHER:
823 out << LookupTag(Message, "Message");
824 break;
825 }
826
827 if (Status == StatAuthError)
828 {
829 auto const ExpectedHashes = GetExpectedHashes();
830 if (ExpectedHashes.empty() == false)
831 {
832 out << "Hashes of expected file:" << std::endl;
833 for (auto const &hs: ExpectedHashes)
834 formatHashsum(out, hs);
835 }
836 if (failreason == HASHSUM_MISMATCH)
837 {
838 out << "Hashes of received file:" << std::endl;
839 for (char const * const * type = HashString::SupportedHashes(); *type != NULL; ++type)
840 {
841 std::string const tagname = std::string(*type) + "-Hash";
842 std::string const hashsum = LookupTag(Message, tagname.c_str());
843 if (hashsum.empty() == false)
844 formatHashsum(out, HashString(*type, hashsum));
845 }
846 }
847 auto const lastmod = LookupTag(Message, "Last-Modified", "");
848 if (lastmod.empty() == false)
849 out << "Last modification reported: " << lastmod << std::endl;
850 }
851 ErrorText = out.str();
852 }
853
854 switch (failreason)
855 {
856 case MAXIMUM_SIZE_EXCEEDED: RenameOnError(MaximumSizeExceeded); break;
857 case HASHSUM_MISMATCH: RenameOnError(HashSumMismatch); break;
858 case WEAK_HASHSUMS: break;
859 case REDIRECTION_LOOP: break;
860 case OTHER: break;
861 }
862
863 if (FailReason.empty() == false)
864 ReportMirrorFailureToCentral(*this, FailReason, ErrorText);
865 else
866 ReportMirrorFailureToCentral(*this, ErrorText, ErrorText);
867
868 if (QueueCounter > 1)
869 Status = StatIdle;
870}
871 /*}}}*/
872// Acquire::Item::Start - Item has begun to download /*{{{*/
873// ---------------------------------------------------------------------
874/* Stash status and the file size. Note that setting Complete means
875 sub-phases of the acquire process such as decompresion are operating */
876void pkgAcquire::Item::Start(string const &/*Message*/, unsigned long long const Size)
877{
878 Status = StatFetching;
879 ErrorText.clear();
880 if (FileSize == 0 && Complete == false)
881 FileSize = Size;
882}
883 /*}}}*/
884// Acquire::Item::VerifyDone - check if Item was downloaded OK /*{{{*/
885/* Note that hash-verification is 'hardcoded' in acquire-worker and has
886 * already passed if this method is called. */
887bool pkgAcquire::Item::VerifyDone(std::string const &Message,
888 pkgAcquire::MethodConfig const * const /*Cnf*/)
889{
890 std::string const FileName = LookupTag(Message,"Filename");
891 if (FileName.empty() == true)
892 {
893 Status = StatError;
894 ErrorText = "Method gave a blank filename";
895 return false;
896 }
897
898 return true;
899}
900 /*}}}*/
901// Acquire::Item::Done - Item downloaded OK /*{{{*/
902void pkgAcquire::Item::Done(string const &/*Message*/, HashStringList const &Hashes,
903 pkgAcquire::MethodConfig const * const /*Cnf*/)
904{
905 // We just downloaded something..
906 if (FileSize == 0)
907 {
908 unsigned long long const downloadedSize = Hashes.FileSize();
909 if (downloadedSize != 0)
910 {
911 FileSize = downloadedSize;
912 }
913 }
914 Status = StatDone;
915 ErrorText = string();
916 Owner->Dequeue(this);
917}
918 /*}}}*/
919// Acquire::Item::Rename - Rename a file /*{{{*/
920// ---------------------------------------------------------------------
921/* This helper function is used by a lot of item methods as their final
922 step */
923bool pkgAcquire::Item::Rename(string const &From,string const &To)
924{
925 if (From == To || rename(From.c_str(),To.c_str()) == 0)
926 return true;
927
928 std::string S;
929 strprintf(S, _("rename failed, %s (%s -> %s)."), strerror(errno),
930 From.c_str(),To.c_str());
931 Status = StatError;
932 if (ErrorText.empty())
933 ErrorText = S;
934 else
935 ErrorText = ErrorText + ": " + S;
936 return false;
937}
938 /*}}}*/
939void pkgAcquire::Item::Dequeue() /*{{{*/
940{
941 Owner->Dequeue(this);
942}
943 /*}}}*/
944bool pkgAcquire::Item::RenameOnError(pkgAcquire::Item::RenameOnErrorState const error)/*{{{*/
945{
946 if (RealFileExists(DestFile))
947 Rename(DestFile, DestFile + ".FAILED");
948
949 std::string errtext;
950 switch (error)
951 {
952 case HashSumMismatch:
953 errtext = _("Hash Sum mismatch");
954 break;
955 case SizeMismatch:
956 errtext = _("Size mismatch");
957 Status = StatAuthError;
958 break;
959 case InvalidFormat:
960 errtext = _("Invalid file format");
961 Status = StatError;
962 // do not report as usually its not the mirrors fault, but Portal/Proxy
963 break;
964 case SignatureError:
965 errtext = _("Signature error");
966 Status = StatError;
967 break;
968 case NotClearsigned:
969 strprintf(errtext, _("Clearsigned file isn't valid, got '%s' (does the network require authentication?)"), "NOSPLIT");
970 Status = StatAuthError;
971 break;
972 case MaximumSizeExceeded:
973 // the method is expected to report a good error for this
974 break;
975 case PDiffError:
976 // no handling here, done by callers
977 break;
978 }
979 if (ErrorText.empty())
980 ErrorText = errtext;
981 return false;
982}
983 /*}}}*/
984void pkgAcquire::Item::SetActiveSubprocess(const std::string &subprocess)/*{{{*/
985{
986 ActiveSubprocess = subprocess;
987 APT_IGNORE_DEPRECATED(Mode = ActiveSubprocess.c_str();)
988}
989 /*}}}*/
990// Acquire::Item::ReportMirrorFailure /*{{{*/
991void pkgAcquire::Item::ReportMirrorFailure(std::string const &FailCode)
992{
993 ReportMirrorFailureToCentral(*this, FailCode, FailCode);
994}
995 /*}}}*/
996std::string pkgAcquire::Item::HashSum() const /*{{{*/
997{
998 HashStringList const hashes = GetExpectedHashes();
999 HashString const * const hs = hashes.find(NULL);
1000 return hs != NULL ? hs->toStr() : "";
1001}
1002 /*}}}*/
1003bool pkgAcquire::Item::IsRedirectionLoop(std::string const &NewURI) /*{{{*/
1004{
1005 // store can fail due to permission errors and the item will "loop" then
1006 if (APT::String::Startswith(NewURI, "store:"))
1007 return false;
1008 if (d->PastRedirections.empty())
1009 {
1010 d->PastRedirections.push_back(NewURI);
1011 return false;
1012 }
1013 auto const LastURI = std::prev(d->PastRedirections.end());
1014 // redirections to the same file are a way of restarting/resheduling,
1015 // individual methods will have to make sure that they aren't looping this way
1016 if (*LastURI == NewURI)
1017 return false;
1018 if (std::find(d->PastRedirections.begin(), LastURI, NewURI) != LastURI)
1019 return true;
1020 d->PastRedirections.push_back(NewURI);
1021 return false;
1022}
1023 /*}}}*/
1024int pkgAcquire::Item::Priority() /*{{{*/
1025{
1026 // Stage 1: Meta indices and diff indices
1027 // - those need to be fetched first to have progress reporting working
1028 // for the rest
1029 if (dynamic_cast<pkgAcqMetaSig*>(this) != nullptr
1030 || dynamic_cast<pkgAcqMetaBase*>(this) != nullptr
1031 || dynamic_cast<pkgAcqDiffIndex*>(this) != nullptr)
1032 return 1000;
1033 // Stage 2: Diff files
1034 // - fetch before complete indexes so we can apply the diffs while fetching
1035 // larger files.
1036 if (dynamic_cast<pkgAcqIndexDiffs*>(this) != nullptr ||
1037 dynamic_cast<pkgAcqIndexMergeDiffs*>(this) != nullptr)
1038 return 800;
1039
1040 // Stage 3: The rest - complete index files and other stuff
1041 return 500;
1042}
1043 /*}}}*/
1044
1045pkgAcqTransactionItem::pkgAcqTransactionItem(pkgAcquire * const Owner, /*{{{*/
1046 pkgAcqMetaClearSig * const transactionManager, IndexTarget const &target) :
1047 pkgAcquire::Item(Owner), d(NULL), Target(target), TransactionManager(transactionManager)
1048{
1049 if (TransactionManager != this)
1050 TransactionManager->Add(this);
1051}
1052 /*}}}*/
1053pkgAcqTransactionItem::~pkgAcqTransactionItem() /*{{{*/
1054{
1055}
1056 /*}}}*/
1057HashStringList pkgAcqTransactionItem::GetExpectedHashesFor(std::string const &MetaKey) const /*{{{*/
1058{
1059 return GetExpectedHashesFromFor(TransactionManager->MetaIndexParser, MetaKey);
1060}
1061 /*}}}*/
1062
1063static void LoadLastMetaIndexParser(pkgAcqMetaClearSig * const TransactionManager, std::string const &FinalRelease, std::string const &FinalInRelease)/*{{{*/
1064{
1065 if (TransactionManager->IMSHit == true)
1066 return;
1067 if (RealFileExists(FinalInRelease) || RealFileExists(FinalRelease))
1068 {
1069 TransactionManager->LastMetaIndexParser = TransactionManager->MetaIndexParser->UnloadedClone();
1070 if (TransactionManager->LastMetaIndexParser != NULL)
1071 {
1072 _error->PushToStack();
1073 if (RealFileExists(FinalInRelease))
1074 TransactionManager->LastMetaIndexParser->Load(FinalInRelease, NULL);
1075 else
1076 TransactionManager->LastMetaIndexParser->Load(FinalRelease, NULL);
1077 // its unlikely to happen, but if what we have is bad ignore it
1078 if (_error->PendingError())
1079 {
1080 delete TransactionManager->LastMetaIndexParser;
1081 TransactionManager->LastMetaIndexParser = NULL;
1082 }
1083 _error->RevertToStack();
1084 }
1085 }
1086}
1087 /*}}}*/
1088
1089// AcqMetaBase - Constructor /*{{{*/
1090pkgAcqMetaBase::pkgAcqMetaBase(pkgAcquire * const Owner,
1091 pkgAcqMetaClearSig * const TransactionManager,
1092 IndexTarget const &DataTarget)
1093: pkgAcqTransactionItem(Owner, TransactionManager, DataTarget), d(NULL),
1094 AuthPass(false), IMSHit(false), State(TransactionStarted)
1095{
1096}
1097 /*}}}*/
1098// AcqMetaBase::Add - Add a item to the current Transaction /*{{{*/
1099void pkgAcqMetaBase::Add(pkgAcqTransactionItem * const I)
1100{
1101 Transaction.push_back(I);
1102}
1103 /*}}}*/
1104// AcqMetaBase::AbortTransaction - Abort the current Transaction /*{{{*/
1105void pkgAcqMetaBase::AbortTransaction()
1106{
1107 if(_config->FindB("Debug::Acquire::Transaction", false) == true)
1108 std::clog << "AbortTransaction: " << TransactionManager << std::endl;
1109
1110 switch (TransactionManager->State)
1111 {
1112 case TransactionStarted: break;
1113 case TransactionAbort: _error->Fatal("Transaction %s was already aborted and is aborted again", TransactionManager->Target.URI.c_str()); return;
1114 case TransactionCommit: _error->Fatal("Transaction %s was already aborted and is now committed", TransactionManager->Target.URI.c_str()); return;
1115 }
1116 TransactionManager->State = TransactionAbort;
1117
1118 // ensure the toplevel is in error state too
1119 for (std::vector<pkgAcqTransactionItem*>::iterator I = Transaction.begin();
1120 I != Transaction.end(); ++I)
1121 {
1122 if ((*I)->Status != pkgAcquire::Item::StatFetching)
1123 Owner->Dequeue(*I);
1124 (*I)->TransactionState(TransactionAbort);
1125 }
1126 Transaction.clear();
1127}
1128 /*}}}*/
1129// AcqMetaBase::TransactionHasError - Check for errors in Transaction /*{{{*/
1130APT_PURE bool pkgAcqMetaBase::TransactionHasError() const
1131{
1132 for (std::vector<pkgAcqTransactionItem*>::const_iterator I = Transaction.begin();
1133 I != Transaction.end(); ++I)
1134 {
1135 switch((*I)->Status) {
1136 case StatDone: break;
1137 case StatIdle: break;
1138 case StatAuthError: return true;
1139 case StatError: return true;
1140 case StatTransientNetworkError: return true;
1141 case StatFetching: break;
1142 }
1143 }
1144 return false;
1145}
1146 /*}}}*/
1147// AcqMetaBase::CommitTransaction - Commit a transaction /*{{{*/
1148void pkgAcqMetaBase::CommitTransaction()
1149{
1150 if(_config->FindB("Debug::Acquire::Transaction", false) == true)
1151 std::clog << "CommitTransaction: " << this << std::endl;
1152
1153 switch (TransactionManager->State)
1154 {
1155 case TransactionStarted: break;
1156 case TransactionAbort: _error->Fatal("Transaction %s was already committed and is now aborted", TransactionManager->Target.URI.c_str()); return;
1157 case TransactionCommit: _error->Fatal("Transaction %s was already committed and is again committed", TransactionManager->Target.URI.c_str()); return;
1158 }
1159 TransactionManager->State = TransactionCommit;
1160
1161 // move new files into place *and* remove files that are not
1162 // part of the transaction but are still on disk
1163 for (std::vector<pkgAcqTransactionItem*>::iterator I = Transaction.begin();
1164 I != Transaction.end(); ++I)
1165 {
1166 (*I)->TransactionState(TransactionCommit);
1167 }
1168 Transaction.clear();
1169}
1170 /*}}}*/
1171// AcqMetaBase::TransactionStageCopy - Stage a file for copying /*{{{*/
1172void pkgAcqMetaBase::TransactionStageCopy(pkgAcqTransactionItem * const I,
1173 const std::string &From,
1174 const std::string &To)
1175{
1176 I->PartialFile = From;
1177 I->DestFile = To;
1178}
1179 /*}}}*/
1180// AcqMetaBase::TransactionStageRemoval - Stage a file for removal /*{{{*/
1181void pkgAcqMetaBase::TransactionStageRemoval(pkgAcqTransactionItem * const I,
1182 const std::string &FinalFile)
1183{
1184 I->PartialFile = "";
1185 I->DestFile = FinalFile;
1186}
1187 /*}}}*/
1188// AcqMetaBase::GenerateAuthWarning - Check gpg authentication error /*{{{*/
1189/* This method is called from ::Failed handlers. If it returns true,
1190 no fallback to other files or modi is performed */
1191bool pkgAcqMetaBase::CheckStopAuthentication(pkgAcquire::Item * const I, const std::string &Message)
1192{
1193 string const Final = I->GetFinalFilename();
1194 std::string const GPGError = LookupTag(Message, "Message");
1195 if (FileExists(Final))
1196 {
1197 I->Status = StatTransientNetworkError;
1198 _error->Warning(_("An error occurred during the signature verification. "
1199 "The repository is not updated and the previous index files will be used. "
1200 "GPG error: %s: %s"),
1201 Desc.Description.c_str(),
1202 GPGError.c_str());
1203 RunScripts("APT::Update::Auth-Failure");
1204 return true;
1205 } else if (LookupTag(Message,"Message").find("NODATA") != string::npos) {
1206 /* Invalid signature file, reject (LP: #346386) (Closes: #627642) */
1207 _error->Error(_("GPG error: %s: %s"),
1208 Desc.Description.c_str(),
1209 GPGError.c_str());
1210 I->Status = StatAuthError;
1211 return true;
1212 } else {
1213 _error->Warning(_("GPG error: %s: %s"),
1214 Desc.Description.c_str(),
1215 GPGError.c_str());
1216 }
1217 // gpgv method failed
1218 ReportMirrorFailureToCentral(*this, "GPGFailure", GPGError);
1219 return false;
1220}
1221 /*}}}*/
1222// AcqMetaBase::Custom600Headers - Get header for AcqMetaBase /*{{{*/
1223// ---------------------------------------------------------------------
1224string pkgAcqMetaBase::Custom600Headers() const
1225{
1226 std::string Header = "\nIndex-File: true";
1227 std::string MaximumSize;
1228 strprintf(MaximumSize, "\nMaximum-Size: %i",
1229 _config->FindI("Acquire::MaxReleaseFileSize", 10*1000*1000));
1230 Header += MaximumSize;
1231
1232 string const FinalFile = GetFinalFilename();
1233 struct stat Buf;
1234 if (stat(FinalFile.c_str(),&Buf) == 0)
1235 Header += "\nLast-Modified: " + TimeRFC1123(Buf.st_mtime, false);
1236
1237 return Header;
1238}
1239 /*}}}*/
1240// AcqMetaBase::QueueForSignatureVerify /*{{{*/
1241void pkgAcqMetaBase::QueueForSignatureVerify(pkgAcqTransactionItem * const I, std::string const &File, std::string const &Signature)
1242{
1243 AuthPass = true;
1244 I->Desc.URI = "gpgv:" + Signature;
1245 I->DestFile = File;
1246 QueueURI(I->Desc);
1247 I->SetActiveSubprocess("gpgv");
1248}
1249 /*}}}*/
1250// AcqMetaBase::CheckDownloadDone /*{{{*/
1251bool pkgAcqMetaBase::CheckDownloadDone(pkgAcqTransactionItem * const I, const std::string &Message, HashStringList const &Hashes) const
1252{
1253 // We have just finished downloading a Release file (it is not
1254 // verified yet)
1255
1256 // Save the final base URI we got this Release file from
1257 if (I->UsedMirror.empty() == false && _config->FindB("Acquire::SameMirrorForAllIndexes", true))
1258 {
1259 if (APT::String::Endswith(I->Desc.URI, "InRelease"))
1260 {
1261 TransactionManager->BaseURI = I->Desc.URI.substr(0, I->Desc.URI.length() - strlen("InRelease"));
1262 TransactionManager->UsedMirror = I->UsedMirror;
1263 }
1264 else if (APT::String::Endswith(I->Desc.URI, "Release"))
1265 {
1266 TransactionManager->BaseURI = I->Desc.URI.substr(0, I->Desc.URI.length() - strlen("Release"));
1267 TransactionManager->UsedMirror = I->UsedMirror;
1268 }
1269 }
1270
1271 std::string const FileName = LookupTag(Message,"Filename");
1272 if (FileName != I->DestFile && RealFileExists(I->DestFile) == false)
1273 {
1274 I->Local = true;
1275 I->Desc.URI = "copy:" + FileName;
1276 I->QueueURI(I->Desc);
1277 return false;
1278 }
1279
1280 // make sure to verify against the right file on I-M-S hit
1281 bool IMSHit = StringToBool(LookupTag(Message,"IMS-Hit"), false);
1282 if (IMSHit == false && Hashes.usable())
1283 {
1284 // detect IMS-Hits servers haven't detected by Hash comparison
1285 std::string const FinalFile = I->GetFinalFilename();
1286 if (RealFileExists(FinalFile) && Hashes.VerifyFile(FinalFile) == true)
1287 {
1288 IMSHit = true;
1289 RemoveFile("CheckDownloadDone", I->DestFile);
1290 }
1291 }
1292
1293 if(IMSHit == true)
1294 {
1295 // for simplicity, the transaction manager is always InRelease
1296 // even if it doesn't exist.
1297 I->PartialFile = I->DestFile = I->GetFinalFilename();
1298 }
1299
1300 // set Item to complete as the remaining work is all local (verify etc)
1301 I->Complete = true;
1302
1303 return true;
1304}
1305 /*}}}*/
1306bool pkgAcqMetaBase::CheckAuthDone(string const &Message) /*{{{*/
1307{
1308 // At this point, the gpgv method has succeeded, so there is a
1309 // valid signature from a key in the trusted keyring. We
1310 // perform additional verification of its contents, and use them
1311 // to verify the indexes we are about to download
1312 if (_config->FindB("Debug::pkgAcquire::Auth", false))
1313 std::cerr << "Signature verification succeeded: " << DestFile << std::endl;
1314
1315 if (TransactionManager->IMSHit == false)
1316 {
1317 // open the last (In)Release if we have it
1318 std::string const FinalFile = GetFinalFilename();
1319 std::string FinalRelease;
1320 std::string FinalInRelease;
1321 if (APT::String::Endswith(FinalFile, "InRelease"))
1322 {
1323 FinalInRelease = FinalFile;
1324 FinalRelease = FinalFile.substr(0, FinalFile.length() - strlen("InRelease")) + "Release";
1325 }
1326 else
1327 {
1328 FinalInRelease = FinalFile.substr(0, FinalFile.length() - strlen("Release")) + "InRelease";
1329 FinalRelease = FinalFile;
1330 }
1331 LoadLastMetaIndexParser(TransactionManager, FinalRelease, FinalInRelease);
1332 }
1333
1334 bool const GoodAuth = TransactionManager->MetaIndexParser->Load(DestFile, &ErrorText);
1335 if (GoodAuth == false && AllowInsecureRepositories(InsecureType::WEAK, Target.Description, TransactionManager->MetaIndexParser, TransactionManager, this) == false)
1336 {
1337 Status = StatAuthError;
1338 return false;
1339 }
1340
1341 if (!VerifyVendor(Message))
1342 {
1343 Status = StatAuthError;
1344 return false;
1345 }
1346
1347 // Download further indexes with verification
1348 TransactionManager->QueueIndexes(GoodAuth);
1349
1350 return GoodAuth;
1351}
1352 /*}}}*/
1353void pkgAcqMetaClearSig::QueueIndexes(bool const verify) /*{{{*/
1354{
1355 // at this point the real Items are loaded in the fetcher
1356 ExpectedAdditionalItems = 0;
1357
1358 std::set<std::string> targetsSeen;
1359 bool const hasReleaseFile = TransactionManager->MetaIndexParser != NULL;
1360 bool const metaBaseSupportsByHash = hasReleaseFile && TransactionManager->MetaIndexParser->GetSupportsAcquireByHash();
1361 bool hasHashes = true;
1362 auto IndexTargets = TransactionManager->MetaIndexParser->GetIndexTargets();
1363 if (hasReleaseFile && verify == false)
1364 hasHashes = std::any_of(IndexTargets.begin(), IndexTargets.end(),
1365 [&](IndexTarget const &Target) { return TransactionManager->MetaIndexParser->Exists(Target.MetaKey); });
1366 if (_config->FindB("Acquire::IndexTargets::Randomized", true) && likely(IndexTargets.empty() == false))
1367 {
1368 /* For fallback handling and to have some reasonable progress information
1369 we can't randomize everything, but at least the order in the same type
1370 can be as we shouldn't be telling the mirrors (and everyone else watching)
1371 which is native/foreign arch, specific order of preference of translations, … */
1372 auto range_start = IndexTargets.begin();
1373 std::random_device rd;
1374 std::default_random_engine g(rd());
1375 do {
1376 auto const type = range_start->Option(IndexTarget::CREATED_BY);
1377 auto const range_end = std::find_if_not(range_start, IndexTargets.end(),
1378 [&type](IndexTarget const &T) { return type == T.Option(IndexTarget::CREATED_BY); });
1379 std::shuffle(range_start, range_end, g);
1380 range_start = range_end;
1381 } while (range_start != IndexTargets.end());
1382 }
1383 for (auto&& Target: IndexTargets)
1384 {
1385 // if we have seen a target which is created-by a target this one here is declared a
1386 // fallback to, we skip acquiring the fallback (but we make sure we clean up)
1387 if (targetsSeen.find(Target.Option(IndexTarget::FALLBACK_OF)) != targetsSeen.end())
1388 {
1389 targetsSeen.emplace(Target.Option(IndexTarget::CREATED_BY));
1390 new CleanupItem(Owner, TransactionManager, Target);
1391 continue;
1392 }
1393 // all is an implementation detail. Users shouldn't use this as arch
1394 // We need this support trickery here as e.g. Debian has binary-all files already,
1395 // but arch:all packages are still in the arch:any files, so we would waste precious
1396 // download time, bandwidth and diskspace for nothing, BUT Debian doesn't feature all
1397 // in the set of supported architectures, so we can filter based on this property rather
1398 // than invent an entirely new flag we would need to carry for all of eternity.
1399 if (hasReleaseFile && Target.Option(IndexTarget::ARCHITECTURE) == "all")
1400 {
1401 if (TransactionManager->MetaIndexParser->IsArchitectureAllSupportedFor(Target) == false)
1402 {
1403 new CleanupItem(Owner, TransactionManager, Target);
1404 continue;
1405 }
1406 }
1407
1408 bool trypdiff = Target.OptionBool(IndexTarget::PDIFFS);
1409 if (hasReleaseFile == true)
1410 {
1411 if (TransactionManager->MetaIndexParser->Exists(Target.MetaKey) == false)
1412 {
1413 // optional targets that we do not have in the Release file are skipped
1414 if (Target.IsOptional)
1415 {
1416 new CleanupItem(Owner, TransactionManager, Target);
1417 continue;
1418 }
1419
1420 std::string const &arch = Target.Option(IndexTarget::ARCHITECTURE);
1421 if (arch.empty() == false)
1422 {
1423 if (TransactionManager->MetaIndexParser->IsArchitectureSupported(arch) == false)
1424 {
1425 new CleanupItem(Owner, TransactionManager, Target);
1426 _error->Notice(_("Skipping acquire of configured file '%s' as repository '%s' doesn't support architecture '%s'"),
1427 Target.MetaKey.c_str(), TransactionManager->Target.Description.c_str(), arch.c_str());
1428 continue;
1429 }
1430 // if the architecture is officially supported but currently no packages for it available,
1431 // ignore silently as this is pretty much the same as just shipping an empty file.
1432 // if we don't know which architectures are supported, we do NOT ignore it to notify user about this
1433 if (hasHashes == true && TransactionManager->MetaIndexParser->IsArchitectureSupported("*undefined*") == false)
1434 {
1435 new CleanupItem(Owner, TransactionManager, Target);
1436 continue;
1437 }
1438 }
1439
1440 if (hasHashes == true)
1441 {
1442 Status = StatAuthError;
1443 strprintf(ErrorText, _("Unable to find expected entry '%s' in Release file (Wrong sources.list entry or malformed file)"), Target.MetaKey.c_str());
1444 return;
1445 }
1446 else
1447 {
1448 new pkgAcqIndex(Owner, TransactionManager, Target);
1449 continue;
1450 }
1451 }
1452 else if (verify)
1453 {
1454 auto const hashes = GetExpectedHashesFor(Target.MetaKey);
1455 if (hashes.empty() == false)
1456 {
1457 if (hashes.usable() == false && TargetIsAllowedToBe(TransactionManager->Target, InsecureType::WEAK) == false)
1458 {
1459 new CleanupItem(Owner, TransactionManager, Target);
1460 _error->Warning(_("Skipping acquire of configured file '%s' as repository '%s' provides only weak security information for it"),
1461 Target.MetaKey.c_str(), TransactionManager->Target.Description.c_str());
1462 continue;
1463 }
1464 // empty files are skipped as acquiring the very small compressed files is a waste of time
1465 else if (hashes.FileSize() == 0)
1466 {
1467 new CleanupItem(Owner, TransactionManager, Target);
1468 targetsSeen.emplace(Target.Option(IndexTarget::CREATED_BY));
1469 continue;
1470 }
1471 }
1472 }
1473
1474 // autoselect the compression method
1475 std::vector<std::string> types = VectorizeString(Target.Option(IndexTarget::COMPRESSIONTYPES), ' ');
1476 types.erase(std::remove_if(types.begin(), types.end(), [&](std::string const &t) {
1477 if (t == "uncompressed")
1478 return TransactionManager->MetaIndexParser->Exists(Target.MetaKey) == false;
1479 std::string const MetaKey = Target.MetaKey + "." + t;
1480 return TransactionManager->MetaIndexParser->Exists(MetaKey) == false;
1481 }), types.end());
1482 if (types.empty() == false)
1483 {
1484 std::ostringstream os;
1485 // add the special compressiontype byhash first if supported
1486 std::string const useByHashConf = Target.Option(IndexTarget::BY_HASH);
1487 bool useByHash = false;
1488 if(useByHashConf == "force")
1489 useByHash = true;
1490 else
1491 useByHash = StringToBool(useByHashConf) == true && metaBaseSupportsByHash;
1492 if (useByHash == true)
1493 os << "by-hash ";
1494 std::copy(types.begin(), types.end()-1, std::ostream_iterator<std::string>(os, " "));
1495 os << *types.rbegin();
1496 Target.Options["COMPRESSIONTYPES"] = os.str();
1497 }
1498 else
1499 Target.Options["COMPRESSIONTYPES"].clear();
1500
1501 std::string filename = GetExistingFilename(GetFinalFileNameFromURI(Target.URI));
1502 if (filename.empty() == false)
1503 {
1504 // if the Release file is a hit and we have an index it must be the current one
1505 if (TransactionManager->IMSHit == true)
1506 ;
1507 else if (TransactionManager->LastMetaIndexParser != NULL)
1508 {
1509 // see if the file changed since the last Release file
1510 // we use the uncompressed files as we might compress differently compared to the server,
1511 // so the hashes might not match, even if they contain the same data.
1512 HashStringList const newFile = GetExpectedHashesFromFor(TransactionManager->MetaIndexParser, Target.MetaKey);
1513 HashStringList const oldFile = GetExpectedHashesFromFor(TransactionManager->LastMetaIndexParser, Target.MetaKey);
1514 if (newFile != oldFile)
1515 filename.clear();
1516 }
1517 else
1518 filename.clear();
1519 }
1520 else
1521 trypdiff = false; // no file to patch
1522
1523 if (filename.empty() == false)
1524 {
1525 new NoActionItem(Owner, Target, filename);
1526 std::string const idxfilename = GetFinalFileNameFromURI(GetDiffIndexURI(Target));
1527 if (FileExists(idxfilename))
1528 new NoActionItem(Owner, Target, idxfilename);
1529 targetsSeen.emplace(Target.Option(IndexTarget::CREATED_BY));
1530 continue;
1531 }
1532
1533 // check if we have patches available
1534 trypdiff &= TransactionManager->MetaIndexParser->Exists(GetDiffIndexFileName(Target.MetaKey));
1535 }
1536 else
1537 {
1538 // if the source wanted these files they should have given us a release file :/
1539 if (Target.IsOptional)
1540 {
1541 new CleanupItem(Owner, TransactionManager, Target);
1542 continue;
1543 }
1544
1545 // if we have no file to patch, no point in trying
1546 trypdiff &= (GetExistingFilename(GetFinalFileNameFromURI(Target.URI)).empty() == false);
1547 }
1548
1549 // no point in patching from local sources
1550 if (trypdiff)
1551 {
1552 std::string const proto = Target.URI.substr(0, strlen("file:/"));
1553 if (proto == "file:/" || proto == "copy:/" || proto == "cdrom:")
1554 trypdiff = false;
1555 }
1556
1557 // Queue the Index file (Packages, Sources, Translation-$foo, …)
1558 targetsSeen.emplace(Target.Option(IndexTarget::CREATED_BY));
1559 if (trypdiff)
1560 new pkgAcqDiffIndex(Owner, TransactionManager, Target);
1561 else
1562 new pkgAcqIndex(Owner, TransactionManager, Target);
1563 }
1564}
1565 /*}}}*/
1566bool pkgAcqMetaBase::VerifyVendor(string const &) /*{{{*/
1567{
1568 if (TransactionManager->MetaIndexParser->GetValidUntil() > 0)
1569 {
1570 time_t const invalid_since = time(NULL) - TransactionManager->MetaIndexParser->GetValidUntil();
1571 if (invalid_since > 0)
1572 {
1573 std::string errmsg;
1574 strprintf(errmsg,
1575 // TRANSLATOR: The first %s is the URL of the bad Release file, the second is
1576 // the time since then the file is invalid - formatted in the same way as in
1577 // the download progress display (e.g. 7d 3h 42min 1s)
1578 _("Release file for %s is expired (invalid since %s). "
1579 "Updates for this repository will not be applied."),
1580 Target.URI.c_str(), TimeToStr(invalid_since).c_str());
1581 if (ErrorText.empty())
1582 ErrorText = errmsg;
1583 return _error->Error("%s", errmsg.c_str());
1584 }
1585 }
1586
1587 /* Did we get a file older than what we have? This is a last minute IMS hit and doubles
1588 as a prevention of downgrading us to older (still valid) files */
1589 if (TransactionManager->IMSHit == false && TransactionManager->LastMetaIndexParser != NULL &&
1590 TransactionManager->LastMetaIndexParser->GetDate() > TransactionManager->MetaIndexParser->GetDate())
1591 {
1592 TransactionManager->IMSHit = true;
1593 RemoveFile("VerifyVendor", DestFile);
1594 PartialFile = DestFile = GetFinalFilename();
1595 // load the 'old' file in the 'new' one instead of flipping pointers as
1596 // the new one isn't owned by us, while the old one is so cleanup would be confused.
1597 TransactionManager->MetaIndexParser->swapLoad(TransactionManager->LastMetaIndexParser);
1598 delete TransactionManager->LastMetaIndexParser;
1599 TransactionManager->LastMetaIndexParser = NULL;
1600 }
1601
1602 if (_config->FindB("Debug::pkgAcquire::Auth", false))
1603 {
1604 std::cerr << "Got Codename: " << TransactionManager->MetaIndexParser->GetCodename() << std::endl;
1605 std::cerr << "Got Suite: " << TransactionManager->MetaIndexParser->GetSuite() << std::endl;
1606 std::cerr << "Expecting Dist: " << TransactionManager->MetaIndexParser->GetExpectedDist() << std::endl;
1607 }
1608
1609 // One day that might become fatal…
1610 auto const ExpectedDist = TransactionManager->MetaIndexParser->GetExpectedDist();
1611 auto const NowCodename = TransactionManager->MetaIndexParser->GetCodename();
1612 if (TransactionManager->MetaIndexParser->CheckDist(ExpectedDist) == false)
1613 _error->Warning(_("Conflicting distribution: %s (expected %s but got %s)"),
1614 Desc.Description.c_str(), ExpectedDist.c_str(), NowCodename.c_str());
1615 // might be okay, might be not
1616 if (TransactionManager->LastMetaIndexParser != nullptr)
1617 {
1618 auto const LastCodename = TransactionManager->LastMetaIndexParser->GetCodename();
1619 if (LastCodename.empty() == false && NowCodename.empty() == false && LastCodename != NowCodename)
1620 _error->Warning(_("Conflicting distribution: %s (expected %s but got %s)"),
1621 Desc.Description.c_str(), LastCodename.c_str(), NowCodename.c_str());
1622 }
1623 return true;
1624}
1625 /*}}}*/
1626pkgAcqMetaBase::~pkgAcqMetaBase()
1627{
1628}
1629
1630pkgAcqMetaClearSig::pkgAcqMetaClearSig(pkgAcquire * const Owner, /*{{{*/
1631 IndexTarget const &ClearsignedTarget,
1632 IndexTarget const &DetachedDataTarget, IndexTarget const &DetachedSigTarget,
1633 metaIndex * const MetaIndexParser) :
1634 pkgAcqMetaIndex(Owner, this, ClearsignedTarget, DetachedSigTarget),
1635 d(NULL), DetachedDataTarget(DetachedDataTarget),
1636 MetaIndexParser(MetaIndexParser), LastMetaIndexParser(NULL)
1637{
1638 // index targets + (worst case:) Release/Release.gpg
1639 ExpectedAdditionalItems = std::numeric_limits<decltype(ExpectedAdditionalItems)>::max();
1640 TransactionManager->Add(this);
1641}
1642 /*}}}*/
1643pkgAcqMetaClearSig::~pkgAcqMetaClearSig() /*{{{*/
1644{
1645 if (LastMetaIndexParser != NULL)
1646 delete LastMetaIndexParser;
1647}
1648 /*}}}*/
1649// pkgAcqMetaClearSig::Custom600Headers - Insert custom request headers /*{{{*/
1650string pkgAcqMetaClearSig::Custom600Headers() const
1651{
1652 string Header = pkgAcqMetaBase::Custom600Headers();
1653 Header += "\nFail-Ignore: true";
1654 std::string const key = TransactionManager->MetaIndexParser->GetSignedBy();
1655 if (key.empty() == false)
1656 Header += "\nSigned-By: " + key;
1657
1658 return Header;
1659}
1660 /*}}}*/
1661void pkgAcqMetaClearSig::Finished() /*{{{*/
1662{
1663 if(_config->FindB("Debug::Acquire::Transaction", false) == true)
1664 std::clog << "Finished: " << DestFile <<std::endl;
1665 if(TransactionManager->State == TransactionStarted &&
1666 TransactionManager->TransactionHasError() == false)
1667 TransactionManager->CommitTransaction();
1668}
1669 /*}}}*/
1670bool pkgAcqMetaClearSig::VerifyDone(std::string const &Message, /*{{{*/
1671 pkgAcquire::MethodConfig const * const Cnf)
1672{
1673 Item::VerifyDone(Message, Cnf);
1674
1675 if (FileExists(DestFile) && !StartsWithGPGClearTextSignature(DestFile))
1676 return RenameOnError(NotClearsigned);
1677
1678 return true;
1679}
1680 /*}}}*/
1681// pkgAcqMetaClearSig::Done - We got a file /*{{{*/
1682void pkgAcqMetaClearSig::Done(std::string const &Message,
1683 HashStringList const &Hashes,
1684 pkgAcquire::MethodConfig const * const Cnf)
1685{
1686 Item::Done(Message, Hashes, Cnf);
1687
1688 if(AuthPass == false)
1689 {
1690 if(CheckDownloadDone(this, Message, Hashes) == true)
1691 QueueForSignatureVerify(this, DestFile, DestFile);
1692 return;
1693 }
1694 else if(CheckAuthDone(Message) == true)
1695 {
1696 if (TransactionManager->IMSHit == false)
1697 TransactionManager->TransactionStageCopy(this, DestFile, GetFinalFilename());
1698 else if (RealFileExists(GetFinalFilename()) == false)
1699 {
1700 // We got an InRelease file IMSHit, but we haven't one, which means
1701 // we had a valid Release/Release.gpg combo stepping in, which we have
1702 // to 'acquire' now to ensure list cleanup isn't removing them
1703 new NoActionItem(Owner, DetachedDataTarget);
1704 new NoActionItem(Owner, DetachedSigTarget);
1705 }
1706 }
1707 else if (Status != StatAuthError)
1708 {
1709 string const FinalFile = GetFinalFileNameFromURI(DetachedDataTarget.URI);
1710 string const OldFile = GetFinalFilename();
1711 if (TransactionManager->IMSHit == false)
1712 TransactionManager->TransactionStageCopy(this, DestFile, FinalFile);
1713 else if (RealFileExists(OldFile) == false)
1714 new NoActionItem(Owner, DetachedDataTarget);
1715 else
1716 TransactionManager->TransactionStageCopy(this, OldFile, FinalFile);
1717 }
1718}
1719 /*}}}*/
1720void pkgAcqMetaClearSig::Failed(string const &Message,pkgAcquire::MethodConfig const * const Cnf) /*{{{*/
1721{
1722 Item::Failed(Message, Cnf);
1723
1724 if (AuthPass == false)
1725 {
1726 if (Status == StatAuthError || Status == StatTransientNetworkError)
1727 {
1728 // if we expected a ClearTextSignature (InRelease) but got a network
1729 // error or got a file, but it wasn't valid, we end up here (see VerifyDone).
1730 // As these is usually called by web-portals we do not try Release/Release.gpg
1731 // as this is gonna fail anyway and instead abort our try (LP#346386)
1732 TransactionManager->AbortTransaction();
1733 return;
1734 }
1735
1736 // Queue the 'old' InRelease file for removal if we try Release.gpg
1737 // as otherwise the file will stay around and gives a false-auth
1738 // impression (CVE-2012-0214)
1739 TransactionManager->TransactionStageRemoval(this, GetFinalFilename());
1740 Status = StatDone;
1741
1742 new pkgAcqMetaIndex(Owner, TransactionManager, DetachedDataTarget, DetachedSigTarget);
1743 }
1744 else
1745 {
1746 if(CheckStopAuthentication(this, Message))
1747 return;
1748
1749 if(AllowInsecureRepositories(InsecureType::UNSIGNED, Target.Description, TransactionManager->MetaIndexParser, TransactionManager, this) == true)
1750 {
1751 Status = StatDone;
1752
1753 /* InRelease files become Release files, otherwise
1754 * they would be considered as trusted later on */
1755 string const FinalRelease = GetFinalFileNameFromURI(DetachedDataTarget.URI);
1756 string const PartialRelease = GetPartialFileNameFromURI(DetachedDataTarget.URI);
1757 string const FinalReleasegpg = GetFinalFileNameFromURI(DetachedSigTarget.URI);
1758 string const FinalInRelease = GetFinalFilename();
1759 Rename(DestFile, PartialRelease);
1760 TransactionManager->TransactionStageCopy(this, PartialRelease, FinalRelease);
1761 LoadLastMetaIndexParser(TransactionManager, FinalRelease, FinalInRelease);
1762
1763 // we parse the indexes here because at this point the user wanted
1764 // a repository that may potentially harm him
1765 if (TransactionManager->MetaIndexParser->Load(PartialRelease, &ErrorText) == false || VerifyVendor(Message) == false)
1766 /* expired Release files are still a problem you need extra force for */;
1767 else
1768 TransactionManager->QueueIndexes(true);
1769 }
1770 }
1771}
1772 /*}}}*/
1773
1774pkgAcqMetaIndex::pkgAcqMetaIndex(pkgAcquire * const Owner, /*{{{*/
1775 pkgAcqMetaClearSig * const TransactionManager,
1776 IndexTarget const &DataTarget,
1777 IndexTarget const &DetachedSigTarget) :
1778 pkgAcqMetaBase(Owner, TransactionManager, DataTarget), d(NULL),
1779 DetachedSigTarget(DetachedSigTarget)
1780{
1781 if(_config->FindB("Debug::Acquire::Transaction", false) == true)
1782 std::clog << "New pkgAcqMetaIndex with TransactionManager "
1783 << this->TransactionManager << std::endl;
1784
1785 DestFile = GetPartialFileNameFromURI(DataTarget.URI);
1786
1787 // Create the item
1788 Desc.Description = DataTarget.Description;
1789 Desc.Owner = this;
1790 Desc.ShortDesc = DataTarget.ShortDesc;
1791 Desc.URI = DataTarget.URI;
1792 QueueURI(Desc);
1793}
1794 /*}}}*/
1795void pkgAcqMetaIndex::Done(string const &Message, /*{{{*/
1796 HashStringList const &Hashes,
1797 pkgAcquire::MethodConfig const * const Cfg)
1798{
1799 Item::Done(Message,Hashes,Cfg);
1800
1801 if(CheckDownloadDone(this, Message, Hashes))
1802 {
1803 // we have a Release file, now download the Signature, all further
1804 // verify/queue for additional downloads will be done in the
1805 // pkgAcqMetaSig::Done() code
1806 new pkgAcqMetaSig(Owner, TransactionManager, DetachedSigTarget, this);
1807 }
1808}
1809 /*}}}*/
1810// pkgAcqMetaIndex::Failed - no Release file present /*{{{*/
1811void pkgAcqMetaIndex::Failed(string const &Message,
1812 pkgAcquire::MethodConfig const * const Cnf)
1813{
1814 pkgAcquire::Item::Failed(Message, Cnf);
1815 Status = StatDone;
1816
1817 // No Release file was present so fall
1818 // back to queueing Packages files without verification
1819 // only allow going further if the user explicitly wants it
1820 if(AllowInsecureRepositories(InsecureType::NORELEASE, Target.Description, TransactionManager->MetaIndexParser, TransactionManager, this) == true)
1821 {
1822 // ensure old Release files are removed
1823 TransactionManager->TransactionStageRemoval(this, GetFinalFilename());
1824
1825 // queue without any kind of hashsum support
1826 TransactionManager->QueueIndexes(false);
1827 }
1828}
1829 /*}}}*/
1830std::string pkgAcqMetaIndex::DescURI() const /*{{{*/
1831{
1832 return Target.URI;
1833}
1834 /*}}}*/
1835pkgAcqMetaIndex::~pkgAcqMetaIndex() {}
1836
1837// AcqMetaSig::AcqMetaSig - Constructor /*{{{*/
1838pkgAcqMetaSig::pkgAcqMetaSig(pkgAcquire * const Owner,
1839 pkgAcqMetaClearSig * const TransactionManager,
1840 IndexTarget const &Target,
1841 pkgAcqMetaIndex * const MetaIndex) :
1842 pkgAcqTransactionItem(Owner, TransactionManager, Target), d(NULL), MetaIndex(MetaIndex)
1843{
1844 DestFile = GetPartialFileNameFromURI(Target.URI);
1845
1846 // remove any partial downloaded sig-file in partial/.
1847 // it may confuse proxies and is too small to warrant a
1848 // partial download anyway
1849 RemoveFile("pkgAcqMetaSig", DestFile);
1850
1851 // set the TransactionManager
1852 if(_config->FindB("Debug::Acquire::Transaction", false) == true)
1853 std::clog << "New pkgAcqMetaSig with TransactionManager "
1854 << TransactionManager << std::endl;
1855
1856 // Create the item
1857 Desc.Description = Target.Description;
1858 Desc.Owner = this;
1859 Desc.ShortDesc = Target.ShortDesc;
1860 Desc.URI = Target.URI;
1861
1862 // If we got a hit for Release, we will get one for Release.gpg too (or obscure errors),
1863 // so we skip the download step and go instantly to verification
1864 if (TransactionManager->IMSHit == true && RealFileExists(GetFinalFilename()))
1865 {
1866 Complete = true;
1867 Status = StatDone;
1868 PartialFile = DestFile = GetFinalFilename();
1869 MetaIndexFileSignature = DestFile;
1870 MetaIndex->QueueForSignatureVerify(this, MetaIndex->DestFile, DestFile);
1871 }
1872 else
1873 QueueURI(Desc);
1874}
1875 /*}}}*/
1876pkgAcqMetaSig::~pkgAcqMetaSig() /*{{{*/
1877{
1878}
1879 /*}}}*/
1880// pkgAcqMetaSig::Custom600Headers - Insert custom request headers /*{{{*/
1881std::string pkgAcqMetaSig::Custom600Headers() const
1882{
1883 std::string Header = pkgAcqTransactionItem::Custom600Headers();
1884 std::string const key = TransactionManager->MetaIndexParser->GetSignedBy();
1885 if (key.empty() == false)
1886 Header += "\nSigned-By: " + key;
1887 return Header;
1888}
1889 /*}}}*/
1890// AcqMetaSig::Done - The signature was downloaded/verified /*{{{*/
1891void pkgAcqMetaSig::Done(string const &Message, HashStringList const &Hashes,
1892 pkgAcquire::MethodConfig const * const Cfg)
1893{
1894 if (MetaIndexFileSignature.empty() == false)
1895 {
1896 DestFile = MetaIndexFileSignature;
1897 MetaIndexFileSignature.clear();
1898 }
1899 Item::Done(Message, Hashes, Cfg);
1900
1901 if(MetaIndex->AuthPass == false)
1902 {
1903 if(MetaIndex->CheckDownloadDone(this, Message, Hashes) == true)
1904 {
1905 // destfile will be modified to point to MetaIndexFile for the
1906 // gpgv method, so we need to save it here
1907 MetaIndexFileSignature = DestFile;
1908 MetaIndex->QueueForSignatureVerify(this, MetaIndex->DestFile, DestFile);
1909 }
1910 return;
1911 }
1912 else if(MetaIndex->CheckAuthDone(Message) == true)
1913 {
1914 auto const Releasegpg = GetFinalFilename();
1915 auto const Release = MetaIndex->GetFinalFilename();
1916 // if this is an IMS-Hit on Release ensure we also have the the Release.gpg file stored
1917 // (previously an unknown pubkey) – but only if the Release file exists locally (unlikely
1918 // event of InRelease removed from the mirror causing fallback but still an IMS-Hit)
1919 if (TransactionManager->IMSHit == false ||
1920 (FileExists(Releasegpg) == false && FileExists(Release) == true))
1921 {
1922 TransactionManager->TransactionStageCopy(this, DestFile, Releasegpg);
1923 TransactionManager->TransactionStageCopy(MetaIndex, MetaIndex->DestFile, Release);
1924 }
1925 }
1926 else if (MetaIndex->Status != StatAuthError)
1927 {
1928 std::string const FinalFile = MetaIndex->GetFinalFilename();
1929 if (TransactionManager->IMSHit == false)
1930 TransactionManager->TransactionStageCopy(MetaIndex, MetaIndex->DestFile, FinalFile);
1931 else
1932 TransactionManager->TransactionStageCopy(MetaIndex, FinalFile, FinalFile);
1933 }
1934}
1935 /*}}}*/
1936void pkgAcqMetaSig::Failed(string const &Message,pkgAcquire::MethodConfig const * const Cnf)/*{{{*/
1937{
1938 Item::Failed(Message,Cnf);
1939
1940 // check if we need to fail at this point
1941 if (MetaIndex->AuthPass == true && MetaIndex->CheckStopAuthentication(this, Message))
1942 return;
1943
1944 // ensures that a Release.gpg file in the lists/ is removed by the transaction
1945 TransactionManager->TransactionStageRemoval(this, DestFile);
1946
1947 // only allow going further if the user explicitly wants it
1948 if (AllowInsecureRepositories(InsecureType::UNSIGNED, MetaIndex->Target.Description, TransactionManager->MetaIndexParser, TransactionManager, this) == true)
1949 {
1950 string const FinalRelease = MetaIndex->GetFinalFilename();
1951 string const FinalInRelease = TransactionManager->GetFinalFilename();
1952 LoadLastMetaIndexParser(TransactionManager, FinalRelease, FinalInRelease);
1953
1954 // we parse the indexes here because at this point the user wanted
1955 // a repository that may potentially harm him
1956 bool const GoodLoad = TransactionManager->MetaIndexParser->Load(MetaIndex->DestFile, &ErrorText);
1957 if (MetaIndex->VerifyVendor(Message) == false)
1958 /* expired Release files are still a problem you need extra force for */;
1959 else
1960 TransactionManager->QueueIndexes(GoodLoad);
1961
1962 TransactionManager->TransactionStageCopy(MetaIndex, MetaIndex->DestFile, FinalRelease);
1963 }
1964 else if (TransactionManager->IMSHit == false)
1965 Rename(MetaIndex->DestFile, MetaIndex->DestFile + ".FAILED");
1966
1967 // FIXME: this is used often (e.g. in pkgAcqIndexTrans) so refactor
1968 if (Cnf->LocalOnly == true ||
1969 StringToBool(LookupTag(Message,"Transient-Failure"),false) == false)
1970 {
1971 // Ignore this
1972 Status = StatDone;
1973 }
1974}
1975 /*}}}*/
1976
1977
1978// AcqBaseIndex - Constructor /*{{{*/
1979pkgAcqBaseIndex::pkgAcqBaseIndex(pkgAcquire * const Owner,
1980 pkgAcqMetaClearSig * const TransactionManager,
1981 IndexTarget const &Target)
1982: pkgAcqTransactionItem(Owner, TransactionManager, Target), d(NULL)
1983{
1984}
1985 /*}}}*/
1986void pkgAcqBaseIndex::Failed(std::string const &Message,pkgAcquire::MethodConfig const * const Cnf)/*{{{*/
1987{
1988 pkgAcquire::Item::Failed(Message, Cnf);
1989 if (Status != StatAuthError)
1990 return;
1991
1992 ErrorText.append("Release file created at: ");
1993 auto const timespec = TransactionManager->MetaIndexParser->GetDate();
1994 if (timespec == 0)
1995 ErrorText.append("<unknown>");
1996 else
1997 ErrorText.append(TimeRFC1123(timespec, true));
1998 ErrorText.append("\n");
1999}
2000 /*}}}*/
2001pkgAcqBaseIndex::~pkgAcqBaseIndex() {}
2002
2003// AcqDiffIndex::AcqDiffIndex - Constructor /*{{{*/
2004// ---------------------------------------------------------------------
2005/* Get the DiffIndex file first and see if there are patches available
2006 * If so, create a pkgAcqIndexDiffs fetcher that will get and apply the
2007 * patches. If anything goes wrong in that process, it will fall back to
2008 * the original packages file
2009 */
2010pkgAcqDiffIndex::pkgAcqDiffIndex(pkgAcquire * const Owner,
2011 pkgAcqMetaClearSig * const TransactionManager,
2012 IndexTarget const &Target)
2013 : pkgAcqIndex(Owner, TransactionManager, Target, true), d(NULL), diffs(NULL)
2014{
2015 // FIXME: Magic number as an upper bound on pdiffs we will reasonably acquire
2016 ExpectedAdditionalItems = 40;
2017 Debug = _config->FindB("Debug::pkgAcquire::Diffs",false);
2018
2019 CompressionExtensions.clear();
2020 {
2021 std::vector<std::string> types = APT::Configuration::getCompressionTypes();
2022 if (types.empty() == false)
2023 {
2024 std::ostringstream os;
2025 std::copy_if(types.begin(), types.end()-1, std::ostream_iterator<std::string>(os, " "), [&](std::string const type) {
2026 if (type == "uncompressed")
2027 return true;
2028 return TransactionManager->MetaIndexParser->Exists(GetDiffIndexFileName(Target.MetaKey) + '.' + type);
2029 });
2030 os << *types.rbegin();
2031 CompressionExtensions = os.str();
2032 }
2033 }
2034 if (Target.Option(IndexTarget::COMPRESSIONTYPES).find("by-hash") != std::string::npos)
2035 CompressionExtensions = "by-hash " + CompressionExtensions;
2036 Init(GetDiffIndexURI(Target), GetDiffIndexFileName(Target.Description), Target.ShortDesc);
2037
2038 if(Debug)
2039 std::clog << "pkgAcqDiffIndex: " << Desc.URI << std::endl;
2040}
2041 /*}}}*/
2042void pkgAcqDiffIndex::QueueOnIMSHit() const /*{{{*/
2043{
2044 // list cleanup needs to know that this file as well as the already
2045 // present index is ours, so we create an empty diff to save it for us
2046 new pkgAcqIndexDiffs(Owner, TransactionManager, Target, UsedMirror, Target.URI);
2047}
2048 /*}}}*/
2049static bool RemoveFileForBootstrapLinking(bool const Debug, std::string const &For, std::string const &Boot)/*{{{*/
2050{
2051 if (FileExists(Boot) && RemoveFile("Bootstrap-linking", Boot) == false)
2052 {
2053 if (Debug)
2054 std::clog << "Bootstrap-linking for patching " << For
2055 << " by removing stale " << Boot << " failed!" << std::endl;
2056 return false;
2057 }
2058 return true;
2059}
2060 /*}}}*/
2061bool pkgAcqDiffIndex::ParseDiffIndex(string const &IndexDiffFile) /*{{{*/
2062{
2063 ExpectedAdditionalItems = 0;
2064 // failing here is fine: our caller will take care of trying to
2065 // get the complete file if patching fails
2066 if(Debug)
2067 std::clog << "pkgAcqDiffIndex::ParseIndexDiff() " << IndexDiffFile
2068 << std::endl;
2069
2070 FileFd Fd(IndexDiffFile, FileFd::ReadOnly, FileFd::Extension);
2071 pkgTagFile TF(&Fd);
2072 if (Fd.IsOpen() == false || Fd.Failed())
2073 return false;
2074
2075 pkgTagSection Tags;
2076 if(unlikely(TF.Step(Tags) == false))
2077 return false;
2078
2079 HashStringList ServerHashes;
2080 unsigned long long ServerSize = 0;
2081
2082 auto const &posix = std::locale::classic();
2083 for (char const * const * type = HashString::SupportedHashes(); *type != NULL; ++type)
2084 {
2085 std::string tagname = *type;
2086 tagname.append("-Current");
2087 std::string const tmp = Tags.FindS(tagname.c_str());
2088 if (tmp.empty() == true)
2089 continue;
2090
2091 string hash;
2092 unsigned long long size;
2093 std::stringstream ss(tmp);
2094 ss.imbue(posix);
2095 ss >> hash >> size;
2096 if (unlikely(hash.empty() == true))
2097 continue;
2098 if (unlikely(ServerSize != 0 && ServerSize != size))
2099 continue;
2100 ServerHashes.push_back(HashString(*type, hash));
2101 ServerSize = size;
2102 }
2103
2104 if (ServerHashes.usable() == false)
2105 {
2106 if (Debug == true)
2107 std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": Did not find a good hashsum in the index" << std::endl;
2108 return false;
2109 }
2110
2111 std::string const CurrentPackagesFile = GetFinalFileNameFromURI(Target.URI);
2112 HashStringList const TargetFileHashes = GetExpectedHashesFor(Target.MetaKey);
2113 if (TargetFileHashes.usable() == false || ServerHashes != TargetFileHashes)
2114 {
2115 if (Debug == true)
2116 {
2117 std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": Index has different hashes than parser, probably older, so fail pdiffing" << std::endl;
2118 printHashSumComparison(CurrentPackagesFile, ServerHashes, TargetFileHashes);
2119 }
2120 return false;
2121 }
2122
2123 HashStringList LocalHashes;
2124 // try avoiding calculating the hash here as this is costly
2125 if (TransactionManager->LastMetaIndexParser != NULL)
2126 LocalHashes = GetExpectedHashesFromFor(TransactionManager->LastMetaIndexParser, Target.MetaKey);
2127 if (LocalHashes.usable() == false)
2128 {
2129 FileFd fd(CurrentPackagesFile, FileFd::ReadOnly, FileFd::Auto);
2130 Hashes LocalHashesCalc(ServerHashes);
2131 LocalHashesCalc.AddFD(fd);
2132 LocalHashes = LocalHashesCalc.GetHashStringList();
2133 }
2134
2135 if (ServerHashes == LocalHashes)
2136 {
2137 // we have the same sha1 as the server so we are done here
2138 if(Debug)
2139 std::clog << "pkgAcqDiffIndex: Package file " << CurrentPackagesFile << " is up-to-date" << std::endl;
2140 QueueOnIMSHit();
2141 return true;
2142 }
2143
2144 if(Debug)
2145 std::clog << "Server-Current: " << ServerHashes.find(NULL)->toStr() << " and we start at "
2146 << CurrentPackagesFile << " " << LocalHashes.FileSize() << " " << LocalHashes.find(NULL)->toStr() << std::endl;
2147
2148 // historically, older hashes have more info than newer ones, so start
2149 // collecting with older ones first to avoid implementing complicated
2150 // information merging techniques… a failure is after all always
2151 // recoverable with a complete file and hashes aren't changed that often.
2152 std::vector<char const *> types;
2153 for (char const * const * type = HashString::SupportedHashes(); *type != NULL; ++type)
2154 types.push_back(*type);
2155
2156 // parse all of (provided) history
2157 vector<DiffInfo> available_patches;
2158 bool firstAcceptedHashes = true;
2159 for (auto type = types.crbegin(); type != types.crend(); ++type)
2160 {
2161 if (LocalHashes.find(*type) == NULL)
2162 continue;
2163
2164 std::string tagname = *type;
2165 tagname.append("-History");
2166 std::string const tmp = Tags.FindS(tagname.c_str());
2167 if (tmp.empty() == true)
2168 continue;
2169
2170 string hash, filename;
2171 unsigned long long size;
2172 std::stringstream ss(tmp);
2173 ss.imbue(posix);
2174
2175 while (ss >> hash >> size >> filename)
2176 {
2177 if (unlikely(hash.empty() == true || filename.empty() == true))
2178 continue;
2179
2180 // see if we have a record for this file already
2181 std::vector<DiffInfo>::iterator cur = available_patches.begin();
2182 for (; cur != available_patches.end(); ++cur)
2183 {
2184 if (cur->file != filename)
2185 continue;
2186 cur->result_hashes.push_back(HashString(*type, hash));
2187 break;
2188 }
2189 if (cur != available_patches.end())
2190 continue;
2191 if (firstAcceptedHashes == true)
2192 {
2193 DiffInfo next;
2194 next.file = filename;
2195 next.result_hashes.push_back(HashString(*type, hash));
2196 next.result_hashes.FileSize(size);
2197 available_patches.push_back(next);
2198 }
2199 else
2200 {
2201 if (Debug == true)
2202 std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": File " << filename
2203 << " wasn't in the list for the first parsed hash! (history)" << std::endl;
2204 break;
2205 }
2206 }
2207 firstAcceptedHashes = false;
2208 }
2209
2210 if (unlikely(available_patches.empty() == true))
2211 {
2212 if (Debug)
2213 std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": "
2214 << "Couldn't find any patches for the patch series." << std::endl;
2215 return false;
2216 }
2217
2218 for (auto type = types.crbegin(); type != types.crend(); ++type)
2219 {
2220 if (LocalHashes.find(*type) == NULL)
2221 continue;
2222
2223 std::string tagname = *type;
2224 tagname.append("-Patches");
2225 std::string const tmp = Tags.FindS(tagname.c_str());
2226 if (tmp.empty() == true)
2227 continue;
2228
2229 string hash, filename;
2230 unsigned long long size;
2231 std::stringstream ss(tmp);
2232 ss.imbue(posix);
2233
2234 while (ss >> hash >> size >> filename)
2235 {
2236 if (unlikely(hash.empty() == true || filename.empty() == true))
2237 continue;
2238
2239 // see if we have a record for this file already
2240 std::vector<DiffInfo>::iterator cur = available_patches.begin();
2241 for (; cur != available_patches.end(); ++cur)
2242 {
2243 if (cur->file != filename)
2244 continue;
2245 if (cur->patch_hashes.empty())
2246 cur->patch_hashes.FileSize(size);
2247 cur->patch_hashes.push_back(HashString(*type, hash));
2248 break;
2249 }
2250 if (cur != available_patches.end())
2251 continue;
2252 if (Debug == true)
2253 std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": File " << filename
2254 << " wasn't in the list for the first parsed hash! (patches)" << std::endl;
2255 break;
2256 }
2257 }
2258
2259 for (auto type = types.crbegin(); type != types.crend(); ++type)
2260 {
2261 std::string tagname = *type;
2262 tagname.append("-Download");
2263 std::string const tmp = Tags.FindS(tagname.c_str());
2264 if (tmp.empty() == true)
2265 continue;
2266
2267 string hash, filename;
2268 unsigned long long size;
2269 std::stringstream ss(tmp);
2270 ss.imbue(posix);
2271
2272 // FIXME: all of pdiff supports only .gz compressed patches
2273 while (ss >> hash >> size >> filename)
2274 {
2275 if (unlikely(hash.empty() == true || filename.empty() == true))
2276 continue;
2277 if (unlikely(APT::String::Endswith(filename, ".gz") == false))
2278 continue;
2279 filename.erase(filename.length() - 3);
2280
2281 // see if we have a record for this file already
2282 std::vector<DiffInfo>::iterator cur = available_patches.begin();
2283 for (; cur != available_patches.end(); ++cur)
2284 {
2285 if (cur->file != filename)
2286 continue;
2287 if (cur->download_hashes.empty())
2288 cur->download_hashes.FileSize(size);
2289 cur->download_hashes.push_back(HashString(*type, hash));
2290 break;
2291 }
2292 if (cur != available_patches.end())
2293 continue;
2294 if (Debug == true)
2295 std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": File " << filename
2296 << " wasn't in the list for the first parsed hash! (download)" << std::endl;
2297 break;
2298 }
2299 }
2300
2301
2302 bool foundStart = false;
2303 for (std::vector<DiffInfo>::iterator cur = available_patches.begin();
2304 cur != available_patches.end(); ++cur)
2305 {
2306 if (LocalHashes != cur->result_hashes)
2307 continue;
2308
2309 available_patches.erase(available_patches.begin(), cur);
2310 foundStart = true;
2311 break;
2312 }
2313
2314 if (foundStart == false || unlikely(available_patches.empty() == true))
2315 {
2316 if (Debug)
2317 std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": "
2318 << "Couldn't find the start of the patch series." << std::endl;
2319 return false;
2320 }
2321
2322 for (auto const &patch: available_patches)
2323 if (patch.result_hashes.usable() == false ||
2324 patch.patch_hashes.usable() == false ||
2325 patch.download_hashes.usable() == false)
2326 {
2327 if (Debug)
2328 std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": provides no usable hashes for " << patch.file
2329 << " so fallback to complete download" << std::endl;
2330 return false;
2331 }
2332
2333 // patching with too many files is rather slow compared to a fast download
2334 unsigned long const fileLimit = _config->FindI("Acquire::PDiffs::FileLimit", 0);
2335 if (fileLimit != 0 && fileLimit < available_patches.size())
2336 {
2337 if (Debug)
2338 std::clog << "Need " << available_patches.size() << " diffs (Limit is " << fileLimit
2339 << ") so fallback to complete download" << std::endl;
2340 return false;
2341 }
2342
2343 // calculate the size of all patches we have to get
2344 unsigned short const sizeLimitPercent = _config->FindI("Acquire::PDiffs::SizeLimit", 100);
2345 if (sizeLimitPercent > 0)
2346 {
2347 unsigned long long downloadSize = std::accumulate(available_patches.begin(),
2348 available_patches.end(), 0llu, [](unsigned long long const T, DiffInfo const &I) {
2349 return T + I.download_hashes.FileSize();
2350 });
2351 if (downloadSize != 0)
2352 {
2353 unsigned long long downloadSizeIdx = 0;
2354 auto const types = VectorizeString(Target.Option(IndexTarget::COMPRESSIONTYPES), ' ');
2355 for (auto const &t : types)
2356 {
2357 std::string MetaKey = Target.MetaKey;
2358 if (t != "uncompressed")
2359 MetaKey += '.' + t;
2360 HashStringList const hsl = GetExpectedHashesFor(MetaKey);
2361 if (unlikely(hsl.usable() == false))
2362 continue;
2363 downloadSizeIdx = hsl.FileSize();
2364 break;
2365 }
2366 unsigned long long const sizeLimit = downloadSizeIdx * sizeLimitPercent;
2367 if ((sizeLimit/100) < downloadSize)
2368 {
2369 if (Debug)
2370 std::clog << "Need " << downloadSize << " compressed bytes (Limit is " << (sizeLimit/100) << ", "
2371 << "original is " << downloadSizeIdx << ") so fallback to complete download" << std::endl;
2372 return false;
2373 }
2374 }
2375 }
2376
2377 // we have something, queue the diffs
2378 string::size_type const last_space = Description.rfind(" ");
2379 if(last_space != string::npos)
2380 Description.erase(last_space, Description.size()-last_space);
2381
2382 /* decide if we should download patches one by one or in one go:
2383 The first is good if the server merges patches, but many don't so client
2384 based merging can be attempt in which case the second is better.
2385 "bad things" will happen if patches are merged on the server,
2386 but client side merging is attempt as well */
2387 bool pdiff_merge = _config->FindB("Acquire::PDiffs::Merge", true);
2388 if (pdiff_merge == true)
2389 {
2390 // reprepro adds this flag if it has merged patches on the server
2391 std::string const precedence = Tags.FindS("X-Patch-Precedence");
2392 pdiff_merge = (precedence != "merged");
2393 }
2394
2395 // clean the plate
2396 {
2397 std::string const Final = GetExistingFilename(CurrentPackagesFile);
2398 if (unlikely(Final.empty())) // because we wouldn't be called in such a case
2399 return false;
2400 std::string const PartialFile = GetPartialFileNameFromURI(Target.URI);
2401 std::string const PatchedFile = GetKeepCompressedFileName(PartialFile + "-patched", Target);
2402 if (RemoveFileForBootstrapLinking(Debug, CurrentPackagesFile, PartialFile) == false ||
2403 RemoveFileForBootstrapLinking(Debug, CurrentPackagesFile, PatchedFile) == false)
2404 return false;
2405 for (auto const &ext : APT::Configuration::getCompressorExtensions())
2406 {
2407 if (RemoveFileForBootstrapLinking(Debug, CurrentPackagesFile, PartialFile + ext) == false ||
2408 RemoveFileForBootstrapLinking(Debug, CurrentPackagesFile, PatchedFile + ext) == false)
2409 return false;
2410 }
2411 std::string const Ext = Final.substr(CurrentPackagesFile.length());
2412 std::string const Partial = PartialFile + Ext;
2413 if (symlink(Final.c_str(), Partial.c_str()) != 0)
2414 {
2415 if (Debug)
2416 std::clog << "Bootstrap-linking for patching " << CurrentPackagesFile
2417 << " by linking " << Final << " to " << Partial << " failed!" << std::endl;
2418 return false;
2419 }
2420 }
2421
2422 std::string indexURI = Desc.URI;
2423 auto const byhashidx = indexURI.find("/by-hash/");
2424 if (byhashidx != std::string::npos)
2425 indexURI = indexURI.substr(0, byhashidx - strlen(".diff"));
2426 else
2427 {
2428 auto end = indexURI.length() - strlen(".diff/Index");
2429 if (CurrentCompressionExtension != "uncompressed")
2430 end -= (1 + CurrentCompressionExtension.length());
2431 indexURI = indexURI.substr(0, end);
2432 }
2433
2434 if (pdiff_merge == false)
2435 new pkgAcqIndexDiffs(Owner, TransactionManager, Target, UsedMirror, indexURI, available_patches);
2436 else
2437 {
2438 diffs = new std::vector<pkgAcqIndexMergeDiffs*>(available_patches.size());
2439 for(size_t i = 0; i < available_patches.size(); ++i)
2440 (*diffs)[i] = new pkgAcqIndexMergeDiffs(Owner, TransactionManager,
2441 Target, UsedMirror, indexURI,
2442 available_patches[i],
2443 diffs);
2444 }
2445
2446 Complete = false;
2447 Status = StatDone;
2448 Dequeue();
2449 return true;
2450}
2451 /*}}}*/
2452void pkgAcqDiffIndex::Failed(string const &Message,pkgAcquire::MethodConfig const * const Cnf)/*{{{*/
2453{
2454 if (CommonFailed(GetDiffIndexURI(Target), GetDiffIndexFileName(Target.Description), Message, Cnf))
2455 return;
2456
2457 Status = StatDone;
2458 ExpectedAdditionalItems = 0;
2459
2460 if(Debug)
2461 std::clog << "pkgAcqDiffIndex failed: " << Desc.URI << " with " << Message << std::endl
2462 << "Falling back to normal index file acquire" << std::endl;
2463
2464 new pkgAcqIndex(Owner, TransactionManager, Target);
2465}
2466 /*}}}*/
2467void pkgAcqDiffIndex::Done(string const &Message,HashStringList const &Hashes, /*{{{*/
2468 pkgAcquire::MethodConfig const * const Cnf)
2469{
2470 if(Debug)
2471 std::clog << "pkgAcqDiffIndex::Done(): " << Desc.URI << std::endl;
2472
2473 Item::Done(Message, Hashes, Cnf);
2474
2475 string const FinalFile = GetFinalFilename();
2476 if(StringToBool(LookupTag(Message,"IMS-Hit"),false))
2477 DestFile = FinalFile;
2478
2479 if(ParseDiffIndex(DestFile) == false)
2480 {
2481 Failed("Message: Couldn't parse pdiff index", Cnf);
2482 // queue for final move - this should happen even if we fail
2483 // while parsing (e.g. on sizelimit) and download the complete file.
2484 TransactionManager->TransactionStageCopy(this, DestFile, FinalFile);
2485 return;
2486 }
2487
2488 TransactionManager->TransactionStageCopy(this, DestFile, FinalFile);
2489
2490 Complete = true;
2491 Status = StatDone;
2492 Dequeue();
2493
2494 return;
2495}
2496 /*}}}*/
2497pkgAcqDiffIndex::~pkgAcqDiffIndex()
2498{
2499 if (diffs != NULL)
2500 delete diffs;
2501}
2502
2503// AcqIndexDiffs::AcqIndexDiffs - Constructor /*{{{*/
2504// ---------------------------------------------------------------------
2505/* The package diff is added to the queue. one object is constructed
2506 * for each diff and the index
2507 */
2508pkgAcqIndexDiffs::pkgAcqIndexDiffs(pkgAcquire * const Owner,
2509 pkgAcqMetaClearSig * const TransactionManager,
2510 IndexTarget const &Target,
2511 std::string const &indexUsedMirror, std::string const &indexURI,
2512 vector<DiffInfo> const &diffs)
2513 : pkgAcqBaseIndex(Owner, TransactionManager, Target), indexURI(indexURI),
2514 available_patches(diffs)
2515{
2516 DestFile = GetKeepCompressedFileName(GetPartialFileNameFromURI(Target.URI), Target);
2517
2518 Debug = _config->FindB("Debug::pkgAcquire::Diffs",false);
2519
2520 Desc.Owner = this;
2521 Description = Target.Description;
2522 Desc.ShortDesc = Target.ShortDesc;
2523
2524 UsedMirror = indexUsedMirror;
2525 if (UsedMirror == "DIRECT")
2526 UsedMirror.clear();
2527 else if (UsedMirror.empty() == false && Description.find(" ") != string::npos)
2528 Description.replace(0, Description.find(" "), UsedMirror);
2529
2530 if(available_patches.empty() == true)
2531 {
2532 // we are done (yeah!), check hashes against the final file
2533 DestFile = GetKeepCompressedFileName(GetFinalFileNameFromURI(Target.URI), Target);
2534 Finish(true);
2535 }
2536 else
2537 {
2538 State = StateFetchDiff;
2539 QueueNextDiff();
2540 }
2541}
2542 /*}}}*/
2543void pkgAcqIndexDiffs::Failed(string const &Message,pkgAcquire::MethodConfig const * const Cnf)/*{{{*/
2544{
2545 pkgAcqBaseIndex::Failed(Message,Cnf);
2546 Status = StatDone;
2547
2548 DestFile = GetKeepCompressedFileName(GetPartialFileNameFromURI(Target.URI), Target);
2549 if(Debug)
2550 std::clog << "pkgAcqIndexDiffs failed: " << Desc.URI << " with " << Message << std::endl
2551 << "Falling back to normal index file acquire " << std::endl;
2552 RenameOnError(PDiffError);
2553 std::string const patchname = GetDiffsPatchFileName(DestFile);
2554 if (RealFileExists(patchname))
2555 Rename(patchname, patchname + ".FAILED");
2556 std::string const UnpatchedFile = GetExistingFilename(GetPartialFileNameFromURI(Target.URI));
2557 if (UnpatchedFile.empty() == false && FileExists(UnpatchedFile))
2558 Rename(UnpatchedFile, UnpatchedFile + ".FAILED");
2559 new pkgAcqIndex(Owner, TransactionManager, Target);
2560 Finish();
2561}
2562 /*}}}*/
2563// Finish - helper that cleans the item out of the fetcher queue /*{{{*/
2564void pkgAcqIndexDiffs::Finish(bool allDone)
2565{
2566 if(Debug)
2567 std::clog << "pkgAcqIndexDiffs::Finish(): "
2568 << allDone << " "
2569 << Desc.URI << std::endl;
2570
2571 // we restore the original name, this is required, otherwise
2572 // the file will be cleaned
2573 if(allDone)
2574 {
2575 std::string const Final = GetKeepCompressedFileName(GetFinalFilename(), Target);
2576 TransactionManager->TransactionStageCopy(this, DestFile, Final);
2577
2578 // this is for the "real" finish
2579 Complete = true;
2580 Status = StatDone;
2581 Dequeue();
2582 if(Debug)
2583 std::clog << "\n\nallDone: " << DestFile << "\n" << std::endl;
2584 return;
2585 }
2586 else
2587 DestFile.clear();
2588
2589 if(Debug)
2590 std::clog << "Finishing: " << Desc.URI << std::endl;
2591 Complete = false;
2592 Status = StatDone;
2593 Dequeue();
2594 return;
2595}
2596 /*}}}*/
2597bool pkgAcqIndexDiffs::QueueNextDiff() /*{{{*/
2598{
2599 // calc sha1 of the just patched file
2600 std::string const PartialFile = GetExistingFilename(GetPartialFileNameFromURI(Target.URI));
2601 if(unlikely(PartialFile.empty()))
2602 {
2603 Failed("Message: The file " + GetPartialFileNameFromURI(Target.URI) + " isn't available", NULL);
2604 return false;
2605 }
2606
2607 FileFd fd(PartialFile, FileFd::ReadOnly, FileFd::Extension);
2608 Hashes LocalHashesCalc;
2609 LocalHashesCalc.AddFD(fd);
2610 HashStringList const LocalHashes = LocalHashesCalc.GetHashStringList();
2611
2612 if(Debug)
2613 std::clog << "QueueNextDiff: " << PartialFile << " (" << LocalHashes.find(NULL)->toStr() << ")" << std::endl;
2614
2615 HashStringList const TargetFileHashes = GetExpectedHashesFor(Target.MetaKey);
2616 if (unlikely(LocalHashes.usable() == false || TargetFileHashes.usable() == false))
2617 {
2618 Failed("Local/Expected hashes are not usable for " + PartialFile, NULL);
2619 return false;
2620 }
2621
2622 // final file reached before all patches are applied
2623 if(LocalHashes == TargetFileHashes)
2624 {
2625 Finish(true);
2626 return true;
2627 }
2628
2629 // remove all patches until the next matching patch is found
2630 // this requires the Index file to be ordered
2631 available_patches.erase(available_patches.begin(),
2632 std::find_if(available_patches.begin(), available_patches.end(), [&](DiffInfo const &I) {
2633 return I.result_hashes == LocalHashes;
2634 }));
2635
2636 // error checking and falling back if no patch was found
2637 if(available_patches.empty() == true)
2638 {
2639 Failed("No patches left to reach target for " + PartialFile, NULL);
2640 return false;
2641 }
2642
2643 // queue the right diff
2644 Desc.URI = indexURI + ".diff/" + available_patches[0].file + ".gz";
2645 Desc.Description = Description + " " + available_patches[0].file + string(".pdiff");
2646 DestFile = GetKeepCompressedFileName(GetPartialFileNameFromURI(Target.URI + ".diff/" + available_patches[0].file), Target);
2647
2648 if(Debug)
2649 std::clog << "pkgAcqIndexDiffs::QueueNextDiff(): " << Desc.URI << std::endl;
2650
2651 QueueURI(Desc);
2652
2653 return true;
2654}
2655 /*}}}*/
2656void pkgAcqIndexDiffs::Done(string const &Message, HashStringList const &Hashes, /*{{{*/
2657 pkgAcquire::MethodConfig const * const Cnf)
2658{
2659 if (Debug)
2660 std::clog << "pkgAcqIndexDiffs::Done(): " << Desc.URI << std::endl;
2661
2662 Item::Done(Message, Hashes, Cnf);
2663
2664 std::string const UncompressedUnpatchedFile = GetPartialFileNameFromURI(Target.URI);
2665 std::string const UnpatchedFile = GetExistingFilename(UncompressedUnpatchedFile);
2666 std::string const PatchFile = GetDiffsPatchFileName(UnpatchedFile);
2667 std::string const PatchedFile = GetKeepCompressedFileName(UncompressedUnpatchedFile, Target);
2668
2669 switch (State)
2670 {
2671 // success in downloading a diff, enter ApplyDiff state
2672 case StateFetchDiff:
2673 Rename(DestFile, PatchFile);
2674 DestFile = GetKeepCompressedFileName(UncompressedUnpatchedFile + "-patched", Target);
2675 if(Debug)
2676 std::clog << "Sending to rred method: " << UnpatchedFile << std::endl;
2677 State = StateApplyDiff;
2678 Local = true;
2679 Desc.URI = "rred:" + UnpatchedFile;
2680 QueueURI(Desc);
2681 SetActiveSubprocess("rred");
2682 return;
2683 // success in download/apply a diff, queue next (if needed)
2684 case StateApplyDiff:
2685 // remove the just applied patch and base file
2686 available_patches.erase(available_patches.begin());
2687 RemoveFile("pkgAcqIndexDiffs::Done", PatchFile);
2688 RemoveFile("pkgAcqIndexDiffs::Done", UnpatchedFile);
2689 if(Debug)
2690 std::clog << "Moving patched file in place: " << std::endl
2691 << DestFile << " -> " << PatchedFile << std::endl;
2692 Rename(DestFile, PatchedFile);
2693
2694 // see if there is more to download
2695 if(available_patches.empty() == false)
2696 {
2697 new pkgAcqIndexDiffs(Owner, TransactionManager, Target, UsedMirror, indexURI, available_patches);
2698 Finish();
2699 } else {
2700 DestFile = PatchedFile;
2701 Finish(true);
2702 }
2703 return;
2704 }
2705}
2706 /*}}}*/
2707std::string pkgAcqIndexDiffs::Custom600Headers() const /*{{{*/
2708{
2709 if(State != StateApplyDiff)
2710 return pkgAcqBaseIndex::Custom600Headers();
2711 std::ostringstream patchhashes;
2712 for (auto && hs : available_patches[0].result_hashes)
2713 patchhashes << "\nStart-" << hs.HashType() << "-Hash: " << hs.HashValue();
2714 for (auto && hs : available_patches[0].patch_hashes)
2715 patchhashes << "\nPatch-0-" << hs.HashType() << "-Hash: " << hs.HashValue();
2716 patchhashes << pkgAcqBaseIndex::Custom600Headers();
2717 return patchhashes.str();
2718}
2719 /*}}}*/
2720pkgAcqIndexDiffs::~pkgAcqIndexDiffs() {}
2721
2722// AcqIndexMergeDiffs::AcqIndexMergeDiffs - Constructor /*{{{*/
2723pkgAcqIndexMergeDiffs::pkgAcqIndexMergeDiffs(pkgAcquire * const Owner,
2724 pkgAcqMetaClearSig * const TransactionManager,
2725 IndexTarget const &Target,
2726 std::string const &indexUsedMirror, std::string const &indexURI,
2727 DiffInfo const &patch,
2728 std::vector<pkgAcqIndexMergeDiffs*> const * const allPatches)
2729 : pkgAcqBaseIndex(Owner, TransactionManager, Target), indexURI(indexURI),
2730 patch(patch), allPatches(allPatches), State(StateFetchDiff)
2731{
2732 Debug = _config->FindB("Debug::pkgAcquire::Diffs",false);
2733
2734 Description = Target.Description;
2735 UsedMirror = indexUsedMirror;
2736 if (UsedMirror == "DIRECT")
2737 UsedMirror.clear();
2738 else if (UsedMirror.empty() == false && Description.find(" ") != string::npos)
2739 Description.replace(0, Description.find(" "), UsedMirror);
2740
2741 Desc.Owner = this;
2742 Desc.ShortDesc = Target.ShortDesc;
2743 Desc.URI = indexURI + ".diff/" + patch.file + ".gz";
2744 Desc.Description = Description + " " + patch.file + ".pdiff";
2745 DestFile = GetPartialFileNameFromURI(Target.URI + ".diff/" + patch.file + ".gz");
2746
2747 if(Debug)
2748 std::clog << "pkgAcqIndexMergeDiffs: " << Desc.URI << std::endl;
2749
2750 QueueURI(Desc);
2751}
2752 /*}}}*/
2753void pkgAcqIndexMergeDiffs::Failed(string const &Message,pkgAcquire::MethodConfig const * const Cnf)/*{{{*/
2754{
2755 if(Debug)
2756 std::clog << "pkgAcqIndexMergeDiffs failed: " << Desc.URI << " with " << Message << std::endl;
2757
2758 pkgAcqBaseIndex::Failed(Message,Cnf);
2759 Status = StatDone;
2760
2761 // check if we are the first to fail, otherwise we are done here
2762 State = StateDoneDiff;
2763 for (std::vector<pkgAcqIndexMergeDiffs *>::const_iterator I = allPatches->begin();
2764 I != allPatches->end(); ++I)
2765 if ((*I)->State == StateErrorDiff)
2766 {
2767 State = StateErrorDiff;
2768 return;
2769 }
2770
2771 // first failure means we should fallback
2772 State = StateErrorDiff;
2773 if (Debug)
2774 std::clog << "Falling back to normal index file acquire" << std::endl;
2775 RenameOnError(PDiffError);
2776 if (RealFileExists(DestFile))
2777 Rename(DestFile, DestFile + ".FAILED");
2778 std::string const UnpatchedFile = GetExistingFilename(GetPartialFileNameFromURI(Target.URI));
2779 if (UnpatchedFile.empty() == false && FileExists(UnpatchedFile))
2780 Rename(UnpatchedFile, UnpatchedFile + ".FAILED");
2781 DestFile.clear();
2782 new pkgAcqIndex(Owner, TransactionManager, Target);
2783}
2784 /*}}}*/
2785void pkgAcqIndexMergeDiffs::Done(string const &Message, HashStringList const &Hashes, /*{{{*/
2786 pkgAcquire::MethodConfig const * const Cnf)
2787{
2788 if(Debug)
2789 std::clog << "pkgAcqIndexMergeDiffs::Done(): " << Desc.URI << std::endl;
2790
2791 Item::Done(Message, Hashes, Cnf);
2792
2793 if (std::any_of(allPatches->begin(), allPatches->end(),
2794 [](pkgAcqIndexMergeDiffs const * const P) { return P->State == StateErrorDiff; }))
2795 {
2796 if(Debug)
2797 std::clog << "Another patch failed already, no point in processing this one." << std::endl;
2798 State = StateErrorDiff;
2799 return;
2800 }
2801
2802 std::string const UncompressedUnpatchedFile = GetPartialFileNameFromURI(Target.URI);
2803 std::string const UnpatchedFile = GetExistingFilename(UncompressedUnpatchedFile);
2804 if (UnpatchedFile.empty())
2805 {
2806 _error->Fatal("Unpatched file %s doesn't exist (anymore)!", UncompressedUnpatchedFile.c_str());
2807 State = StateErrorDiff;
2808 return;
2809 }
2810 std::string const PatchFile = GetMergeDiffsPatchFileName(UnpatchedFile, patch.file);
2811 std::string const PatchedFile = GetKeepCompressedFileName(UncompressedUnpatchedFile, Target);
2812
2813 switch (State)
2814 {
2815 case StateFetchDiff:
2816 Rename(DestFile, PatchFile);
2817
2818 // check if this is the last completed diff
2819 State = StateDoneDiff;
2820 for (std::vector<pkgAcqIndexMergeDiffs *>::const_iterator I = allPatches->begin();
2821 I != allPatches->end(); ++I)
2822 if ((*I)->State != StateDoneDiff)
2823 {
2824 if(Debug)
2825 std::clog << "Not the last done diff in the batch: " << Desc.URI << std::endl;
2826 return;
2827 }
2828 // this is the last completed diff, so we are ready to apply now
2829 DestFile = GetKeepCompressedFileName(UncompressedUnpatchedFile + "-patched", Target);
2830 if(Debug)
2831 std::clog << "Sending to rred method: " << UnpatchedFile << std::endl;
2832 State = StateApplyDiff;
2833 Local = true;
2834 Desc.URI = "rred:" + UnpatchedFile;
2835 QueueURI(Desc);
2836 SetActiveSubprocess("rred");
2837 return;
2838 case StateApplyDiff:
2839 // success in download & apply all diffs, finialize and clean up
2840 if(Debug)
2841 std::clog << "Queue patched file in place: " << std::endl
2842 << DestFile << " -> " << PatchedFile << std::endl;
2843
2844 // queue for copy by the transaction manager
2845 TransactionManager->TransactionStageCopy(this, DestFile, GetKeepCompressedFileName(GetFinalFilename(), Target));
2846
2847 // ensure the ed's are gone regardless of list-cleanup
2848 for (std::vector<pkgAcqIndexMergeDiffs *>::const_iterator I = allPatches->begin();
2849 I != allPatches->end(); ++I)
2850 RemoveFile("pkgAcqIndexMergeDiffs::Done", GetMergeDiffsPatchFileName(UnpatchedFile, (*I)->patch.file));
2851 RemoveFile("pkgAcqIndexMergeDiffs::Done", UnpatchedFile);
2852
2853 // all set and done
2854 Complete = true;
2855 if(Debug)
2856 std::clog << "allDone: " << DestFile << "\n" << std::endl;
2857 return;
2858 case StateDoneDiff: _error->Fatal("Done called for %s which is in an invalid Done state", PatchFile.c_str()); break;
2859 case StateErrorDiff: _error->Fatal("Done called for %s which is in an invalid Error state", PatchFile.c_str()); break;
2860 }
2861}
2862 /*}}}*/
2863std::string pkgAcqIndexMergeDiffs::Custom600Headers() const /*{{{*/
2864{
2865 if(State != StateApplyDiff)
2866 return pkgAcqBaseIndex::Custom600Headers();
2867 std::ostringstream patchhashes;
2868 unsigned int seen_patches = 0;
2869 for (auto && hs : (*allPatches)[0]->patch.result_hashes)
2870 patchhashes << "\nStart-" << hs.HashType() << "-Hash: " << hs.HashValue();
2871 for (std::vector<pkgAcqIndexMergeDiffs *>::const_iterator I = allPatches->begin();
2872 I != allPatches->end(); ++I)
2873 {
2874 HashStringList const ExpectedHashes = (*I)->patch.patch_hashes;
2875 for (HashStringList::const_iterator hs = ExpectedHashes.begin(); hs != ExpectedHashes.end(); ++hs)
2876 patchhashes << "\nPatch-" << std::to_string(seen_patches) << "-" << hs->HashType() << "-Hash: " << hs->HashValue();
2877 ++seen_patches;
2878 }
2879 patchhashes << pkgAcqBaseIndex::Custom600Headers();
2880 return patchhashes.str();
2881}
2882 /*}}}*/
2883pkgAcqIndexMergeDiffs::~pkgAcqIndexMergeDiffs() {}
2884
2885// AcqIndex::AcqIndex - Constructor /*{{{*/
2886pkgAcqIndex::pkgAcqIndex(pkgAcquire * const Owner,
2887 pkgAcqMetaClearSig * const TransactionManager,
2888 IndexTarget const &Target, bool const Derived)
2889 : pkgAcqBaseIndex(Owner, TransactionManager, Target), d(NULL), Stage(STAGE_DOWNLOAD),
2890 CompressionExtensions(Target.Option(IndexTarget::COMPRESSIONTYPES))
2891{
2892 if (Derived)
2893 return;
2894 Init(Target.URI, Target.Description, Target.ShortDesc);
2895
2896 if(_config->FindB("Debug::Acquire::Transaction", false) == true)
2897 std::clog << "New pkgIndex with TransactionManager "
2898 << TransactionManager << std::endl;
2899}
2900 /*}}}*/
2901// AcqIndex::Init - defered Constructor /*{{{*/
2902static void NextCompressionExtension(std::string &CurrentCompressionExtension, std::string &CompressionExtensions, bool const preview)
2903{
2904 size_t const nextExt = CompressionExtensions.find(' ');
2905 if (nextExt == std::string::npos)
2906 {
2907 CurrentCompressionExtension = CompressionExtensions;
2908 if (preview == false)
2909 CompressionExtensions.clear();
2910 }
2911 else
2912 {
2913 CurrentCompressionExtension = CompressionExtensions.substr(0, nextExt);
2914 if (preview == false)
2915 CompressionExtensions = CompressionExtensions.substr(nextExt+1);
2916 }
2917}
2918void pkgAcqIndex::Init(string const &URI, string const &URIDesc,
2919 string const &ShortDesc)
2920{
2921 Stage = STAGE_DOWNLOAD;
2922
2923 DestFile = GetPartialFileNameFromURI(URI);
2924 NextCompressionExtension(CurrentCompressionExtension, CompressionExtensions, false);
2925
2926 if (CurrentCompressionExtension == "uncompressed")
2927 {
2928 Desc.URI = URI;
2929 }
2930 else if (CurrentCompressionExtension == "by-hash")
2931 {
2932 NextCompressionExtension(CurrentCompressionExtension, CompressionExtensions, true);
2933 if(unlikely(CurrentCompressionExtension.empty()))
2934 return;
2935 if (CurrentCompressionExtension != "uncompressed")
2936 {
2937 Desc.URI = URI + '.' + CurrentCompressionExtension;
2938 DestFile = DestFile + '.' + CurrentCompressionExtension;
2939 }
2940 else
2941 Desc.URI = URI;
2942
2943 HashStringList const Hashes = GetExpectedHashes();
2944 HashString const * const TargetHash = Hashes.find(NULL);
2945 if (unlikely(TargetHash == nullptr))
2946 return;
2947 std::string const ByHash = "/by-hash/" + TargetHash->HashType() + "/" + TargetHash->HashValue();
2948 size_t const trailing_slash = Desc.URI.find_last_of("/");
2949 if (unlikely(trailing_slash == std::string::npos))
2950 return;
2951 Desc.URI = Desc.URI.replace(
2952 trailing_slash,
2953 Desc.URI.substr(trailing_slash+1).size()+1,
2954 ByHash);
2955 }
2956 else if (unlikely(CurrentCompressionExtension.empty()))
2957 return;
2958 else
2959 {
2960 Desc.URI = URI + '.' + CurrentCompressionExtension;
2961 DestFile = DestFile + '.' + CurrentCompressionExtension;
2962 }
2963
2964 // store file size of the download to ensure the fetcher gives
2965 // accurate progress reporting
2966 FileSize = GetExpectedHashes().FileSize();
2967
2968 Desc.Description = URIDesc;
2969 Desc.Owner = this;
2970 Desc.ShortDesc = ShortDesc;
2971
2972 QueueURI(Desc);
2973}
2974 /*}}}*/
2975// AcqIndex::Custom600Headers - Insert custom request headers /*{{{*/
2976// ---------------------------------------------------------------------
2977/* The only header we use is the last-modified header. */
2978string pkgAcqIndex::Custom600Headers() const
2979{
2980
2981 string msg = "\nIndex-File: true";
2982
2983 if (TransactionManager->LastMetaIndexParser == NULL)
2984 {
2985 std::string const Final = GetFinalFilename();
2986
2987 struct stat Buf;
2988 if (stat(Final.c_str(),&Buf) == 0)
2989 msg += "\nLast-Modified: " + TimeRFC1123(Buf.st_mtime, false);
2990 }
2991
2992 if(Target.IsOptional)
2993 msg += "\nFail-Ignore: true";
2994
2995 return msg;
2996}
2997 /*}}}*/
2998// AcqIndex::Failed - getting the indexfile failed /*{{{*/
2999bool pkgAcqIndex::CommonFailed(std::string const &TargetURI, std::string const TargetDesc,
3000 std::string const &Message, pkgAcquire::MethodConfig const * const Cnf)
3001{
3002 pkgAcqBaseIndex::Failed(Message,Cnf);
3003
3004 if (UsedMirror.empty() == false && UsedMirror != "DIRECT" &&
3005 LookupTag(Message, "FailReason") == "HttpError404")
3006 {
3007 UsedMirror = "DIRECT";
3008 if (Desc.URI.find("/by-hash/") != std::string::npos)
3009 CompressionExtensions = "by-hash " + CompressionExtensions;
3010 else
3011 CompressionExtensions = CurrentCompressionExtension + ' ' + CompressionExtensions;
3012 Init(TargetURI, TargetDesc, Desc.ShortDesc);
3013 Status = StatIdle;
3014 return true;
3015 }
3016
3017 // authorisation matches will not be fixed by other compression types
3018 if (Status != StatAuthError)
3019 {
3020 if (CompressionExtensions.empty() == false)
3021 {
3022 Init(TargetURI, Desc.Description, Desc.ShortDesc);
3023 Status = StatIdle;
3024 return true;
3025 }
3026 }
3027 return false;
3028}
3029void pkgAcqIndex::Failed(string const &Message,pkgAcquire::MethodConfig const * const Cnf)
3030{
3031 if (CommonFailed(Target.URI, Target.Description, Message, Cnf))
3032 return;
3033
3034 if(Target.IsOptional && GetExpectedHashes().empty() && Stage == STAGE_DOWNLOAD)
3035 Status = StatDone;
3036 else
3037 TransactionManager->AbortTransaction();
3038}
3039 /*}}}*/
3040// AcqIndex::Done - Finished a fetch /*{{{*/
3041// ---------------------------------------------------------------------
3042/* This goes through a number of states.. On the initial fetch the
3043 method could possibly return an alternate filename which points
3044 to the uncompressed version of the file. If this is so the file
3045 is copied into the partial directory. In all other cases the file
3046 is decompressed with a compressed uri. */
3047void pkgAcqIndex::Done(string const &Message,
3048 HashStringList const &Hashes,
3049 pkgAcquire::MethodConfig const * const Cfg)
3050{
3051 Item::Done(Message,Hashes,Cfg);
3052
3053 switch(Stage)
3054 {
3055 case STAGE_DOWNLOAD:
3056 StageDownloadDone(Message);
3057 break;
3058 case STAGE_DECOMPRESS_AND_VERIFY:
3059 StageDecompressDone();
3060 break;
3061 }
3062}
3063 /*}}}*/
3064// AcqIndex::StageDownloadDone - Queue for decompress and verify /*{{{*/
3065void pkgAcqIndex::StageDownloadDone(string const &Message)
3066{
3067 Local = true;
3068 Complete = true;
3069
3070 std::string const AltFilename = LookupTag(Message,"Alt-Filename");
3071 std::string Filename = LookupTag(Message,"Filename");
3072
3073 // we need to verify the file against the current Release file again
3074 // on if-modfied-since hit to avoid a stale attack against us
3075 if(StringToBool(LookupTag(Message,"IMS-Hit"),false) == true)
3076 {
3077 // copy FinalFile into partial/ so that we check the hash again
3078 string const FinalFile = GetExistingFilename(GetFinalFileNameFromURI(Target.URI));
3079 DestFile = GetKeepCompressedFileName(GetPartialFileNameFromURI(Target.URI), Target);
3080 unlink(DestFile.c_str());
3081 if (symlink(FinalFile.c_str(), DestFile.c_str()) != 0)
3082 _error->WarningE("pkgAcqIndex::StageDownloadDone", "Symlinking final file %s back to %s failed", FinalFile.c_str(), DestFile.c_str());
3083 else
3084 {
3085 EraseFileName = DestFile;
3086 Filename = DestFile;
3087 }
3088 Stage = STAGE_DECOMPRESS_AND_VERIFY;
3089 if (Filename != DestFile && flExtension(Filename) == flExtension(DestFile))
3090 Desc.URI = "copy:" + Filename;
3091 else
3092 Desc.URI = "store:" + Filename;
3093 QueueURI(Desc);
3094 SetActiveSubprocess(::URI(Desc.URI).Access);
3095 return;
3096 }
3097 // methods like file:// give us an alternative (uncompressed) file
3098 else if (Target.KeepCompressed == false && AltFilename.empty() == false)
3099 {
3100 Filename = AltFilename;
3101 EraseFileName.clear();
3102 }
3103 // Methods like e.g. "file:" will give us a (compressed) FileName that is
3104 // not the "DestFile" we set, in this case we uncompress from the local file
3105 else if (Filename != DestFile && RealFileExists(DestFile) == false)
3106 {
3107 // symlinking ensures that the filename can be used for compression detection
3108 // that is e.g. needed for by-hash which has no extension over file
3109 if (symlink(Filename.c_str(),DestFile.c_str()) != 0)
3110 _error->WarningE("pkgAcqIndex::StageDownloadDone", "Symlinking file %s to %s failed", Filename.c_str(), DestFile.c_str());
3111 else
3112 {
3113 EraseFileName = DestFile;
3114 Filename = DestFile;
3115 }
3116 }
3117
3118 Stage = STAGE_DECOMPRESS_AND_VERIFY;
3119 DestFile = GetKeepCompressedFileName(GetPartialFileNameFromURI(Target.URI), Target);
3120 if (Filename != DestFile && flExtension(Filename) == flExtension(DestFile))
3121 Desc.URI = "copy:" + Filename;
3122 else
3123 Desc.URI = "store:" + Filename;
3124 if (DestFile == Filename)
3125 {
3126 if (CurrentCompressionExtension == "uncompressed")
3127 return StageDecompressDone();
3128 DestFile = "/dev/null";
3129 }
3130
3131 if (EraseFileName.empty() && Filename != AltFilename)
3132 EraseFileName = Filename;
3133
3134 // queue uri for the next stage
3135 QueueURI(Desc);
3136 SetActiveSubprocess(::URI(Desc.URI).Access);
3137}
3138 /*}}}*/
3139// AcqIndex::StageDecompressDone - Final verification /*{{{*/
3140void pkgAcqIndex::StageDecompressDone()
3141{
3142 if (DestFile == "/dev/null")
3143 DestFile = GetKeepCompressedFileName(GetPartialFileNameFromURI(Target.URI), Target);
3144
3145 // Done, queue for rename on transaction finished
3146 TransactionManager->TransactionStageCopy(this, DestFile, GetFinalFilename());
3147}
3148 /*}}}*/
3149pkgAcqIndex::~pkgAcqIndex() {}
3150
3151
3152// AcqArchive::AcqArchive - Constructor /*{{{*/
3153// ---------------------------------------------------------------------
3154/* This just sets up the initial fetch environment and queues the first
3155 possibilitiy */
3156pkgAcqArchive::pkgAcqArchive(pkgAcquire * const Owner,pkgSourceList * const Sources,
3157 pkgRecords * const Recs,pkgCache::VerIterator const &Version,
3158 string &StoreFilename) :
3159 Item(Owner), d(NULL), LocalSource(false), Version(Version), Sources(Sources), Recs(Recs),
3160 StoreFilename(StoreFilename), Vf(Version.FileList()),
3161 Trusted(false)
3162{
3163 Retries = _config->FindI("Acquire::Retries",0);
3164
3165 if (Version.Arch() == 0)
3166 {
3167 _error->Error(_("I wasn't able to locate a file for the %s package. "
3168 "This might mean you need to manually fix this package. "
3169 "(due to missing arch)"),
3170 Version.ParentPkg().FullName().c_str());
3171 return;
3172 }
3173
3174 /* We need to find a filename to determine the extension. We make the
3175 assumption here that all the available sources for this version share
3176 the same extension.. */
3177 // Skip not source sources, they do not have file fields.
3178 for (; Vf.end() == false; ++Vf)
3179 {
3180 if (Vf.File().Flagged(pkgCache::Flag::NotSource))
3181 continue;
3182 break;
3183 }
3184
3185 // Does not really matter here.. we are going to fail out below
3186 if (Vf.end() != true)
3187 {
3188 // If this fails to get a file name we will bomb out below.
3189 pkgRecords::Parser &Parse = Recs->Lookup(Vf);
3190 if (_error->PendingError() == true)
3191 return;
3192
3193 // Generate the final file name as: package_version_arch.foo
3194 StoreFilename = QuoteString(Version.ParentPkg().Name(),"_:") + '_' +
3195 QuoteString(Version.VerStr(),"_:") + '_' +
3196 QuoteString(Version.Arch(),"_:.") +
3197 "." + flExtension(Parse.FileName());
3198 }
3199
3200 // check if we have one trusted source for the package. if so, switch
3201 // to "TrustedOnly" mode - but only if not in AllowUnauthenticated mode
3202 bool const allowUnauth = _config->FindB("APT::Get::AllowUnauthenticated", false);
3203 bool const debugAuth = _config->FindB("Debug::pkgAcquire::Auth", false);
3204 bool seenUntrusted = false;
3205 for (pkgCache::VerFileIterator i = Version.FileList(); i.end() == false; ++i)
3206 {
3207 pkgIndexFile *Index;
3208 if (Sources->FindIndex(i.File(),Index) == false)
3209 continue;
3210
3211 if (debugAuth == true)
3212 std::cerr << "Checking index: " << Index->Describe()
3213 << "(Trusted=" << Index->IsTrusted() << ")" << std::endl;
3214
3215 if (Index->IsTrusted() == true)
3216 {
3217 Trusted = true;
3218 if (allowUnauth == false)
3219 break;
3220 }
3221 else
3222 seenUntrusted = true;
3223 }
3224
3225 // "allow-unauthenticated" restores apts old fetching behaviour
3226 // that means that e.g. unauthenticated file:// uris are higher
3227 // priority than authenticated http:// uris
3228 if (allowUnauth == true && seenUntrusted == true)
3229 Trusted = false;
3230
3231 // Select a source
3232 if (QueueNext() == false && _error->PendingError() == false)
3233 _error->Error(_("Can't find a source to download version '%s' of '%s'"),
3234 Version.VerStr(), Version.ParentPkg().FullName(false).c_str());
3235}
3236 /*}}}*/
3237// AcqArchive::QueueNext - Queue the next file source /*{{{*/
3238// ---------------------------------------------------------------------
3239/* This queues the next available file version for download. It checks if
3240 the archive is already available in the cache and stashs the MD5 for
3241 checking later. */
3242bool pkgAcqArchive::QueueNext()
3243{
3244 for (; Vf.end() == false; ++Vf)
3245 {
3246 pkgCache::PkgFileIterator const PkgF = Vf.File();
3247 // Ignore not source sources
3248 if (PkgF.Flagged(pkgCache::Flag::NotSource))
3249 continue;
3250
3251 // Try to cross match against the source list
3252 pkgIndexFile *Index;
3253 if (Sources->FindIndex(PkgF, Index) == false)
3254 continue;
3255 LocalSource = PkgF.Flagged(pkgCache::Flag::LocalSource);
3256
3257 // only try to get a trusted package from another source if that source
3258 // is also trusted
3259 if(Trusted && !Index->IsTrusted())
3260 continue;
3261
3262 // Grab the text package record
3263 pkgRecords::Parser &Parse = Recs->Lookup(Vf);
3264 if (_error->PendingError() == true)
3265 return false;
3266
3267 string PkgFile = Parse.FileName();
3268 ExpectedHashes = Parse.Hashes();
3269
3270 if (PkgFile.empty() == true)
3271 return _error->Error(_("The package index files are corrupted. No Filename: "
3272 "field for package %s."),
3273 Version.ParentPkg().Name());
3274
3275 Desc.URI = Index->ArchiveURI(PkgFile);
3276 Desc.Description = Index->ArchiveInfo(Version);
3277 Desc.Owner = this;
3278 Desc.ShortDesc = Version.ParentPkg().FullName(true);
3279
3280 // See if we already have the file. (Legacy filenames)
3281 FileSize = Version->Size;
3282 string FinalFile = _config->FindDir("Dir::Cache::Archives") + flNotDir(PkgFile);
3283 struct stat Buf;
3284 if (stat(FinalFile.c_str(),&Buf) == 0)
3285 {
3286 // Make sure the size matches
3287 if ((unsigned long long)Buf.st_size == Version->Size)
3288 {
3289 Complete = true;
3290 Local = true;
3291 Status = StatDone;
3292 StoreFilename = DestFile = FinalFile;
3293 return true;
3294 }
3295
3296 /* Hmm, we have a file and its size does not match, this means it is
3297 an old style mismatched arch */
3298 RemoveFile("pkgAcqArchive::QueueNext", FinalFile);
3299 }
3300
3301 // Check it again using the new style output filenames
3302 FinalFile = _config->FindDir("Dir::Cache::Archives") + flNotDir(StoreFilename);
3303 if (stat(FinalFile.c_str(),&Buf) == 0)
3304 {
3305 // Make sure the size matches
3306 if ((unsigned long long)Buf.st_size == Version->Size)
3307 {
3308 Complete = true;
3309 Local = true;
3310 Status = StatDone;
3311 StoreFilename = DestFile = FinalFile;
3312 return true;
3313 }
3314
3315 /* Hmm, we have a file and its size does not match, this shouldn't
3316 happen.. */
3317 RemoveFile("pkgAcqArchive::QueueNext", FinalFile);
3318 }
3319
3320 DestFile = _config->FindDir("Dir::Cache::Archives") + "partial/" + flNotDir(StoreFilename);
3321
3322 // Check the destination file
3323 if (stat(DestFile.c_str(),&Buf) == 0)
3324 {
3325 // Hmm, the partial file is too big, erase it
3326 if ((unsigned long long)Buf.st_size > Version->Size)
3327 RemoveFile("pkgAcqArchive::QueueNext", DestFile);
3328 else
3329 PartialSize = Buf.st_size;
3330 }
3331
3332 // Disables download of archives - useful if no real installation follows,
3333 // e.g. if we are just interested in proposed installation order
3334 if (_config->FindB("Debug::pkgAcqArchive::NoQueue", false) == true)
3335 {
3336 Complete = true;
3337 Local = true;
3338 Status = StatDone;
3339 StoreFilename = DestFile = FinalFile;
3340 return true;
3341 }
3342
3343 // Create the item
3344 Local = false;
3345 ++Vf;
3346 QueueURI(Desc);
3347 return true;
3348 }
3349 return false;
3350}
3351 /*}}}*/
3352// AcqArchive::Done - Finished fetching /*{{{*/
3353// ---------------------------------------------------------------------
3354/* */
3355void pkgAcqArchive::Done(string const &Message, HashStringList const &Hashes,
3356 pkgAcquire::MethodConfig const * const Cfg)
3357{
3358 Item::Done(Message, Hashes, Cfg);
3359
3360 // Grab the output filename
3361 std::string const FileName = LookupTag(Message,"Filename");
3362 if (DestFile != FileName && RealFileExists(DestFile) == false)
3363 {
3364 StoreFilename = DestFile = FileName;
3365 Local = true;
3366 Complete = true;
3367 return;
3368 }
3369
3370 // Done, move it into position
3371 string const FinalFile = GetFinalFilename();
3372 Rename(DestFile,FinalFile);
3373 StoreFilename = DestFile = FinalFile;
3374 Complete = true;
3375}
3376 /*}}}*/
3377// AcqArchive::Failed - Failure handler /*{{{*/
3378// ---------------------------------------------------------------------
3379/* Here we try other sources */
3380void pkgAcqArchive::Failed(string const &Message,pkgAcquire::MethodConfig const * const Cnf)
3381{
3382 Item::Failed(Message,Cnf);
3383
3384 /* We don't really want to retry on failed media swaps, this prevents
3385 that. An interesting observation is that permanent failures are not
3386 recorded. */
3387 if (Cnf->Removable == true &&
3388 StringToBool(LookupTag(Message,"Transient-Failure"),false) == true)
3389 {
3390 // Vf = Version.FileList();
3391 while (Vf.end() == false) ++Vf;
3392 StoreFilename = string();
3393 return;
3394 }
3395
3396 Status = StatIdle;
3397 if (QueueNext() == false)
3398 {
3399 // This is the retry counter
3400 if (Retries != 0 &&
3401 Cnf->LocalOnly == false &&
3402 StringToBool(LookupTag(Message,"Transient-Failure"),false) == true)
3403 {
3404 Retries--;
3405 Vf = Version.FileList();
3406 if (QueueNext() == true)
3407 return;
3408 }
3409
3410 StoreFilename = string();
3411 Status = StatError;
3412 }
3413}
3414 /*}}}*/
3415APT_PURE bool pkgAcqArchive::IsTrusted() const /*{{{*/
3416{
3417 return Trusted;
3418}
3419 /*}}}*/
3420void pkgAcqArchive::Finished() /*{{{*/
3421{
3422 if (Status == pkgAcquire::Item::StatDone &&
3423 Complete == true)
3424 return;
3425 StoreFilename = string();
3426}
3427 /*}}}*/
3428std::string pkgAcqArchive::DescURI() const /*{{{*/
3429{
3430 return Desc.URI;
3431}
3432 /*}}}*/
3433std::string pkgAcqArchive::ShortDesc() const /*{{{*/
3434{
3435 return Desc.ShortDesc;
3436}
3437 /*}}}*/
3438pkgAcqArchive::~pkgAcqArchive() {}
3439
3440// AcqChangelog::pkgAcqChangelog - Constructors /*{{{*/
3441class pkgAcqChangelog::Private
3442{
3443 public:
3444 std::string FinalFile;
3445};
3446pkgAcqChangelog::pkgAcqChangelog(pkgAcquire * const Owner, pkgCache::VerIterator const &Ver,
3447 std::string const &DestDir, std::string const &DestFilename) :
3448 pkgAcquire::Item(Owner), d(new pkgAcqChangelog::Private()), SrcName(Ver.SourcePkgName()), SrcVersion(Ver.SourceVerStr())
3449{
3450 Desc.URI = URI(Ver);
3451 Init(DestDir, DestFilename);
3452}
3453// some parameters are char* here as they come likely from char* interfaces – which can also return NULL
3454pkgAcqChangelog::pkgAcqChangelog(pkgAcquire * const Owner, pkgCache::RlsFileIterator const &RlsFile,
3455 char const * const Component, char const * const SrcName, char const * const SrcVersion,
3456 const string &DestDir, const string &DestFilename) :
3457 pkgAcquire::Item(Owner), d(new pkgAcqChangelog::Private()), SrcName(SrcName), SrcVersion(SrcVersion)
3458{
3459 Desc.URI = URI(RlsFile, Component, SrcName, SrcVersion);
3460 Init(DestDir, DestFilename);
3461}
3462pkgAcqChangelog::pkgAcqChangelog(pkgAcquire * const Owner,
3463 std::string const &URI, char const * const SrcName, char const * const SrcVersion,
3464 const string &DestDir, const string &DestFilename) :
3465 pkgAcquire::Item(Owner), d(new pkgAcqChangelog::Private()), SrcName(SrcName), SrcVersion(SrcVersion)
3466{
3467 Desc.URI = URI;
3468 Init(DestDir, DestFilename);
3469}
3470void pkgAcqChangelog::Init(std::string const &DestDir, std::string const &DestFilename)
3471{
3472 if (Desc.URI.empty())
3473 {
3474 Status = StatError;
3475 // TRANSLATOR: %s=%s is sourcename=sourceversion, e.g. apt=1.1
3476 strprintf(ErrorText, _("Changelog unavailable for %s=%s"), SrcName.c_str(), SrcVersion.c_str());
3477 // Let the error message print something sensible rather than "Failed to fetch /"
3478 if (DestFilename.empty())
3479 DestFile = SrcName + ".changelog";
3480 else
3481 DestFile = DestFilename;
3482 Desc.URI = "changelog:/" + DestFile;
3483 return;
3484 }
3485
3486 std::string DestFileName;
3487 if (DestFilename.empty())
3488 DestFileName = flCombine(DestFile, SrcName + ".changelog");
3489 else
3490 DestFileName = flCombine(DestFile, DestFilename);
3491
3492 std::string const SandboxUser = _config->Find("APT::Sandbox::User");
3493 std::string const systemTemp = GetTempDir(SandboxUser);
3494 char tmpname[1000];
3495 snprintf(tmpname, sizeof(tmpname), "%s/apt-changelog-XXXXXX", systemTemp.c_str());
3496 if (NULL == mkdtemp(tmpname))
3497 {
3498 _error->Errno("mkdtemp", "mkdtemp failed in changelog acquire of %s %s", SrcName.c_str(), SrcVersion.c_str());
3499 Status = StatError;
3500 return;
3501 }
3502 TemporaryDirectory = tmpname;
3503
3504 ChangeOwnerAndPermissionOfFile("Item::QueueURI", TemporaryDirectory.c_str(),
3505 SandboxUser.c_str(), ROOT_GROUP, 0700);
3506
3507 DestFile = flCombine(TemporaryDirectory, DestFileName);
3508 if (DestDir.empty() == false)
3509 {
3510 d->FinalFile = flCombine(DestDir, DestFileName);
3511 if (RealFileExists(d->FinalFile))
3512 {
3513 FileFd file1, file2;
3514 if (file1.Open(DestFile, FileFd::WriteOnly | FileFd::Create | FileFd::Exclusive) &&
3515 file2.Open(d->FinalFile, FileFd::ReadOnly) && CopyFile(file2, file1))
3516 {
3517 struct timeval times[2];
3518 times[0].tv_sec = times[1].tv_sec = file2.ModificationTime();
3519 times[0].tv_usec = times[1].tv_usec = 0;
3520 utimes(DestFile.c_str(), times);
3521 }
3522 }
3523 }
3524
3525 Desc.ShortDesc = "Changelog";
3526 strprintf(Desc.Description, "%s %s %s Changelog", URI::SiteOnly(Desc.URI).c_str(), SrcName.c_str(), SrcVersion.c_str());
3527 Desc.Owner = this;
3528 QueueURI(Desc);
3529}
3530 /*}}}*/
3531std::string pkgAcqChangelog::URI(pkgCache::VerIterator const &Ver) /*{{{*/
3532{
3533 std::string const confOnline = "Acquire::Changelogs::AlwaysOnline";
3534 bool AlwaysOnline = _config->FindB(confOnline, false);
3535 if (AlwaysOnline == false)
3536 for (pkgCache::VerFileIterator VF = Ver.FileList(); VF.end() == false; ++VF)
3537 {
3538 pkgCache::PkgFileIterator const PF = VF.File();
3539 if (PF.Flagged(pkgCache::Flag::NotSource) || PF->Release == 0)
3540 continue;
3541 pkgCache::RlsFileIterator const RF = PF.ReleaseFile();
3542 if (RF->Origin != 0 && _config->FindB(confOnline + "::Origin::" + RF.Origin(), false))
3543 {
3544 AlwaysOnline = true;
3545 break;
3546 }
3547 }
3548 if (AlwaysOnline == false)
3549 {
3550 pkgCache::PkgIterator const Pkg = Ver.ParentPkg();
3551 if (Pkg->CurrentVer != 0 && Pkg.CurrentVer() == Ver)
3552 {
3553 std::string const root = _config->FindDir("Dir");
3554 std::string const basename = root + std::string("usr/share/doc/") + Pkg.Name() + "/changelog";
3555 std::string const debianname = basename + ".Debian";
3556 if (FileExists(debianname))
3557 return "copy://" + debianname;
3558 else if (FileExists(debianname + ".gz"))
3559 return "gzip://" + debianname + ".gz";
3560 else if (FileExists(basename))
3561 return "copy://" + basename;
3562 else if (FileExists(basename + ".gz"))
3563 return "gzip://" + basename + ".gz";
3564 }
3565 }
3566
3567 char const * const SrcName = Ver.SourcePkgName();
3568 char const * const SrcVersion = Ver.SourceVerStr();
3569 // find the first source for this version which promises a changelog
3570 for (pkgCache::VerFileIterator VF = Ver.FileList(); VF.end() == false; ++VF)
3571 {
3572 pkgCache::PkgFileIterator const PF = VF.File();
3573 if (PF.Flagged(pkgCache::Flag::NotSource) || PF->Release == 0)
3574 continue;
3575 pkgCache::RlsFileIterator const RF = PF.ReleaseFile();
3576 std::string const uri = URI(RF, PF.Component(), SrcName, SrcVersion);
3577 if (uri.empty())
3578 continue;
3579 return uri;
3580 }
3581 return "";
3582}
3583std::string pkgAcqChangelog::URITemplate(pkgCache::RlsFileIterator const &Rls)
3584{
3585 if (Rls.end() == true || (Rls->Label == 0 && Rls->Origin == 0))
3586 return "";
3587 std::string const serverConfig = "Acquire::Changelogs::URI";
3588 std::string server;
3589#define APT_EMPTY_SERVER \
3590 if (server.empty() == false) \
3591 { \
3592 if (server != "no") \
3593 return server; \
3594 return ""; \
3595 }
3596#define APT_CHECK_SERVER(X, Y) \
3597 if (Rls->X != 0) \
3598 { \
3599 std::string const specialServerConfig = serverConfig + "::" + Y + #X + "::" + Rls.X(); \
3600 server = _config->Find(specialServerConfig); \
3601 APT_EMPTY_SERVER \
3602 }
3603 // this way e.g. Debian-Security can fallback to Debian
3604 APT_CHECK_SERVER(Label, "Override::")
3605 APT_CHECK_SERVER(Origin, "Override::")
3606
3607 if (RealFileExists(Rls.FileName()))
3608 {
3609 _error->PushToStack();
3610 FileFd rf;
3611 /* This can be costly. A caller wanting to get millions of URIs might
3612 want to do this on its own once and use Override settings.
3613 We don't do this here as Origin/Label are not as unique as they
3614 should be so this could produce request order-dependent anomalies */
3615 if (OpenMaybeClearSignedFile(Rls.FileName(), rf) == true)
3616 {
3617 pkgTagFile TagFile(&rf);
3618 pkgTagSection Section;
3619 if (TagFile.Step(Section) == true)
3620 server = Section.FindS("Changelogs");
3621 }
3622 _error->RevertToStack();
3623 APT_EMPTY_SERVER
3624 }
3625
3626 APT_CHECK_SERVER(Label, "")
3627 APT_CHECK_SERVER(Origin, "")
3628#undef APT_CHECK_SERVER
3629#undef APT_EMPTY_SERVER
3630 return "";
3631}
3632std::string pkgAcqChangelog::URI(pkgCache::RlsFileIterator const &Rls,
3633 char const * const Component, char const * const SrcName,
3634 char const * const SrcVersion)
3635{
3636 return URI(URITemplate(Rls), Component, SrcName, SrcVersion);
3637}
3638std::string pkgAcqChangelog::URI(std::string const &Template,
3639 char const * const Component, char const * const SrcName,
3640 char const * const SrcVersion)
3641{
3642 if (Template.find("@CHANGEPATH@") == std::string::npos)
3643 return "";
3644
3645 // the path is: COMPONENT/SRC/SRCNAME/SRCNAME_SRCVER, e.g. main/a/apt/1.1 or contrib/liba/libapt/2.0
3646 std::string Src = SrcName;
3647 std::string path = APT::String::Startswith(SrcName, "lib") ? Src.substr(0, 4) : Src.substr(0,1);
3648 path.append("/").append(Src).append("/");
3649 path.append(Src).append("_").append(StripEpoch(SrcVersion));
3650 // we omit component for releases without one (= flat-style repositories)
3651 if (Component != NULL && strlen(Component) != 0)
3652 path = std::string(Component) + "/" + path;
3653
3654 return SubstVar(Template, "@CHANGEPATH@", path);
3655}
3656 /*}}}*/
3657// AcqChangelog::Failed - Failure handler /*{{{*/
3658void pkgAcqChangelog::Failed(string const &Message, pkgAcquire::MethodConfig const * const Cnf)
3659{
3660 Item::Failed(Message,Cnf);
3661
3662 std::string errText;
3663 // TRANSLATOR: %s=%s is sourcename=sourceversion, e.g. apt=1.1
3664 strprintf(errText, _("Changelog unavailable for %s=%s"), SrcName.c_str(), SrcVersion.c_str());
3665
3666 // Error is probably something techy like 404 Not Found
3667 if (ErrorText.empty())
3668 ErrorText = errText;
3669 else
3670 ErrorText = errText + " (" + ErrorText + ")";
3671}
3672 /*}}}*/
3673// AcqChangelog::Done - Item downloaded OK /*{{{*/
3674void pkgAcqChangelog::Done(string const &Message,HashStringList const &CalcHashes,
3675 pkgAcquire::MethodConfig const * const Cnf)
3676{
3677 Item::Done(Message,CalcHashes,Cnf);
3678 if (d->FinalFile.empty() == false)
3679 {
3680 if (RemoveFile("pkgAcqChangelog::Done", d->FinalFile) == false ||
3681 Rename(DestFile, d->FinalFile) == false)
3682 Status = StatError;
3683 }
3684
3685 Complete = true;
3686}
3687 /*}}}*/
3688pkgAcqChangelog::~pkgAcqChangelog() /*{{{*/
3689{
3690 if (TemporaryDirectory.empty() == false)
3691 {
3692 RemoveFile("~pkgAcqChangelog", DestFile);
3693 rmdir(TemporaryDirectory.c_str());
3694 }
3695 delete d;
3696}
3697 /*}}}*/
3698
3699// AcqFile::pkgAcqFile - Constructor /*{{{*/
3700pkgAcqFile::pkgAcqFile(pkgAcquire * const Owner,string const &URI, HashStringList const &Hashes,
3701 unsigned long long const Size,string const &Dsc,string const &ShortDesc,
3702 const string &DestDir, const string &DestFilename,
3703 bool const IsIndexFile) :
3704 Item(Owner), d(NULL), IsIndexFile(IsIndexFile), ExpectedHashes(Hashes)
3705{
3706 Retries = _config->FindI("Acquire::Retries",0);
3707
3708 if(!DestFilename.empty())
3709 DestFile = DestFilename;
3710 else if(!DestDir.empty())
3711 DestFile = DestDir + "/" + flNotDir(URI);
3712 else
3713 DestFile = flNotDir(URI);
3714
3715 // Create the item
3716 Desc.URI = URI;
3717 Desc.Description = Dsc;
3718 Desc.Owner = this;
3719
3720 // Set the short description to the archive component
3721 Desc.ShortDesc = ShortDesc;
3722
3723 // Get the transfer sizes
3724 FileSize = Size;
3725 struct stat Buf;
3726 if (stat(DestFile.c_str(),&Buf) == 0)
3727 {
3728 // Hmm, the partial file is too big, erase it
3729 if ((Size > 0) && (unsigned long long)Buf.st_size > Size)
3730 RemoveFile("pkgAcqFile", DestFile);
3731 else
3732 PartialSize = Buf.st_size;
3733 }
3734
3735 QueueURI(Desc);
3736}
3737 /*}}}*/
3738// AcqFile::Done - Item downloaded OK /*{{{*/
3739void pkgAcqFile::Done(string const &Message,HashStringList const &CalcHashes,
3740 pkgAcquire::MethodConfig const * const Cnf)
3741{
3742 Item::Done(Message,CalcHashes,Cnf);
3743
3744 std::string const FileName = LookupTag(Message,"Filename");
3745 Complete = true;
3746
3747 // The files timestamp matches
3748 if (StringToBool(LookupTag(Message,"IMS-Hit"),false) == true)
3749 return;
3750
3751 // We have to copy it into place
3752 if (RealFileExists(DestFile.c_str()) == false)
3753 {
3754 Local = true;
3755 if (_config->FindB("Acquire::Source-Symlinks",true) == false ||
3756 Cnf->Removable == true)
3757 {
3758 Desc.URI = "copy:" + FileName;
3759 QueueURI(Desc);
3760 return;
3761 }
3762
3763 // Erase the file if it is a symlink so we can overwrite it
3764 struct stat St;
3765 if (lstat(DestFile.c_str(),&St) == 0)
3766 {
3767 if (S_ISLNK(St.st_mode) != 0)
3768 RemoveFile("pkgAcqFile::Done", DestFile);
3769 }
3770
3771 // Symlink the file
3772 if (symlink(FileName.c_str(),DestFile.c_str()) != 0)
3773 {
3774 _error->PushToStack();
3775 _error->Errno("pkgAcqFile::Done", "Symlinking file %s failed", DestFile.c_str());
3776 std::stringstream msg;
3777 _error->DumpErrors(msg, GlobalError::DEBUG, false);
3778 _error->RevertToStack();
3779 ErrorText = msg.str();
3780 Status = StatError;
3781 Complete = false;
3782 }
3783 }
3784}
3785 /*}}}*/
3786// AcqFile::Failed - Failure handler /*{{{*/
3787// ---------------------------------------------------------------------
3788/* Here we try other sources */
3789void pkgAcqFile::Failed(string const &Message, pkgAcquire::MethodConfig const * const Cnf)
3790{
3791 Item::Failed(Message,Cnf);
3792
3793 // This is the retry counter
3794 if (Retries != 0 &&
3795 Cnf->LocalOnly == false &&
3796 StringToBool(LookupTag(Message,"Transient-Failure"),false) == true)
3797 {
3798 --Retries;
3799 QueueURI(Desc);
3800 Status = StatIdle;
3801 return;
3802 }
3803
3804}
3805 /*}}}*/
3806string pkgAcqFile::Custom600Headers() const /*{{{*/
3807{
3808 if (IsIndexFile)
3809 return "\nIndex-File: true";
3810 return "";
3811}
3812 /*}}}*/
3813pkgAcqFile::~pkgAcqFile() {}