]> git.saurik.com Git - apt.git/blame_incremental - apt-pkg/acquire-item.cc
apt-key: Only use readlink -f for existing components
[apt.git] / apt-pkg / acquire-item.cc
... / ...
CommitLineData
1// -*- mode: cpp; mode: fold -*-
2// Description /*{{{*/
3// $Id: acquire-item.cc,v 1.46.2.9 2004/01/16 18:51:11 mdz Exp $
4/* ######################################################################
5
6 Acquire Item - Item to acquire
7
8 Each item can download to exactly one file at a time. This means you
9 cannot create an item that fetches two uri's to two files at the same
10 time. The pkgAcqIndex class creates a second class upon instantiation
11 to fetch the other index files because of this.
12
13 ##################################################################### */
14 /*}}}*/
15// Include Files /*{{{*/
16#include <config.h>
17
18#include <apt-pkg/acquire-item.h>
19#include <apt-pkg/configuration.h>
20#include <apt-pkg/aptconfiguration.h>
21#include <apt-pkg/sourcelist.h>
22#include <apt-pkg/error.h>
23#include <apt-pkg/strutl.h>
24#include <apt-pkg/fileutl.h>
25#include <apt-pkg/tagfile.h>
26#include <apt-pkg/metaindex.h>
27#include <apt-pkg/acquire.h>
28#include <apt-pkg/hashes.h>
29#include <apt-pkg/indexfile.h>
30#include <apt-pkg/pkgcache.h>
31#include <apt-pkg/cacheiterators.h>
32#include <apt-pkg/pkgrecords.h>
33#include <apt-pkg/gpgv.h>
34
35#include <algorithm>
36#include <stddef.h>
37#include <stdlib.h>
38#include <string.h>
39#include <iostream>
40#include <vector>
41#include <sys/stat.h>
42#include <unistd.h>
43#include <errno.h>
44#include <string>
45#include <stdio.h>
46#include <ctime>
47#include <sstream>
48#include <numeric>
49
50#include <apti18n.h>
51 /*}}}*/
52
53using namespace std;
54
55static void printHashSumComparison(std::string const &URI, HashStringList const &Expected, HashStringList const &Actual) /*{{{*/
56{
57 if (_config->FindB("Debug::Acquire::HashSumMismatch", false) == false)
58 return;
59 std::cerr << std::endl << URI << ":" << std::endl << " Expected Hash: " << std::endl;
60 for (HashStringList::const_iterator hs = Expected.begin(); hs != Expected.end(); ++hs)
61 std::cerr << "\t- " << hs->toStr() << std::endl;
62 std::cerr << " Actual Hash: " << std::endl;
63 for (HashStringList::const_iterator hs = Actual.begin(); hs != Actual.end(); ++hs)
64 std::cerr << "\t- " << hs->toStr() << std::endl;
65}
66 /*}}}*/
67static std::string GetPartialFileName(std::string const &file) /*{{{*/
68{
69 std::string DestFile = _config->FindDir("Dir::State::lists") + "partial/";
70 DestFile += file;
71 return DestFile;
72}
73 /*}}}*/
74static std::string GetPartialFileNameFromURI(std::string const &uri) /*{{{*/
75{
76 return GetPartialFileName(URItoFileName(uri));
77}
78 /*}}}*/
79static std::string GetFinalFileNameFromURI(std::string const &uri) /*{{{*/
80{
81 return _config->FindDir("Dir::State::lists") + URItoFileName(uri);
82}
83 /*}}}*/
84static std::string GetKeepCompressedFileName(std::string file, IndexTarget const &Target)/*{{{*/
85{
86 if (Target.KeepCompressed == false)
87 return file;
88
89 std::string const KeepCompressedAs = Target.Option(IndexTarget::KEEPCOMPRESSEDAS);
90 if (KeepCompressedAs.empty() == false)
91 {
92 std::string const ext = KeepCompressedAs.substr(0, KeepCompressedAs.find(' '));
93 if (ext != "uncompressed")
94 file.append(".").append(ext);
95 }
96 return file;
97}
98 /*}}}*/
99static std::string GetMergeDiffsPatchFileName(std::string const &Final, std::string const &Patch)/*{{{*/
100{
101 // rred expects the patch as $FinalFile.ed.$patchname.gz
102 return Final + ".ed." + Patch + ".gz";
103}
104 /*}}}*/
105static std::string GetDiffsPatchFileName(std::string const &Final) /*{{{*/
106{
107 // rred expects the patch as $FinalFile.ed
108 return Final + ".ed";
109}
110 /*}}}*/
111static std::string GetExistingFilename(std::string const &File) /*{{{*/
112{
113 if (RealFileExists(File))
114 return File;
115 for (auto const &type : APT::Configuration::getCompressorExtensions())
116 {
117 std::string const Final = File + type;
118 if (RealFileExists(Final))
119 return Final;
120 }
121 return "";
122}
123 /*}}}*/
124static std::string GetDiffIndexFileName(std::string const &Name) /*{{{*/
125{
126 return Name + ".diff/Index";
127}
128 /*}}}*/
129static std::string GetDiffIndexURI(IndexTarget const &Target) /*{{{*/
130{
131 return Target.URI + ".diff/Index";
132}
133 /*}}}*/
134
135static void ReportMirrorFailureToCentral(pkgAcquire::Item const &I, std::string const &FailCode, std::string const &Details)/*{{{*/
136{
137 // we only act if a mirror was used at all
138 if(I.UsedMirror.empty())
139 return;
140#if 0
141 std::cerr << "\nReportMirrorFailure: "
142 << UsedMirror
143 << " Uri: " << DescURI()
144 << " FailCode: "
145 << FailCode << std::endl;
146#endif
147 string const report = _config->Find("Methods::Mirror::ProblemReporting",
148 "/usr/lib/apt/apt-report-mirror-failure");
149 if(!FileExists(report))
150 return;
151
152 std::vector<char const*> const Args = {
153 report.c_str(),
154 I.UsedMirror.c_str(),
155 I.DescURI().c_str(),
156 FailCode.c_str(),
157 Details.c_str(),
158 NULL
159 };
160
161 pid_t pid = ExecFork();
162 if(pid < 0)
163 {
164 _error->Error("ReportMirrorFailure Fork failed");
165 return;
166 }
167 else if(pid == 0)
168 {
169 execvp(Args[0], (char**)Args.data());
170 std::cerr << "Could not exec " << Args[0] << std::endl;
171 _exit(100);
172 }
173 if(!ExecWait(pid, "report-mirror-failure"))
174 _error->Warning("Couldn't report problem to '%s'", report.c_str());
175}
176 /*}}}*/
177
178static APT_NONNULL(2) bool MessageInsecureRepository(bool const isError, char const * const msg, std::string const &repo)/*{{{*/
179{
180 std::string m;
181 strprintf(m, msg, repo.c_str());
182 if (isError)
183 {
184 _error->Error("%s", m.c_str());
185 _error->Notice("%s", _("Updating from such a repository can't be done securely, and is therefore disabled by default."));
186 }
187 else
188 {
189 _error->Warning("%s", m.c_str());
190 _error->Notice("%s", _("Data from such a repository can't be authenticated and is therefore potentially dangerous to use."));
191 }
192 _error->Notice("%s", _("See apt-secure(8) manpage for repository creation and user configuration details."));
193 return false;
194}
195 /*}}}*/
196// AllowInsecureRepositories /*{{{*/
197enum class InsecureType { UNSIGNED, WEAK, NORELEASE };
198static bool TargetIsAllowedToBe(IndexTarget const &Target, InsecureType const type)
199{
200 if (_config->FindB("Acquire::AllowInsecureRepositories"))
201 return true;
202
203 if (Target.OptionBool(IndexTarget::ALLOW_INSECURE))
204 return true;
205
206 switch (type)
207 {
208 case InsecureType::UNSIGNED: break;
209 case InsecureType::NORELEASE: break;
210 case InsecureType::WEAK:
211 if (_config->FindB("Acquire::AllowWeakRepositories"))
212 return true;
213 if (Target.OptionBool(IndexTarget::ALLOW_WEAK))
214 return true;
215 break;
216 }
217 return false;
218}
219static bool APT_NONNULL(3, 4, 5) AllowInsecureRepositories(InsecureType const msg, std::string const &repo,
220 metaIndex const * const MetaIndexParser, pkgAcqMetaClearSig * const TransactionManager, pkgAcquire::Item * const I)
221{
222 // we skip weak downgrades as its unlikely that a repository gets really weaker –
223 // its more realistic that apt got pickier in a newer version
224 if (msg != InsecureType::WEAK)
225 {
226 std::string const FinalInRelease = TransactionManager->GetFinalFilename();
227 std::string const FinalReleasegpg = FinalInRelease.substr(0, FinalInRelease.length() - strlen("InRelease")) + "Release.gpg";
228 if (RealFileExists(FinalReleasegpg) || RealFileExists(FinalInRelease))
229 {
230 char const * msgstr = nullptr;
231 switch (msg)
232 {
233 case InsecureType::UNSIGNED: msgstr = _("The repository '%s' is no longer signed."); break;
234 case InsecureType::NORELEASE: msgstr = _("The repository '%s' does no longer have a Release file."); break;
235 case InsecureType::WEAK: /* unreachable */ break;
236 }
237 if (_config->FindB("Acquire::AllowDowngradeToInsecureRepositories") ||
238 TransactionManager->Target.OptionBool(IndexTarget::ALLOW_DOWNGRADE_TO_INSECURE))
239 {
240 // meh, the users wants to take risks (we still mark the packages
241 // from this repository as unauthenticated)
242 _error->Warning(msgstr, repo.c_str());
243 _error->Warning(_("This is normally not allowed, but the option "
244 "Acquire::AllowDowngradeToInsecureRepositories was "
245 "given to override it."));
246 } else {
247 MessageInsecureRepository(true, msgstr, repo);
248 TransactionManager->AbortTransaction();
249 I->Status = pkgAcquire::Item::StatError;
250 return false;
251 }
252 }
253 }
254
255 if(MetaIndexParser->GetTrusted() == metaIndex::TRI_YES)
256 return true;
257
258 char const * msgstr = nullptr;
259 switch (msg)
260 {
261 case InsecureType::UNSIGNED: msgstr = _("The repository '%s' is not signed."); break;
262 case InsecureType::NORELEASE: msgstr = _("The repository '%s' does not have a Release file."); break;
263 case InsecureType::WEAK: msgstr = _("The repository '%s' provides only weak security information."); break;
264 }
265
266 if (TargetIsAllowedToBe(TransactionManager->Target, msg) == true)
267 {
268 MessageInsecureRepository(false, msgstr, repo);
269 return true;
270 }
271
272 MessageInsecureRepository(true, msgstr, repo);
273 TransactionManager->AbortTransaction();
274 I->Status = pkgAcquire::Item::StatError;
275 return false;
276}
277 /*}}}*/
278static HashStringList GetExpectedHashesFromFor(metaIndex * const Parser, std::string const &MetaKey)/*{{{*/
279{
280 if (Parser == NULL)
281 return HashStringList();
282 metaIndex::checkSum * const R = Parser->Lookup(MetaKey);
283 if (R == NULL)
284 return HashStringList();
285 return R->Hashes;
286}
287 /*}}}*/
288
289// all ::HashesRequired and ::GetExpectedHashes implementations /*{{{*/
290/* ::GetExpectedHashes is abstract and has to be implemented by all subclasses.
291 It is best to implement it as broadly as possible, while ::HashesRequired defaults
292 to true and should be as restrictive as possible for false cases. Note that if
293 a hash is returned by ::GetExpectedHashes it must match. Only if it doesn't
294 ::HashesRequired is called to evaluate if its okay to have no hashes. */
295APT_CONST bool pkgAcqTransactionItem::HashesRequired() const
296{
297 /* signed repositories obviously have a parser and good hashes.
298 unsigned repositories, too, as even if we can't trust them for security,
299 we can at least trust them for integrity of the download itself.
300 Only repositories without a Release file can (obviously) not have
301 hashes – and they are very uncommon and strongly discouraged */
302 if (TransactionManager->MetaIndexParser->GetLoadedSuccessfully() != metaIndex::TRI_YES)
303 return false;
304 if (TargetIsAllowedToBe(Target, InsecureType::WEAK))
305 {
306 /* If we allow weak hashes, we check that we have some (weak) and then
307 declare hashes not needed. That will tip us in the right direction
308 as if hashes exist, they will be used, even if not required */
309 auto const hsl = GetExpectedHashes();
310 if (hsl.usable())
311 return true;
312 if (hsl.empty() == false)
313 return false;
314 }
315 return true;
316}
317HashStringList pkgAcqTransactionItem::GetExpectedHashes() const
318{
319 return GetExpectedHashesFor(GetMetaKey());
320}
321
322APT_CONST bool pkgAcqMetaBase::HashesRequired() const
323{
324 // Release and co have no hashes 'by design'.
325 return false;
326}
327HashStringList pkgAcqMetaBase::GetExpectedHashes() const
328{
329 return HashStringList();
330}
331
332APT_CONST bool pkgAcqIndexDiffs::HashesRequired() const
333{
334 /* We can't check hashes of rred result as we don't know what the
335 hash of the file will be. We just know the hash of the patch(es),
336 the hash of the file they will apply on and the hash of the resulting
337 file. */
338 if (State == StateFetchDiff)
339 return true;
340 return false;
341}
342HashStringList pkgAcqIndexDiffs::GetExpectedHashes() const
343{
344 if (State == StateFetchDiff)
345 return available_patches[0].download_hashes;
346 return HashStringList();
347}
348
349APT_CONST bool pkgAcqIndexMergeDiffs::HashesRequired() const
350{
351 /* @see #pkgAcqIndexDiffs::HashesRequired, with the difference that
352 we can check the rred result after all patches are applied as
353 we know the expected result rather than potentially apply more patches */
354 if (State == StateFetchDiff)
355 return true;
356 return State == StateApplyDiff;
357}
358HashStringList pkgAcqIndexMergeDiffs::GetExpectedHashes() const
359{
360 if (State == StateFetchDiff)
361 return patch.download_hashes;
362 else if (State == StateApplyDiff)
363 return GetExpectedHashesFor(Target.MetaKey);
364 return HashStringList();
365}
366
367APT_CONST bool pkgAcqArchive::HashesRequired() const
368{
369 return LocalSource == false;
370}
371HashStringList pkgAcqArchive::GetExpectedHashes() const
372{
373 // figured out while parsing the records
374 return ExpectedHashes;
375}
376
377APT_CONST bool pkgAcqFile::HashesRequired() const
378{
379 // supplied as parameter at creation time, so the caller decides
380 return ExpectedHashes.usable();
381}
382HashStringList pkgAcqFile::GetExpectedHashes() const
383{
384 return ExpectedHashes;
385}
386 /*}}}*/
387// Acquire::Item::QueueURI and specialisations from child classes /*{{{*/
388bool pkgAcquire::Item::QueueURI(pkgAcquire::ItemDesc &Item)
389{
390 Owner->Enqueue(Item);
391 return true;
392}
393/* The idea here is that an item isn't queued if it exists on disk and the
394 transition manager was a hit as this means that the files it contains
395 the checksums for can't be updated either (or they are and we are asking
396 for a hashsum mismatch to happen which helps nobody) */
397bool pkgAcqTransactionItem::QueueURI(pkgAcquire::ItemDesc &Item)
398{
399 if (TransactionManager->State != TransactionStarted)
400 {
401 if (_config->FindB("Debug::Acquire::Transaction", false))
402 std::clog << "Skip " << Target.URI << " as transaction was already dealt with!" << std::endl;
403 return false;
404 }
405 std::string const FinalFile = GetFinalFilename();
406 if (TransactionManager->IMSHit == true && FileExists(FinalFile) == true)
407 {
408 PartialFile = DestFile = FinalFile;
409 Status = StatDone;
410 return false;
411 }
412 // If we got the InRelease file via a mirror, pick all indexes directly from this mirror, too
413 if (TransactionManager->BaseURI.empty() == false && UsedMirror.empty() &&
414 URI::SiteOnly(Item.URI) != URI::SiteOnly(TransactionManager->BaseURI))
415 {
416 // this ensures we rewrite only once and only the first step
417 auto const OldBaseURI = Target.Option(IndexTarget::BASE_URI);
418 if (OldBaseURI.empty() == false && APT::String::Startswith(Item.URI, OldBaseURI))
419 {
420 auto const ExtraPath = Item.URI.substr(OldBaseURI.length());
421 Item.URI = flCombine(TransactionManager->BaseURI, ExtraPath);
422 UsedMirror = TransactionManager->UsedMirror;
423 if (Item.Description.find(" ") != string::npos)
424 Item.Description.replace(0, Item.Description.find(" "), UsedMirror);
425 }
426 }
427 return pkgAcquire::Item::QueueURI(Item);
428}
429/* The transition manager InRelease itself (or its older sisters-in-law
430 Release & Release.gpg) is always queued as this allows us to rerun gpgv
431 on it to verify that we aren't stalled with old files */
432bool pkgAcqMetaBase::QueueURI(pkgAcquire::ItemDesc &Item)
433{
434 return pkgAcquire::Item::QueueURI(Item);
435}
436/* the Diff/Index needs to queue also the up-to-date complete index file
437 to ensure that the list cleaner isn't eating it */
438bool pkgAcqDiffIndex::QueueURI(pkgAcquire::ItemDesc &Item)
439{
440 if (pkgAcqTransactionItem::QueueURI(Item) == true)
441 return true;
442 QueueOnIMSHit();
443 return false;
444}
445 /*}}}*/
446// Acquire::Item::GetFinalFilename and specialisations for child classes /*{{{*/
447std::string pkgAcquire::Item::GetFinalFilename() const
448{
449 // Beware: Desc.URI is modified by redirections
450 return GetFinalFileNameFromURI(Desc.URI);
451}
452std::string pkgAcqDiffIndex::GetFinalFilename() const
453{
454 std::string const FinalFile = GetFinalFileNameFromURI(GetDiffIndexURI(Target));
455 // we don't want recompress, so lets keep whatever we got
456 if (CurrentCompressionExtension == "uncompressed")
457 return FinalFile;
458 return FinalFile + "." + CurrentCompressionExtension;
459}
460std::string pkgAcqIndex::GetFinalFilename() const
461{
462 std::string const FinalFile = GetFinalFileNameFromURI(Target.URI);
463 return GetKeepCompressedFileName(FinalFile, Target);
464}
465std::string pkgAcqMetaSig::GetFinalFilename() const
466{
467 return GetFinalFileNameFromURI(Target.URI);
468}
469std::string pkgAcqBaseIndex::GetFinalFilename() const
470{
471 return GetFinalFileNameFromURI(Target.URI);
472}
473std::string pkgAcqMetaBase::GetFinalFilename() const
474{
475 return GetFinalFileNameFromURI(Target.URI);
476}
477std::string pkgAcqArchive::GetFinalFilename() const
478{
479 return _config->FindDir("Dir::Cache::Archives") + flNotDir(StoreFilename);
480}
481 /*}}}*/
482// pkgAcqTransactionItem::GetMetaKey and specialisations for child classes /*{{{*/
483std::string pkgAcqTransactionItem::GetMetaKey() const
484{
485 return Target.MetaKey;
486}
487std::string pkgAcqIndex::GetMetaKey() const
488{
489 if (Stage == STAGE_DECOMPRESS_AND_VERIFY || CurrentCompressionExtension == "uncompressed")
490 return Target.MetaKey;
491 return Target.MetaKey + "." + CurrentCompressionExtension;
492}
493std::string pkgAcqDiffIndex::GetMetaKey() const
494{
495 auto const metakey = GetDiffIndexFileName(Target.MetaKey);
496 if (CurrentCompressionExtension == "uncompressed")
497 return metakey;
498 return metakey + "." + CurrentCompressionExtension;
499}
500 /*}}}*/
501//pkgAcqTransactionItem::TransactionState and specialisations for child classes /*{{{*/
502bool pkgAcqTransactionItem::TransactionState(TransactionStates const state)
503{
504 bool const Debug = _config->FindB("Debug::Acquire::Transaction", false);
505 switch(state)
506 {
507 case TransactionStarted: _error->Fatal("Item %s changed to invalid transaction start state!", Target.URI.c_str()); break;
508 case TransactionAbort:
509 if(Debug == true)
510 std::clog << " Cancel: " << DestFile << std::endl;
511 if (Status == pkgAcquire::Item::StatIdle)
512 {
513 Status = pkgAcquire::Item::StatDone;
514 Dequeue();
515 }
516 break;
517 case TransactionCommit:
518 if(PartialFile.empty() == false)
519 {
520 bool sameFile = (PartialFile == DestFile);
521 // we use symlinks on IMS-Hit to avoid copies
522 if (RealFileExists(DestFile))
523 {
524 struct stat Buf;
525 if (lstat(PartialFile.c_str(), &Buf) != -1)
526 {
527 if (S_ISLNK(Buf.st_mode) && Buf.st_size > 0)
528 {
529 char partial[Buf.st_size + 1];
530 ssize_t const sp = readlink(PartialFile.c_str(), partial, Buf.st_size);
531 if (sp == -1)
532 _error->Errno("pkgAcqTransactionItem::TransactionState-sp", _("Failed to readlink %s"), PartialFile.c_str());
533 else
534 {
535 partial[sp] = '\0';
536 sameFile = (DestFile == partial);
537 }
538 }
539 }
540 else
541 _error->Errno("pkgAcqTransactionItem::TransactionState-stat", _("Failed to stat %s"), PartialFile.c_str());
542 }
543 if (sameFile == false)
544 {
545 // ensure that even without lists-cleanup all compressions are nuked
546 std::string FinalFile = GetFinalFileNameFromURI(Target.URI);
547 if (FileExists(FinalFile))
548 {
549 if(Debug == true)
550 std::clog << "rm " << FinalFile << " # " << DescURI() << std::endl;
551 if (RemoveFile("TransactionStates-Cleanup", FinalFile) == false)
552 return false;
553 }
554 for (auto const &ext: APT::Configuration::getCompressorExtensions())
555 {
556 auto const Final = FinalFile + ext;
557 if (FileExists(Final))
558 {
559 if(Debug == true)
560 std::clog << "rm " << Final << " # " << DescURI() << std::endl;
561 if (RemoveFile("TransactionStates-Cleanup", Final) == false)
562 return false;
563 }
564 }
565 if(Debug == true)
566 std::clog << "mv " << PartialFile << " -> "<< DestFile << " # " << DescURI() << std::endl;
567 if (Rename(PartialFile, DestFile) == false)
568 return false;
569 }
570 else if(Debug == true)
571 std::clog << "keep " << PartialFile << " # " << DescURI() << std::endl;
572
573 } else {
574 if(Debug == true)
575 std::clog << "rm " << DestFile << " # " << DescURI() << std::endl;
576 if (RemoveFile("TransItem::TransactionCommit", DestFile) == false)
577 return false;
578 }
579 break;
580 }
581 return true;
582}
583bool pkgAcqMetaBase::TransactionState(TransactionStates const state)
584{
585 // Do not remove InRelease on IMSHit of Release.gpg [yes, this is very edgecasey]
586 if (TransactionManager->IMSHit == false)
587 return pkgAcqTransactionItem::TransactionState(state);
588 return true;
589}
590bool pkgAcqIndex::TransactionState(TransactionStates const state)
591{
592 if (pkgAcqTransactionItem::TransactionState(state) == false)
593 return false;
594
595 switch (state)
596 {
597 case TransactionStarted: _error->Fatal("AcqIndex %s changed to invalid transaction start state!", Target.URI.c_str()); break;
598 case TransactionAbort:
599 if (Stage == STAGE_DECOMPRESS_AND_VERIFY)
600 {
601 // keep the compressed file, but drop the decompressed
602 EraseFileName.clear();
603 if (PartialFile.empty() == false && flExtension(PartialFile) != CurrentCompressionExtension)
604 RemoveFile("TransactionAbort", PartialFile);
605 }
606 break;
607 case TransactionCommit:
608 if (EraseFileName.empty() == false)
609 RemoveFile("AcqIndex::TransactionCommit", EraseFileName);
610 break;
611 }
612 return true;
613}
614bool pkgAcqDiffIndex::TransactionState(TransactionStates const state)
615{
616 if (pkgAcqTransactionItem::TransactionState(state) == false)
617 return false;
618
619 switch (state)
620 {
621 case TransactionStarted: _error->Fatal("Item %s changed to invalid transaction start state!", Target.URI.c_str()); break;
622 case TransactionCommit:
623 break;
624 case TransactionAbort:
625 std::string const Partial = GetPartialFileNameFromURI(Target.URI);
626 RemoveFile("TransactionAbort", Partial);
627 break;
628 }
629
630 return true;
631}
632 /*}}}*/
633
634class APT_HIDDEN NoActionItem : public pkgAcquire::Item /*{{{*/
635/* The sole purpose of this class is having an item which does nothing to
636 reach its done state to prevent cleanup deleting the mentioned file.
637 Handy in cases in which we know we have the file already, like IMS-Hits. */
638{
639 IndexTarget const Target;
640 public:
641 virtual std::string DescURI() const APT_OVERRIDE {return Target.URI;};
642 virtual HashStringList GetExpectedHashes() const APT_OVERRIDE {return HashStringList();};
643
644 NoActionItem(pkgAcquire * const Owner, IndexTarget const &Target) :
645 pkgAcquire::Item(Owner), Target(Target)
646 {
647 Status = StatDone;
648 DestFile = GetFinalFileNameFromURI(Target.URI);
649 }
650 NoActionItem(pkgAcquire * const Owner, IndexTarget const &Target, std::string const &FinalFile) :
651 pkgAcquire::Item(Owner), Target(Target)
652 {
653 Status = StatDone;
654 DestFile = FinalFile;
655 }
656};
657 /*}}}*/
658class APT_HIDDEN CleanupItem : public pkgAcqTransactionItem /*{{{*/
659/* This class ensures that a file which was configured but isn't downloaded
660 for various reasons isn't kept in an old version in the lists directory.
661 In a way its the reverse of NoActionItem as it helps with removing files
662 even if the lists-cleanup is deactivated. */
663{
664 public:
665 virtual std::string DescURI() const APT_OVERRIDE {return Target.URI;};
666 virtual HashStringList GetExpectedHashes() const APT_OVERRIDE {return HashStringList();};
667
668 CleanupItem(pkgAcquire * const Owner, pkgAcqMetaClearSig * const TransactionManager, IndexTarget const &Target) :
669 pkgAcqTransactionItem(Owner, TransactionManager, Target)
670 {
671 Status = StatDone;
672 DestFile = GetFinalFileNameFromURI(Target.URI);
673 }
674 bool TransactionState(TransactionStates const state) APT_OVERRIDE
675 {
676 switch (state)
677 {
678 case TransactionStarted:
679 break;
680 case TransactionAbort:
681 break;
682 case TransactionCommit:
683 if (_config->FindB("Debug::Acquire::Transaction", false) == true)
684 std::clog << "rm " << DestFile << " # " << DescURI() << std::endl;
685 if (RemoveFile("TransItem::TransactionCommit", DestFile) == false)
686 return false;
687 break;
688 }
689 return true;
690 }
691};
692 /*}}}*/
693
694// Acquire::Item::Item - Constructor /*{{{*/
695class pkgAcquire::Item::Private
696{
697public:
698 std::vector<std::string> PastRedirections;
699};
700APT_IGNORE_DEPRECATED_PUSH
701pkgAcquire::Item::Item(pkgAcquire * const owner) :
702 FileSize(0), PartialSize(0), Mode(0), ID(0), Complete(false), Local(false),
703 QueueCounter(0), ExpectedAdditionalItems(0), Owner(owner), d(new Private())
704{
705 Owner->Add(this);
706 Status = StatIdle;
707}
708APT_IGNORE_DEPRECATED_POP
709 /*}}}*/
710// Acquire::Item::~Item - Destructor /*{{{*/
711pkgAcquire::Item::~Item()
712{
713 Owner->Remove(this);
714 delete d;
715}
716 /*}}}*/
717std::string pkgAcquire::Item::Custom600Headers() const /*{{{*/
718{
719 return std::string();
720}
721 /*}}}*/
722std::string pkgAcquire::Item::ShortDesc() const /*{{{*/
723{
724 return DescURI();
725}
726 /*}}}*/
727APT_CONST void pkgAcquire::Item::Finished() /*{{{*/
728{
729}
730 /*}}}*/
731APT_PURE pkgAcquire * pkgAcquire::Item::GetOwner() const /*{{{*/
732{
733 return Owner;
734}
735 /*}}}*/
736APT_CONST pkgAcquire::ItemDesc &pkgAcquire::Item::GetItemDesc() /*{{{*/
737{
738 return Desc;
739}
740 /*}}}*/
741APT_CONST bool pkgAcquire::Item::IsTrusted() const /*{{{*/
742{
743 return false;
744}
745 /*}}}*/
746// Acquire::Item::Failed - Item failed to download /*{{{*/
747// ---------------------------------------------------------------------
748/* We return to an idle state if there are still other queues that could
749 fetch this object */
750void pkgAcquire::Item::Failed(string const &Message,pkgAcquire::MethodConfig const * const Cnf)
751{
752 if (QueueCounter <= 1)
753 {
754 /* This indicates that the file is not available right now but might
755 be sometime later. If we do a retry cycle then this should be
756 retried [CDROMs] */
757 if (Cnf != NULL && Cnf->LocalOnly == true &&
758 StringToBool(LookupTag(Message,"Transient-Failure"),false) == true)
759 {
760 Status = StatIdle;
761 Dequeue();
762 return;
763 }
764
765 switch (Status)
766 {
767 case StatIdle:
768 case StatFetching:
769 case StatDone:
770 Status = StatError;
771 break;
772 case StatAuthError:
773 case StatError:
774 case StatTransientNetworkError:
775 break;
776 }
777 Complete = false;
778 Dequeue();
779 }
780
781 string const FailReason = LookupTag(Message, "FailReason");
782 enum { MAXIMUM_SIZE_EXCEEDED, HASHSUM_MISMATCH, WEAK_HASHSUMS, REDIRECTION_LOOP, OTHER } failreason = OTHER;
783 if ( FailReason == "MaximumSizeExceeded")
784 failreason = MAXIMUM_SIZE_EXCEEDED;
785 else if ( FailReason == "WeakHashSums")
786 failreason = WEAK_HASHSUMS;
787 else if (FailReason == "RedirectionLoop")
788 failreason = REDIRECTION_LOOP;
789 else if (Status == StatAuthError)
790 failreason = HASHSUM_MISMATCH;
791
792 if(ErrorText.empty())
793 {
794 std::ostringstream out;
795 switch (failreason)
796 {
797 case HASHSUM_MISMATCH:
798 out << _("Hash Sum mismatch") << std::endl;
799 break;
800 case WEAK_HASHSUMS:
801 out << _("Insufficient information available to perform this download securely") << std::endl;
802 break;
803 case REDIRECTION_LOOP:
804 out << "Redirection loop encountered" << std::endl;
805 break;
806 case MAXIMUM_SIZE_EXCEEDED:
807 out << LookupTag(Message, "Message") << std::endl;
808 break;
809 case OTHER:
810 out << LookupTag(Message, "Message");
811 break;
812 }
813
814 if (Status == StatAuthError)
815 {
816 auto const ExpectedHashes = GetExpectedHashes();
817 if (ExpectedHashes.empty() == false)
818 {
819 out << "Hashes of expected file:" << std::endl;
820 for (auto const &hs: ExpectedHashes)
821 {
822 out << " - " << hs.toStr();
823 if (hs.usable() == false)
824 out << " [weak]";
825 out << std::endl;
826 }
827 }
828 if (failreason == HASHSUM_MISMATCH)
829 {
830 out << "Hashes of received file:" << std::endl;
831 for (char const * const * type = HashString::SupportedHashes(); *type != NULL; ++type)
832 {
833 std::string const tagname = std::string(*type) + "-Hash";
834 std::string const hashsum = LookupTag(Message, tagname.c_str());
835 if (hashsum.empty() == false)
836 {
837 auto const hs = HashString(*type, hashsum);
838 out << " - " << hs.toStr();
839 if (hs.usable() == false)
840 out << " [weak]";
841 out << std::endl;
842 }
843 }
844 out << "Last modification reported: " << LookupTag(Message, "Last-Modified", "<none>") << std::endl;
845 }
846 }
847 ErrorText = out.str();
848 }
849
850 switch (failreason)
851 {
852 case MAXIMUM_SIZE_EXCEEDED: RenameOnError(MaximumSizeExceeded); break;
853 case HASHSUM_MISMATCH: RenameOnError(HashSumMismatch); break;
854 case WEAK_HASHSUMS: break;
855 case REDIRECTION_LOOP: break;
856 case OTHER: break;
857 }
858
859 if (FailReason.empty() == false)
860 ReportMirrorFailureToCentral(*this, FailReason, ErrorText);
861 else
862 ReportMirrorFailureToCentral(*this, ErrorText, ErrorText);
863
864 if (QueueCounter > 1)
865 Status = StatIdle;
866}
867 /*}}}*/
868// Acquire::Item::Start - Item has begun to download /*{{{*/
869// ---------------------------------------------------------------------
870/* Stash status and the file size. Note that setting Complete means
871 sub-phases of the acquire process such as decompresion are operating */
872void pkgAcquire::Item::Start(string const &/*Message*/, unsigned long long const Size)
873{
874 Status = StatFetching;
875 ErrorText.clear();
876 if (FileSize == 0 && Complete == false)
877 FileSize = Size;
878}
879 /*}}}*/
880// Acquire::Item::VerifyDone - check if Item was downloaded OK /*{{{*/
881/* Note that hash-verification is 'hardcoded' in acquire-worker and has
882 * already passed if this method is called. */
883bool pkgAcquire::Item::VerifyDone(std::string const &Message,
884 pkgAcquire::MethodConfig const * const /*Cnf*/)
885{
886 std::string const FileName = LookupTag(Message,"Filename");
887 if (FileName.empty() == true)
888 {
889 Status = StatError;
890 ErrorText = "Method gave a blank filename";
891 return false;
892 }
893
894 return true;
895}
896 /*}}}*/
897// Acquire::Item::Done - Item downloaded OK /*{{{*/
898void pkgAcquire::Item::Done(string const &/*Message*/, HashStringList const &Hashes,
899 pkgAcquire::MethodConfig const * const /*Cnf*/)
900{
901 // We just downloaded something..
902 if (FileSize == 0)
903 {
904 unsigned long long const downloadedSize = Hashes.FileSize();
905 if (downloadedSize != 0)
906 {
907 FileSize = downloadedSize;
908 }
909 }
910 Status = StatDone;
911 ErrorText = string();
912 Owner->Dequeue(this);
913}
914 /*}}}*/
915// Acquire::Item::Rename - Rename a file /*{{{*/
916// ---------------------------------------------------------------------
917/* This helper function is used by a lot of item methods as their final
918 step */
919bool pkgAcquire::Item::Rename(string const &From,string const &To)
920{
921 if (From == To || rename(From.c_str(),To.c_str()) == 0)
922 return true;
923
924 std::string S;
925 strprintf(S, _("rename failed, %s (%s -> %s)."), strerror(errno),
926 From.c_str(),To.c_str());
927 Status = StatError;
928 if (ErrorText.empty())
929 ErrorText = S;
930 else
931 ErrorText = ErrorText + ": " + S;
932 return false;
933}
934 /*}}}*/
935void pkgAcquire::Item::Dequeue() /*{{{*/
936{
937 Owner->Dequeue(this);
938}
939 /*}}}*/
940bool pkgAcquire::Item::RenameOnError(pkgAcquire::Item::RenameOnErrorState const error)/*{{{*/
941{
942 if (RealFileExists(DestFile))
943 Rename(DestFile, DestFile + ".FAILED");
944
945 std::string errtext;
946 switch (error)
947 {
948 case HashSumMismatch:
949 errtext = _("Hash Sum mismatch");
950 break;
951 case SizeMismatch:
952 errtext = _("Size mismatch");
953 Status = StatAuthError;
954 break;
955 case InvalidFormat:
956 errtext = _("Invalid file format");
957 Status = StatError;
958 // do not report as usually its not the mirrors fault, but Portal/Proxy
959 break;
960 case SignatureError:
961 errtext = _("Signature error");
962 Status = StatError;
963 break;
964 case NotClearsigned:
965 strprintf(errtext, _("Clearsigned file isn't valid, got '%s' (does the network require authentication?)"), "NOSPLIT");
966 Status = StatAuthError;
967 break;
968 case MaximumSizeExceeded:
969 // the method is expected to report a good error for this
970 break;
971 case PDiffError:
972 // no handling here, done by callers
973 break;
974 }
975 if (ErrorText.empty())
976 ErrorText = errtext;
977 return false;
978}
979 /*}}}*/
980void pkgAcquire::Item::SetActiveSubprocess(const std::string &subprocess)/*{{{*/
981{
982 ActiveSubprocess = subprocess;
983 APT_IGNORE_DEPRECATED(Mode = ActiveSubprocess.c_str();)
984}
985 /*}}}*/
986// Acquire::Item::ReportMirrorFailure /*{{{*/
987void pkgAcquire::Item::ReportMirrorFailure(std::string const &FailCode)
988{
989 ReportMirrorFailureToCentral(*this, FailCode, FailCode);
990}
991 /*}}}*/
992std::string pkgAcquire::Item::HashSum() const /*{{{*/
993{
994 HashStringList const hashes = GetExpectedHashes();
995 HashString const * const hs = hashes.find(NULL);
996 return hs != NULL ? hs->toStr() : "";
997}
998 /*}}}*/
999bool pkgAcquire::Item::IsRedirectionLoop(std::string const &NewURI) /*{{{*/
1000{
1001 // store can fail due to permission errors and the item will "loop" then
1002 if (APT::String::Startswith(NewURI, "store:"))
1003 return false;
1004 if (d->PastRedirections.empty())
1005 {
1006 d->PastRedirections.push_back(NewURI);
1007 return false;
1008 }
1009 auto const LastURI = std::prev(d->PastRedirections.end());
1010 // redirections to the same file are a way of restarting/resheduling,
1011 // individual methods will have to make sure that they aren't looping this way
1012 if (*LastURI == NewURI)
1013 return false;
1014 if (std::find(d->PastRedirections.begin(), LastURI, NewURI) != LastURI)
1015 return true;
1016 d->PastRedirections.push_back(NewURI);
1017 return false;
1018}
1019 /*}}}*/
1020
1021pkgAcqTransactionItem::pkgAcqTransactionItem(pkgAcquire * const Owner, /*{{{*/
1022 pkgAcqMetaClearSig * const transactionManager, IndexTarget const &target) :
1023 pkgAcquire::Item(Owner), d(NULL), Target(target), TransactionManager(transactionManager)
1024{
1025 if (TransactionManager != this)
1026 TransactionManager->Add(this);
1027}
1028 /*}}}*/
1029pkgAcqTransactionItem::~pkgAcqTransactionItem() /*{{{*/
1030{
1031}
1032 /*}}}*/
1033HashStringList pkgAcqTransactionItem::GetExpectedHashesFor(std::string const &MetaKey) const /*{{{*/
1034{
1035 return GetExpectedHashesFromFor(TransactionManager->MetaIndexParser, MetaKey);
1036}
1037 /*}}}*/
1038
1039static void LoadLastMetaIndexParser(pkgAcqMetaClearSig * const TransactionManager, std::string const &FinalRelease, std::string const &FinalInRelease)/*{{{*/
1040{
1041 if (TransactionManager->IMSHit == true)
1042 return;
1043 if (RealFileExists(FinalInRelease) || RealFileExists(FinalRelease))
1044 {
1045 TransactionManager->LastMetaIndexParser = TransactionManager->MetaIndexParser->UnloadedClone();
1046 if (TransactionManager->LastMetaIndexParser != NULL)
1047 {
1048 _error->PushToStack();
1049 if (RealFileExists(FinalInRelease))
1050 TransactionManager->LastMetaIndexParser->Load(FinalInRelease, NULL);
1051 else
1052 TransactionManager->LastMetaIndexParser->Load(FinalRelease, NULL);
1053 // its unlikely to happen, but if what we have is bad ignore it
1054 if (_error->PendingError())
1055 {
1056 delete TransactionManager->LastMetaIndexParser;
1057 TransactionManager->LastMetaIndexParser = NULL;
1058 }
1059 _error->RevertToStack();
1060 }
1061 }
1062}
1063 /*}}}*/
1064
1065// AcqMetaBase - Constructor /*{{{*/
1066pkgAcqMetaBase::pkgAcqMetaBase(pkgAcquire * const Owner,
1067 pkgAcqMetaClearSig * const TransactionManager,
1068 IndexTarget const &DataTarget)
1069: pkgAcqTransactionItem(Owner, TransactionManager, DataTarget), d(NULL),
1070 AuthPass(false), IMSHit(false), State(TransactionStarted)
1071{
1072}
1073 /*}}}*/
1074// AcqMetaBase::Add - Add a item to the current Transaction /*{{{*/
1075void pkgAcqMetaBase::Add(pkgAcqTransactionItem * const I)
1076{
1077 Transaction.push_back(I);
1078}
1079 /*}}}*/
1080// AcqMetaBase::AbortTransaction - Abort the current Transaction /*{{{*/
1081void pkgAcqMetaBase::AbortTransaction()
1082{
1083 if(_config->FindB("Debug::Acquire::Transaction", false) == true)
1084 std::clog << "AbortTransaction: " << TransactionManager << std::endl;
1085
1086 switch (TransactionManager->State)
1087 {
1088 case TransactionStarted: break;
1089 case TransactionAbort: _error->Fatal("Transaction %s was already aborted and is aborted again", TransactionManager->Target.URI.c_str()); return;
1090 case TransactionCommit: _error->Fatal("Transaction %s was already aborted and is now committed", TransactionManager->Target.URI.c_str()); return;
1091 }
1092 TransactionManager->State = TransactionAbort;
1093
1094 // ensure the toplevel is in error state too
1095 for (std::vector<pkgAcqTransactionItem*>::iterator I = Transaction.begin();
1096 I != Transaction.end(); ++I)
1097 {
1098 if ((*I)->Status != pkgAcquire::Item::StatFetching)
1099 Owner->Dequeue(*I);
1100 (*I)->TransactionState(TransactionAbort);
1101 }
1102 Transaction.clear();
1103}
1104 /*}}}*/
1105// AcqMetaBase::TransactionHasError - Check for errors in Transaction /*{{{*/
1106APT_PURE bool pkgAcqMetaBase::TransactionHasError() const
1107{
1108 for (std::vector<pkgAcqTransactionItem*>::const_iterator I = Transaction.begin();
1109 I != Transaction.end(); ++I)
1110 {
1111 switch((*I)->Status) {
1112 case StatDone: break;
1113 case StatIdle: break;
1114 case StatAuthError: return true;
1115 case StatError: return true;
1116 case StatTransientNetworkError: return true;
1117 case StatFetching: break;
1118 }
1119 }
1120 return false;
1121}
1122 /*}}}*/
1123// AcqMetaBase::CommitTransaction - Commit a transaction /*{{{*/
1124void pkgAcqMetaBase::CommitTransaction()
1125{
1126 if(_config->FindB("Debug::Acquire::Transaction", false) == true)
1127 std::clog << "CommitTransaction: " << this << std::endl;
1128
1129 switch (TransactionManager->State)
1130 {
1131 case TransactionStarted: break;
1132 case TransactionAbort: _error->Fatal("Transaction %s was already committed and is now aborted", TransactionManager->Target.URI.c_str()); return;
1133 case TransactionCommit: _error->Fatal("Transaction %s was already committed and is again committed", TransactionManager->Target.URI.c_str()); return;
1134 }
1135 TransactionManager->State = TransactionCommit;
1136
1137 // move new files into place *and* remove files that are not
1138 // part of the transaction but are still on disk
1139 for (std::vector<pkgAcqTransactionItem*>::iterator I = Transaction.begin();
1140 I != Transaction.end(); ++I)
1141 {
1142 (*I)->TransactionState(TransactionCommit);
1143 }
1144 Transaction.clear();
1145}
1146 /*}}}*/
1147// AcqMetaBase::TransactionStageCopy - Stage a file for copying /*{{{*/
1148void pkgAcqMetaBase::TransactionStageCopy(pkgAcqTransactionItem * const I,
1149 const std::string &From,
1150 const std::string &To)
1151{
1152 I->PartialFile = From;
1153 I->DestFile = To;
1154}
1155 /*}}}*/
1156// AcqMetaBase::TransactionStageRemoval - Stage a file for removal /*{{{*/
1157void pkgAcqMetaBase::TransactionStageRemoval(pkgAcqTransactionItem * const I,
1158 const std::string &FinalFile)
1159{
1160 I->PartialFile = "";
1161 I->DestFile = FinalFile;
1162}
1163 /*}}}*/
1164// AcqMetaBase::GenerateAuthWarning - Check gpg authentication error /*{{{*/
1165/* This method is called from ::Failed handlers. If it returns true,
1166 no fallback to other files or modi is performed */
1167bool pkgAcqMetaBase::CheckStopAuthentication(pkgAcquire::Item * const I, const std::string &Message)
1168{
1169 string const Final = I->GetFinalFilename();
1170 std::string const GPGError = LookupTag(Message, "Message");
1171 if (FileExists(Final))
1172 {
1173 I->Status = StatTransientNetworkError;
1174 _error->Warning(_("An error occurred during the signature verification. "
1175 "The repository is not updated and the previous index files will be used. "
1176 "GPG error: %s: %s"),
1177 Desc.Description.c_str(),
1178 GPGError.c_str());
1179 RunScripts("APT::Update::Auth-Failure");
1180 return true;
1181 } else if (LookupTag(Message,"Message").find("NODATA") != string::npos) {
1182 /* Invalid signature file, reject (LP: #346386) (Closes: #627642) */
1183 _error->Error(_("GPG error: %s: %s"),
1184 Desc.Description.c_str(),
1185 GPGError.c_str());
1186 I->Status = StatAuthError;
1187 return true;
1188 } else {
1189 _error->Warning(_("GPG error: %s: %s"),
1190 Desc.Description.c_str(),
1191 GPGError.c_str());
1192 }
1193 // gpgv method failed
1194 ReportMirrorFailureToCentral(*this, "GPGFailure", GPGError);
1195 return false;
1196}
1197 /*}}}*/
1198// AcqMetaBase::Custom600Headers - Get header for AcqMetaBase /*{{{*/
1199// ---------------------------------------------------------------------
1200string pkgAcqMetaBase::Custom600Headers() const
1201{
1202 std::string Header = "\nIndex-File: true";
1203 std::string MaximumSize;
1204 strprintf(MaximumSize, "\nMaximum-Size: %i",
1205 _config->FindI("Acquire::MaxReleaseFileSize", 10*1000*1000));
1206 Header += MaximumSize;
1207
1208 string const FinalFile = GetFinalFilename();
1209 struct stat Buf;
1210 if (stat(FinalFile.c_str(),&Buf) == 0)
1211 Header += "\nLast-Modified: " + TimeRFC1123(Buf.st_mtime, false);
1212
1213 return Header;
1214}
1215 /*}}}*/
1216// AcqMetaBase::QueueForSignatureVerify /*{{{*/
1217void pkgAcqMetaBase::QueueForSignatureVerify(pkgAcqTransactionItem * const I, std::string const &File, std::string const &Signature)
1218{
1219 AuthPass = true;
1220 I->Desc.URI = "gpgv:" + Signature;
1221 I->DestFile = File;
1222 QueueURI(I->Desc);
1223 I->SetActiveSubprocess("gpgv");
1224}
1225 /*}}}*/
1226// AcqMetaBase::CheckDownloadDone /*{{{*/
1227bool pkgAcqMetaBase::CheckDownloadDone(pkgAcqTransactionItem * const I, const std::string &Message, HashStringList const &Hashes) const
1228{
1229 // We have just finished downloading a Release file (it is not
1230 // verified yet)
1231
1232 // Save the final base URI we got this Release file from
1233 if (I->UsedMirror.empty() == false && _config->FindB("Acquire::SameMirrorForAllIndexes", true))
1234 {
1235 if (APT::String::Endswith(I->Desc.URI, "InRelease"))
1236 {
1237 TransactionManager->BaseURI = I->Desc.URI.substr(0, I->Desc.URI.length() - strlen("InRelease"));
1238 TransactionManager->UsedMirror = I->UsedMirror;
1239 }
1240 else if (APT::String::Endswith(I->Desc.URI, "Release"))
1241 {
1242 TransactionManager->BaseURI = I->Desc.URI.substr(0, I->Desc.URI.length() - strlen("Release"));
1243 TransactionManager->UsedMirror = I->UsedMirror;
1244 }
1245 }
1246
1247 std::string const FileName = LookupTag(Message,"Filename");
1248 if (FileName != I->DestFile && RealFileExists(I->DestFile) == false)
1249 {
1250 I->Local = true;
1251 I->Desc.URI = "copy:" + FileName;
1252 I->QueueURI(I->Desc);
1253 return false;
1254 }
1255
1256 // make sure to verify against the right file on I-M-S hit
1257 bool IMSHit = StringToBool(LookupTag(Message,"IMS-Hit"), false);
1258 if (IMSHit == false && Hashes.usable())
1259 {
1260 // detect IMS-Hits servers haven't detected by Hash comparison
1261 std::string const FinalFile = I->GetFinalFilename();
1262 if (RealFileExists(FinalFile) && Hashes.VerifyFile(FinalFile) == true)
1263 {
1264 IMSHit = true;
1265 RemoveFile("CheckDownloadDone", I->DestFile);
1266 }
1267 }
1268
1269 if(IMSHit == true)
1270 {
1271 // for simplicity, the transaction manager is always InRelease
1272 // even if it doesn't exist.
1273 TransactionManager->IMSHit = true;
1274 I->PartialFile = I->DestFile = I->GetFinalFilename();
1275 }
1276
1277 // set Item to complete as the remaining work is all local (verify etc)
1278 I->Complete = true;
1279
1280 return true;
1281}
1282 /*}}}*/
1283bool pkgAcqMetaBase::CheckAuthDone(string const &Message) /*{{{*/
1284{
1285 // At this point, the gpgv method has succeeded, so there is a
1286 // valid signature from a key in the trusted keyring. We
1287 // perform additional verification of its contents, and use them
1288 // to verify the indexes we are about to download
1289 if (_config->FindB("Debug::pkgAcquire::Auth", false))
1290 std::cerr << "Signature verification succeeded: " << DestFile << std::endl;
1291
1292 if (TransactionManager->IMSHit == false)
1293 {
1294 // open the last (In)Release if we have it
1295 std::string const FinalFile = GetFinalFilename();
1296 std::string FinalRelease;
1297 std::string FinalInRelease;
1298 if (APT::String::Endswith(FinalFile, "InRelease"))
1299 {
1300 FinalInRelease = FinalFile;
1301 FinalRelease = FinalFile.substr(0, FinalFile.length() - strlen("InRelease")) + "Release";
1302 }
1303 else
1304 {
1305 FinalInRelease = FinalFile.substr(0, FinalFile.length() - strlen("Release")) + "InRelease";
1306 FinalRelease = FinalFile;
1307 }
1308 LoadLastMetaIndexParser(TransactionManager, FinalRelease, FinalInRelease);
1309 }
1310
1311 bool const GoodAuth = TransactionManager->MetaIndexParser->Load(DestFile, &ErrorText);
1312 if (GoodAuth == false && AllowInsecureRepositories(InsecureType::WEAK, Target.Description, TransactionManager->MetaIndexParser, TransactionManager, this) == false)
1313 {
1314 Status = StatAuthError;
1315 return false;
1316 }
1317
1318 if (!VerifyVendor(Message))
1319 {
1320 Status = StatAuthError;
1321 return false;
1322 }
1323
1324 // Download further indexes with verification
1325 TransactionManager->QueueIndexes(GoodAuth);
1326
1327 return GoodAuth;
1328}
1329 /*}}}*/
1330void pkgAcqMetaClearSig::QueueIndexes(bool const verify) /*{{{*/
1331{
1332 // at this point the real Items are loaded in the fetcher
1333 ExpectedAdditionalItems = 0;
1334
1335 std::set<std::string> targetsSeen;
1336 bool const hasReleaseFile = TransactionManager->MetaIndexParser != NULL;
1337 bool const metaBaseSupportsByHash = hasReleaseFile && TransactionManager->MetaIndexParser->GetSupportsAcquireByHash();
1338 bool hasHashes = true;
1339 auto IndexTargets = TransactionManager->MetaIndexParser->GetIndexTargets();
1340 if (hasReleaseFile && verify == false)
1341 hasHashes = std::any_of(IndexTargets.begin(), IndexTargets.end(),
1342 [&](IndexTarget const &Target) { return TransactionManager->MetaIndexParser->Exists(Target.MetaKey); });
1343 for (auto&& Target: IndexTargets)
1344 {
1345 // if we have seen a target which is created-by a target this one here is declared a
1346 // fallback to, we skip acquiring the fallback (but we make sure we clean up)
1347 if (targetsSeen.find(Target.Option(IndexTarget::FALLBACK_OF)) != targetsSeen.end())
1348 {
1349 targetsSeen.emplace(Target.Option(IndexTarget::CREATED_BY));
1350 new CleanupItem(Owner, TransactionManager, Target);
1351 continue;
1352 }
1353 // all is an implementation detail. Users shouldn't use this as arch
1354 // We need this support trickery here as e.g. Debian has binary-all files already,
1355 // but arch:all packages are still in the arch:any files, so we would waste precious
1356 // download time, bandwidth and diskspace for nothing, BUT Debian doesn't feature all
1357 // in the set of supported architectures, so we can filter based on this property rather
1358 // than invent an entirely new flag we would need to carry for all of eternity.
1359 if (hasReleaseFile && Target.Option(IndexTarget::ARCHITECTURE) == "all")
1360 {
1361 if (TransactionManager->MetaIndexParser->IsArchitectureAllSupportedFor(Target) == false)
1362 {
1363 new CleanupItem(Owner, TransactionManager, Target);
1364 continue;
1365 }
1366 }
1367
1368 bool trypdiff = Target.OptionBool(IndexTarget::PDIFFS);
1369 if (hasReleaseFile == true)
1370 {
1371 if (TransactionManager->MetaIndexParser->Exists(Target.MetaKey) == false)
1372 {
1373 // optional targets that we do not have in the Release file are skipped
1374 if (hasHashes == true && Target.IsOptional)
1375 {
1376 new CleanupItem(Owner, TransactionManager, Target);
1377 continue;
1378 }
1379
1380 std::string const &arch = Target.Option(IndexTarget::ARCHITECTURE);
1381 if (arch.empty() == false)
1382 {
1383 if (TransactionManager->MetaIndexParser->IsArchitectureSupported(arch) == false)
1384 {
1385 new CleanupItem(Owner, TransactionManager, Target);
1386 _error->Notice(_("Skipping acquire of configured file '%s' as repository '%s' doesn't support architecture '%s'"),
1387 Target.MetaKey.c_str(), TransactionManager->Target.Description.c_str(), arch.c_str());
1388 continue;
1389 }
1390 // if the architecture is officially supported but currently no packages for it available,
1391 // ignore silently as this is pretty much the same as just shipping an empty file.
1392 // if we don't know which architectures are supported, we do NOT ignore it to notify user about this
1393 if (hasHashes == true && TransactionManager->MetaIndexParser->IsArchitectureSupported("*undefined*") == false)
1394 {
1395 new CleanupItem(Owner, TransactionManager, Target);
1396 continue;
1397 }
1398 }
1399
1400 if (hasHashes == true)
1401 {
1402 Status = StatAuthError;
1403 strprintf(ErrorText, _("Unable to find expected entry '%s' in Release file (Wrong sources.list entry or malformed file)"), Target.MetaKey.c_str());
1404 return;
1405 }
1406 else
1407 {
1408 new pkgAcqIndex(Owner, TransactionManager, Target);
1409 continue;
1410 }
1411 }
1412 else if (verify)
1413 {
1414 auto const hashes = GetExpectedHashesFor(Target.MetaKey);
1415 if (hashes.empty() == false)
1416 {
1417 if (hashes.usable() == false && TargetIsAllowedToBe(TransactionManager->Target, InsecureType::WEAK) == false)
1418 {
1419 new CleanupItem(Owner, TransactionManager, Target);
1420 _error->Warning(_("Skipping acquire of configured file '%s' as repository '%s' provides only weak security information for it"),
1421 Target.MetaKey.c_str(), TransactionManager->Target.Description.c_str());
1422 continue;
1423 }
1424 // empty files are skipped as acquiring the very small compressed files is a waste of time
1425 else if (hashes.FileSize() == 0)
1426 {
1427 new CleanupItem(Owner, TransactionManager, Target);
1428 targetsSeen.emplace(Target.Option(IndexTarget::CREATED_BY));
1429 continue;
1430 }
1431 }
1432 }
1433
1434 // autoselect the compression method
1435 std::vector<std::string> types = VectorizeString(Target.Option(IndexTarget::COMPRESSIONTYPES), ' ');
1436 types.erase(std::remove_if(types.begin(), types.end(), [&](std::string const &t) {
1437 if (t == "uncompressed")
1438 return TransactionManager->MetaIndexParser->Exists(Target.MetaKey) == false;
1439 std::string const MetaKey = Target.MetaKey + "." + t;
1440 return TransactionManager->MetaIndexParser->Exists(MetaKey) == false;
1441 }), types.end());
1442 if (types.empty() == false)
1443 {
1444 std::ostringstream os;
1445 // add the special compressiontype byhash first if supported
1446 std::string const useByHashConf = Target.Option(IndexTarget::BY_HASH);
1447 bool useByHash = false;
1448 if(useByHashConf == "force")
1449 useByHash = true;
1450 else
1451 useByHash = StringToBool(useByHashConf) == true && metaBaseSupportsByHash;
1452 if (useByHash == true)
1453 os << "by-hash ";
1454 std::copy(types.begin(), types.end()-1, std::ostream_iterator<std::string>(os, " "));
1455 os << *types.rbegin();
1456 Target.Options["COMPRESSIONTYPES"] = os.str();
1457 }
1458 else
1459 Target.Options["COMPRESSIONTYPES"].clear();
1460
1461 std::string filename = GetExistingFilename(GetFinalFileNameFromURI(Target.URI));
1462 if (filename.empty() == false)
1463 {
1464 // if the Release file is a hit and we have an index it must be the current one
1465 if (TransactionManager->IMSHit == true)
1466 ;
1467 else if (TransactionManager->LastMetaIndexParser != NULL)
1468 {
1469 // see if the file changed since the last Release file
1470 // we use the uncompressed files as we might compress differently compared to the server,
1471 // so the hashes might not match, even if they contain the same data.
1472 HashStringList const newFile = GetExpectedHashesFromFor(TransactionManager->MetaIndexParser, Target.MetaKey);
1473 HashStringList const oldFile = GetExpectedHashesFromFor(TransactionManager->LastMetaIndexParser, Target.MetaKey);
1474 if (newFile != oldFile)
1475 filename.clear();
1476 }
1477 else
1478 filename.clear();
1479 }
1480 else
1481 trypdiff = false; // no file to patch
1482
1483 if (filename.empty() == false)
1484 {
1485 new NoActionItem(Owner, Target, filename);
1486 std::string const idxfilename = GetFinalFileNameFromURI(GetDiffIndexURI(Target));
1487 if (FileExists(idxfilename))
1488 new NoActionItem(Owner, Target, idxfilename);
1489 targetsSeen.emplace(Target.Option(IndexTarget::CREATED_BY));
1490 continue;
1491 }
1492
1493 // check if we have patches available
1494 trypdiff &= TransactionManager->MetaIndexParser->Exists(GetDiffIndexFileName(Target.MetaKey));
1495 }
1496 else
1497 {
1498 // if we have no file to patch, no point in trying
1499 trypdiff &= (GetExistingFilename(GetFinalFileNameFromURI(Target.URI)).empty() == false);
1500 }
1501
1502 // no point in patching from local sources
1503 if (trypdiff)
1504 {
1505 std::string const proto = Target.URI.substr(0, strlen("file:/"));
1506 if (proto == "file:/" || proto == "copy:/" || proto == "cdrom:")
1507 trypdiff = false;
1508 }
1509
1510 // Queue the Index file (Packages, Sources, Translation-$foo, …)
1511 targetsSeen.emplace(Target.Option(IndexTarget::CREATED_BY));
1512 if (trypdiff)
1513 new pkgAcqDiffIndex(Owner, TransactionManager, Target);
1514 else
1515 new pkgAcqIndex(Owner, TransactionManager, Target);
1516 }
1517}
1518 /*}}}*/
1519bool pkgAcqMetaBase::VerifyVendor(string const &) /*{{{*/
1520{
1521 string Transformed = TransactionManager->MetaIndexParser->GetExpectedDist();
1522
1523 if (Transformed == "../project/experimental")
1524 {
1525 Transformed = "experimental";
1526 }
1527
1528 auto pos = Transformed.rfind('/');
1529 if (pos != string::npos)
1530 {
1531 Transformed = Transformed.substr(0, pos);
1532 }
1533
1534 if (Transformed == ".")
1535 {
1536 Transformed = "";
1537 }
1538
1539 if (TransactionManager->MetaIndexParser->GetValidUntil() > 0)
1540 {
1541 time_t const invalid_since = time(NULL) - TransactionManager->MetaIndexParser->GetValidUntil();
1542 if (invalid_since > 0)
1543 {
1544 std::string errmsg;
1545 strprintf(errmsg,
1546 // TRANSLATOR: The first %s is the URL of the bad Release file, the second is
1547 // the time since then the file is invalid - formatted in the same way as in
1548 // the download progress display (e.g. 7d 3h 42min 1s)
1549 _("Release file for %s is expired (invalid since %s). "
1550 "Updates for this repository will not be applied."),
1551 Target.URI.c_str(), TimeToStr(invalid_since).c_str());
1552 if (ErrorText.empty())
1553 ErrorText = errmsg;
1554 return _error->Error("%s", errmsg.c_str());
1555 }
1556 }
1557
1558 /* Did we get a file older than what we have? This is a last minute IMS hit and doubles
1559 as a prevention of downgrading us to older (still valid) files */
1560 if (TransactionManager->IMSHit == false && TransactionManager->LastMetaIndexParser != NULL &&
1561 TransactionManager->LastMetaIndexParser->GetDate() > TransactionManager->MetaIndexParser->GetDate())
1562 {
1563 TransactionManager->IMSHit = true;
1564 RemoveFile("VerifyVendor", DestFile);
1565 PartialFile = DestFile = GetFinalFilename();
1566 // load the 'old' file in the 'new' one instead of flipping pointers as
1567 // the new one isn't owned by us, while the old one is so cleanup would be confused.
1568 TransactionManager->MetaIndexParser->swapLoad(TransactionManager->LastMetaIndexParser);
1569 delete TransactionManager->LastMetaIndexParser;
1570 TransactionManager->LastMetaIndexParser = NULL;
1571 }
1572
1573 if (_config->FindB("Debug::pkgAcquire::Auth", false))
1574 {
1575 std::cerr << "Got Codename: " << TransactionManager->MetaIndexParser->GetCodename() << std::endl;
1576 std::cerr << "Expecting Dist: " << TransactionManager->MetaIndexParser->GetExpectedDist() << std::endl;
1577 std::cerr << "Transformed Dist: " << Transformed << std::endl;
1578 }
1579
1580 if (TransactionManager->MetaIndexParser->CheckDist(Transformed) == false)
1581 {
1582 // This might become fatal one day
1583// Status = StatAuthError;
1584// ErrorText = "Conflicting distribution; expected "
1585// + MetaIndexParser->GetExpectedDist() + " but got "
1586// + MetaIndexParser->GetCodename();
1587// return false;
1588 if (!Transformed.empty())
1589 {
1590 _error->Warning(_("Conflicting distribution: %s (expected %s but got %s)"),
1591 Desc.Description.c_str(),
1592 Transformed.c_str(),
1593 TransactionManager->MetaIndexParser->GetCodename().c_str());
1594 }
1595 }
1596
1597 return true;
1598}
1599 /*}}}*/
1600pkgAcqMetaBase::~pkgAcqMetaBase()
1601{
1602}
1603
1604pkgAcqMetaClearSig::pkgAcqMetaClearSig(pkgAcquire * const Owner, /*{{{*/
1605 IndexTarget const &ClearsignedTarget,
1606 IndexTarget const &DetachedDataTarget, IndexTarget const &DetachedSigTarget,
1607 metaIndex * const MetaIndexParser) :
1608 pkgAcqMetaIndex(Owner, this, ClearsignedTarget, DetachedSigTarget),
1609 d(NULL), DetachedDataTarget(DetachedDataTarget),
1610 MetaIndexParser(MetaIndexParser), LastMetaIndexParser(NULL)
1611{
1612 // index targets + (worst case:) Release/Release.gpg
1613 ExpectedAdditionalItems = std::numeric_limits<decltype(ExpectedAdditionalItems)>::max();
1614 TransactionManager->Add(this);
1615}
1616 /*}}}*/
1617pkgAcqMetaClearSig::~pkgAcqMetaClearSig() /*{{{*/
1618{
1619 if (LastMetaIndexParser != NULL)
1620 delete LastMetaIndexParser;
1621}
1622 /*}}}*/
1623// pkgAcqMetaClearSig::Custom600Headers - Insert custom request headers /*{{{*/
1624string pkgAcqMetaClearSig::Custom600Headers() const
1625{
1626 string Header = pkgAcqMetaBase::Custom600Headers();
1627 Header += "\nFail-Ignore: true";
1628 std::string const key = TransactionManager->MetaIndexParser->GetSignedBy();
1629 if (key.empty() == false)
1630 Header += "\nSigned-By: " + key;
1631
1632 return Header;
1633}
1634 /*}}}*/
1635void pkgAcqMetaClearSig::Finished() /*{{{*/
1636{
1637 if(_config->FindB("Debug::Acquire::Transaction", false) == true)
1638 std::clog << "Finished: " << DestFile <<std::endl;
1639 if(TransactionManager->State == TransactionStarted &&
1640 TransactionManager->TransactionHasError() == false)
1641 TransactionManager->CommitTransaction();
1642}
1643 /*}}}*/
1644bool pkgAcqMetaClearSig::VerifyDone(std::string const &Message, /*{{{*/
1645 pkgAcquire::MethodConfig const * const Cnf)
1646{
1647 Item::VerifyDone(Message, Cnf);
1648
1649 if (FileExists(DestFile) && !StartsWithGPGClearTextSignature(DestFile))
1650 return RenameOnError(NotClearsigned);
1651
1652 return true;
1653}
1654 /*}}}*/
1655// pkgAcqMetaClearSig::Done - We got a file /*{{{*/
1656void pkgAcqMetaClearSig::Done(std::string const &Message,
1657 HashStringList const &Hashes,
1658 pkgAcquire::MethodConfig const * const Cnf)
1659{
1660 Item::Done(Message, Hashes, Cnf);
1661
1662 if(AuthPass == false)
1663 {
1664 if(CheckDownloadDone(this, Message, Hashes) == true)
1665 QueueForSignatureVerify(this, DestFile, DestFile);
1666 return;
1667 }
1668 else if(CheckAuthDone(Message) == true)
1669 {
1670 if (TransactionManager->IMSHit == false)
1671 TransactionManager->TransactionStageCopy(this, DestFile, GetFinalFilename());
1672 else if (RealFileExists(GetFinalFilename()) == false)
1673 {
1674 // We got an InRelease file IMSHit, but we haven't one, which means
1675 // we had a valid Release/Release.gpg combo stepping in, which we have
1676 // to 'acquire' now to ensure list cleanup isn't removing them
1677 new NoActionItem(Owner, DetachedDataTarget);
1678 new NoActionItem(Owner, DetachedSigTarget);
1679 }
1680 }
1681 else if (Status != StatAuthError)
1682 {
1683 string const FinalFile = GetFinalFileNameFromURI(DetachedDataTarget.URI);
1684 string const OldFile = GetFinalFilename();
1685 if (TransactionManager->IMSHit == false)
1686 TransactionManager->TransactionStageCopy(this, DestFile, FinalFile);
1687 else if (RealFileExists(OldFile) == false)
1688 new NoActionItem(Owner, DetachedDataTarget);
1689 else
1690 TransactionManager->TransactionStageCopy(this, OldFile, FinalFile);
1691 }
1692}
1693 /*}}}*/
1694void pkgAcqMetaClearSig::Failed(string const &Message,pkgAcquire::MethodConfig const * const Cnf) /*{{{*/
1695{
1696 Item::Failed(Message, Cnf);
1697
1698 if (AuthPass == false)
1699 {
1700 if (Status == StatAuthError || Status == StatTransientNetworkError)
1701 {
1702 // if we expected a ClearTextSignature (InRelease) but got a network
1703 // error or got a file, but it wasn't valid, we end up here (see VerifyDone).
1704 // As these is usually called by web-portals we do not try Release/Release.gpg
1705 // as this is gonna fail anyway and instead abort our try (LP#346386)
1706 TransactionManager->AbortTransaction();
1707 return;
1708 }
1709
1710 // Queue the 'old' InRelease file for removal if we try Release.gpg
1711 // as otherwise the file will stay around and gives a false-auth
1712 // impression (CVE-2012-0214)
1713 TransactionManager->TransactionStageRemoval(this, GetFinalFilename());
1714 Status = StatDone;
1715
1716 new pkgAcqMetaIndex(Owner, TransactionManager, DetachedDataTarget, DetachedSigTarget);
1717 }
1718 else
1719 {
1720 if(CheckStopAuthentication(this, Message))
1721 return;
1722
1723 if(AllowInsecureRepositories(InsecureType::UNSIGNED, Target.Description, TransactionManager->MetaIndexParser, TransactionManager, this) == true)
1724 {
1725 Status = StatDone;
1726
1727 /* InRelease files become Release files, otherwise
1728 * they would be considered as trusted later on */
1729 string const FinalRelease = GetFinalFileNameFromURI(DetachedDataTarget.URI);
1730 string const PartialRelease = GetPartialFileNameFromURI(DetachedDataTarget.URI);
1731 string const FinalReleasegpg = GetFinalFileNameFromURI(DetachedSigTarget.URI);
1732 string const FinalInRelease = GetFinalFilename();
1733 Rename(DestFile, PartialRelease);
1734 TransactionManager->TransactionStageCopy(this, PartialRelease, FinalRelease);
1735 LoadLastMetaIndexParser(TransactionManager, FinalRelease, FinalInRelease);
1736
1737 // we parse the indexes here because at this point the user wanted
1738 // a repository that may potentially harm him
1739 if (TransactionManager->MetaIndexParser->Load(PartialRelease, &ErrorText) == false || VerifyVendor(Message) == false)
1740 /* expired Release files are still a problem you need extra force for */;
1741 else
1742 TransactionManager->QueueIndexes(true);
1743 }
1744 }
1745}
1746 /*}}}*/
1747
1748pkgAcqMetaIndex::pkgAcqMetaIndex(pkgAcquire * const Owner, /*{{{*/
1749 pkgAcqMetaClearSig * const TransactionManager,
1750 IndexTarget const &DataTarget,
1751 IndexTarget const &DetachedSigTarget) :
1752 pkgAcqMetaBase(Owner, TransactionManager, DataTarget), d(NULL),
1753 DetachedSigTarget(DetachedSigTarget)
1754{
1755 if(_config->FindB("Debug::Acquire::Transaction", false) == true)
1756 std::clog << "New pkgAcqMetaIndex with TransactionManager "
1757 << this->TransactionManager << std::endl;
1758
1759 DestFile = GetPartialFileNameFromURI(DataTarget.URI);
1760
1761 // Create the item
1762 Desc.Description = DataTarget.Description;
1763 Desc.Owner = this;
1764 Desc.ShortDesc = DataTarget.ShortDesc;
1765 Desc.URI = DataTarget.URI;
1766 QueueURI(Desc);
1767}
1768 /*}}}*/
1769void pkgAcqMetaIndex::Done(string const &Message, /*{{{*/
1770 HashStringList const &Hashes,
1771 pkgAcquire::MethodConfig const * const Cfg)
1772{
1773 Item::Done(Message,Hashes,Cfg);
1774
1775 if(CheckDownloadDone(this, Message, Hashes))
1776 {
1777 // we have a Release file, now download the Signature, all further
1778 // verify/queue for additional downloads will be done in the
1779 // pkgAcqMetaSig::Done() code
1780 new pkgAcqMetaSig(Owner, TransactionManager, DetachedSigTarget, this);
1781 }
1782}
1783 /*}}}*/
1784// pkgAcqMetaIndex::Failed - no Release file present /*{{{*/
1785void pkgAcqMetaIndex::Failed(string const &Message,
1786 pkgAcquire::MethodConfig const * const Cnf)
1787{
1788 pkgAcquire::Item::Failed(Message, Cnf);
1789 Status = StatDone;
1790
1791 // No Release file was present so fall
1792 // back to queueing Packages files without verification
1793 // only allow going further if the user explicitly wants it
1794 if(AllowInsecureRepositories(InsecureType::NORELEASE, Target.Description, TransactionManager->MetaIndexParser, TransactionManager, this) == true)
1795 {
1796 // ensure old Release files are removed
1797 TransactionManager->TransactionStageRemoval(this, GetFinalFilename());
1798
1799 // queue without any kind of hashsum support
1800 TransactionManager->QueueIndexes(false);
1801 }
1802}
1803 /*}}}*/
1804std::string pkgAcqMetaIndex::DescURI() const /*{{{*/
1805{
1806 return Target.URI;
1807}
1808 /*}}}*/
1809pkgAcqMetaIndex::~pkgAcqMetaIndex() {}
1810
1811// AcqMetaSig::AcqMetaSig - Constructor /*{{{*/
1812pkgAcqMetaSig::pkgAcqMetaSig(pkgAcquire * const Owner,
1813 pkgAcqMetaClearSig * const TransactionManager,
1814 IndexTarget const &Target,
1815 pkgAcqMetaIndex * const MetaIndex) :
1816 pkgAcqTransactionItem(Owner, TransactionManager, Target), d(NULL), MetaIndex(MetaIndex)
1817{
1818 DestFile = GetPartialFileNameFromURI(Target.URI);
1819
1820 // remove any partial downloaded sig-file in partial/.
1821 // it may confuse proxies and is too small to warrant a
1822 // partial download anyway
1823 RemoveFile("pkgAcqMetaSig", DestFile);
1824
1825 // set the TransactionManager
1826 if(_config->FindB("Debug::Acquire::Transaction", false) == true)
1827 std::clog << "New pkgAcqMetaSig with TransactionManager "
1828 << TransactionManager << std::endl;
1829
1830 // Create the item
1831 Desc.Description = Target.Description;
1832 Desc.Owner = this;
1833 Desc.ShortDesc = Target.ShortDesc;
1834 Desc.URI = Target.URI;
1835
1836 // If we got a hit for Release, we will get one for Release.gpg too (or obscure errors),
1837 // so we skip the download step and go instantly to verification
1838 if (TransactionManager->IMSHit == true && RealFileExists(GetFinalFilename()))
1839 {
1840 Complete = true;
1841 Status = StatDone;
1842 PartialFile = DestFile = GetFinalFilename();
1843 MetaIndexFileSignature = DestFile;
1844 MetaIndex->QueueForSignatureVerify(this, MetaIndex->DestFile, DestFile);
1845 }
1846 else
1847 QueueURI(Desc);
1848}
1849 /*}}}*/
1850pkgAcqMetaSig::~pkgAcqMetaSig() /*{{{*/
1851{
1852}
1853 /*}}}*/
1854// pkgAcqMetaSig::Custom600Headers - Insert custom request headers /*{{{*/
1855std::string pkgAcqMetaSig::Custom600Headers() const
1856{
1857 std::string Header = pkgAcqTransactionItem::Custom600Headers();
1858 std::string const key = TransactionManager->MetaIndexParser->GetSignedBy();
1859 if (key.empty() == false)
1860 Header += "\nSigned-By: " + key;
1861 return Header;
1862}
1863 /*}}}*/
1864// AcqMetaSig::Done - The signature was downloaded/verified /*{{{*/
1865void pkgAcqMetaSig::Done(string const &Message, HashStringList const &Hashes,
1866 pkgAcquire::MethodConfig const * const Cfg)
1867{
1868 if (MetaIndexFileSignature.empty() == false)
1869 {
1870 DestFile = MetaIndexFileSignature;
1871 MetaIndexFileSignature.clear();
1872 }
1873 Item::Done(Message, Hashes, Cfg);
1874
1875 if(MetaIndex->AuthPass == false)
1876 {
1877 if(MetaIndex->CheckDownloadDone(this, Message, Hashes) == true)
1878 {
1879 // destfile will be modified to point to MetaIndexFile for the
1880 // gpgv method, so we need to save it here
1881 MetaIndexFileSignature = DestFile;
1882 MetaIndex->QueueForSignatureVerify(this, MetaIndex->DestFile, DestFile);
1883 }
1884 return;
1885 }
1886 else if(MetaIndex->CheckAuthDone(Message) == true)
1887 {
1888 if (TransactionManager->IMSHit == false)
1889 {
1890 TransactionManager->TransactionStageCopy(this, DestFile, GetFinalFilename());
1891 TransactionManager->TransactionStageCopy(MetaIndex, MetaIndex->DestFile, MetaIndex->GetFinalFilename());
1892 }
1893 }
1894 else if (MetaIndex->Status != StatAuthError)
1895 {
1896 std::string const FinalFile = MetaIndex->GetFinalFilename();
1897 if (TransactionManager->IMSHit == false)
1898 TransactionManager->TransactionStageCopy(MetaIndex, MetaIndex->DestFile, FinalFile);
1899 else
1900 TransactionManager->TransactionStageCopy(MetaIndex, FinalFile, FinalFile);
1901 }
1902}
1903 /*}}}*/
1904void pkgAcqMetaSig::Failed(string const &Message,pkgAcquire::MethodConfig const * const Cnf)/*{{{*/
1905{
1906 Item::Failed(Message,Cnf);
1907
1908 // check if we need to fail at this point
1909 if (MetaIndex->AuthPass == true && MetaIndex->CheckStopAuthentication(this, Message))
1910 return;
1911
1912 // ensures that a Release.gpg file in the lists/ is removed by the transaction
1913 TransactionManager->TransactionStageRemoval(this, DestFile);
1914
1915 // only allow going further if the user explicitly wants it
1916 if (AllowInsecureRepositories(InsecureType::UNSIGNED, MetaIndex->Target.Description, TransactionManager->MetaIndexParser, TransactionManager, this) == true)
1917 {
1918 string const FinalRelease = MetaIndex->GetFinalFilename();
1919 string const FinalInRelease = TransactionManager->GetFinalFilename();
1920 LoadLastMetaIndexParser(TransactionManager, FinalRelease, FinalInRelease);
1921
1922 // we parse the indexes here because at this point the user wanted
1923 // a repository that may potentially harm him
1924 bool const GoodLoad = TransactionManager->MetaIndexParser->Load(MetaIndex->DestFile, &ErrorText);
1925 if (MetaIndex->VerifyVendor(Message) == false)
1926 /* expired Release files are still a problem you need extra force for */;
1927 else
1928 TransactionManager->QueueIndexes(GoodLoad);
1929
1930 TransactionManager->TransactionStageCopy(MetaIndex, MetaIndex->DestFile, FinalRelease);
1931 }
1932 else if (TransactionManager->IMSHit == false)
1933 Rename(MetaIndex->DestFile, MetaIndex->DestFile + ".FAILED");
1934
1935 // FIXME: this is used often (e.g. in pkgAcqIndexTrans) so refactor
1936 if (Cnf->LocalOnly == true ||
1937 StringToBool(LookupTag(Message,"Transient-Failure"),false) == false)
1938 {
1939 // Ignore this
1940 Status = StatDone;
1941 }
1942}
1943 /*}}}*/
1944
1945
1946// AcqBaseIndex - Constructor /*{{{*/
1947pkgAcqBaseIndex::pkgAcqBaseIndex(pkgAcquire * const Owner,
1948 pkgAcqMetaClearSig * const TransactionManager,
1949 IndexTarget const &Target)
1950: pkgAcqTransactionItem(Owner, TransactionManager, Target), d(NULL)
1951{
1952}
1953 /*}}}*/
1954void pkgAcqBaseIndex::Failed(std::string const &Message,pkgAcquire::MethodConfig const * const Cnf)/*{{{*/
1955{
1956 pkgAcquire::Item::Failed(Message, Cnf);
1957 if (Status != StatAuthError)
1958 return;
1959
1960 ErrorText.append("Release file created at: ");
1961 auto const timespec = TransactionManager->MetaIndexParser->GetDate();
1962 if (timespec == 0)
1963 ErrorText.append("<unknown>");
1964 else
1965 ErrorText.append(TimeRFC1123(timespec, true));
1966 ErrorText.append("\n");
1967}
1968 /*}}}*/
1969pkgAcqBaseIndex::~pkgAcqBaseIndex() {}
1970
1971// AcqDiffIndex::AcqDiffIndex - Constructor /*{{{*/
1972// ---------------------------------------------------------------------
1973/* Get the DiffIndex file first and see if there are patches available
1974 * If so, create a pkgAcqIndexDiffs fetcher that will get and apply the
1975 * patches. If anything goes wrong in that process, it will fall back to
1976 * the original packages file
1977 */
1978pkgAcqDiffIndex::pkgAcqDiffIndex(pkgAcquire * const Owner,
1979 pkgAcqMetaClearSig * const TransactionManager,
1980 IndexTarget const &Target)
1981 : pkgAcqIndex(Owner, TransactionManager, Target, true), d(NULL), diffs(NULL)
1982{
1983 // FIXME: Magic number as an upper bound on pdiffs we will reasonably acquire
1984 ExpectedAdditionalItems = 40;
1985 Debug = _config->FindB("Debug::pkgAcquire::Diffs",false);
1986
1987 CompressionExtensions.clear();
1988 {
1989 std::vector<std::string> types = APT::Configuration::getCompressionTypes();
1990 if (types.empty() == false)
1991 {
1992 std::ostringstream os;
1993 std::copy_if(types.begin(), types.end()-1, std::ostream_iterator<std::string>(os, " "), [&](std::string const type) {
1994 if (type == "uncompressed")
1995 return true;
1996 return TransactionManager->MetaIndexParser->Exists(GetDiffIndexFileName(Target.MetaKey) + '.' + type);
1997 });
1998 os << *types.rbegin();
1999 CompressionExtensions = os.str();
2000 }
2001 }
2002 if (Target.Option(IndexTarget::COMPRESSIONTYPES).find("by-hash") != std::string::npos)
2003 CompressionExtensions = "by-hash " + CompressionExtensions;
2004 Init(GetDiffIndexURI(Target), GetDiffIndexFileName(Target.Description), Target.ShortDesc);
2005
2006 if(Debug)
2007 std::clog << "pkgAcqDiffIndex: " << Desc.URI << std::endl;
2008}
2009 /*}}}*/
2010void pkgAcqDiffIndex::QueueOnIMSHit() const /*{{{*/
2011{
2012 // list cleanup needs to know that this file as well as the already
2013 // present index is ours, so we create an empty diff to save it for us
2014 new pkgAcqIndexDiffs(Owner, TransactionManager, Target);
2015}
2016 /*}}}*/
2017static bool RemoveFileForBootstrapLinking(bool const Debug, std::string const &For, std::string const &Boot)/*{{{*/
2018{
2019 if (FileExists(Boot) && RemoveFile("Bootstrap-linking", Boot) == false)
2020 {
2021 if (Debug)
2022 std::clog << "Bootstrap-linking for patching " << For
2023 << " by removing stale " << Boot << " failed!" << std::endl;
2024 return false;
2025 }
2026 return true;
2027}
2028 /*}}}*/
2029bool pkgAcqDiffIndex::ParseDiffIndex(string const &IndexDiffFile) /*{{{*/
2030{
2031 ExpectedAdditionalItems = 0;
2032 // failing here is fine: our caller will take care of trying to
2033 // get the complete file if patching fails
2034 if(Debug)
2035 std::clog << "pkgAcqDiffIndex::ParseIndexDiff() " << IndexDiffFile
2036 << std::endl;
2037
2038 FileFd Fd(IndexDiffFile, FileFd::ReadOnly, FileFd::Extension);
2039 pkgTagFile TF(&Fd);
2040 if (Fd.IsOpen() == false || Fd.Failed())
2041 return false;
2042
2043 pkgTagSection Tags;
2044 if(unlikely(TF.Step(Tags) == false))
2045 return false;
2046
2047 HashStringList ServerHashes;
2048 unsigned long long ServerSize = 0;
2049
2050 auto const &posix = std::locale::classic();
2051 for (char const * const * type = HashString::SupportedHashes(); *type != NULL; ++type)
2052 {
2053 std::string tagname = *type;
2054 tagname.append("-Current");
2055 std::string const tmp = Tags.FindS(tagname.c_str());
2056 if (tmp.empty() == true)
2057 continue;
2058
2059 string hash;
2060 unsigned long long size;
2061 std::stringstream ss(tmp);
2062 ss.imbue(posix);
2063 ss >> hash >> size;
2064 if (unlikely(hash.empty() == true))
2065 continue;
2066 if (unlikely(ServerSize != 0 && ServerSize != size))
2067 continue;
2068 ServerHashes.push_back(HashString(*type, hash));
2069 ServerSize = size;
2070 }
2071
2072 if (ServerHashes.usable() == false)
2073 {
2074 if (Debug == true)
2075 std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": Did not find a good hashsum in the index" << std::endl;
2076 return false;
2077 }
2078
2079 std::string const CurrentPackagesFile = GetFinalFileNameFromURI(Target.URI);
2080 HashStringList const TargetFileHashes = GetExpectedHashesFor(Target.MetaKey);
2081 if (TargetFileHashes.usable() == false || ServerHashes != TargetFileHashes)
2082 {
2083 if (Debug == true)
2084 {
2085 std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": Index has different hashes than parser, probably older, so fail pdiffing" << std::endl;
2086 printHashSumComparison(CurrentPackagesFile, ServerHashes, TargetFileHashes);
2087 }
2088 return false;
2089 }
2090
2091 HashStringList LocalHashes;
2092 // try avoiding calculating the hash here as this is costly
2093 if (TransactionManager->LastMetaIndexParser != NULL)
2094 LocalHashes = GetExpectedHashesFromFor(TransactionManager->LastMetaIndexParser, Target.MetaKey);
2095 if (LocalHashes.usable() == false)
2096 {
2097 FileFd fd(CurrentPackagesFile, FileFd::ReadOnly, FileFd::Auto);
2098 Hashes LocalHashesCalc(ServerHashes);
2099 LocalHashesCalc.AddFD(fd);
2100 LocalHashes = LocalHashesCalc.GetHashStringList();
2101 }
2102
2103 if (ServerHashes == LocalHashes)
2104 {
2105 // we have the same sha1 as the server so we are done here
2106 if(Debug)
2107 std::clog << "pkgAcqDiffIndex: Package file " << CurrentPackagesFile << " is up-to-date" << std::endl;
2108 QueueOnIMSHit();
2109 return true;
2110 }
2111
2112 if(Debug)
2113 std::clog << "Server-Current: " << ServerHashes.find(NULL)->toStr() << " and we start at "
2114 << CurrentPackagesFile << " " << LocalHashes.FileSize() << " " << LocalHashes.find(NULL)->toStr() << std::endl;
2115
2116 // historically, older hashes have more info than newer ones, so start
2117 // collecting with older ones first to avoid implementing complicated
2118 // information merging techniques… a failure is after all always
2119 // recoverable with a complete file and hashes aren't changed that often.
2120 std::vector<char const *> types;
2121 for (char const * const * type = HashString::SupportedHashes(); *type != NULL; ++type)
2122 types.push_back(*type);
2123
2124 // parse all of (provided) history
2125 vector<DiffInfo> available_patches;
2126 bool firstAcceptedHashes = true;
2127 for (auto type = types.crbegin(); type != types.crend(); ++type)
2128 {
2129 if (LocalHashes.find(*type) == NULL)
2130 continue;
2131
2132 std::string tagname = *type;
2133 tagname.append("-History");
2134 std::string const tmp = Tags.FindS(tagname.c_str());
2135 if (tmp.empty() == true)
2136 continue;
2137
2138 string hash, filename;
2139 unsigned long long size;
2140 std::stringstream ss(tmp);
2141 ss.imbue(posix);
2142
2143 while (ss >> hash >> size >> filename)
2144 {
2145 if (unlikely(hash.empty() == true || filename.empty() == true))
2146 continue;
2147
2148 // see if we have a record for this file already
2149 std::vector<DiffInfo>::iterator cur = available_patches.begin();
2150 for (; cur != available_patches.end(); ++cur)
2151 {
2152 if (cur->file != filename)
2153 continue;
2154 cur->result_hashes.push_back(HashString(*type, hash));
2155 break;
2156 }
2157 if (cur != available_patches.end())
2158 continue;
2159 if (firstAcceptedHashes == true)
2160 {
2161 DiffInfo next;
2162 next.file = filename;
2163 next.result_hashes.push_back(HashString(*type, hash));
2164 next.result_hashes.FileSize(size);
2165 available_patches.push_back(next);
2166 }
2167 else
2168 {
2169 if (Debug == true)
2170 std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": File " << filename
2171 << " wasn't in the list for the first parsed hash! (history)" << std::endl;
2172 break;
2173 }
2174 }
2175 firstAcceptedHashes = false;
2176 }
2177
2178 if (unlikely(available_patches.empty() == true))
2179 {
2180 if (Debug)
2181 std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": "
2182 << "Couldn't find any patches for the patch series." << std::endl;
2183 return false;
2184 }
2185
2186 for (auto type = types.crbegin(); type != types.crend(); ++type)
2187 {
2188 if (LocalHashes.find(*type) == NULL)
2189 continue;
2190
2191 std::string tagname = *type;
2192 tagname.append("-Patches");
2193 std::string const tmp = Tags.FindS(tagname.c_str());
2194 if (tmp.empty() == true)
2195 continue;
2196
2197 string hash, filename;
2198 unsigned long long size;
2199 std::stringstream ss(tmp);
2200 ss.imbue(posix);
2201
2202 while (ss >> hash >> size >> filename)
2203 {
2204 if (unlikely(hash.empty() == true || filename.empty() == true))
2205 continue;
2206
2207 // see if we have a record for this file already
2208 std::vector<DiffInfo>::iterator cur = available_patches.begin();
2209 for (; cur != available_patches.end(); ++cur)
2210 {
2211 if (cur->file != filename)
2212 continue;
2213 if (cur->patch_hashes.empty())
2214 cur->patch_hashes.FileSize(size);
2215 cur->patch_hashes.push_back(HashString(*type, hash));
2216 break;
2217 }
2218 if (cur != available_patches.end())
2219 continue;
2220 if (Debug == true)
2221 std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": File " << filename
2222 << " wasn't in the list for the first parsed hash! (patches)" << std::endl;
2223 break;
2224 }
2225 }
2226
2227 for (auto type = types.crbegin(); type != types.crend(); ++type)
2228 {
2229 std::string tagname = *type;
2230 tagname.append("-Download");
2231 std::string const tmp = Tags.FindS(tagname.c_str());
2232 if (tmp.empty() == true)
2233 continue;
2234
2235 string hash, filename;
2236 unsigned long long size;
2237 std::stringstream ss(tmp);
2238 ss.imbue(posix);
2239
2240 // FIXME: all of pdiff supports only .gz compressed patches
2241 while (ss >> hash >> size >> filename)
2242 {
2243 if (unlikely(hash.empty() == true || filename.empty() == true))
2244 continue;
2245 if (unlikely(APT::String::Endswith(filename, ".gz") == false))
2246 continue;
2247 filename.erase(filename.length() - 3);
2248
2249 // see if we have a record for this file already
2250 std::vector<DiffInfo>::iterator cur = available_patches.begin();
2251 for (; cur != available_patches.end(); ++cur)
2252 {
2253 if (cur->file != filename)
2254 continue;
2255 if (cur->download_hashes.empty())
2256 cur->download_hashes.FileSize(size);
2257 cur->download_hashes.push_back(HashString(*type, hash));
2258 break;
2259 }
2260 if (cur != available_patches.end())
2261 continue;
2262 if (Debug == true)
2263 std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": File " << filename
2264 << " wasn't in the list for the first parsed hash! (download)" << std::endl;
2265 break;
2266 }
2267 }
2268
2269
2270 bool foundStart = false;
2271 for (std::vector<DiffInfo>::iterator cur = available_patches.begin();
2272 cur != available_patches.end(); ++cur)
2273 {
2274 if (LocalHashes != cur->result_hashes)
2275 continue;
2276
2277 available_patches.erase(available_patches.begin(), cur);
2278 foundStart = true;
2279 break;
2280 }
2281
2282 if (foundStart == false || unlikely(available_patches.empty() == true))
2283 {
2284 if (Debug)
2285 std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": "
2286 << "Couldn't find the start of the patch series." << std::endl;
2287 return false;
2288 }
2289
2290 for (auto const &patch: available_patches)
2291 if (patch.result_hashes.usable() == false ||
2292 patch.patch_hashes.usable() == false ||
2293 patch.download_hashes.usable() == false)
2294 {
2295 if (Debug)
2296 std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": provides no usable hashes for " << patch.file
2297 << " so fallback to complete download" << std::endl;
2298 return false;
2299 }
2300
2301 // patching with too many files is rather slow compared to a fast download
2302 unsigned long const fileLimit = _config->FindI("Acquire::PDiffs::FileLimit", 0);
2303 if (fileLimit != 0 && fileLimit < available_patches.size())
2304 {
2305 if (Debug)
2306 std::clog << "Need " << available_patches.size() << " diffs (Limit is " << fileLimit
2307 << ") so fallback to complete download" << std::endl;
2308 return false;
2309 }
2310
2311 // calculate the size of all patches we have to get
2312 unsigned short const sizeLimitPercent = _config->FindI("Acquire::PDiffs::SizeLimit", 100);
2313 if (sizeLimitPercent > 0)
2314 {
2315 unsigned long long downloadSize = std::accumulate(available_patches.begin(),
2316 available_patches.end(), 0llu, [](unsigned long long const T, DiffInfo const &I) {
2317 return T + I.download_hashes.FileSize();
2318 });
2319 if (downloadSize != 0)
2320 {
2321 unsigned long long downloadSizeIdx = 0;
2322 auto const types = VectorizeString(Target.Option(IndexTarget::COMPRESSIONTYPES), ' ');
2323 for (auto const &t : types)
2324 {
2325 std::string MetaKey = Target.MetaKey;
2326 if (t != "uncompressed")
2327 MetaKey += '.' + t;
2328 HashStringList const hsl = GetExpectedHashesFor(MetaKey);
2329 if (unlikely(hsl.usable() == false))
2330 continue;
2331 downloadSizeIdx = hsl.FileSize();
2332 break;
2333 }
2334 unsigned long long const sizeLimit = downloadSizeIdx * sizeLimitPercent;
2335 if ((sizeLimit/100) < downloadSize)
2336 {
2337 if (Debug)
2338 std::clog << "Need " << downloadSize << " compressed bytes (Limit is " << (sizeLimit/100) << ", "
2339 << "original is " << downloadSizeIdx << ") so fallback to complete download" << std::endl;
2340 return false;
2341 }
2342 }
2343 }
2344
2345 // we have something, queue the diffs
2346 string::size_type const last_space = Description.rfind(" ");
2347 if(last_space != string::npos)
2348 Description.erase(last_space, Description.size()-last_space);
2349
2350 /* decide if we should download patches one by one or in one go:
2351 The first is good if the server merges patches, but many don't so client
2352 based merging can be attempt in which case the second is better.
2353 "bad things" will happen if patches are merged on the server,
2354 but client side merging is attempt as well */
2355 bool pdiff_merge = _config->FindB("Acquire::PDiffs::Merge", true);
2356 if (pdiff_merge == true)
2357 {
2358 // reprepro adds this flag if it has merged patches on the server
2359 std::string const precedence = Tags.FindS("X-Patch-Precedence");
2360 pdiff_merge = (precedence != "merged");
2361 }
2362
2363 // clean the plate
2364 {
2365 std::string const Final = GetExistingFilename(CurrentPackagesFile);
2366 if (unlikely(Final.empty())) // because we wouldn't be called in such a case
2367 return false;
2368 std::string const PartialFile = GetPartialFileNameFromURI(Target.URI);
2369 std::string const PatchedFile = GetKeepCompressedFileName(PartialFile + "-patched", Target);
2370 if (RemoveFileForBootstrapLinking(Debug, CurrentPackagesFile, PartialFile) == false ||
2371 RemoveFileForBootstrapLinking(Debug, CurrentPackagesFile, PatchedFile) == false)
2372 return false;
2373 for (auto const &ext : APT::Configuration::getCompressorExtensions())
2374 {
2375 if (RemoveFileForBootstrapLinking(Debug, CurrentPackagesFile, PartialFile + ext) == false ||
2376 RemoveFileForBootstrapLinking(Debug, CurrentPackagesFile, PatchedFile + ext) == false)
2377 return false;
2378 }
2379 std::string const Ext = Final.substr(CurrentPackagesFile.length());
2380 std::string const Partial = PartialFile + Ext;
2381 if (symlink(Final.c_str(), Partial.c_str()) != 0)
2382 {
2383 if (Debug)
2384 std::clog << "Bootstrap-linking for patching " << CurrentPackagesFile
2385 << " by linking " << Final << " to " << Partial << " failed!" << std::endl;
2386 return false;
2387 }
2388 }
2389
2390 if (pdiff_merge == false)
2391 new pkgAcqIndexDiffs(Owner, TransactionManager, Target, available_patches);
2392 else
2393 {
2394 diffs = new std::vector<pkgAcqIndexMergeDiffs*>(available_patches.size());
2395 for(size_t i = 0; i < available_patches.size(); ++i)
2396 (*diffs)[i] = new pkgAcqIndexMergeDiffs(Owner, TransactionManager,
2397 Target,
2398 available_patches[i],
2399 diffs);
2400 }
2401
2402 Complete = false;
2403 Status = StatDone;
2404 Dequeue();
2405 return true;
2406}
2407 /*}}}*/
2408void pkgAcqDiffIndex::Failed(string const &Message,pkgAcquire::MethodConfig const * const Cnf)/*{{{*/
2409{
2410 if (CommonFailed(GetDiffIndexURI(Target), GetDiffIndexFileName(Target.Description), Message, Cnf))
2411 return;
2412
2413 Status = StatDone;
2414 ExpectedAdditionalItems = 0;
2415
2416 if(Debug)
2417 std::clog << "pkgAcqDiffIndex failed: " << Desc.URI << " with " << Message << std::endl
2418 << "Falling back to normal index file acquire" << std::endl;
2419
2420 new pkgAcqIndex(Owner, TransactionManager, Target);
2421}
2422 /*}}}*/
2423void pkgAcqDiffIndex::Done(string const &Message,HashStringList const &Hashes, /*{{{*/
2424 pkgAcquire::MethodConfig const * const Cnf)
2425{
2426 if(Debug)
2427 std::clog << "pkgAcqDiffIndex::Done(): " << Desc.URI << std::endl;
2428
2429 Item::Done(Message, Hashes, Cnf);
2430
2431 string const FinalFile = GetFinalFilename();
2432 if(StringToBool(LookupTag(Message,"IMS-Hit"),false))
2433 DestFile = FinalFile;
2434
2435 if(ParseDiffIndex(DestFile) == false)
2436 {
2437 Failed("Message: Couldn't parse pdiff index", Cnf);
2438 // queue for final move - this should happen even if we fail
2439 // while parsing (e.g. on sizelimit) and download the complete file.
2440 TransactionManager->TransactionStageCopy(this, DestFile, FinalFile);
2441 return;
2442 }
2443
2444 TransactionManager->TransactionStageCopy(this, DestFile, FinalFile);
2445
2446 Complete = true;
2447 Status = StatDone;
2448 Dequeue();
2449
2450 return;
2451}
2452 /*}}}*/
2453pkgAcqDiffIndex::~pkgAcqDiffIndex()
2454{
2455 if (diffs != NULL)
2456 delete diffs;
2457}
2458
2459// AcqIndexDiffs::AcqIndexDiffs - Constructor /*{{{*/
2460// ---------------------------------------------------------------------
2461/* The package diff is added to the queue. one object is constructed
2462 * for each diff and the index
2463 */
2464pkgAcqIndexDiffs::pkgAcqIndexDiffs(pkgAcquire * const Owner,
2465 pkgAcqMetaClearSig * const TransactionManager,
2466 IndexTarget const &Target,
2467 vector<DiffInfo> const &diffs)
2468 : pkgAcqBaseIndex(Owner, TransactionManager, Target), d(NULL),
2469 available_patches(diffs)
2470{
2471 DestFile = GetKeepCompressedFileName(GetPartialFileNameFromURI(Target.URI), Target);
2472
2473 Debug = _config->FindB("Debug::pkgAcquire::Diffs",false);
2474
2475 Desc.Owner = this;
2476 Description = Target.Description;
2477 Desc.ShortDesc = Target.ShortDesc;
2478
2479 if(available_patches.empty() == true)
2480 {
2481 // we are done (yeah!), check hashes against the final file
2482 DestFile = GetKeepCompressedFileName(GetFinalFileNameFromURI(Target.URI), Target);
2483 Finish(true);
2484 }
2485 else
2486 {
2487 State = StateFetchDiff;
2488 QueueNextDiff();
2489 }
2490}
2491 /*}}}*/
2492void pkgAcqIndexDiffs::Failed(string const &Message,pkgAcquire::MethodConfig const * const Cnf)/*{{{*/
2493{
2494 pkgAcqBaseIndex::Failed(Message,Cnf);
2495 Status = StatDone;
2496
2497 DestFile = GetKeepCompressedFileName(GetPartialFileNameFromURI(Target.URI), Target);
2498 if(Debug)
2499 std::clog << "pkgAcqIndexDiffs failed: " << Desc.URI << " with " << Message << std::endl
2500 << "Falling back to normal index file acquire " << std::endl;
2501 RenameOnError(PDiffError);
2502 std::string const patchname = GetDiffsPatchFileName(DestFile);
2503 if (RealFileExists(patchname))
2504 Rename(patchname, patchname + ".FAILED");
2505 std::string const UnpatchedFile = GetExistingFilename(GetPartialFileNameFromURI(Target.URI));
2506 if (UnpatchedFile.empty() == false && FileExists(UnpatchedFile))
2507 Rename(UnpatchedFile, UnpatchedFile + ".FAILED");
2508 new pkgAcqIndex(Owner, TransactionManager, Target);
2509 Finish();
2510}
2511 /*}}}*/
2512// Finish - helper that cleans the item out of the fetcher queue /*{{{*/
2513void pkgAcqIndexDiffs::Finish(bool allDone)
2514{
2515 if(Debug)
2516 std::clog << "pkgAcqIndexDiffs::Finish(): "
2517 << allDone << " "
2518 << Desc.URI << std::endl;
2519
2520 // we restore the original name, this is required, otherwise
2521 // the file will be cleaned
2522 if(allDone)
2523 {
2524 std::string const Final = GetKeepCompressedFileName(GetFinalFilename(), Target);
2525 TransactionManager->TransactionStageCopy(this, DestFile, Final);
2526
2527 // this is for the "real" finish
2528 Complete = true;
2529 Status = StatDone;
2530 Dequeue();
2531 if(Debug)
2532 std::clog << "\n\nallDone: " << DestFile << "\n" << std::endl;
2533 return;
2534 }
2535 else
2536 DestFile.clear();
2537
2538 if(Debug)
2539 std::clog << "Finishing: " << Desc.URI << std::endl;
2540 Complete = false;
2541 Status = StatDone;
2542 Dequeue();
2543 return;
2544}
2545 /*}}}*/
2546bool pkgAcqIndexDiffs::QueueNextDiff() /*{{{*/
2547{
2548 // calc sha1 of the just patched file
2549 std::string const PartialFile = GetExistingFilename(GetPartialFileNameFromURI(Target.URI));
2550 if(unlikely(PartialFile.empty()))
2551 {
2552 Failed("Message: The file " + GetPartialFileNameFromURI(Target.URI) + " isn't available", NULL);
2553 return false;
2554 }
2555
2556 FileFd fd(PartialFile, FileFd::ReadOnly, FileFd::Extension);
2557 Hashes LocalHashesCalc;
2558 LocalHashesCalc.AddFD(fd);
2559 HashStringList const LocalHashes = LocalHashesCalc.GetHashStringList();
2560
2561 if(Debug)
2562 std::clog << "QueueNextDiff: " << PartialFile << " (" << LocalHashes.find(NULL)->toStr() << ")" << std::endl;
2563
2564 HashStringList const TargetFileHashes = GetExpectedHashesFor(Target.MetaKey);
2565 if (unlikely(LocalHashes.usable() == false || TargetFileHashes.usable() == false))
2566 {
2567 Failed("Local/Expected hashes are not usable for " + PartialFile, NULL);
2568 return false;
2569 }
2570
2571 // final file reached before all patches are applied
2572 if(LocalHashes == TargetFileHashes)
2573 {
2574 Finish(true);
2575 return true;
2576 }
2577
2578 // remove all patches until the next matching patch is found
2579 // this requires the Index file to be ordered
2580 available_patches.erase(available_patches.begin(),
2581 std::find_if(available_patches.begin(), available_patches.end(), [&](DiffInfo const &I) {
2582 return I.result_hashes == LocalHashes;
2583 }));
2584
2585 // error checking and falling back if no patch was found
2586 if(available_patches.empty() == true)
2587 {
2588 Failed("No patches left to reach target for " + PartialFile, NULL);
2589 return false;
2590 }
2591
2592 // queue the right diff
2593 Desc.URI = Target.URI + ".diff/" + available_patches[0].file + ".gz";
2594 Desc.Description = Description + " " + available_patches[0].file + string(".pdiff");
2595 DestFile = GetKeepCompressedFileName(GetPartialFileNameFromURI(Target.URI + ".diff/" + available_patches[0].file), Target);
2596
2597 if(Debug)
2598 std::clog << "pkgAcqIndexDiffs::QueueNextDiff(): " << Desc.URI << std::endl;
2599
2600 QueueURI(Desc);
2601
2602 return true;
2603}
2604 /*}}}*/
2605void pkgAcqIndexDiffs::Done(string const &Message, HashStringList const &Hashes, /*{{{*/
2606 pkgAcquire::MethodConfig const * const Cnf)
2607{
2608 if (Debug)
2609 std::clog << "pkgAcqIndexDiffs::Done(): " << Desc.URI << std::endl;
2610
2611 Item::Done(Message, Hashes, Cnf);
2612
2613 std::string const UncompressedUnpatchedFile = GetPartialFileNameFromURI(Target.URI);
2614 std::string const UnpatchedFile = GetExistingFilename(UncompressedUnpatchedFile);
2615 std::string const PatchFile = GetDiffsPatchFileName(UnpatchedFile);
2616 std::string const PatchedFile = GetKeepCompressedFileName(UncompressedUnpatchedFile, Target);
2617
2618 switch (State)
2619 {
2620 // success in downloading a diff, enter ApplyDiff state
2621 case StateFetchDiff:
2622 Rename(DestFile, PatchFile);
2623 DestFile = GetKeepCompressedFileName(UncompressedUnpatchedFile + "-patched", Target);
2624 if(Debug)
2625 std::clog << "Sending to rred method: " << UnpatchedFile << std::endl;
2626 State = StateApplyDiff;
2627 Local = true;
2628 Desc.URI = "rred:" + UnpatchedFile;
2629 QueueURI(Desc);
2630 SetActiveSubprocess("rred");
2631 return;
2632 // success in download/apply a diff, queue next (if needed)
2633 case StateApplyDiff:
2634 // remove the just applied patch and base file
2635 available_patches.erase(available_patches.begin());
2636 RemoveFile("pkgAcqIndexDiffs::Done", PatchFile);
2637 RemoveFile("pkgAcqIndexDiffs::Done", UnpatchedFile);
2638 if(Debug)
2639 std::clog << "Moving patched file in place: " << std::endl
2640 << DestFile << " -> " << PatchedFile << std::endl;
2641 Rename(DestFile, PatchedFile);
2642
2643 // see if there is more to download
2644 if(available_patches.empty() == false)
2645 {
2646 new pkgAcqIndexDiffs(Owner, TransactionManager, Target, available_patches);
2647 Finish();
2648 } else {
2649 DestFile = PatchedFile;
2650 Finish(true);
2651 }
2652 return;
2653 }
2654}
2655 /*}}}*/
2656std::string pkgAcqIndexDiffs::Custom600Headers() const /*{{{*/
2657{
2658 if(State != StateApplyDiff)
2659 return pkgAcqBaseIndex::Custom600Headers();
2660 std::ostringstream patchhashes;
2661 for (auto && hs : available_patches[0].result_hashes)
2662 patchhashes << "\nStart-" << hs.HashType() << "-Hash: " << hs.HashValue();
2663 for (auto && hs : available_patches[0].patch_hashes)
2664 patchhashes << "\nPatch-0-" << hs.HashType() << "-Hash: " << hs.HashValue();
2665 patchhashes << pkgAcqBaseIndex::Custom600Headers();
2666 return patchhashes.str();
2667}
2668 /*}}}*/
2669pkgAcqIndexDiffs::~pkgAcqIndexDiffs() {}
2670
2671// AcqIndexMergeDiffs::AcqIndexMergeDiffs - Constructor /*{{{*/
2672pkgAcqIndexMergeDiffs::pkgAcqIndexMergeDiffs(pkgAcquire * const Owner,
2673 pkgAcqMetaClearSig * const TransactionManager,
2674 IndexTarget const &Target,
2675 DiffInfo const &patch,
2676 std::vector<pkgAcqIndexMergeDiffs*> const * const allPatches)
2677 : pkgAcqBaseIndex(Owner, TransactionManager, Target), d(NULL),
2678 patch(patch), allPatches(allPatches), State(StateFetchDiff)
2679{
2680 Debug = _config->FindB("Debug::pkgAcquire::Diffs",false);
2681
2682 Desc.Owner = this;
2683 Description = Target.Description;
2684 Desc.ShortDesc = Target.ShortDesc;
2685 Desc.URI = Target.URI + ".diff/" + patch.file + ".gz";
2686 Desc.Description = Description + " " + patch.file + ".pdiff";
2687 DestFile = GetPartialFileNameFromURI(Desc.URI);
2688
2689 if(Debug)
2690 std::clog << "pkgAcqIndexMergeDiffs: " << Desc.URI << std::endl;
2691
2692 QueueURI(Desc);
2693}
2694 /*}}}*/
2695void pkgAcqIndexMergeDiffs::Failed(string const &Message,pkgAcquire::MethodConfig const * const Cnf)/*{{{*/
2696{
2697 if(Debug)
2698 std::clog << "pkgAcqIndexMergeDiffs failed: " << Desc.URI << " with " << Message << std::endl;
2699
2700 pkgAcqBaseIndex::Failed(Message,Cnf);
2701 Status = StatDone;
2702
2703 // check if we are the first to fail, otherwise we are done here
2704 State = StateDoneDiff;
2705 for (std::vector<pkgAcqIndexMergeDiffs *>::const_iterator I = allPatches->begin();
2706 I != allPatches->end(); ++I)
2707 if ((*I)->State == StateErrorDiff)
2708 {
2709 State = StateErrorDiff;
2710 return;
2711 }
2712
2713 // first failure means we should fallback
2714 State = StateErrorDiff;
2715 if (Debug)
2716 std::clog << "Falling back to normal index file acquire" << std::endl;
2717 RenameOnError(PDiffError);
2718 if (RealFileExists(DestFile))
2719 Rename(DestFile, DestFile + ".FAILED");
2720 std::string const UnpatchedFile = GetExistingFilename(GetPartialFileNameFromURI(Target.URI));
2721 if (UnpatchedFile.empty() == false && FileExists(UnpatchedFile))
2722 Rename(UnpatchedFile, UnpatchedFile + ".FAILED");
2723 DestFile.clear();
2724 new pkgAcqIndex(Owner, TransactionManager, Target);
2725}
2726 /*}}}*/
2727void pkgAcqIndexMergeDiffs::Done(string const &Message, HashStringList const &Hashes, /*{{{*/
2728 pkgAcquire::MethodConfig const * const Cnf)
2729{
2730 if(Debug)
2731 std::clog << "pkgAcqIndexMergeDiffs::Done(): " << Desc.URI << std::endl;
2732
2733 Item::Done(Message, Hashes, Cnf);
2734
2735 if (std::any_of(allPatches->begin(), allPatches->end(),
2736 [](pkgAcqIndexMergeDiffs const * const P) { return P->State == StateErrorDiff; }))
2737 {
2738 if(Debug)
2739 std::clog << "Another patch failed already, no point in processing this one." << std::endl;
2740 State = StateErrorDiff;
2741 return;
2742 }
2743
2744 std::string const UncompressedUnpatchedFile = GetPartialFileNameFromURI(Target.URI);
2745 std::string const UnpatchedFile = GetExistingFilename(UncompressedUnpatchedFile);
2746 if (UnpatchedFile.empty())
2747 {
2748 _error->Fatal("Unpatched file %s doesn't exist (anymore)!", UncompressedUnpatchedFile.c_str());
2749 State = StateErrorDiff;
2750 return;
2751 }
2752 std::string const PatchFile = GetMergeDiffsPatchFileName(UnpatchedFile, patch.file);
2753 std::string const PatchedFile = GetKeepCompressedFileName(UncompressedUnpatchedFile, Target);
2754
2755 switch (State)
2756 {
2757 case StateFetchDiff:
2758 Rename(DestFile, PatchFile);
2759
2760 // check if this is the last completed diff
2761 State = StateDoneDiff;
2762 for (std::vector<pkgAcqIndexMergeDiffs *>::const_iterator I = allPatches->begin();
2763 I != allPatches->end(); ++I)
2764 if ((*I)->State != StateDoneDiff)
2765 {
2766 if(Debug)
2767 std::clog << "Not the last done diff in the batch: " << Desc.URI << std::endl;
2768 return;
2769 }
2770 // this is the last completed diff, so we are ready to apply now
2771 DestFile = GetKeepCompressedFileName(UncompressedUnpatchedFile + "-patched", Target);
2772 if(Debug)
2773 std::clog << "Sending to rred method: " << UnpatchedFile << std::endl;
2774 State = StateApplyDiff;
2775 Local = true;
2776 Desc.URI = "rred:" + UnpatchedFile;
2777 QueueURI(Desc);
2778 SetActiveSubprocess("rred");
2779 return;
2780 case StateApplyDiff:
2781 // success in download & apply all diffs, finialize and clean up
2782 if(Debug)
2783 std::clog << "Queue patched file in place: " << std::endl
2784 << DestFile << " -> " << PatchedFile << std::endl;
2785
2786 // queue for copy by the transaction manager
2787 TransactionManager->TransactionStageCopy(this, DestFile, GetKeepCompressedFileName(GetFinalFilename(), Target));
2788
2789 // ensure the ed's are gone regardless of list-cleanup
2790 for (std::vector<pkgAcqIndexMergeDiffs *>::const_iterator I = allPatches->begin();
2791 I != allPatches->end(); ++I)
2792 RemoveFile("pkgAcqIndexMergeDiffs::Done", GetMergeDiffsPatchFileName(UnpatchedFile, (*I)->patch.file));
2793 RemoveFile("pkgAcqIndexMergeDiffs::Done", UnpatchedFile);
2794
2795 // all set and done
2796 Complete = true;
2797 if(Debug)
2798 std::clog << "allDone: " << DestFile << "\n" << std::endl;
2799 return;
2800 case StateDoneDiff: _error->Fatal("Done called for %s which is in an invalid Done state", PatchFile.c_str()); break;
2801 case StateErrorDiff: _error->Fatal("Done called for %s which is in an invalid Error state", PatchFile.c_str()); break;
2802 }
2803}
2804 /*}}}*/
2805std::string pkgAcqIndexMergeDiffs::Custom600Headers() const /*{{{*/
2806{
2807 if(State != StateApplyDiff)
2808 return pkgAcqBaseIndex::Custom600Headers();
2809 std::ostringstream patchhashes;
2810 unsigned int seen_patches = 0;
2811 for (auto && hs : (*allPatches)[0]->patch.result_hashes)
2812 patchhashes << "\nStart-" << hs.HashType() << "-Hash: " << hs.HashValue();
2813 for (std::vector<pkgAcqIndexMergeDiffs *>::const_iterator I = allPatches->begin();
2814 I != allPatches->end(); ++I)
2815 {
2816 HashStringList const ExpectedHashes = (*I)->patch.patch_hashes;
2817 for (HashStringList::const_iterator hs = ExpectedHashes.begin(); hs != ExpectedHashes.end(); ++hs)
2818 patchhashes << "\nPatch-" << std::to_string(seen_patches) << "-" << hs->HashType() << "-Hash: " << hs->HashValue();
2819 ++seen_patches;
2820 }
2821 patchhashes << pkgAcqBaseIndex::Custom600Headers();
2822 return patchhashes.str();
2823}
2824 /*}}}*/
2825pkgAcqIndexMergeDiffs::~pkgAcqIndexMergeDiffs() {}
2826
2827// AcqIndex::AcqIndex - Constructor /*{{{*/
2828pkgAcqIndex::pkgAcqIndex(pkgAcquire * const Owner,
2829 pkgAcqMetaClearSig * const TransactionManager,
2830 IndexTarget const &Target, bool const Derived)
2831 : pkgAcqBaseIndex(Owner, TransactionManager, Target), d(NULL), Stage(STAGE_DOWNLOAD),
2832 CompressionExtensions(Target.Option(IndexTarget::COMPRESSIONTYPES))
2833{
2834 if (Derived)
2835 return;
2836 Init(Target.URI, Target.Description, Target.ShortDesc);
2837
2838 if(_config->FindB("Debug::Acquire::Transaction", false) == true)
2839 std::clog << "New pkgIndex with TransactionManager "
2840 << TransactionManager << std::endl;
2841}
2842 /*}}}*/
2843// AcqIndex::Init - defered Constructor /*{{{*/
2844static void NextCompressionExtension(std::string &CurrentCompressionExtension, std::string &CompressionExtensions, bool const preview)
2845{
2846 size_t const nextExt = CompressionExtensions.find(' ');
2847 if (nextExt == std::string::npos)
2848 {
2849 CurrentCompressionExtension = CompressionExtensions;
2850 if (preview == false)
2851 CompressionExtensions.clear();
2852 }
2853 else
2854 {
2855 CurrentCompressionExtension = CompressionExtensions.substr(0, nextExt);
2856 if (preview == false)
2857 CompressionExtensions = CompressionExtensions.substr(nextExt+1);
2858 }
2859}
2860void pkgAcqIndex::Init(string const &URI, string const &URIDesc,
2861 string const &ShortDesc)
2862{
2863 Stage = STAGE_DOWNLOAD;
2864
2865 DestFile = GetPartialFileNameFromURI(URI);
2866 NextCompressionExtension(CurrentCompressionExtension, CompressionExtensions, false);
2867
2868 if (CurrentCompressionExtension == "uncompressed")
2869 {
2870 Desc.URI = URI;
2871 }
2872 else if (CurrentCompressionExtension == "by-hash")
2873 {
2874 NextCompressionExtension(CurrentCompressionExtension, CompressionExtensions, true);
2875 if(unlikely(CurrentCompressionExtension.empty()))
2876 return;
2877 if (CurrentCompressionExtension != "uncompressed")
2878 {
2879 Desc.URI = URI + '.' + CurrentCompressionExtension;
2880 DestFile = DestFile + '.' + CurrentCompressionExtension;
2881 }
2882 else
2883 Desc.URI = URI;
2884
2885 HashStringList const Hashes = GetExpectedHashes();
2886 HashString const * const TargetHash = Hashes.find(NULL);
2887 if (unlikely(TargetHash == nullptr))
2888 return;
2889 std::string const ByHash = "/by-hash/" + TargetHash->HashType() + "/" + TargetHash->HashValue();
2890 size_t const trailing_slash = Desc.URI.find_last_of("/");
2891 if (unlikely(trailing_slash == std::string::npos))
2892 return;
2893 Desc.URI = Desc.URI.replace(
2894 trailing_slash,
2895 Desc.URI.substr(trailing_slash+1).size()+1,
2896 ByHash);
2897 }
2898 else if (unlikely(CurrentCompressionExtension.empty()))
2899 return;
2900 else
2901 {
2902 Desc.URI = URI + '.' + CurrentCompressionExtension;
2903 DestFile = DestFile + '.' + CurrentCompressionExtension;
2904 }
2905
2906 // store file size of the download to ensure the fetcher gives
2907 // accurate progress reporting
2908 FileSize = GetExpectedHashes().FileSize();
2909
2910 Desc.Description = URIDesc;
2911 Desc.Owner = this;
2912 Desc.ShortDesc = ShortDesc;
2913
2914 QueueURI(Desc);
2915}
2916 /*}}}*/
2917// AcqIndex::Custom600Headers - Insert custom request headers /*{{{*/
2918// ---------------------------------------------------------------------
2919/* The only header we use is the last-modified header. */
2920string pkgAcqIndex::Custom600Headers() const
2921{
2922
2923 string msg = "\nIndex-File: true";
2924
2925 if (TransactionManager->LastMetaIndexParser == NULL)
2926 {
2927 std::string const Final = GetFinalFilename();
2928
2929 struct stat Buf;
2930 if (stat(Final.c_str(),&Buf) == 0)
2931 msg += "\nLast-Modified: " + TimeRFC1123(Buf.st_mtime, false);
2932 }
2933
2934 if(Target.IsOptional)
2935 msg += "\nFail-Ignore: true";
2936
2937 return msg;
2938}
2939 /*}}}*/
2940// AcqIndex::Failed - getting the indexfile failed /*{{{*/
2941bool pkgAcqIndex::CommonFailed(std::string const &TargetURI, std::string const TargetDesc,
2942 std::string const &Message, pkgAcquire::MethodConfig const * const Cnf)
2943{
2944 pkgAcqBaseIndex::Failed(Message,Cnf);
2945
2946 if (UsedMirror.empty() == false && UsedMirror != "DIRECT" &&
2947 LookupTag(Message, "FailReason") == "HttpError404")
2948 {
2949 UsedMirror = "DIRECT";
2950 if (Desc.URI.find("/by-hash/") != std::string::npos)
2951 CompressionExtensions = "by-hash " + CompressionExtensions;
2952 else
2953 CompressionExtensions = CurrentCompressionExtension + ' ' + CompressionExtensions;
2954 Init(TargetURI, TargetDesc, Desc.ShortDesc);
2955 Status = StatIdle;
2956 return true;
2957 }
2958
2959 // authorisation matches will not be fixed by other compression types
2960 if (Status != StatAuthError)
2961 {
2962 if (CompressionExtensions.empty() == false)
2963 {
2964 Init(TargetURI, Desc.Description, Desc.ShortDesc);
2965 Status = StatIdle;
2966 return true;
2967 }
2968 }
2969 return false;
2970}
2971void pkgAcqIndex::Failed(string const &Message,pkgAcquire::MethodConfig const * const Cnf)
2972{
2973 if (CommonFailed(Target.URI, Target.Description, Message, Cnf))
2974 return;
2975
2976 if(Target.IsOptional && GetExpectedHashes().empty() && Stage == STAGE_DOWNLOAD)
2977 Status = StatDone;
2978 else
2979 TransactionManager->AbortTransaction();
2980}
2981 /*}}}*/
2982// AcqIndex::Done - Finished a fetch /*{{{*/
2983// ---------------------------------------------------------------------
2984/* This goes through a number of states.. On the initial fetch the
2985 method could possibly return an alternate filename which points
2986 to the uncompressed version of the file. If this is so the file
2987 is copied into the partial directory. In all other cases the file
2988 is decompressed with a compressed uri. */
2989void pkgAcqIndex::Done(string const &Message,
2990 HashStringList const &Hashes,
2991 pkgAcquire::MethodConfig const * const Cfg)
2992{
2993 Item::Done(Message,Hashes,Cfg);
2994
2995 switch(Stage)
2996 {
2997 case STAGE_DOWNLOAD:
2998 StageDownloadDone(Message);
2999 break;
3000 case STAGE_DECOMPRESS_AND_VERIFY:
3001 StageDecompressDone();
3002 break;
3003 }
3004}
3005 /*}}}*/
3006// AcqIndex::StageDownloadDone - Queue for decompress and verify /*{{{*/
3007void pkgAcqIndex::StageDownloadDone(string const &Message)
3008{
3009 Local = true;
3010 Complete = true;
3011
3012 std::string const AltFilename = LookupTag(Message,"Alt-Filename");
3013 std::string Filename = LookupTag(Message,"Filename");
3014
3015 // we need to verify the file against the current Release file again
3016 // on if-modfied-since hit to avoid a stale attack against us
3017 if(StringToBool(LookupTag(Message,"IMS-Hit"),false) == true)
3018 {
3019 // copy FinalFile into partial/ so that we check the hash again
3020 string const FinalFile = GetExistingFilename(GetFinalFileNameFromURI(Target.URI));
3021 if (symlink(FinalFile.c_str(), DestFile.c_str()) != 0)
3022 _error->WarningE("pkgAcqIndex::StageDownloadDone", "Symlinking final file %s back to %s failed", FinalFile.c_str(), DestFile.c_str());
3023 else
3024 {
3025 EraseFileName = DestFile;
3026 Filename = DestFile;
3027 }
3028 Stage = STAGE_DECOMPRESS_AND_VERIFY;
3029 Desc.URI = "store:" + Filename;
3030 QueueURI(Desc);
3031 SetActiveSubprocess(::URI(Desc.URI).Access);
3032 return;
3033 }
3034 // methods like file:// give us an alternative (uncompressed) file
3035 else if (Target.KeepCompressed == false && AltFilename.empty() == false)
3036 {
3037 Filename = AltFilename;
3038 EraseFileName.clear();
3039 }
3040 // Methods like e.g. "file:" will give us a (compressed) FileName that is
3041 // not the "DestFile" we set, in this case we uncompress from the local file
3042 else if (Filename != DestFile && RealFileExists(DestFile) == false)
3043 {
3044 // symlinking ensures that the filename can be used for compression detection
3045 // that is e.g. needed for by-hash which has no extension over file
3046 if (symlink(Filename.c_str(),DestFile.c_str()) != 0)
3047 _error->WarningE("pkgAcqIndex::StageDownloadDone", "Symlinking file %s to %s failed", Filename.c_str(), DestFile.c_str());
3048 else
3049 {
3050 EraseFileName = DestFile;
3051 Filename = DestFile;
3052 }
3053 }
3054
3055 Stage = STAGE_DECOMPRESS_AND_VERIFY;
3056 DestFile = GetKeepCompressedFileName(GetPartialFileNameFromURI(Target.URI), Target);
3057 if (Filename != DestFile && flExtension(Filename) == flExtension(DestFile))
3058 Desc.URI = "copy:" + Filename;
3059 else
3060 Desc.URI = "store:" + Filename;
3061 if (DestFile == Filename)
3062 {
3063 if (CurrentCompressionExtension == "uncompressed")
3064 return StageDecompressDone();
3065 DestFile = "/dev/null";
3066 }
3067
3068 if (EraseFileName.empty() && Filename != AltFilename)
3069 EraseFileName = Filename;
3070
3071 // queue uri for the next stage
3072 QueueURI(Desc);
3073 SetActiveSubprocess(::URI(Desc.URI).Access);
3074}
3075 /*}}}*/
3076// AcqIndex::StageDecompressDone - Final verification /*{{{*/
3077void pkgAcqIndex::StageDecompressDone()
3078{
3079 if (DestFile == "/dev/null")
3080 DestFile = GetKeepCompressedFileName(GetPartialFileNameFromURI(Target.URI), Target);
3081
3082 // Done, queue for rename on transaction finished
3083 TransactionManager->TransactionStageCopy(this, DestFile, GetFinalFilename());
3084}
3085 /*}}}*/
3086pkgAcqIndex::~pkgAcqIndex() {}
3087
3088
3089// AcqArchive::AcqArchive - Constructor /*{{{*/
3090// ---------------------------------------------------------------------
3091/* This just sets up the initial fetch environment and queues the first
3092 possibilitiy */
3093pkgAcqArchive::pkgAcqArchive(pkgAcquire * const Owner,pkgSourceList * const Sources,
3094 pkgRecords * const Recs,pkgCache::VerIterator const &Version,
3095 string &StoreFilename) :
3096 Item(Owner), d(NULL), LocalSource(false), Version(Version), Sources(Sources), Recs(Recs),
3097 StoreFilename(StoreFilename), Vf(Version.FileList()),
3098 Trusted(false)
3099{
3100 Retries = _config->FindI("Acquire::Retries",0);
3101
3102 if (Version.Arch() == 0)
3103 {
3104 _error->Error(_("I wasn't able to locate a file for the %s package. "
3105 "This might mean you need to manually fix this package. "
3106 "(due to missing arch)"),
3107 Version.ParentPkg().FullName().c_str());
3108 return;
3109 }
3110
3111 /* We need to find a filename to determine the extension. We make the
3112 assumption here that all the available sources for this version share
3113 the same extension.. */
3114 // Skip not source sources, they do not have file fields.
3115 for (; Vf.end() == false; ++Vf)
3116 {
3117 if (Vf.File().Flagged(pkgCache::Flag::NotSource))
3118 continue;
3119 break;
3120 }
3121
3122 // Does not really matter here.. we are going to fail out below
3123 if (Vf.end() != true)
3124 {
3125 // If this fails to get a file name we will bomb out below.
3126 pkgRecords::Parser &Parse = Recs->Lookup(Vf);
3127 if (_error->PendingError() == true)
3128 return;
3129
3130 // Generate the final file name as: package_version_arch.foo
3131 StoreFilename = QuoteString(Version.ParentPkg().Name(),"_:") + '_' +
3132 QuoteString(Version.VerStr(),"_:") + '_' +
3133 QuoteString(Version.Arch(),"_:.") +
3134 "." + flExtension(Parse.FileName());
3135 }
3136
3137 // check if we have one trusted source for the package. if so, switch
3138 // to "TrustedOnly" mode - but only if not in AllowUnauthenticated mode
3139 bool const allowUnauth = _config->FindB("APT::Get::AllowUnauthenticated", false);
3140 bool const debugAuth = _config->FindB("Debug::pkgAcquire::Auth", false);
3141 bool seenUntrusted = false;
3142 for (pkgCache::VerFileIterator i = Version.FileList(); i.end() == false; ++i)
3143 {
3144 pkgIndexFile *Index;
3145 if (Sources->FindIndex(i.File(),Index) == false)
3146 continue;
3147
3148 if (debugAuth == true)
3149 std::cerr << "Checking index: " << Index->Describe()
3150 << "(Trusted=" << Index->IsTrusted() << ")" << std::endl;
3151
3152 if (Index->IsTrusted() == true)
3153 {
3154 Trusted = true;
3155 if (allowUnauth == false)
3156 break;
3157 }
3158 else
3159 seenUntrusted = true;
3160 }
3161
3162 // "allow-unauthenticated" restores apts old fetching behaviour
3163 // that means that e.g. unauthenticated file:// uris are higher
3164 // priority than authenticated http:// uris
3165 if (allowUnauth == true && seenUntrusted == true)
3166 Trusted = false;
3167
3168 // Select a source
3169 if (QueueNext() == false && _error->PendingError() == false)
3170 _error->Error(_("Can't find a source to download version '%s' of '%s'"),
3171 Version.VerStr(), Version.ParentPkg().FullName(false).c_str());
3172}
3173 /*}}}*/
3174// AcqArchive::QueueNext - Queue the next file source /*{{{*/
3175// ---------------------------------------------------------------------
3176/* This queues the next available file version for download. It checks if
3177 the archive is already available in the cache and stashs the MD5 for
3178 checking later. */
3179bool pkgAcqArchive::QueueNext()
3180{
3181 for (; Vf.end() == false; ++Vf)
3182 {
3183 pkgCache::PkgFileIterator const PkgF = Vf.File();
3184 // Ignore not source sources
3185 if (PkgF.Flagged(pkgCache::Flag::NotSource))
3186 continue;
3187
3188 // Try to cross match against the source list
3189 pkgIndexFile *Index;
3190 if (Sources->FindIndex(PkgF, Index) == false)
3191 continue;
3192 LocalSource = PkgF.Flagged(pkgCache::Flag::LocalSource);
3193
3194 // only try to get a trusted package from another source if that source
3195 // is also trusted
3196 if(Trusted && !Index->IsTrusted())
3197 continue;
3198
3199 // Grab the text package record
3200 pkgRecords::Parser &Parse = Recs->Lookup(Vf);
3201 if (_error->PendingError() == true)
3202 return false;
3203
3204 string PkgFile = Parse.FileName();
3205 ExpectedHashes = Parse.Hashes();
3206
3207 if (PkgFile.empty() == true)
3208 return _error->Error(_("The package index files are corrupted. No Filename: "
3209 "field for package %s."),
3210 Version.ParentPkg().Name());
3211
3212 Desc.URI = Index->ArchiveURI(PkgFile);
3213 Desc.Description = Index->ArchiveInfo(Version);
3214 Desc.Owner = this;
3215 Desc.ShortDesc = Version.ParentPkg().FullName(true);
3216
3217 // See if we already have the file. (Legacy filenames)
3218 FileSize = Version->Size;
3219 string FinalFile = _config->FindDir("Dir::Cache::Archives") + flNotDir(PkgFile);
3220 struct stat Buf;
3221 if (stat(FinalFile.c_str(),&Buf) == 0)
3222 {
3223 // Make sure the size matches
3224 if ((unsigned long long)Buf.st_size == Version->Size)
3225 {
3226 Complete = true;
3227 Local = true;
3228 Status = StatDone;
3229 StoreFilename = DestFile = FinalFile;
3230 return true;
3231 }
3232
3233 /* Hmm, we have a file and its size does not match, this means it is
3234 an old style mismatched arch */
3235 RemoveFile("pkgAcqArchive::QueueNext", FinalFile);
3236 }
3237
3238 // Check it again using the new style output filenames
3239 FinalFile = _config->FindDir("Dir::Cache::Archives") + flNotDir(StoreFilename);
3240 if (stat(FinalFile.c_str(),&Buf) == 0)
3241 {
3242 // Make sure the size matches
3243 if ((unsigned long long)Buf.st_size == Version->Size)
3244 {
3245 Complete = true;
3246 Local = true;
3247 Status = StatDone;
3248 StoreFilename = DestFile = FinalFile;
3249 return true;
3250 }
3251
3252 /* Hmm, we have a file and its size does not match, this shouldn't
3253 happen.. */
3254 RemoveFile("pkgAcqArchive::QueueNext", FinalFile);
3255 }
3256
3257 DestFile = _config->FindDir("Dir::Cache::Archives") + "partial/" + flNotDir(StoreFilename);
3258
3259 // Check the destination file
3260 if (stat(DestFile.c_str(),&Buf) == 0)
3261 {
3262 // Hmm, the partial file is too big, erase it
3263 if ((unsigned long long)Buf.st_size > Version->Size)
3264 RemoveFile("pkgAcqArchive::QueueNext", DestFile);
3265 else
3266 PartialSize = Buf.st_size;
3267 }
3268
3269 // Disables download of archives - useful if no real installation follows,
3270 // e.g. if we are just interested in proposed installation order
3271 if (_config->FindB("Debug::pkgAcqArchive::NoQueue", false) == true)
3272 {
3273 Complete = true;
3274 Local = true;
3275 Status = StatDone;
3276 StoreFilename = DestFile = FinalFile;
3277 return true;
3278 }
3279
3280 // Create the item
3281 Local = false;
3282 QueueURI(Desc);
3283
3284 ++Vf;
3285 return true;
3286 }
3287 return false;
3288}
3289 /*}}}*/
3290// AcqArchive::Done - Finished fetching /*{{{*/
3291// ---------------------------------------------------------------------
3292/* */
3293void pkgAcqArchive::Done(string const &Message, HashStringList const &Hashes,
3294 pkgAcquire::MethodConfig const * const Cfg)
3295{
3296 Item::Done(Message, Hashes, Cfg);
3297
3298 // Grab the output filename
3299 std::string const FileName = LookupTag(Message,"Filename");
3300 if (DestFile != FileName && RealFileExists(DestFile) == false)
3301 {
3302 StoreFilename = DestFile = FileName;
3303 Local = true;
3304 Complete = true;
3305 return;
3306 }
3307
3308 // Done, move it into position
3309 string const FinalFile = GetFinalFilename();
3310 Rename(DestFile,FinalFile);
3311 StoreFilename = DestFile = FinalFile;
3312 Complete = true;
3313}
3314 /*}}}*/
3315// AcqArchive::Failed - Failure handler /*{{{*/
3316// ---------------------------------------------------------------------
3317/* Here we try other sources */
3318void pkgAcqArchive::Failed(string const &Message,pkgAcquire::MethodConfig const * const Cnf)
3319{
3320 Item::Failed(Message,Cnf);
3321
3322 /* We don't really want to retry on failed media swaps, this prevents
3323 that. An interesting observation is that permanent failures are not
3324 recorded. */
3325 if (Cnf->Removable == true &&
3326 StringToBool(LookupTag(Message,"Transient-Failure"),false) == true)
3327 {
3328 // Vf = Version.FileList();
3329 while (Vf.end() == false) ++Vf;
3330 StoreFilename = string();
3331 return;
3332 }
3333
3334 Status = StatIdle;
3335 if (QueueNext() == false)
3336 {
3337 // This is the retry counter
3338 if (Retries != 0 &&
3339 Cnf->LocalOnly == false &&
3340 StringToBool(LookupTag(Message,"Transient-Failure"),false) == true)
3341 {
3342 Retries--;
3343 Vf = Version.FileList();
3344 if (QueueNext() == true)
3345 return;
3346 }
3347
3348 StoreFilename = string();
3349 Status = StatError;
3350 }
3351}
3352 /*}}}*/
3353APT_PURE bool pkgAcqArchive::IsTrusted() const /*{{{*/
3354{
3355 return Trusted;
3356}
3357 /*}}}*/
3358void pkgAcqArchive::Finished() /*{{{*/
3359{
3360 if (Status == pkgAcquire::Item::StatDone &&
3361 Complete == true)
3362 return;
3363 StoreFilename = string();
3364}
3365 /*}}}*/
3366std::string pkgAcqArchive::DescURI() const /*{{{*/
3367{
3368 return Desc.URI;
3369}
3370 /*}}}*/
3371std::string pkgAcqArchive::ShortDesc() const /*{{{*/
3372{
3373 return Desc.ShortDesc;
3374}
3375 /*}}}*/
3376pkgAcqArchive::~pkgAcqArchive() {}
3377
3378// AcqChangelog::pkgAcqChangelog - Constructors /*{{{*/
3379class pkgAcqChangelog::Private
3380{
3381 public:
3382 std::string FinalFile;
3383};
3384pkgAcqChangelog::pkgAcqChangelog(pkgAcquire * const Owner, pkgCache::VerIterator const &Ver,
3385 std::string const &DestDir, std::string const &DestFilename) :
3386 pkgAcquire::Item(Owner), d(new pkgAcqChangelog::Private()), SrcName(Ver.SourcePkgName()), SrcVersion(Ver.SourceVerStr())
3387{
3388 Desc.URI = URI(Ver);
3389 Init(DestDir, DestFilename);
3390}
3391// some parameters are char* here as they come likely from char* interfaces – which can also return NULL
3392pkgAcqChangelog::pkgAcqChangelog(pkgAcquire * const Owner, pkgCache::RlsFileIterator const &RlsFile,
3393 char const * const Component, char const * const SrcName, char const * const SrcVersion,
3394 const string &DestDir, const string &DestFilename) :
3395 pkgAcquire::Item(Owner), d(new pkgAcqChangelog::Private()), SrcName(SrcName), SrcVersion(SrcVersion)
3396{
3397 Desc.URI = URI(RlsFile, Component, SrcName, SrcVersion);
3398 Init(DestDir, DestFilename);
3399}
3400pkgAcqChangelog::pkgAcqChangelog(pkgAcquire * const Owner,
3401 std::string const &URI, char const * const SrcName, char const * const SrcVersion,
3402 const string &DestDir, const string &DestFilename) :
3403 pkgAcquire::Item(Owner), d(new pkgAcqChangelog::Private()), SrcName(SrcName), SrcVersion(SrcVersion)
3404{
3405 Desc.URI = URI;
3406 Init(DestDir, DestFilename);
3407}
3408void pkgAcqChangelog::Init(std::string const &DestDir, std::string const &DestFilename)
3409{
3410 if (Desc.URI.empty())
3411 {
3412 Status = StatError;
3413 // TRANSLATOR: %s=%s is sourcename=sourceversion, e.g. apt=1.1
3414 strprintf(ErrorText, _("Changelog unavailable for %s=%s"), SrcName.c_str(), SrcVersion.c_str());
3415 // Let the error message print something sensible rather than "Failed to fetch /"
3416 if (DestFilename.empty())
3417 DestFile = SrcName + ".changelog";
3418 else
3419 DestFile = DestFilename;
3420 Desc.URI = "changelog:/" + DestFile;
3421 return;
3422 }
3423
3424 std::string DestFileName;
3425 if (DestFilename.empty())
3426 DestFileName = flCombine(DestFile, SrcName + ".changelog");
3427 else
3428 DestFileName = flCombine(DestFile, DestFilename);
3429
3430 std::string const SandboxUser = _config->Find("APT::Sandbox::User");
3431 std::string const systemTemp = GetTempDir(SandboxUser);
3432 char tmpname[1000];
3433 snprintf(tmpname, sizeof(tmpname), "%s/apt-changelog-XXXXXX", systemTemp.c_str());
3434 if (NULL == mkdtemp(tmpname))
3435 {
3436 _error->Errno("mkdtemp", "mkdtemp failed in changelog acquire of %s %s", SrcName.c_str(), SrcVersion.c_str());
3437 Status = StatError;
3438 return;
3439 }
3440 TemporaryDirectory = tmpname;
3441
3442 ChangeOwnerAndPermissionOfFile("Item::QueueURI", TemporaryDirectory.c_str(),
3443 SandboxUser.c_str(), "root", 0700);
3444
3445 DestFile = flCombine(TemporaryDirectory, DestFileName);
3446 if (DestDir.empty() == false)
3447 {
3448 d->FinalFile = flCombine(DestDir, DestFileName);
3449 if (RealFileExists(d->FinalFile))
3450 {
3451 FileFd file1, file2;
3452 if (file1.Open(DestFile, FileFd::WriteOnly | FileFd::Create | FileFd::Exclusive) &&
3453 file2.Open(d->FinalFile, FileFd::ReadOnly) && CopyFile(file2, file1))
3454 {
3455 struct timeval times[2];
3456 times[0].tv_sec = times[1].tv_sec = file2.ModificationTime();
3457 times[0].tv_usec = times[1].tv_usec = 0;
3458 utimes(DestFile.c_str(), times);
3459 }
3460 }
3461 }
3462
3463 Desc.ShortDesc = "Changelog";
3464 strprintf(Desc.Description, "%s %s %s Changelog", URI::SiteOnly(Desc.URI).c_str(), SrcName.c_str(), SrcVersion.c_str());
3465 Desc.Owner = this;
3466 QueueURI(Desc);
3467}
3468 /*}}}*/
3469std::string pkgAcqChangelog::URI(pkgCache::VerIterator const &Ver) /*{{{*/
3470{
3471 std::string const confOnline = "Acquire::Changelogs::AlwaysOnline";
3472 bool AlwaysOnline = _config->FindB(confOnline, false);
3473 if (AlwaysOnline == false)
3474 for (pkgCache::VerFileIterator VF = Ver.FileList(); VF.end() == false; ++VF)
3475 {
3476 pkgCache::PkgFileIterator const PF = VF.File();
3477 if (PF.Flagged(pkgCache::Flag::NotSource) || PF->Release == 0)
3478 continue;
3479 pkgCache::RlsFileIterator const RF = PF.ReleaseFile();
3480 if (RF->Origin != 0 && _config->FindB(confOnline + "::Origin::" + RF.Origin(), false))
3481 {
3482 AlwaysOnline = true;
3483 break;
3484 }
3485 }
3486 if (AlwaysOnline == false)
3487 {
3488 pkgCache::PkgIterator const Pkg = Ver.ParentPkg();
3489 if (Pkg->CurrentVer != 0 && Pkg.CurrentVer() == Ver)
3490 {
3491 std::string const basename = std::string("/usr/share/doc/") + Pkg.Name() + "/changelog";
3492 std::string const debianname = basename + ".Debian";
3493 if (FileExists(debianname))
3494 return "copy://" + debianname;
3495 else if (FileExists(debianname + ".gz"))
3496 return "gzip://" + debianname + ".gz";
3497 else if (FileExists(basename))
3498 return "copy://" + basename;
3499 else if (FileExists(basename + ".gz"))
3500 return "gzip://" + basename + ".gz";
3501 }
3502 }
3503
3504 char const * const SrcName = Ver.SourcePkgName();
3505 char const * const SrcVersion = Ver.SourceVerStr();
3506 // find the first source for this version which promises a changelog
3507 for (pkgCache::VerFileIterator VF = Ver.FileList(); VF.end() == false; ++VF)
3508 {
3509 pkgCache::PkgFileIterator const PF = VF.File();
3510 if (PF.Flagged(pkgCache::Flag::NotSource) || PF->Release == 0)
3511 continue;
3512 pkgCache::RlsFileIterator const RF = PF.ReleaseFile();
3513 std::string const uri = URI(RF, PF.Component(), SrcName, SrcVersion);
3514 if (uri.empty())
3515 continue;
3516 return uri;
3517 }
3518 return "";
3519}
3520std::string pkgAcqChangelog::URITemplate(pkgCache::RlsFileIterator const &Rls)
3521{
3522 if (Rls.end() == true || (Rls->Label == 0 && Rls->Origin == 0))
3523 return "";
3524 std::string const serverConfig = "Acquire::Changelogs::URI";
3525 std::string server;
3526#define APT_EMPTY_SERVER \
3527 if (server.empty() == false) \
3528 { \
3529 if (server != "no") \
3530 return server; \
3531 return ""; \
3532 }
3533#define APT_CHECK_SERVER(X, Y) \
3534 if (Rls->X != 0) \
3535 { \
3536 std::string const specialServerConfig = serverConfig + "::" + Y + #X + "::" + Rls.X(); \
3537 server = _config->Find(specialServerConfig); \
3538 APT_EMPTY_SERVER \
3539 }
3540 // this way e.g. Debian-Security can fallback to Debian
3541 APT_CHECK_SERVER(Label, "Override::")
3542 APT_CHECK_SERVER(Origin, "Override::")
3543
3544 if (RealFileExists(Rls.FileName()))
3545 {
3546 _error->PushToStack();
3547 FileFd rf;
3548 /* This can be costly. A caller wanting to get millions of URIs might
3549 want to do this on its own once and use Override settings.
3550 We don't do this here as Origin/Label are not as unique as they
3551 should be so this could produce request order-dependent anomalies */
3552 if (OpenMaybeClearSignedFile(Rls.FileName(), rf) == true)
3553 {
3554 pkgTagFile TagFile(&rf, rf.Size());
3555 pkgTagSection Section;
3556 if (TagFile.Step(Section) == true)
3557 server = Section.FindS("Changelogs");
3558 }
3559 _error->RevertToStack();
3560 APT_EMPTY_SERVER
3561 }
3562
3563 APT_CHECK_SERVER(Label, "")
3564 APT_CHECK_SERVER(Origin, "")
3565#undef APT_CHECK_SERVER
3566#undef APT_EMPTY_SERVER
3567 return "";
3568}
3569std::string pkgAcqChangelog::URI(pkgCache::RlsFileIterator const &Rls,
3570 char const * const Component, char const * const SrcName,
3571 char const * const SrcVersion)
3572{
3573 return URI(URITemplate(Rls), Component, SrcName, SrcVersion);
3574}
3575std::string pkgAcqChangelog::URI(std::string const &Template,
3576 char const * const Component, char const * const SrcName,
3577 char const * const SrcVersion)
3578{
3579 if (Template.find("@CHANGEPATH@") == std::string::npos)
3580 return "";
3581
3582 // the path is: COMPONENT/SRC/SRCNAME/SRCNAME_SRCVER, e.g. main/a/apt/1.1 or contrib/liba/libapt/2.0
3583 std::string Src = SrcName;
3584 std::string path = APT::String::Startswith(SrcName, "lib") ? Src.substr(0, 4) : Src.substr(0,1);
3585 path.append("/").append(Src).append("/");
3586 path.append(Src).append("_").append(StripEpoch(SrcVersion));
3587 // we omit component for releases without one (= flat-style repositories)
3588 if (Component != NULL && strlen(Component) != 0)
3589 path = std::string(Component) + "/" + path;
3590
3591 return SubstVar(Template, "@CHANGEPATH@", path);
3592}
3593 /*}}}*/
3594// AcqChangelog::Failed - Failure handler /*{{{*/
3595void pkgAcqChangelog::Failed(string const &Message, pkgAcquire::MethodConfig const * const Cnf)
3596{
3597 Item::Failed(Message,Cnf);
3598
3599 std::string errText;
3600 // TRANSLATOR: %s=%s is sourcename=sourceversion, e.g. apt=1.1
3601 strprintf(errText, _("Changelog unavailable for %s=%s"), SrcName.c_str(), SrcVersion.c_str());
3602
3603 // Error is probably something techy like 404 Not Found
3604 if (ErrorText.empty())
3605 ErrorText = errText;
3606 else
3607 ErrorText = errText + " (" + ErrorText + ")";
3608}
3609 /*}}}*/
3610// AcqChangelog::Done - Item downloaded OK /*{{{*/
3611void pkgAcqChangelog::Done(string const &Message,HashStringList const &CalcHashes,
3612 pkgAcquire::MethodConfig const * const Cnf)
3613{
3614 Item::Done(Message,CalcHashes,Cnf);
3615 if (d->FinalFile.empty() == false)
3616 {
3617 if (RemoveFile("pkgAcqChangelog::Done", d->FinalFile) == false ||
3618 Rename(DestFile, d->FinalFile) == false)
3619 Status = StatError;
3620 }
3621
3622 Complete = true;
3623}
3624 /*}}}*/
3625pkgAcqChangelog::~pkgAcqChangelog() /*{{{*/
3626{
3627 if (TemporaryDirectory.empty() == false)
3628 {
3629 RemoveFile("~pkgAcqChangelog", DestFile);
3630 rmdir(TemporaryDirectory.c_str());
3631 }
3632 delete d;
3633}
3634 /*}}}*/
3635
3636// AcqFile::pkgAcqFile - Constructor /*{{{*/
3637pkgAcqFile::pkgAcqFile(pkgAcquire * const Owner,string const &URI, HashStringList const &Hashes,
3638 unsigned long long const Size,string const &Dsc,string const &ShortDesc,
3639 const string &DestDir, const string &DestFilename,
3640 bool const IsIndexFile) :
3641 Item(Owner), d(NULL), IsIndexFile(IsIndexFile), ExpectedHashes(Hashes)
3642{
3643 Retries = _config->FindI("Acquire::Retries",0);
3644
3645 if(!DestFilename.empty())
3646 DestFile = DestFilename;
3647 else if(!DestDir.empty())
3648 DestFile = DestDir + "/" + flNotDir(URI);
3649 else
3650 DestFile = flNotDir(URI);
3651
3652 // Create the item
3653 Desc.URI = URI;
3654 Desc.Description = Dsc;
3655 Desc.Owner = this;
3656
3657 // Set the short description to the archive component
3658 Desc.ShortDesc = ShortDesc;
3659
3660 // Get the transfer sizes
3661 FileSize = Size;
3662 struct stat Buf;
3663 if (stat(DestFile.c_str(),&Buf) == 0)
3664 {
3665 // Hmm, the partial file is too big, erase it
3666 if ((Size > 0) && (unsigned long long)Buf.st_size > Size)
3667 RemoveFile("pkgAcqFile", DestFile);
3668 else
3669 PartialSize = Buf.st_size;
3670 }
3671
3672 QueueURI(Desc);
3673}
3674 /*}}}*/
3675// AcqFile::Done - Item downloaded OK /*{{{*/
3676void pkgAcqFile::Done(string const &Message,HashStringList const &CalcHashes,
3677 pkgAcquire::MethodConfig const * const Cnf)
3678{
3679 Item::Done(Message,CalcHashes,Cnf);
3680
3681 std::string const FileName = LookupTag(Message,"Filename");
3682 Complete = true;
3683
3684 // The files timestamp matches
3685 if (StringToBool(LookupTag(Message,"IMS-Hit"),false) == true)
3686 return;
3687
3688 // We have to copy it into place
3689 if (RealFileExists(DestFile.c_str()) == false)
3690 {
3691 Local = true;
3692 if (_config->FindB("Acquire::Source-Symlinks",true) == false ||
3693 Cnf->Removable == true)
3694 {
3695 Desc.URI = "copy:" + FileName;
3696 QueueURI(Desc);
3697 return;
3698 }
3699
3700 // Erase the file if it is a symlink so we can overwrite it
3701 struct stat St;
3702 if (lstat(DestFile.c_str(),&St) == 0)
3703 {
3704 if (S_ISLNK(St.st_mode) != 0)
3705 RemoveFile("pkgAcqFile::Done", DestFile);
3706 }
3707
3708 // Symlink the file
3709 if (symlink(FileName.c_str(),DestFile.c_str()) != 0)
3710 {
3711 _error->PushToStack();
3712 _error->Errno("pkgAcqFile::Done", "Symlinking file %s failed", DestFile.c_str());
3713 std::stringstream msg;
3714 _error->DumpErrors(msg, GlobalError::DEBUG, false);
3715 _error->RevertToStack();
3716 ErrorText = msg.str();
3717 Status = StatError;
3718 Complete = false;
3719 }
3720 }
3721}
3722 /*}}}*/
3723// AcqFile::Failed - Failure handler /*{{{*/
3724// ---------------------------------------------------------------------
3725/* Here we try other sources */
3726void pkgAcqFile::Failed(string const &Message, pkgAcquire::MethodConfig const * const Cnf)
3727{
3728 Item::Failed(Message,Cnf);
3729
3730 // This is the retry counter
3731 if (Retries != 0 &&
3732 Cnf->LocalOnly == false &&
3733 StringToBool(LookupTag(Message,"Transient-Failure"),false) == true)
3734 {
3735 --Retries;
3736 QueueURI(Desc);
3737 Status = StatIdle;
3738 return;
3739 }
3740
3741}
3742 /*}}}*/
3743string pkgAcqFile::Custom600Headers() const /*{{{*/
3744{
3745 if (IsIndexFile)
3746 return "\nIndex-File: true";
3747 return "";
3748}
3749 /*}}}*/
3750pkgAcqFile::~pkgAcqFile() {}