]> git.saurik.com Git - apt.git/blame_incremental - apt-pkg/acquire-item.cc
Do not set the binary dir in run-tests, it breaks stuff
[apt.git] / apt-pkg / acquire-item.cc
... / ...
CommitLineData
1// -*- mode: cpp; mode: fold -*-
2// Description /*{{{*/
3// $Id: acquire-item.cc,v 1.46.2.9 2004/01/16 18:51:11 mdz Exp $
4/* ######################################################################
5
6 Acquire Item - Item to acquire
7
8 Each item can download to exactly one file at a time. This means you
9 cannot create an item that fetches two uri's to two files at the same
10 time. The pkgAcqIndex class creates a second class upon instantiation
11 to fetch the other index files because of this.
12
13 ##################################################################### */
14 /*}}}*/
15// Include Files /*{{{*/
16#include <config.h>
17
18#include <apt-pkg/acquire-item.h>
19#include <apt-pkg/configuration.h>
20#include <apt-pkg/aptconfiguration.h>
21#include <apt-pkg/sourcelist.h>
22#include <apt-pkg/error.h>
23#include <apt-pkg/strutl.h>
24#include <apt-pkg/fileutl.h>
25#include <apt-pkg/tagfile.h>
26#include <apt-pkg/metaindex.h>
27#include <apt-pkg/acquire.h>
28#include <apt-pkg/hashes.h>
29#include <apt-pkg/indexfile.h>
30#include <apt-pkg/pkgcache.h>
31#include <apt-pkg/cacheiterators.h>
32#include <apt-pkg/pkgrecords.h>
33#include <apt-pkg/gpgv.h>
34
35#include <algorithm>
36#include <stddef.h>
37#include <stdlib.h>
38#include <string.h>
39#include <iostream>
40#include <vector>
41#include <sys/stat.h>
42#include <unistd.h>
43#include <errno.h>
44#include <string>
45#include <stdio.h>
46#include <ctime>
47#include <sstream>
48#include <numeric>
49
50#include <apti18n.h>
51 /*}}}*/
52
53using namespace std;
54
55static void printHashSumComparison(std::string const &URI, HashStringList const &Expected, HashStringList const &Actual) /*{{{*/
56{
57 if (_config->FindB("Debug::Acquire::HashSumMismatch", false) == false)
58 return;
59 std::cerr << std::endl << URI << ":" << std::endl << " Expected Hash: " << std::endl;
60 for (HashStringList::const_iterator hs = Expected.begin(); hs != Expected.end(); ++hs)
61 std::cerr << "\t- " << hs->toStr() << std::endl;
62 std::cerr << " Actual Hash: " << std::endl;
63 for (HashStringList::const_iterator hs = Actual.begin(); hs != Actual.end(); ++hs)
64 std::cerr << "\t- " << hs->toStr() << std::endl;
65}
66 /*}}}*/
67static std::string GetPartialFileName(std::string const &file) /*{{{*/
68{
69 std::string DestFile = _config->FindDir("Dir::State::lists") + "partial/";
70 DestFile += file;
71 return DestFile;
72}
73 /*}}}*/
74static std::string GetPartialFileNameFromURI(std::string const &uri) /*{{{*/
75{
76 return GetPartialFileName(URItoFileName(uri));
77}
78 /*}}}*/
79static std::string GetFinalFileNameFromURI(std::string const &uri) /*{{{*/
80{
81 return _config->FindDir("Dir::State::lists") + URItoFileName(uri);
82}
83 /*}}}*/
84static std::string GetKeepCompressedFileName(std::string file, IndexTarget const &Target)/*{{{*/
85{
86 if (Target.KeepCompressed == false)
87 return file;
88
89 std::string const KeepCompressedAs = Target.Option(IndexTarget::KEEPCOMPRESSEDAS);
90 if (KeepCompressedAs.empty() == false)
91 {
92 std::string const ext = KeepCompressedAs.substr(0, KeepCompressedAs.find(' '));
93 if (ext != "uncompressed")
94 file.append(".").append(ext);
95 }
96 return file;
97}
98 /*}}}*/
99static std::string GetMergeDiffsPatchFileName(std::string const &Final, std::string const &Patch)/*{{{*/
100{
101 // rred expects the patch as $FinalFile.ed.$patchname.gz
102 return Final + ".ed." + Patch + ".gz";
103}
104 /*}}}*/
105static std::string GetDiffsPatchFileName(std::string const &Final) /*{{{*/
106{
107 // rred expects the patch as $FinalFile.ed
108 return Final + ".ed";
109}
110 /*}}}*/
111static std::string GetExistingFilename(std::string const &File) /*{{{*/
112{
113 if (RealFileExists(File))
114 return File;
115 for (auto const &type : APT::Configuration::getCompressorExtensions())
116 {
117 std::string const Final = File + type;
118 if (RealFileExists(Final))
119 return Final;
120 }
121 return "";
122}
123 /*}}}*/
124static std::string GetDiffIndexFileName(std::string const &Name) /*{{{*/
125{
126 return Name + ".diff/Index";
127}
128 /*}}}*/
129static std::string GetDiffIndexURI(IndexTarget const &Target) /*{{{*/
130{
131 return Target.URI + ".diff/Index";
132}
133 /*}}}*/
134
135static void ReportMirrorFailureToCentral(pkgAcquire::Item const &I, std::string const &FailCode, std::string const &Details)/*{{{*/
136{
137 // we only act if a mirror was used at all
138 if(I.UsedMirror.empty())
139 return;
140#if 0
141 std::cerr << "\nReportMirrorFailure: "
142 << UsedMirror
143 << " Uri: " << DescURI()
144 << " FailCode: "
145 << FailCode << std::endl;
146#endif
147 string const report = _config->Find("Methods::Mirror::ProblemReporting",
148 "/usr/lib/apt/apt-report-mirror-failure");
149 if(!FileExists(report))
150 return;
151
152 std::vector<char const*> const Args = {
153 report.c_str(),
154 I.UsedMirror.c_str(),
155 I.DescURI().c_str(),
156 FailCode.c_str(),
157 Details.c_str(),
158 NULL
159 };
160
161 pid_t pid = ExecFork();
162 if(pid < 0)
163 {
164 _error->Error("ReportMirrorFailure Fork failed");
165 return;
166 }
167 else if(pid == 0)
168 {
169 execvp(Args[0], (char**)Args.data());
170 std::cerr << "Could not exec " << Args[0] << std::endl;
171 _exit(100);
172 }
173 if(!ExecWait(pid, "report-mirror-failure"))
174 _error->Warning("Couldn't report problem to '%s'", report.c_str());
175}
176 /*}}}*/
177
178static APT_NONNULL(2) bool MessageInsecureRepository(bool const isError, char const * const msg, std::string const &repo)/*{{{*/
179{
180 std::string m;
181 strprintf(m, msg, repo.c_str());
182 if (isError)
183 {
184 _error->Error("%s", m.c_str());
185 _error->Notice("%s", _("Updating from such a repository can't be done securely, and is therefore disabled by default."));
186 }
187 else
188 {
189 _error->Warning("%s", m.c_str());
190 _error->Notice("%s", _("Data from such a repository can't be authenticated and is therefore potentially dangerous to use."));
191 }
192 _error->Notice("%s", _("See apt-secure(8) manpage for repository creation and user configuration details."));
193 return false;
194}
195 /*}}}*/
196// AllowInsecureRepositories /*{{{*/
197enum class InsecureType { UNSIGNED, WEAK, NORELEASE };
198static bool TargetIsAllowedToBe(IndexTarget const &Target, InsecureType const type)
199{
200 if (_config->FindB("Acquire::AllowInsecureRepositories"))
201 return true;
202
203 if (Target.OptionBool(IndexTarget::ALLOW_INSECURE))
204 return true;
205
206 switch (type)
207 {
208 case InsecureType::UNSIGNED: break;
209 case InsecureType::NORELEASE: break;
210 case InsecureType::WEAK:
211 if (_config->FindB("Acquire::AllowWeakRepositories"))
212 return true;
213 if (Target.OptionBool(IndexTarget::ALLOW_WEAK))
214 return true;
215 break;
216 }
217 return false;
218}
219static bool APT_NONNULL(3, 4, 5) AllowInsecureRepositories(InsecureType const msg, std::string const &repo,
220 metaIndex const * const MetaIndexParser, pkgAcqMetaClearSig * const TransactionManager, pkgAcquire::Item * const I)
221{
222 // we skip weak downgrades as its unlikely that a repository gets really weaker –
223 // its more realistic that apt got pickier in a newer version
224 if (msg != InsecureType::WEAK)
225 {
226 std::string const FinalInRelease = TransactionManager->GetFinalFilename();
227 std::string const FinalReleasegpg = FinalInRelease.substr(0, FinalInRelease.length() - strlen("InRelease")) + "Release.gpg";
228 if (RealFileExists(FinalReleasegpg) || RealFileExists(FinalInRelease))
229 {
230 char const * msgstr = nullptr;
231 switch (msg)
232 {
233 case InsecureType::UNSIGNED: msgstr = _("The repository '%s' is no longer signed."); break;
234 case InsecureType::NORELEASE: msgstr = _("The repository '%s' does no longer have a Release file."); break;
235 case InsecureType::WEAK: /* unreachable */ break;
236 }
237 if (_config->FindB("Acquire::AllowDowngradeToInsecureRepositories") ||
238 TransactionManager->Target.OptionBool(IndexTarget::ALLOW_DOWNGRADE_TO_INSECURE))
239 {
240 // meh, the users wants to take risks (we still mark the packages
241 // from this repository as unauthenticated)
242 _error->Warning(msgstr, repo.c_str());
243 _error->Warning(_("This is normally not allowed, but the option "
244 "Acquire::AllowDowngradeToInsecureRepositories was "
245 "given to override it."));
246 } else {
247 MessageInsecureRepository(true, msgstr, repo);
248 TransactionManager->AbortTransaction();
249 I->Status = pkgAcquire::Item::StatError;
250 return false;
251 }
252 }
253 }
254
255 if(MetaIndexParser->GetTrusted() == metaIndex::TRI_YES)
256 return true;
257
258 char const * msgstr = nullptr;
259 switch (msg)
260 {
261 case InsecureType::UNSIGNED: msgstr = _("The repository '%s' is not signed."); break;
262 case InsecureType::NORELEASE: msgstr = _("The repository '%s' does not have a Release file."); break;
263 case InsecureType::WEAK: msgstr = _("The repository '%s' provides only weak security information."); break;
264 }
265
266 if (TargetIsAllowedToBe(TransactionManager->Target, msg) == true)
267 {
268 MessageInsecureRepository(false, msgstr, repo);
269 return true;
270 }
271
272 MessageInsecureRepository(true, msgstr, repo);
273 TransactionManager->AbortTransaction();
274 I->Status = pkgAcquire::Item::StatError;
275 return false;
276}
277 /*}}}*/
278static HashStringList GetExpectedHashesFromFor(metaIndex * const Parser, std::string const &MetaKey)/*{{{*/
279{
280 if (Parser == NULL)
281 return HashStringList();
282 metaIndex::checkSum * const R = Parser->Lookup(MetaKey);
283 if (R == NULL)
284 return HashStringList();
285 return R->Hashes;
286}
287 /*}}}*/
288
289// all ::HashesRequired and ::GetExpectedHashes implementations /*{{{*/
290/* ::GetExpectedHashes is abstract and has to be implemented by all subclasses.
291 It is best to implement it as broadly as possible, while ::HashesRequired defaults
292 to true and should be as restrictive as possible for false cases. Note that if
293 a hash is returned by ::GetExpectedHashes it must match. Only if it doesn't
294 ::HashesRequired is called to evaluate if its okay to have no hashes. */
295APT_CONST bool pkgAcqTransactionItem::HashesRequired() const
296{
297 /* signed repositories obviously have a parser and good hashes.
298 unsigned repositories, too, as even if we can't trust them for security,
299 we can at least trust them for integrity of the download itself.
300 Only repositories without a Release file can (obviously) not have
301 hashes – and they are very uncommon and strongly discouraged */
302 if (TransactionManager->MetaIndexParser->GetLoadedSuccessfully() != metaIndex::TRI_YES)
303 return false;
304 if (TargetIsAllowedToBe(Target, InsecureType::WEAK))
305 {
306 /* If we allow weak hashes, we check that we have some (weak) and then
307 declare hashes not needed. That will tip us in the right direction
308 as if hashes exist, they will be used, even if not required */
309 auto const hsl = GetExpectedHashes();
310 if (hsl.usable())
311 return true;
312 if (hsl.empty() == false)
313 return false;
314 }
315 return true;
316}
317HashStringList pkgAcqTransactionItem::GetExpectedHashes() const
318{
319 return GetExpectedHashesFor(GetMetaKey());
320}
321
322APT_CONST bool pkgAcqMetaBase::HashesRequired() const
323{
324 // Release and co have no hashes 'by design'.
325 return false;
326}
327HashStringList pkgAcqMetaBase::GetExpectedHashes() const
328{
329 return HashStringList();
330}
331
332APT_CONST bool pkgAcqIndexDiffs::HashesRequired() const
333{
334 /* We can't check hashes of rred result as we don't know what the
335 hash of the file will be. We just know the hash of the patch(es),
336 the hash of the file they will apply on and the hash of the resulting
337 file. */
338 if (State == StateFetchDiff)
339 return true;
340 return false;
341}
342HashStringList pkgAcqIndexDiffs::GetExpectedHashes() const
343{
344 if (State == StateFetchDiff)
345 return available_patches[0].download_hashes;
346 return HashStringList();
347}
348
349APT_CONST bool pkgAcqIndexMergeDiffs::HashesRequired() const
350{
351 /* @see #pkgAcqIndexDiffs::HashesRequired, with the difference that
352 we can check the rred result after all patches are applied as
353 we know the expected result rather than potentially apply more patches */
354 if (State == StateFetchDiff)
355 return true;
356 return State == StateApplyDiff;
357}
358HashStringList pkgAcqIndexMergeDiffs::GetExpectedHashes() const
359{
360 if (State == StateFetchDiff)
361 return patch.download_hashes;
362 else if (State == StateApplyDiff)
363 return GetExpectedHashesFor(Target.MetaKey);
364 return HashStringList();
365}
366
367APT_CONST bool pkgAcqArchive::HashesRequired() const
368{
369 return LocalSource == false;
370}
371HashStringList pkgAcqArchive::GetExpectedHashes() const
372{
373 // figured out while parsing the records
374 return ExpectedHashes;
375}
376
377APT_CONST bool pkgAcqFile::HashesRequired() const
378{
379 // supplied as parameter at creation time, so the caller decides
380 return ExpectedHashes.usable();
381}
382HashStringList pkgAcqFile::GetExpectedHashes() const
383{
384 return ExpectedHashes;
385}
386 /*}}}*/
387// Acquire::Item::QueueURI and specialisations from child classes /*{{{*/
388bool pkgAcquire::Item::QueueURI(pkgAcquire::ItemDesc &Item)
389{
390 Owner->Enqueue(Item);
391 return true;
392}
393/* The idea here is that an item isn't queued if it exists on disk and the
394 transition manager was a hit as this means that the files it contains
395 the checksums for can't be updated either (or they are and we are asking
396 for a hashsum mismatch to happen which helps nobody) */
397bool pkgAcqTransactionItem::QueueURI(pkgAcquire::ItemDesc &Item)
398{
399 if (TransactionManager->State != TransactionStarted)
400 {
401 if (_config->FindB("Debug::Acquire::Transaction", false))
402 std::clog << "Skip " << Target.URI << " as transaction was already dealt with!" << std::endl;
403 return false;
404 }
405 std::string const FinalFile = GetFinalFilename();
406 if (TransactionManager->IMSHit == true && FileExists(FinalFile) == true)
407 {
408 PartialFile = DestFile = FinalFile;
409 Status = StatDone;
410 return false;
411 }
412 // If we got the InRelease file via a mirror, pick all indexes directly from this mirror, too
413 if (TransactionManager->BaseURI.empty() == false &&
414 URI::SiteOnly(Item.URI) != URI::SiteOnly(TransactionManager->BaseURI))
415 {
416 // this ensures we rewrite only once and only the first step
417 auto const OldBaseURI = Target.Option(IndexTarget::BASE_URI);
418 if (OldBaseURI.empty() == false && APT::String::Startswith(Item.URI, OldBaseURI))
419 {
420 auto const ExtraPath = Item.URI.substr(OldBaseURI.length());
421 Item.URI = flCombine(TransactionManager->BaseURI, ExtraPath);
422 UsedMirror = TransactionManager->UsedMirror;
423 if (Item.Description.find(" ") != string::npos)
424 Item.Description.replace(0, Item.Description.find(" "), UsedMirror);
425 }
426 }
427 return pkgAcquire::Item::QueueURI(Item);
428}
429/* The transition manager InRelease itself (or its older sisters-in-law
430 Release & Release.gpg) is always queued as this allows us to rerun gpgv
431 on it to verify that we aren't stalled with old files */
432bool pkgAcqMetaBase::QueueURI(pkgAcquire::ItemDesc &Item)
433{
434 return pkgAcquire::Item::QueueURI(Item);
435}
436/* the Diff/Index needs to queue also the up-to-date complete index file
437 to ensure that the list cleaner isn't eating it */
438bool pkgAcqDiffIndex::QueueURI(pkgAcquire::ItemDesc &Item)
439{
440 if (pkgAcqTransactionItem::QueueURI(Item) == true)
441 return true;
442 QueueOnIMSHit();
443 return false;
444}
445 /*}}}*/
446// Acquire::Item::GetFinalFilename and specialisations for child classes /*{{{*/
447std::string pkgAcquire::Item::GetFinalFilename() const
448{
449 // Beware: Desc.URI is modified by redirections
450 return GetFinalFileNameFromURI(Desc.URI);
451}
452std::string pkgAcqDiffIndex::GetFinalFilename() const
453{
454 return GetFinalFileNameFromURI(GetDiffIndexURI(Target));
455}
456std::string pkgAcqIndex::GetFinalFilename() const
457{
458 std::string const FinalFile = GetFinalFileNameFromURI(Target.URI);
459 return GetKeepCompressedFileName(FinalFile, Target);
460}
461std::string pkgAcqMetaSig::GetFinalFilename() const
462{
463 return GetFinalFileNameFromURI(Target.URI);
464}
465std::string pkgAcqBaseIndex::GetFinalFilename() const
466{
467 return GetFinalFileNameFromURI(Target.URI);
468}
469std::string pkgAcqMetaBase::GetFinalFilename() const
470{
471 return GetFinalFileNameFromURI(Target.URI);
472}
473std::string pkgAcqArchive::GetFinalFilename() const
474{
475 return _config->FindDir("Dir::Cache::Archives") + flNotDir(StoreFilename);
476}
477 /*}}}*/
478// pkgAcqTransactionItem::GetMetaKey and specialisations for child classes /*{{{*/
479std::string pkgAcqTransactionItem::GetMetaKey() const
480{
481 return Target.MetaKey;
482}
483std::string pkgAcqIndex::GetMetaKey() const
484{
485 if (Stage == STAGE_DECOMPRESS_AND_VERIFY || CurrentCompressionExtension == "uncompressed")
486 return Target.MetaKey;
487 return Target.MetaKey + "." + CurrentCompressionExtension;
488}
489std::string pkgAcqDiffIndex::GetMetaKey() const
490{
491 return GetDiffIndexFileName(Target.MetaKey);
492}
493 /*}}}*/
494//pkgAcqTransactionItem::TransactionState and specialisations for child classes /*{{{*/
495bool pkgAcqTransactionItem::TransactionState(TransactionStates const state)
496{
497 bool const Debug = _config->FindB("Debug::Acquire::Transaction", false);
498 switch(state)
499 {
500 case TransactionStarted: _error->Fatal("Item %s changed to invalid transaction start state!", Target.URI.c_str()); break;
501 case TransactionAbort:
502 if(Debug == true)
503 std::clog << " Cancel: " << DestFile << std::endl;
504 if (Status == pkgAcquire::Item::StatIdle)
505 {
506 Status = pkgAcquire::Item::StatDone;
507 Dequeue();
508 }
509 break;
510 case TransactionCommit:
511 if(PartialFile.empty() == false)
512 {
513 bool sameFile = (PartialFile == DestFile);
514 // we use symlinks on IMS-Hit to avoid copies
515 if (RealFileExists(DestFile))
516 {
517 struct stat Buf;
518 if (lstat(PartialFile.c_str(), &Buf) != -1)
519 {
520 if (S_ISLNK(Buf.st_mode) && Buf.st_size > 0)
521 {
522 char partial[Buf.st_size + 1];
523 ssize_t const sp = readlink(PartialFile.c_str(), partial, Buf.st_size);
524 if (sp == -1)
525 _error->Errno("pkgAcqTransactionItem::TransactionState-sp", _("Failed to readlink %s"), PartialFile.c_str());
526 else
527 {
528 partial[sp] = '\0';
529 sameFile = (DestFile == partial);
530 }
531 }
532 }
533 else
534 _error->Errno("pkgAcqTransactionItem::TransactionState-stat", _("Failed to stat %s"), PartialFile.c_str());
535 }
536 if (sameFile == false)
537 {
538 // ensure that even without lists-cleanup all compressions are nuked
539 std::string FinalFile = GetFinalFileNameFromURI(Target.URI);
540 if (FileExists(FinalFile))
541 {
542 if(Debug == true)
543 std::clog << "rm " << FinalFile << " # " << DescURI() << std::endl;
544 if (RemoveFile("TransactionStates-Cleanup", FinalFile) == false)
545 return false;
546 }
547 for (auto const &ext: APT::Configuration::getCompressorExtensions())
548 {
549 auto const Final = FinalFile + ext;
550 if (FileExists(Final))
551 {
552 if(Debug == true)
553 std::clog << "rm " << Final << " # " << DescURI() << std::endl;
554 if (RemoveFile("TransactionStates-Cleanup", Final) == false)
555 return false;
556 }
557 }
558 if(Debug == true)
559 std::clog << "mv " << PartialFile << " -> "<< DestFile << " # " << DescURI() << std::endl;
560 if (Rename(PartialFile, DestFile) == false)
561 return false;
562 }
563 else if(Debug == true)
564 std::clog << "keep " << PartialFile << " # " << DescURI() << std::endl;
565
566 } else {
567 if(Debug == true)
568 std::clog << "rm " << DestFile << " # " << DescURI() << std::endl;
569 if (RemoveFile("TransItem::TransactionCommit", DestFile) == false)
570 return false;
571 }
572 break;
573 }
574 return true;
575}
576bool pkgAcqMetaBase::TransactionState(TransactionStates const state)
577{
578 // Do not remove InRelease on IMSHit of Release.gpg [yes, this is very edgecasey]
579 if (TransactionManager->IMSHit == false)
580 return pkgAcqTransactionItem::TransactionState(state);
581 return true;
582}
583bool pkgAcqIndex::TransactionState(TransactionStates const state)
584{
585 if (pkgAcqTransactionItem::TransactionState(state) == false)
586 return false;
587
588 switch (state)
589 {
590 case TransactionStarted: _error->Fatal("AcqIndex %s changed to invalid transaction start state!", Target.URI.c_str()); break;
591 case TransactionAbort:
592 if (Stage == STAGE_DECOMPRESS_AND_VERIFY)
593 {
594 // keep the compressed file, but drop the decompressed
595 EraseFileName.clear();
596 if (PartialFile.empty() == false && flExtension(PartialFile) != CurrentCompressionExtension)
597 RemoveFile("TransactionAbort", PartialFile);
598 }
599 break;
600 case TransactionCommit:
601 if (EraseFileName.empty() == false)
602 RemoveFile("AcqIndex::TransactionCommit", EraseFileName);
603 break;
604 }
605 return true;
606}
607bool pkgAcqDiffIndex::TransactionState(TransactionStates const state)
608{
609 if (pkgAcqTransactionItem::TransactionState(state) == false)
610 return false;
611
612 switch (state)
613 {
614 case TransactionStarted: _error->Fatal("Item %s changed to invalid transaction start state!", Target.URI.c_str()); break;
615 case TransactionCommit:
616 break;
617 case TransactionAbort:
618 std::string const Partial = GetPartialFileNameFromURI(Target.URI);
619 RemoveFile("TransactionAbort", Partial);
620 break;
621 }
622
623 return true;
624}
625 /*}}}*/
626
627class APT_HIDDEN NoActionItem : public pkgAcquire::Item /*{{{*/
628/* The sole purpose of this class is having an item which does nothing to
629 reach its done state to prevent cleanup deleting the mentioned file.
630 Handy in cases in which we know we have the file already, like IMS-Hits. */
631{
632 IndexTarget const Target;
633 public:
634 virtual std::string DescURI() const APT_OVERRIDE {return Target.URI;};
635 virtual HashStringList GetExpectedHashes() const APT_OVERRIDE {return HashStringList();};
636
637 NoActionItem(pkgAcquire * const Owner, IndexTarget const &Target) :
638 pkgAcquire::Item(Owner), Target(Target)
639 {
640 Status = StatDone;
641 DestFile = GetFinalFileNameFromURI(Target.URI);
642 }
643 NoActionItem(pkgAcquire * const Owner, IndexTarget const &Target, std::string const &FinalFile) :
644 pkgAcquire::Item(Owner), Target(Target)
645 {
646 Status = StatDone;
647 DestFile = FinalFile;
648 }
649};
650 /*}}}*/
651class APT_HIDDEN CleanupItem : public pkgAcqTransactionItem /*{{{*/
652/* This class ensures that a file which was configured but isn't downloaded
653 for various reasons isn't kept in an old version in the lists directory.
654 In a way its the reverse of NoActionItem as it helps with removing files
655 even if the lists-cleanup is deactivated. */
656{
657 public:
658 virtual std::string DescURI() const APT_OVERRIDE {return Target.URI;};
659 virtual HashStringList GetExpectedHashes() const APT_OVERRIDE {return HashStringList();};
660
661 CleanupItem(pkgAcquire * const Owner, pkgAcqMetaClearSig * const TransactionManager, IndexTarget const &Target) :
662 pkgAcqTransactionItem(Owner, TransactionManager, Target)
663 {
664 Status = StatDone;
665 DestFile = GetFinalFileNameFromURI(Target.URI);
666 }
667 bool TransactionState(TransactionStates const state) APT_OVERRIDE
668 {
669 switch (state)
670 {
671 case TransactionStarted:
672 break;
673 case TransactionAbort:
674 break;
675 case TransactionCommit:
676 if (_config->FindB("Debug::Acquire::Transaction", false) == true)
677 std::clog << "rm " << DestFile << " # " << DescURI() << std::endl;
678 if (RemoveFile("TransItem::TransactionCommit", DestFile) == false)
679 return false;
680 break;
681 }
682 return true;
683 }
684};
685 /*}}}*/
686
687// Acquire::Item::Item - Constructor /*{{{*/
688APT_IGNORE_DEPRECATED_PUSH
689pkgAcquire::Item::Item(pkgAcquire * const owner) :
690 FileSize(0), PartialSize(0), Mode(0), ID(0), Complete(false), Local(false),
691 QueueCounter(0), ExpectedAdditionalItems(0), Owner(owner), d(NULL)
692{
693 Owner->Add(this);
694 Status = StatIdle;
695}
696APT_IGNORE_DEPRECATED_POP
697 /*}}}*/
698// Acquire::Item::~Item - Destructor /*{{{*/
699pkgAcquire::Item::~Item()
700{
701 Owner->Remove(this);
702}
703 /*}}}*/
704std::string pkgAcquire::Item::Custom600Headers() const /*{{{*/
705{
706 return std::string();
707}
708 /*}}}*/
709std::string pkgAcquire::Item::ShortDesc() const /*{{{*/
710{
711 return DescURI();
712}
713 /*}}}*/
714APT_CONST void pkgAcquire::Item::Finished() /*{{{*/
715{
716}
717 /*}}}*/
718APT_PURE pkgAcquire * pkgAcquire::Item::GetOwner() const /*{{{*/
719{
720 return Owner;
721}
722 /*}}}*/
723APT_CONST pkgAcquire::ItemDesc &pkgAcquire::Item::GetItemDesc() /*{{{*/
724{
725 return Desc;
726}
727 /*}}}*/
728APT_CONST bool pkgAcquire::Item::IsTrusted() const /*{{{*/
729{
730 return false;
731}
732 /*}}}*/
733// Acquire::Item::Failed - Item failed to download /*{{{*/
734// ---------------------------------------------------------------------
735/* We return to an idle state if there are still other queues that could
736 fetch this object */
737void pkgAcquire::Item::Failed(string const &Message,pkgAcquire::MethodConfig const * const Cnf)
738{
739 if (QueueCounter <= 1)
740 {
741 /* This indicates that the file is not available right now but might
742 be sometime later. If we do a retry cycle then this should be
743 retried [CDROMs] */
744 if (Cnf != NULL && Cnf->LocalOnly == true &&
745 StringToBool(LookupTag(Message,"Transient-Failure"),false) == true)
746 {
747 Status = StatIdle;
748 Dequeue();
749 return;
750 }
751
752 switch (Status)
753 {
754 case StatIdle:
755 case StatFetching:
756 case StatDone:
757 Status = StatError;
758 break;
759 case StatAuthError:
760 case StatError:
761 case StatTransientNetworkError:
762 break;
763 }
764 Complete = false;
765 Dequeue();
766 }
767
768 string const FailReason = LookupTag(Message, "FailReason");
769 enum { MAXIMUM_SIZE_EXCEEDED, HASHSUM_MISMATCH, WEAK_HASHSUMS, OTHER } failreason = OTHER;
770 if ( FailReason == "MaximumSizeExceeded")
771 failreason = MAXIMUM_SIZE_EXCEEDED;
772 else if ( FailReason == "WeakHashSums")
773 failreason = WEAK_HASHSUMS;
774 else if (Status == StatAuthError)
775 failreason = HASHSUM_MISMATCH;
776
777 if(ErrorText.empty())
778 {
779 if (Status == StatAuthError)
780 {
781 std::ostringstream out;
782 switch (failreason)
783 {
784 case HASHSUM_MISMATCH:
785 out << _("Hash Sum mismatch") << std::endl;
786 break;
787 case WEAK_HASHSUMS:
788 out << _("Insufficient information available to perform this download securely") << std::endl;
789 break;
790 case MAXIMUM_SIZE_EXCEEDED:
791 case OTHER:
792 out << LookupTag(Message, "Message") << std::endl;
793 break;
794 }
795 auto const ExpectedHashes = GetExpectedHashes();
796 if (ExpectedHashes.empty() == false)
797 {
798 out << "Hashes of expected file:" << std::endl;
799 for (auto const &hs: ExpectedHashes)
800 {
801 out << " - " << hs.toStr();
802 if (hs.usable() == false)
803 out << " [weak]";
804 out << std::endl;
805 }
806 }
807 if (failreason == HASHSUM_MISMATCH)
808 {
809 out << "Hashes of received file:" << std::endl;
810 for (char const * const * type = HashString::SupportedHashes(); *type != NULL; ++type)
811 {
812 std::string const tagname = std::string(*type) + "-Hash";
813 std::string const hashsum = LookupTag(Message, tagname.c_str());
814 if (hashsum.empty() == false)
815 {
816 auto const hs = HashString(*type, hashsum);
817 out << " - " << hs.toStr();
818 if (hs.usable() == false)
819 out << " [weak]";
820 out << std::endl;
821 }
822 }
823 out << "Last modification reported: " << LookupTag(Message, "Last-Modified", "<none>") << std::endl;
824 }
825 ErrorText = out.str();
826 }
827 else
828 ErrorText = LookupTag(Message,"Message");
829 }
830
831 switch (failreason)
832 {
833 case MAXIMUM_SIZE_EXCEEDED: RenameOnError(MaximumSizeExceeded); break;
834 case HASHSUM_MISMATCH: RenameOnError(HashSumMismatch); break;
835 case WEAK_HASHSUMS: break;
836 case OTHER: break;
837 }
838
839 if (FailReason.empty() == false)
840 ReportMirrorFailureToCentral(*this, FailReason, ErrorText);
841 else
842 ReportMirrorFailureToCentral(*this, ErrorText, ErrorText);
843
844 if (QueueCounter > 1)
845 Status = StatIdle;
846}
847 /*}}}*/
848// Acquire::Item::Start - Item has begun to download /*{{{*/
849// ---------------------------------------------------------------------
850/* Stash status and the file size. Note that setting Complete means
851 sub-phases of the acquire process such as decompresion are operating */
852void pkgAcquire::Item::Start(string const &/*Message*/, unsigned long long const Size)
853{
854 Status = StatFetching;
855 ErrorText.clear();
856 if (FileSize == 0 && Complete == false)
857 FileSize = Size;
858}
859 /*}}}*/
860// Acquire::Item::VerifyDone - check if Item was downloaded OK /*{{{*/
861/* Note that hash-verification is 'hardcoded' in acquire-worker and has
862 * already passed if this method is called. */
863bool pkgAcquire::Item::VerifyDone(std::string const &Message,
864 pkgAcquire::MethodConfig const * const /*Cnf*/)
865{
866 std::string const FileName = LookupTag(Message,"Filename");
867 if (FileName.empty() == true)
868 {
869 Status = StatError;
870 ErrorText = "Method gave a blank filename";
871 return false;
872 }
873
874 return true;
875}
876 /*}}}*/
877// Acquire::Item::Done - Item downloaded OK /*{{{*/
878void pkgAcquire::Item::Done(string const &/*Message*/, HashStringList const &Hashes,
879 pkgAcquire::MethodConfig const * const /*Cnf*/)
880{
881 // We just downloaded something..
882 if (FileSize == 0)
883 {
884 unsigned long long const downloadedSize = Hashes.FileSize();
885 if (downloadedSize != 0)
886 {
887 FileSize = downloadedSize;
888 }
889 }
890 Status = StatDone;
891 ErrorText = string();
892 Owner->Dequeue(this);
893}
894 /*}}}*/
895// Acquire::Item::Rename - Rename a file /*{{{*/
896// ---------------------------------------------------------------------
897/* This helper function is used by a lot of item methods as their final
898 step */
899bool pkgAcquire::Item::Rename(string const &From,string const &To)
900{
901 if (From == To || rename(From.c_str(),To.c_str()) == 0)
902 return true;
903
904 std::string S;
905 strprintf(S, _("rename failed, %s (%s -> %s)."), strerror(errno),
906 From.c_str(),To.c_str());
907 Status = StatError;
908 if (ErrorText.empty())
909 ErrorText = S;
910 else
911 ErrorText = ErrorText + ": " + S;
912 return false;
913}
914 /*}}}*/
915void pkgAcquire::Item::Dequeue() /*{{{*/
916{
917 Owner->Dequeue(this);
918}
919 /*}}}*/
920bool pkgAcquire::Item::RenameOnError(pkgAcquire::Item::RenameOnErrorState const error)/*{{{*/
921{
922 if (RealFileExists(DestFile))
923 Rename(DestFile, DestFile + ".FAILED");
924
925 std::string errtext;
926 switch (error)
927 {
928 case HashSumMismatch:
929 errtext = _("Hash Sum mismatch");
930 break;
931 case SizeMismatch:
932 errtext = _("Size mismatch");
933 Status = StatAuthError;
934 break;
935 case InvalidFormat:
936 errtext = _("Invalid file format");
937 Status = StatError;
938 // do not report as usually its not the mirrors fault, but Portal/Proxy
939 break;
940 case SignatureError:
941 errtext = _("Signature error");
942 Status = StatError;
943 break;
944 case NotClearsigned:
945 strprintf(errtext, _("Clearsigned file isn't valid, got '%s' (does the network require authentication?)"), "NOSPLIT");
946 Status = StatAuthError;
947 break;
948 case MaximumSizeExceeded:
949 // the method is expected to report a good error for this
950 break;
951 case PDiffError:
952 // no handling here, done by callers
953 break;
954 }
955 if (ErrorText.empty())
956 ErrorText = errtext;
957 return false;
958}
959 /*}}}*/
960void pkgAcquire::Item::SetActiveSubprocess(const std::string &subprocess)/*{{{*/
961{
962 ActiveSubprocess = subprocess;
963 APT_IGNORE_DEPRECATED(Mode = ActiveSubprocess.c_str();)
964}
965 /*}}}*/
966// Acquire::Item::ReportMirrorFailure /*{{{*/
967void pkgAcquire::Item::ReportMirrorFailure(std::string const &FailCode)
968{
969 ReportMirrorFailureToCentral(*this, FailCode, FailCode);
970}
971 /*}}}*/
972std::string pkgAcquire::Item::HashSum() const /*{{{*/
973{
974 HashStringList const hashes = GetExpectedHashes();
975 HashString const * const hs = hashes.find(NULL);
976 return hs != NULL ? hs->toStr() : "";
977}
978 /*}}}*/
979
980pkgAcqTransactionItem::pkgAcqTransactionItem(pkgAcquire * const Owner, /*{{{*/
981 pkgAcqMetaClearSig * const transactionManager, IndexTarget const &target) :
982 pkgAcquire::Item(Owner), d(NULL), Target(target), TransactionManager(transactionManager)
983{
984 if (TransactionManager != this)
985 TransactionManager->Add(this);
986}
987 /*}}}*/
988pkgAcqTransactionItem::~pkgAcqTransactionItem() /*{{{*/
989{
990}
991 /*}}}*/
992HashStringList pkgAcqTransactionItem::GetExpectedHashesFor(std::string const &MetaKey) const /*{{{*/
993{
994 return GetExpectedHashesFromFor(TransactionManager->MetaIndexParser, MetaKey);
995}
996 /*}}}*/
997
998static void LoadLastMetaIndexParser(pkgAcqMetaClearSig * const TransactionManager, std::string const &FinalRelease, std::string const &FinalInRelease)/*{{{*/
999{
1000 if (TransactionManager->IMSHit == true)
1001 return;
1002 if (RealFileExists(FinalInRelease) || RealFileExists(FinalRelease))
1003 {
1004 TransactionManager->LastMetaIndexParser = TransactionManager->MetaIndexParser->UnloadedClone();
1005 if (TransactionManager->LastMetaIndexParser != NULL)
1006 {
1007 _error->PushToStack();
1008 if (RealFileExists(FinalInRelease))
1009 TransactionManager->LastMetaIndexParser->Load(FinalInRelease, NULL);
1010 else
1011 TransactionManager->LastMetaIndexParser->Load(FinalRelease, NULL);
1012 // its unlikely to happen, but if what we have is bad ignore it
1013 if (_error->PendingError())
1014 {
1015 delete TransactionManager->LastMetaIndexParser;
1016 TransactionManager->LastMetaIndexParser = NULL;
1017 }
1018 _error->RevertToStack();
1019 }
1020 }
1021}
1022 /*}}}*/
1023
1024// AcqMetaBase - Constructor /*{{{*/
1025pkgAcqMetaBase::pkgAcqMetaBase(pkgAcquire * const Owner,
1026 pkgAcqMetaClearSig * const TransactionManager,
1027 IndexTarget const &DataTarget)
1028: pkgAcqTransactionItem(Owner, TransactionManager, DataTarget), d(NULL),
1029 AuthPass(false), IMSHit(false), State(TransactionStarted)
1030{
1031}
1032 /*}}}*/
1033// AcqMetaBase::Add - Add a item to the current Transaction /*{{{*/
1034void pkgAcqMetaBase::Add(pkgAcqTransactionItem * const I)
1035{
1036 Transaction.push_back(I);
1037}
1038 /*}}}*/
1039// AcqMetaBase::AbortTransaction - Abort the current Transaction /*{{{*/
1040void pkgAcqMetaBase::AbortTransaction()
1041{
1042 if(_config->FindB("Debug::Acquire::Transaction", false) == true)
1043 std::clog << "AbortTransaction: " << TransactionManager << std::endl;
1044
1045 switch (TransactionManager->State)
1046 {
1047 case TransactionStarted: break;
1048 case TransactionAbort: _error->Fatal("Transaction %s was already aborted and is aborted again", TransactionManager->Target.URI.c_str()); return;
1049 case TransactionCommit: _error->Fatal("Transaction %s was already aborted and is now committed", TransactionManager->Target.URI.c_str()); return;
1050 }
1051 TransactionManager->State = TransactionAbort;
1052
1053 // ensure the toplevel is in error state too
1054 for (std::vector<pkgAcqTransactionItem*>::iterator I = Transaction.begin();
1055 I != Transaction.end(); ++I)
1056 {
1057 if ((*I)->Status != pkgAcquire::Item::StatFetching)
1058 Owner->Dequeue(*I);
1059 (*I)->TransactionState(TransactionAbort);
1060 }
1061 Transaction.clear();
1062}
1063 /*}}}*/
1064// AcqMetaBase::TransactionHasError - Check for errors in Transaction /*{{{*/
1065APT_PURE bool pkgAcqMetaBase::TransactionHasError() const
1066{
1067 for (std::vector<pkgAcqTransactionItem*>::const_iterator I = Transaction.begin();
1068 I != Transaction.end(); ++I)
1069 {
1070 switch((*I)->Status) {
1071 case StatDone: break;
1072 case StatIdle: break;
1073 case StatAuthError: return true;
1074 case StatError: return true;
1075 case StatTransientNetworkError: return true;
1076 case StatFetching: break;
1077 }
1078 }
1079 return false;
1080}
1081 /*}}}*/
1082// AcqMetaBase::CommitTransaction - Commit a transaction /*{{{*/
1083void pkgAcqMetaBase::CommitTransaction()
1084{
1085 if(_config->FindB("Debug::Acquire::Transaction", false) == true)
1086 std::clog << "CommitTransaction: " << this << std::endl;
1087
1088 switch (TransactionManager->State)
1089 {
1090 case TransactionStarted: break;
1091 case TransactionAbort: _error->Fatal("Transaction %s was already committed and is now aborted", TransactionManager->Target.URI.c_str()); return;
1092 case TransactionCommit: _error->Fatal("Transaction %s was already committed and is again committed", TransactionManager->Target.URI.c_str()); return;
1093 }
1094 TransactionManager->State = TransactionCommit;
1095
1096 // move new files into place *and* remove files that are not
1097 // part of the transaction but are still on disk
1098 for (std::vector<pkgAcqTransactionItem*>::iterator I = Transaction.begin();
1099 I != Transaction.end(); ++I)
1100 {
1101 (*I)->TransactionState(TransactionCommit);
1102 }
1103 Transaction.clear();
1104}
1105 /*}}}*/
1106// AcqMetaBase::TransactionStageCopy - Stage a file for copying /*{{{*/
1107void pkgAcqMetaBase::TransactionStageCopy(pkgAcqTransactionItem * const I,
1108 const std::string &From,
1109 const std::string &To)
1110{
1111 I->PartialFile = From;
1112 I->DestFile = To;
1113}
1114 /*}}}*/
1115// AcqMetaBase::TransactionStageRemoval - Stage a file for removal /*{{{*/
1116void pkgAcqMetaBase::TransactionStageRemoval(pkgAcqTransactionItem * const I,
1117 const std::string &FinalFile)
1118{
1119 I->PartialFile = "";
1120 I->DestFile = FinalFile;
1121}
1122 /*}}}*/
1123// AcqMetaBase::GenerateAuthWarning - Check gpg authentication error /*{{{*/
1124/* This method is called from ::Failed handlers. If it returns true,
1125 no fallback to other files or modi is performed */
1126bool pkgAcqMetaBase::CheckStopAuthentication(pkgAcquire::Item * const I, const std::string &Message)
1127{
1128 string const Final = I->GetFinalFilename();
1129 std::string const GPGError = LookupTag(Message, "Message");
1130 if (FileExists(Final))
1131 {
1132 I->Status = StatTransientNetworkError;
1133 _error->Warning(_("An error occurred during the signature verification. "
1134 "The repository is not updated and the previous index files will be used. "
1135 "GPG error: %s: %s"),
1136 Desc.Description.c_str(),
1137 GPGError.c_str());
1138 RunScripts("APT::Update::Auth-Failure");
1139 return true;
1140 } else if (LookupTag(Message,"Message").find("NODATA") != string::npos) {
1141 /* Invalid signature file, reject (LP: #346386) (Closes: #627642) */
1142 _error->Error(_("GPG error: %s: %s"),
1143 Desc.Description.c_str(),
1144 GPGError.c_str());
1145 I->Status = StatAuthError;
1146 return true;
1147 } else {
1148 _error->Warning(_("GPG error: %s: %s"),
1149 Desc.Description.c_str(),
1150 GPGError.c_str());
1151 }
1152 // gpgv method failed
1153 ReportMirrorFailureToCentral(*this, "GPGFailure", GPGError);
1154 return false;
1155}
1156 /*}}}*/
1157// AcqMetaBase::Custom600Headers - Get header for AcqMetaBase /*{{{*/
1158// ---------------------------------------------------------------------
1159string pkgAcqMetaBase::Custom600Headers() const
1160{
1161 std::string Header = "\nIndex-File: true";
1162 std::string MaximumSize;
1163 strprintf(MaximumSize, "\nMaximum-Size: %i",
1164 _config->FindI("Acquire::MaxReleaseFileSize", 10*1000*1000));
1165 Header += MaximumSize;
1166
1167 string const FinalFile = GetFinalFilename();
1168 struct stat Buf;
1169 if (stat(FinalFile.c_str(),&Buf) == 0)
1170 Header += "\nLast-Modified: " + TimeRFC1123(Buf.st_mtime, false);
1171
1172 return Header;
1173}
1174 /*}}}*/
1175// AcqMetaBase::QueueForSignatureVerify /*{{{*/
1176void pkgAcqMetaBase::QueueForSignatureVerify(pkgAcqTransactionItem * const I, std::string const &File, std::string const &Signature)
1177{
1178 AuthPass = true;
1179 I->Desc.URI = "gpgv:" + Signature;
1180 I->DestFile = File;
1181 QueueURI(I->Desc);
1182 I->SetActiveSubprocess("gpgv");
1183}
1184 /*}}}*/
1185// AcqMetaBase::CheckDownloadDone /*{{{*/
1186bool pkgAcqMetaBase::CheckDownloadDone(pkgAcqTransactionItem * const I, const std::string &Message, HashStringList const &Hashes) const
1187{
1188 // We have just finished downloading a Release file (it is not
1189 // verified yet)
1190
1191 // Save the final base URI we got this Release file from
1192 if (I->UsedMirror.empty() == false && _config->FindB("Acquire::SameMirrorForAllIndexes", true))
1193 {
1194 if (APT::String::Endswith(I->Desc.URI, "InRelease"))
1195 TransactionManager->BaseURI = I->Desc.URI.substr(0, I->Desc.URI.length() - strlen("InRelease"));
1196 else if (APT::String::Endswith(I->Desc.URI, "Release"))
1197 TransactionManager->BaseURI = I->Desc.URI.substr(0, I->Desc.URI.length() - strlen("Release"));
1198 }
1199
1200 std::string const FileName = LookupTag(Message,"Filename");
1201 if (FileName != I->DestFile && RealFileExists(I->DestFile) == false)
1202 {
1203 I->Local = true;
1204 I->Desc.URI = "copy:" + FileName;
1205 I->QueueURI(I->Desc);
1206 return false;
1207 }
1208
1209 // make sure to verify against the right file on I-M-S hit
1210 bool IMSHit = StringToBool(LookupTag(Message,"IMS-Hit"), false);
1211 if (IMSHit == false && Hashes.usable())
1212 {
1213 // detect IMS-Hits servers haven't detected by Hash comparison
1214 std::string const FinalFile = I->GetFinalFilename();
1215 if (RealFileExists(FinalFile) && Hashes.VerifyFile(FinalFile) == true)
1216 {
1217 IMSHit = true;
1218 RemoveFile("CheckDownloadDone", I->DestFile);
1219 }
1220 }
1221
1222 if(IMSHit == true)
1223 {
1224 // for simplicity, the transaction manager is always InRelease
1225 // even if it doesn't exist.
1226 TransactionManager->IMSHit = true;
1227 I->PartialFile = I->DestFile = I->GetFinalFilename();
1228 }
1229
1230 // set Item to complete as the remaining work is all local (verify etc)
1231 I->Complete = true;
1232
1233 return true;
1234}
1235 /*}}}*/
1236bool pkgAcqMetaBase::CheckAuthDone(string const &Message) /*{{{*/
1237{
1238 // At this point, the gpgv method has succeeded, so there is a
1239 // valid signature from a key in the trusted keyring. We
1240 // perform additional verification of its contents, and use them
1241 // to verify the indexes we are about to download
1242 if (_config->FindB("Debug::pkgAcquire::Auth", false))
1243 std::cerr << "Signature verification succeeded: " << DestFile << std::endl;
1244
1245 if (TransactionManager->IMSHit == false)
1246 {
1247 // open the last (In)Release if we have it
1248 std::string const FinalFile = GetFinalFilename();
1249 std::string FinalRelease;
1250 std::string FinalInRelease;
1251 if (APT::String::Endswith(FinalFile, "InRelease"))
1252 {
1253 FinalInRelease = FinalFile;
1254 FinalRelease = FinalFile.substr(0, FinalFile.length() - strlen("InRelease")) + "Release";
1255 }
1256 else
1257 {
1258 FinalInRelease = FinalFile.substr(0, FinalFile.length() - strlen("Release")) + "InRelease";
1259 FinalRelease = FinalFile;
1260 }
1261 LoadLastMetaIndexParser(TransactionManager, FinalRelease, FinalInRelease);
1262 }
1263
1264 bool const GoodAuth = TransactionManager->MetaIndexParser->Load(DestFile, &ErrorText);
1265 if (GoodAuth == false && AllowInsecureRepositories(InsecureType::WEAK, Target.Description, TransactionManager->MetaIndexParser, TransactionManager, this) == false)
1266 {
1267 Status = StatAuthError;
1268 return false;
1269 }
1270
1271 if (!VerifyVendor(Message))
1272 {
1273 Status = StatAuthError;
1274 return false;
1275 }
1276
1277 // Download further indexes with verification
1278 TransactionManager->QueueIndexes(GoodAuth);
1279
1280 return GoodAuth;
1281}
1282 /*}}}*/
1283void pkgAcqMetaClearSig::QueueIndexes(bool const verify) /*{{{*/
1284{
1285 // at this point the real Items are loaded in the fetcher
1286 ExpectedAdditionalItems = 0;
1287
1288 std::set<std::string> targetsSeen;
1289 bool const hasReleaseFile = TransactionManager->MetaIndexParser != NULL;
1290 bool const metaBaseSupportsByHash = hasReleaseFile && TransactionManager->MetaIndexParser->GetSupportsAcquireByHash();
1291 bool hasHashes = true;
1292 auto IndexTargets = TransactionManager->MetaIndexParser->GetIndexTargets();
1293 if (hasReleaseFile && verify == false)
1294 hasHashes = std::any_of(IndexTargets.begin(), IndexTargets.end(),
1295 [&](IndexTarget const &Target) { return TransactionManager->MetaIndexParser->Exists(Target.MetaKey); });
1296 for (auto&& Target: IndexTargets)
1297 {
1298 // if we have seen a target which is created-by a target this one here is declared a
1299 // fallback to, we skip acquiring the fallback (but we make sure we clean up)
1300 if (targetsSeen.find(Target.Option(IndexTarget::FALLBACK_OF)) != targetsSeen.end())
1301 {
1302 targetsSeen.emplace(Target.Option(IndexTarget::CREATED_BY));
1303 new CleanupItem(Owner, TransactionManager, Target);
1304 continue;
1305 }
1306 // all is an implementation detail. Users shouldn't use this as arch
1307 // We need this support trickery here as e.g. Debian has binary-all files already,
1308 // but arch:all packages are still in the arch:any files, so we would waste precious
1309 // download time, bandwidth and diskspace for nothing, BUT Debian doesn't feature all
1310 // in the set of supported architectures, so we can filter based on this property rather
1311 // than invent an entirely new flag we would need to carry for all of eternity.
1312 if (hasReleaseFile && Target.Option(IndexTarget::ARCHITECTURE) == "all")
1313 {
1314 if (TransactionManager->MetaIndexParser->IsArchitectureAllSupportedFor(Target) == false)
1315 {
1316 new CleanupItem(Owner, TransactionManager, Target);
1317 continue;
1318 }
1319 }
1320
1321 bool trypdiff = Target.OptionBool(IndexTarget::PDIFFS);
1322 if (hasReleaseFile == true)
1323 {
1324 if (TransactionManager->MetaIndexParser->Exists(Target.MetaKey) == false)
1325 {
1326 // optional targets that we do not have in the Release file are skipped
1327 if (hasHashes == true && Target.IsOptional)
1328 {
1329 new CleanupItem(Owner, TransactionManager, Target);
1330 continue;
1331 }
1332
1333 std::string const &arch = Target.Option(IndexTarget::ARCHITECTURE);
1334 if (arch.empty() == false)
1335 {
1336 if (TransactionManager->MetaIndexParser->IsArchitectureSupported(arch) == false)
1337 {
1338 new CleanupItem(Owner, TransactionManager, Target);
1339 _error->Notice(_("Skipping acquire of configured file '%s' as repository '%s' doesn't support architecture '%s'"),
1340 Target.MetaKey.c_str(), TransactionManager->Target.Description.c_str(), arch.c_str());
1341 continue;
1342 }
1343 // if the architecture is officially supported but currently no packages for it available,
1344 // ignore silently as this is pretty much the same as just shipping an empty file.
1345 // if we don't know which architectures are supported, we do NOT ignore it to notify user about this
1346 if (hasHashes == true && TransactionManager->MetaIndexParser->IsArchitectureSupported("*undefined*") == false)
1347 {
1348 new CleanupItem(Owner, TransactionManager, Target);
1349 continue;
1350 }
1351 }
1352
1353 if (hasHashes == true)
1354 {
1355 Status = StatAuthError;
1356 strprintf(ErrorText, _("Unable to find expected entry '%s' in Release file (Wrong sources.list entry or malformed file)"), Target.MetaKey.c_str());
1357 return;
1358 }
1359 else
1360 {
1361 new pkgAcqIndex(Owner, TransactionManager, Target);
1362 continue;
1363 }
1364 }
1365 else if (verify)
1366 {
1367 auto const hashes = GetExpectedHashesFor(Target.MetaKey);
1368 if (hashes.empty() == false)
1369 {
1370 if (hashes.usable() == false && TargetIsAllowedToBe(TransactionManager->Target, InsecureType::WEAK) == false)
1371 {
1372 new CleanupItem(Owner, TransactionManager, Target);
1373 _error->Warning(_("Skipping acquire of configured file '%s' as repository '%s' provides only weak security information for it"),
1374 Target.MetaKey.c_str(), TransactionManager->Target.Description.c_str());
1375 continue;
1376 }
1377 // empty files are skipped as acquiring the very small compressed files is a waste of time
1378 else if (hashes.FileSize() == 0)
1379 {
1380 new CleanupItem(Owner, TransactionManager, Target);
1381 targetsSeen.emplace(Target.Option(IndexTarget::CREATED_BY));
1382 continue;
1383 }
1384 }
1385 }
1386
1387 // autoselect the compression method
1388 std::vector<std::string> types = VectorizeString(Target.Option(IndexTarget::COMPRESSIONTYPES), ' ');
1389 types.erase(std::remove_if(types.begin(), types.end(), [&](std::string const &t) {
1390 if (t == "uncompressed")
1391 return TransactionManager->MetaIndexParser->Exists(Target.MetaKey) == false;
1392 std::string const MetaKey = Target.MetaKey + "." + t;
1393 return TransactionManager->MetaIndexParser->Exists(MetaKey) == false;
1394 }), types.end());
1395 if (types.empty() == false)
1396 {
1397 std::ostringstream os;
1398 // add the special compressiontype byhash first if supported
1399 std::string const useByHashConf = Target.Option(IndexTarget::BY_HASH);
1400 bool useByHash = false;
1401 if(useByHashConf == "force")
1402 useByHash = true;
1403 else
1404 useByHash = StringToBool(useByHashConf) == true && metaBaseSupportsByHash;
1405 if (useByHash == true)
1406 os << "by-hash ";
1407 std::copy(types.begin(), types.end()-1, std::ostream_iterator<std::string>(os, " "));
1408 os << *types.rbegin();
1409 Target.Options["COMPRESSIONTYPES"] = os.str();
1410 }
1411 else
1412 Target.Options["COMPRESSIONTYPES"].clear();
1413
1414 std::string filename = GetExistingFilename(GetFinalFileNameFromURI(Target.URI));
1415 if (filename.empty() == false)
1416 {
1417 // if the Release file is a hit and we have an index it must be the current one
1418 if (TransactionManager->IMSHit == true)
1419 ;
1420 else if (TransactionManager->LastMetaIndexParser != NULL)
1421 {
1422 // see if the file changed since the last Release file
1423 // we use the uncompressed files as we might compress differently compared to the server,
1424 // so the hashes might not match, even if they contain the same data.
1425 HashStringList const newFile = GetExpectedHashesFromFor(TransactionManager->MetaIndexParser, Target.MetaKey);
1426 HashStringList const oldFile = GetExpectedHashesFromFor(TransactionManager->LastMetaIndexParser, Target.MetaKey);
1427 if (newFile != oldFile)
1428 filename.clear();
1429 }
1430 else
1431 filename.clear();
1432 }
1433 else
1434 trypdiff = false; // no file to patch
1435
1436 if (filename.empty() == false)
1437 {
1438 new NoActionItem(Owner, Target, filename);
1439 std::string const idxfilename = GetFinalFileNameFromURI(GetDiffIndexURI(Target));
1440 if (FileExists(idxfilename))
1441 new NoActionItem(Owner, Target, idxfilename);
1442 targetsSeen.emplace(Target.Option(IndexTarget::CREATED_BY));
1443 continue;
1444 }
1445
1446 // check if we have patches available
1447 trypdiff &= TransactionManager->MetaIndexParser->Exists(GetDiffIndexFileName(Target.MetaKey));
1448 }
1449 else
1450 {
1451 // if we have no file to patch, no point in trying
1452 trypdiff &= (GetExistingFilename(GetFinalFileNameFromURI(Target.URI)).empty() == false);
1453 }
1454
1455 // no point in patching from local sources
1456 if (trypdiff)
1457 {
1458 std::string const proto = Target.URI.substr(0, strlen("file:/"));
1459 if (proto == "file:/" || proto == "copy:/" || proto == "cdrom:")
1460 trypdiff = false;
1461 }
1462
1463 // Queue the Index file (Packages, Sources, Translation-$foo, …)
1464 targetsSeen.emplace(Target.Option(IndexTarget::CREATED_BY));
1465 if (trypdiff)
1466 new pkgAcqDiffIndex(Owner, TransactionManager, Target);
1467 else
1468 new pkgAcqIndex(Owner, TransactionManager, Target);
1469 }
1470}
1471 /*}}}*/
1472bool pkgAcqMetaBase::VerifyVendor(string const &) /*{{{*/
1473{
1474 string Transformed = TransactionManager->MetaIndexParser->GetExpectedDist();
1475
1476 if (Transformed == "../project/experimental")
1477 {
1478 Transformed = "experimental";
1479 }
1480
1481 auto pos = Transformed.rfind('/');
1482 if (pos != string::npos)
1483 {
1484 Transformed = Transformed.substr(0, pos);
1485 }
1486
1487 if (Transformed == ".")
1488 {
1489 Transformed = "";
1490 }
1491
1492 if (TransactionManager->MetaIndexParser->GetValidUntil() > 0)
1493 {
1494 time_t const invalid_since = time(NULL) - TransactionManager->MetaIndexParser->GetValidUntil();
1495 if (invalid_since > 0)
1496 {
1497 std::string errmsg;
1498 strprintf(errmsg,
1499 // TRANSLATOR: The first %s is the URL of the bad Release file, the second is
1500 // the time since then the file is invalid - formatted in the same way as in
1501 // the download progress display (e.g. 7d 3h 42min 1s)
1502 _("Release file for %s is expired (invalid since %s). "
1503 "Updates for this repository will not be applied."),
1504 Target.URI.c_str(), TimeToStr(invalid_since).c_str());
1505 if (ErrorText.empty())
1506 ErrorText = errmsg;
1507 return _error->Error("%s", errmsg.c_str());
1508 }
1509 }
1510
1511 /* Did we get a file older than what we have? This is a last minute IMS hit and doubles
1512 as a prevention of downgrading us to older (still valid) files */
1513 if (TransactionManager->IMSHit == false && TransactionManager->LastMetaIndexParser != NULL &&
1514 TransactionManager->LastMetaIndexParser->GetDate() > TransactionManager->MetaIndexParser->GetDate())
1515 {
1516 TransactionManager->IMSHit = true;
1517 RemoveFile("VerifyVendor", DestFile);
1518 PartialFile = DestFile = GetFinalFilename();
1519 // load the 'old' file in the 'new' one instead of flipping pointers as
1520 // the new one isn't owned by us, while the old one is so cleanup would be confused.
1521 TransactionManager->MetaIndexParser->swapLoad(TransactionManager->LastMetaIndexParser);
1522 delete TransactionManager->LastMetaIndexParser;
1523 TransactionManager->LastMetaIndexParser = NULL;
1524 }
1525
1526 if (_config->FindB("Debug::pkgAcquire::Auth", false))
1527 {
1528 std::cerr << "Got Codename: " << TransactionManager->MetaIndexParser->GetCodename() << std::endl;
1529 std::cerr << "Expecting Dist: " << TransactionManager->MetaIndexParser->GetExpectedDist() << std::endl;
1530 std::cerr << "Transformed Dist: " << Transformed << std::endl;
1531 }
1532
1533 if (TransactionManager->MetaIndexParser->CheckDist(Transformed) == false)
1534 {
1535 // This might become fatal one day
1536// Status = StatAuthError;
1537// ErrorText = "Conflicting distribution; expected "
1538// + MetaIndexParser->GetExpectedDist() + " but got "
1539// + MetaIndexParser->GetCodename();
1540// return false;
1541 if (!Transformed.empty())
1542 {
1543 _error->Warning(_("Conflicting distribution: %s (expected %s but got %s)"),
1544 Desc.Description.c_str(),
1545 Transformed.c_str(),
1546 TransactionManager->MetaIndexParser->GetCodename().c_str());
1547 }
1548 }
1549
1550 return true;
1551}
1552 /*}}}*/
1553pkgAcqMetaBase::~pkgAcqMetaBase()
1554{
1555}
1556
1557pkgAcqMetaClearSig::pkgAcqMetaClearSig(pkgAcquire * const Owner, /*{{{*/
1558 IndexTarget const &ClearsignedTarget,
1559 IndexTarget const &DetachedDataTarget, IndexTarget const &DetachedSigTarget,
1560 metaIndex * const MetaIndexParser) :
1561 pkgAcqMetaIndex(Owner, this, ClearsignedTarget, DetachedSigTarget),
1562 d(NULL), DetachedDataTarget(DetachedDataTarget),
1563 MetaIndexParser(MetaIndexParser), LastMetaIndexParser(NULL)
1564{
1565 // index targets + (worst case:) Release/Release.gpg
1566 ExpectedAdditionalItems = std::numeric_limits<decltype(ExpectedAdditionalItems)>::max();
1567 TransactionManager->Add(this);
1568}
1569 /*}}}*/
1570pkgAcqMetaClearSig::~pkgAcqMetaClearSig() /*{{{*/
1571{
1572 if (LastMetaIndexParser != NULL)
1573 delete LastMetaIndexParser;
1574}
1575 /*}}}*/
1576// pkgAcqMetaClearSig::Custom600Headers - Insert custom request headers /*{{{*/
1577string pkgAcqMetaClearSig::Custom600Headers() const
1578{
1579 string Header = pkgAcqMetaBase::Custom600Headers();
1580 Header += "\nFail-Ignore: true";
1581 std::string const key = TransactionManager->MetaIndexParser->GetSignedBy();
1582 if (key.empty() == false)
1583 Header += "\nSigned-By: " + key;
1584
1585 return Header;
1586}
1587 /*}}}*/
1588void pkgAcqMetaClearSig::Finished() /*{{{*/
1589{
1590 if(_config->FindB("Debug::Acquire::Transaction", false) == true)
1591 std::clog << "Finished: " << DestFile <<std::endl;
1592 if(TransactionManager->State == TransactionStarted &&
1593 TransactionManager->TransactionHasError() == false)
1594 TransactionManager->CommitTransaction();
1595}
1596 /*}}}*/
1597bool pkgAcqMetaClearSig::VerifyDone(std::string const &Message, /*{{{*/
1598 pkgAcquire::MethodConfig const * const Cnf)
1599{
1600 Item::VerifyDone(Message, Cnf);
1601
1602 if (FileExists(DestFile) && !StartsWithGPGClearTextSignature(DestFile))
1603 return RenameOnError(NotClearsigned);
1604
1605 return true;
1606}
1607 /*}}}*/
1608// pkgAcqMetaClearSig::Done - We got a file /*{{{*/
1609void pkgAcqMetaClearSig::Done(std::string const &Message,
1610 HashStringList const &Hashes,
1611 pkgAcquire::MethodConfig const * const Cnf)
1612{
1613 Item::Done(Message, Hashes, Cnf);
1614
1615 if(AuthPass == false)
1616 {
1617 if(CheckDownloadDone(this, Message, Hashes) == true)
1618 QueueForSignatureVerify(this, DestFile, DestFile);
1619 return;
1620 }
1621 else if(CheckAuthDone(Message) == true)
1622 {
1623 if (TransactionManager->IMSHit == false)
1624 TransactionManager->TransactionStageCopy(this, DestFile, GetFinalFilename());
1625 else if (RealFileExists(GetFinalFilename()) == false)
1626 {
1627 // We got an InRelease file IMSHit, but we haven't one, which means
1628 // we had a valid Release/Release.gpg combo stepping in, which we have
1629 // to 'acquire' now to ensure list cleanup isn't removing them
1630 new NoActionItem(Owner, DetachedDataTarget);
1631 new NoActionItem(Owner, DetachedSigTarget);
1632 }
1633 }
1634 else if (Status != StatAuthError)
1635 {
1636 string const FinalFile = GetFinalFileNameFromURI(DetachedDataTarget.URI);
1637 string const OldFile = GetFinalFilename();
1638 if (TransactionManager->IMSHit == false)
1639 TransactionManager->TransactionStageCopy(this, DestFile, FinalFile);
1640 else if (RealFileExists(OldFile) == false)
1641 new NoActionItem(Owner, DetachedDataTarget);
1642 else
1643 TransactionManager->TransactionStageCopy(this, OldFile, FinalFile);
1644 }
1645}
1646 /*}}}*/
1647void pkgAcqMetaClearSig::Failed(string const &Message,pkgAcquire::MethodConfig const * const Cnf) /*{{{*/
1648{
1649 Item::Failed(Message, Cnf);
1650
1651 if (AuthPass == false)
1652 {
1653 if (Status == StatAuthError || Status == StatTransientNetworkError)
1654 {
1655 // if we expected a ClearTextSignature (InRelease) but got a network
1656 // error or got a file, but it wasn't valid, we end up here (see VerifyDone).
1657 // As these is usually called by web-portals we do not try Release/Release.gpg
1658 // as this is gonna fail anyway and instead abort our try (LP#346386)
1659 TransactionManager->AbortTransaction();
1660 return;
1661 }
1662
1663 // Queue the 'old' InRelease file for removal if we try Release.gpg
1664 // as otherwise the file will stay around and gives a false-auth
1665 // impression (CVE-2012-0214)
1666 TransactionManager->TransactionStageRemoval(this, GetFinalFilename());
1667 Status = StatDone;
1668
1669 new pkgAcqMetaIndex(Owner, TransactionManager, DetachedDataTarget, DetachedSigTarget);
1670 }
1671 else
1672 {
1673 if(CheckStopAuthentication(this, Message))
1674 return;
1675
1676 if(AllowInsecureRepositories(InsecureType::UNSIGNED, Target.Description, TransactionManager->MetaIndexParser, TransactionManager, this) == true)
1677 {
1678 Status = StatDone;
1679
1680 /* InRelease files become Release files, otherwise
1681 * they would be considered as trusted later on */
1682 string const FinalRelease = GetFinalFileNameFromURI(DetachedDataTarget.URI);
1683 string const PartialRelease = GetPartialFileNameFromURI(DetachedDataTarget.URI);
1684 string const FinalReleasegpg = GetFinalFileNameFromURI(DetachedSigTarget.URI);
1685 string const FinalInRelease = GetFinalFilename();
1686 Rename(DestFile, PartialRelease);
1687 TransactionManager->TransactionStageCopy(this, PartialRelease, FinalRelease);
1688 LoadLastMetaIndexParser(TransactionManager, FinalRelease, FinalInRelease);
1689
1690 // we parse the indexes here because at this point the user wanted
1691 // a repository that may potentially harm him
1692 if (TransactionManager->MetaIndexParser->Load(PartialRelease, &ErrorText) == false || VerifyVendor(Message) == false)
1693 /* expired Release files are still a problem you need extra force for */;
1694 else
1695 TransactionManager->QueueIndexes(true);
1696 }
1697 }
1698}
1699 /*}}}*/
1700
1701pkgAcqMetaIndex::pkgAcqMetaIndex(pkgAcquire * const Owner, /*{{{*/
1702 pkgAcqMetaClearSig * const TransactionManager,
1703 IndexTarget const &DataTarget,
1704 IndexTarget const &DetachedSigTarget) :
1705 pkgAcqMetaBase(Owner, TransactionManager, DataTarget), d(NULL),
1706 DetachedSigTarget(DetachedSigTarget)
1707{
1708 if(_config->FindB("Debug::Acquire::Transaction", false) == true)
1709 std::clog << "New pkgAcqMetaIndex with TransactionManager "
1710 << this->TransactionManager << std::endl;
1711
1712 DestFile = GetPartialFileNameFromURI(DataTarget.URI);
1713
1714 // Create the item
1715 Desc.Description = DataTarget.Description;
1716 Desc.Owner = this;
1717 Desc.ShortDesc = DataTarget.ShortDesc;
1718 Desc.URI = DataTarget.URI;
1719 QueueURI(Desc);
1720}
1721 /*}}}*/
1722void pkgAcqMetaIndex::Done(string const &Message, /*{{{*/
1723 HashStringList const &Hashes,
1724 pkgAcquire::MethodConfig const * const Cfg)
1725{
1726 Item::Done(Message,Hashes,Cfg);
1727
1728 if(CheckDownloadDone(this, Message, Hashes))
1729 {
1730 // we have a Release file, now download the Signature, all further
1731 // verify/queue for additional downloads will be done in the
1732 // pkgAcqMetaSig::Done() code
1733 new pkgAcqMetaSig(Owner, TransactionManager, DetachedSigTarget, this);
1734 }
1735}
1736 /*}}}*/
1737// pkgAcqMetaIndex::Failed - no Release file present /*{{{*/
1738void pkgAcqMetaIndex::Failed(string const &Message,
1739 pkgAcquire::MethodConfig const * const Cnf)
1740{
1741 pkgAcquire::Item::Failed(Message, Cnf);
1742 Status = StatDone;
1743
1744 // No Release file was present so fall
1745 // back to queueing Packages files without verification
1746 // only allow going further if the user explicitly wants it
1747 if(AllowInsecureRepositories(InsecureType::NORELEASE, Target.Description, TransactionManager->MetaIndexParser, TransactionManager, this) == true)
1748 {
1749 // ensure old Release files are removed
1750 TransactionManager->TransactionStageRemoval(this, GetFinalFilename());
1751
1752 // queue without any kind of hashsum support
1753 TransactionManager->QueueIndexes(false);
1754 }
1755}
1756 /*}}}*/
1757std::string pkgAcqMetaIndex::DescURI() const /*{{{*/
1758{
1759 return Target.URI;
1760}
1761 /*}}}*/
1762pkgAcqMetaIndex::~pkgAcqMetaIndex() {}
1763
1764// AcqMetaSig::AcqMetaSig - Constructor /*{{{*/
1765pkgAcqMetaSig::pkgAcqMetaSig(pkgAcquire * const Owner,
1766 pkgAcqMetaClearSig * const TransactionManager,
1767 IndexTarget const &Target,
1768 pkgAcqMetaIndex * const MetaIndex) :
1769 pkgAcqTransactionItem(Owner, TransactionManager, Target), d(NULL), MetaIndex(MetaIndex)
1770{
1771 DestFile = GetPartialFileNameFromURI(Target.URI);
1772
1773 // remove any partial downloaded sig-file in partial/.
1774 // it may confuse proxies and is too small to warrant a
1775 // partial download anyway
1776 RemoveFile("pkgAcqMetaSig", DestFile);
1777
1778 // set the TransactionManager
1779 if(_config->FindB("Debug::Acquire::Transaction", false) == true)
1780 std::clog << "New pkgAcqMetaSig with TransactionManager "
1781 << TransactionManager << std::endl;
1782
1783 // Create the item
1784 Desc.Description = Target.Description;
1785 Desc.Owner = this;
1786 Desc.ShortDesc = Target.ShortDesc;
1787 Desc.URI = Target.URI;
1788
1789 // If we got a hit for Release, we will get one for Release.gpg too (or obscure errors),
1790 // so we skip the download step and go instantly to verification
1791 if (TransactionManager->IMSHit == true && RealFileExists(GetFinalFilename()))
1792 {
1793 Complete = true;
1794 Status = StatDone;
1795 PartialFile = DestFile = GetFinalFilename();
1796 MetaIndexFileSignature = DestFile;
1797 MetaIndex->QueueForSignatureVerify(this, MetaIndex->DestFile, DestFile);
1798 }
1799 else
1800 QueueURI(Desc);
1801}
1802 /*}}}*/
1803pkgAcqMetaSig::~pkgAcqMetaSig() /*{{{*/
1804{
1805}
1806 /*}}}*/
1807// pkgAcqMetaSig::Custom600Headers - Insert custom request headers /*{{{*/
1808std::string pkgAcqMetaSig::Custom600Headers() const
1809{
1810 std::string Header = pkgAcqTransactionItem::Custom600Headers();
1811 std::string const key = TransactionManager->MetaIndexParser->GetSignedBy();
1812 if (key.empty() == false)
1813 Header += "\nSigned-By: " + key;
1814 return Header;
1815}
1816 /*}}}*/
1817// AcqMetaSig::Done - The signature was downloaded/verified /*{{{*/
1818void pkgAcqMetaSig::Done(string const &Message, HashStringList const &Hashes,
1819 pkgAcquire::MethodConfig const * const Cfg)
1820{
1821 if (MetaIndexFileSignature.empty() == false)
1822 {
1823 DestFile = MetaIndexFileSignature;
1824 MetaIndexFileSignature.clear();
1825 }
1826 Item::Done(Message, Hashes, Cfg);
1827
1828 if(MetaIndex->AuthPass == false)
1829 {
1830 if(MetaIndex->CheckDownloadDone(this, Message, Hashes) == true)
1831 {
1832 // destfile will be modified to point to MetaIndexFile for the
1833 // gpgv method, so we need to save it here
1834 MetaIndexFileSignature = DestFile;
1835 MetaIndex->QueueForSignatureVerify(this, MetaIndex->DestFile, DestFile);
1836 }
1837 return;
1838 }
1839 else if(MetaIndex->CheckAuthDone(Message) == true)
1840 {
1841 if (TransactionManager->IMSHit == false)
1842 {
1843 TransactionManager->TransactionStageCopy(this, DestFile, GetFinalFilename());
1844 TransactionManager->TransactionStageCopy(MetaIndex, MetaIndex->DestFile, MetaIndex->GetFinalFilename());
1845 }
1846 }
1847 else if (MetaIndex->Status != StatAuthError)
1848 {
1849 std::string const FinalFile = MetaIndex->GetFinalFilename();
1850 if (TransactionManager->IMSHit == false)
1851 TransactionManager->TransactionStageCopy(MetaIndex, MetaIndex->DestFile, FinalFile);
1852 else
1853 TransactionManager->TransactionStageCopy(MetaIndex, FinalFile, FinalFile);
1854 }
1855}
1856 /*}}}*/
1857void pkgAcqMetaSig::Failed(string const &Message,pkgAcquire::MethodConfig const * const Cnf)/*{{{*/
1858{
1859 Item::Failed(Message,Cnf);
1860
1861 // check if we need to fail at this point
1862 if (MetaIndex->AuthPass == true && MetaIndex->CheckStopAuthentication(this, Message))
1863 return;
1864
1865 // ensures that a Release.gpg file in the lists/ is removed by the transaction
1866 TransactionManager->TransactionStageRemoval(this, DestFile);
1867
1868 // only allow going further if the user explicitly wants it
1869 if (AllowInsecureRepositories(InsecureType::UNSIGNED, MetaIndex->Target.Description, TransactionManager->MetaIndexParser, TransactionManager, this) == true)
1870 {
1871 string const FinalRelease = MetaIndex->GetFinalFilename();
1872 string const FinalInRelease = TransactionManager->GetFinalFilename();
1873 LoadLastMetaIndexParser(TransactionManager, FinalRelease, FinalInRelease);
1874
1875 // we parse the indexes here because at this point the user wanted
1876 // a repository that may potentially harm him
1877 bool const GoodLoad = TransactionManager->MetaIndexParser->Load(MetaIndex->DestFile, &ErrorText);
1878 if (MetaIndex->VerifyVendor(Message) == false)
1879 /* expired Release files are still a problem you need extra force for */;
1880 else
1881 TransactionManager->QueueIndexes(GoodLoad);
1882
1883 TransactionManager->TransactionStageCopy(MetaIndex, MetaIndex->DestFile, FinalRelease);
1884 }
1885 else if (TransactionManager->IMSHit == false)
1886 Rename(MetaIndex->DestFile, MetaIndex->DestFile + ".FAILED");
1887
1888 // FIXME: this is used often (e.g. in pkgAcqIndexTrans) so refactor
1889 if (Cnf->LocalOnly == true ||
1890 StringToBool(LookupTag(Message,"Transient-Failure"),false) == false)
1891 {
1892 // Ignore this
1893 Status = StatDone;
1894 }
1895}
1896 /*}}}*/
1897
1898
1899// AcqBaseIndex - Constructor /*{{{*/
1900pkgAcqBaseIndex::pkgAcqBaseIndex(pkgAcquire * const Owner,
1901 pkgAcqMetaClearSig * const TransactionManager,
1902 IndexTarget const &Target)
1903: pkgAcqTransactionItem(Owner, TransactionManager, Target), d(NULL)
1904{
1905}
1906 /*}}}*/
1907void pkgAcqBaseIndex::Failed(std::string const &Message,pkgAcquire::MethodConfig const * const Cnf)/*{{{*/
1908{
1909 pkgAcquire::Item::Failed(Message, Cnf);
1910 if (Status != StatAuthError)
1911 return;
1912
1913 ErrorText.append("Release file created at: ");
1914 auto const timespec = TransactionManager->MetaIndexParser->GetDate();
1915 if (timespec == 0)
1916 ErrorText.append("<unknown>");
1917 else
1918 ErrorText.append(TimeRFC1123(timespec, true));
1919 ErrorText.append("\n");
1920}
1921 /*}}}*/
1922pkgAcqBaseIndex::~pkgAcqBaseIndex() {}
1923
1924// AcqDiffIndex::AcqDiffIndex - Constructor /*{{{*/
1925// ---------------------------------------------------------------------
1926/* Get the DiffIndex file first and see if there are patches available
1927 * If so, create a pkgAcqIndexDiffs fetcher that will get and apply the
1928 * patches. If anything goes wrong in that process, it will fall back to
1929 * the original packages file
1930 */
1931pkgAcqDiffIndex::pkgAcqDiffIndex(pkgAcquire * const Owner,
1932 pkgAcqMetaClearSig * const TransactionManager,
1933 IndexTarget const &Target)
1934 : pkgAcqBaseIndex(Owner, TransactionManager, Target), d(NULL), diffs(NULL)
1935{
1936 // FIXME: Magic number as an upper bound on pdiffs we will reasonably acquire
1937 ExpectedAdditionalItems = 40;
1938
1939 Debug = _config->FindB("Debug::pkgAcquire::Diffs",false);
1940
1941 Desc.Owner = this;
1942 Desc.Description = GetDiffIndexFileName(Target.Description);
1943 Desc.ShortDesc = Target.ShortDesc;
1944 Desc.URI = GetDiffIndexURI(Target);
1945
1946 DestFile = GetPartialFileNameFromURI(Desc.URI);
1947
1948 if(Debug)
1949 std::clog << "pkgAcqDiffIndex: " << Desc.URI << std::endl;
1950
1951 QueueURI(Desc);
1952}
1953 /*}}}*/
1954// AcqIndex::Custom600Headers - Insert custom request headers /*{{{*/
1955// ---------------------------------------------------------------------
1956/* The only header we use is the last-modified header. */
1957string pkgAcqDiffIndex::Custom600Headers() const
1958{
1959 if (TransactionManager->LastMetaIndexParser != NULL)
1960 return "\nIndex-File: true";
1961
1962 string const Final = GetFinalFilename();
1963
1964 if(Debug)
1965 std::clog << "Custom600Header-IMS: " << Final << std::endl;
1966
1967 struct stat Buf;
1968 if (stat(Final.c_str(),&Buf) != 0)
1969 return "\nIndex-File: true";
1970
1971 return "\nIndex-File: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime, false);
1972}
1973 /*}}}*/
1974void pkgAcqDiffIndex::QueueOnIMSHit() const /*{{{*/
1975{
1976 // list cleanup needs to know that this file as well as the already
1977 // present index is ours, so we create an empty diff to save it for us
1978 new pkgAcqIndexDiffs(Owner, TransactionManager, Target);
1979}
1980 /*}}}*/
1981static bool RemoveFileForBootstrapLinking(bool const Debug, std::string const &For, std::string const &Boot)/*{{{*/
1982{
1983 if (FileExists(Boot) && RemoveFile("Bootstrap-linking", Boot) == false)
1984 {
1985 if (Debug)
1986 std::clog << "Bootstrap-linking for patching " << For
1987 << " by removing stale " << Boot << " failed!" << std::endl;
1988 return false;
1989 }
1990 return true;
1991}
1992 /*}}}*/
1993bool pkgAcqDiffIndex::ParseDiffIndex(string const &IndexDiffFile) /*{{{*/
1994{
1995 ExpectedAdditionalItems = 0;
1996 // failing here is fine: our caller will take care of trying to
1997 // get the complete file if patching fails
1998 if(Debug)
1999 std::clog << "pkgAcqDiffIndex::ParseIndexDiff() " << IndexDiffFile
2000 << std::endl;
2001
2002 FileFd Fd(IndexDiffFile,FileFd::ReadOnly);
2003 pkgTagFile TF(&Fd);
2004 if (Fd.IsOpen() == false || Fd.Failed())
2005 return false;
2006
2007 pkgTagSection Tags;
2008 if(unlikely(TF.Step(Tags) == false))
2009 return false;
2010
2011 HashStringList ServerHashes;
2012 unsigned long long ServerSize = 0;
2013
2014 auto const &posix = std::locale("C.UTF-8");
2015 for (char const * const * type = HashString::SupportedHashes(); *type != NULL; ++type)
2016 {
2017 std::string tagname = *type;
2018 tagname.append("-Current");
2019 std::string const tmp = Tags.FindS(tagname.c_str());
2020 if (tmp.empty() == true)
2021 continue;
2022
2023 string hash;
2024 unsigned long long size;
2025 std::stringstream ss(tmp);
2026 ss.imbue(posix);
2027 ss >> hash >> size;
2028 if (unlikely(hash.empty() == true))
2029 continue;
2030 if (unlikely(ServerSize != 0 && ServerSize != size))
2031 continue;
2032 ServerHashes.push_back(HashString(*type, hash));
2033 ServerSize = size;
2034 }
2035
2036 if (ServerHashes.usable() == false)
2037 {
2038 if (Debug == true)
2039 std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": Did not find a good hashsum in the index" << std::endl;
2040 return false;
2041 }
2042
2043 std::string const CurrentPackagesFile = GetFinalFileNameFromURI(Target.URI);
2044 HashStringList const TargetFileHashes = GetExpectedHashesFor(Target.MetaKey);
2045 if (TargetFileHashes.usable() == false || ServerHashes != TargetFileHashes)
2046 {
2047 if (Debug == true)
2048 {
2049 std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": Index has different hashes than parser, probably older, so fail pdiffing" << std::endl;
2050 printHashSumComparison(CurrentPackagesFile, ServerHashes, TargetFileHashes);
2051 }
2052 return false;
2053 }
2054
2055 HashStringList LocalHashes;
2056 // try avoiding calculating the hash here as this is costly
2057 if (TransactionManager->LastMetaIndexParser != NULL)
2058 LocalHashes = GetExpectedHashesFromFor(TransactionManager->LastMetaIndexParser, Target.MetaKey);
2059 if (LocalHashes.usable() == false)
2060 {
2061 FileFd fd(CurrentPackagesFile, FileFd::ReadOnly, FileFd::Auto);
2062 Hashes LocalHashesCalc(ServerHashes);
2063 LocalHashesCalc.AddFD(fd);
2064 LocalHashes = LocalHashesCalc.GetHashStringList();
2065 }
2066
2067 if (ServerHashes == LocalHashes)
2068 {
2069 // we have the same sha1 as the server so we are done here
2070 if(Debug)
2071 std::clog << "pkgAcqDiffIndex: Package file " << CurrentPackagesFile << " is up-to-date" << std::endl;
2072 QueueOnIMSHit();
2073 return true;
2074 }
2075
2076 if(Debug)
2077 std::clog << "Server-Current: " << ServerHashes.find(NULL)->toStr() << " and we start at "
2078 << CurrentPackagesFile << " " << LocalHashes.FileSize() << " " << LocalHashes.find(NULL)->toStr() << std::endl;
2079
2080 // historically, older hashes have more info than newer ones, so start
2081 // collecting with older ones first to avoid implementing complicated
2082 // information merging techniques… a failure is after all always
2083 // recoverable with a complete file and hashes aren't changed that often.
2084 std::vector<char const *> types;
2085 for (char const * const * type = HashString::SupportedHashes(); *type != NULL; ++type)
2086 types.push_back(*type);
2087
2088 // parse all of (provided) history
2089 vector<DiffInfo> available_patches;
2090 bool firstAcceptedHashes = true;
2091 for (auto type = types.crbegin(); type != types.crend(); ++type)
2092 {
2093 if (LocalHashes.find(*type) == NULL)
2094 continue;
2095
2096 std::string tagname = *type;
2097 tagname.append("-History");
2098 std::string const tmp = Tags.FindS(tagname.c_str());
2099 if (tmp.empty() == true)
2100 continue;
2101
2102 string hash, filename;
2103 unsigned long long size;
2104 std::stringstream ss(tmp);
2105 ss.imbue(posix);
2106
2107 while (ss >> hash >> size >> filename)
2108 {
2109 if (unlikely(hash.empty() == true || filename.empty() == true))
2110 continue;
2111
2112 // see if we have a record for this file already
2113 std::vector<DiffInfo>::iterator cur = available_patches.begin();
2114 for (; cur != available_patches.end(); ++cur)
2115 {
2116 if (cur->file != filename)
2117 continue;
2118 cur->result_hashes.push_back(HashString(*type, hash));
2119 break;
2120 }
2121 if (cur != available_patches.end())
2122 continue;
2123 if (firstAcceptedHashes == true)
2124 {
2125 DiffInfo next;
2126 next.file = filename;
2127 next.result_hashes.push_back(HashString(*type, hash));
2128 next.result_hashes.FileSize(size);
2129 available_patches.push_back(next);
2130 }
2131 else
2132 {
2133 if (Debug == true)
2134 std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": File " << filename
2135 << " wasn't in the list for the first parsed hash! (history)" << std::endl;
2136 break;
2137 }
2138 }
2139 firstAcceptedHashes = false;
2140 }
2141
2142 if (unlikely(available_patches.empty() == true))
2143 {
2144 if (Debug)
2145 std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": "
2146 << "Couldn't find any patches for the patch series." << std::endl;
2147 return false;
2148 }
2149
2150 for (auto type = types.crbegin(); type != types.crend(); ++type)
2151 {
2152 if (LocalHashes.find(*type) == NULL)
2153 continue;
2154
2155 std::string tagname = *type;
2156 tagname.append("-Patches");
2157 std::string const tmp = Tags.FindS(tagname.c_str());
2158 if (tmp.empty() == true)
2159 continue;
2160
2161 string hash, filename;
2162 unsigned long long size;
2163 std::stringstream ss(tmp);
2164 ss.imbue(posix);
2165
2166 while (ss >> hash >> size >> filename)
2167 {
2168 if (unlikely(hash.empty() == true || filename.empty() == true))
2169 continue;
2170
2171 // see if we have a record for this file already
2172 std::vector<DiffInfo>::iterator cur = available_patches.begin();
2173 for (; cur != available_patches.end(); ++cur)
2174 {
2175 if (cur->file != filename)
2176 continue;
2177 if (cur->patch_hashes.empty())
2178 cur->patch_hashes.FileSize(size);
2179 cur->patch_hashes.push_back(HashString(*type, hash));
2180 break;
2181 }
2182 if (cur != available_patches.end())
2183 continue;
2184 if (Debug == true)
2185 std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": File " << filename
2186 << " wasn't in the list for the first parsed hash! (patches)" << std::endl;
2187 break;
2188 }
2189 }
2190
2191 for (auto type = types.crbegin(); type != types.crend(); ++type)
2192 {
2193 std::string tagname = *type;
2194 tagname.append("-Download");
2195 std::string const tmp = Tags.FindS(tagname.c_str());
2196 if (tmp.empty() == true)
2197 continue;
2198
2199 string hash, filename;
2200 unsigned long long size;
2201 std::stringstream ss(tmp);
2202 ss.imbue(posix);
2203
2204 // FIXME: all of pdiff supports only .gz compressed patches
2205 while (ss >> hash >> size >> filename)
2206 {
2207 if (unlikely(hash.empty() == true || filename.empty() == true))
2208 continue;
2209 if (unlikely(APT::String::Endswith(filename, ".gz") == false))
2210 continue;
2211 filename.erase(filename.length() - 3);
2212
2213 // see if we have a record for this file already
2214 std::vector<DiffInfo>::iterator cur = available_patches.begin();
2215 for (; cur != available_patches.end(); ++cur)
2216 {
2217 if (cur->file != filename)
2218 continue;
2219 if (cur->download_hashes.empty())
2220 cur->download_hashes.FileSize(size);
2221 cur->download_hashes.push_back(HashString(*type, hash));
2222 break;
2223 }
2224 if (cur != available_patches.end())
2225 continue;
2226 if (Debug == true)
2227 std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": File " << filename
2228 << " wasn't in the list for the first parsed hash! (download)" << std::endl;
2229 break;
2230 }
2231 }
2232
2233
2234 bool foundStart = false;
2235 for (std::vector<DiffInfo>::iterator cur = available_patches.begin();
2236 cur != available_patches.end(); ++cur)
2237 {
2238 if (LocalHashes != cur->result_hashes)
2239 continue;
2240
2241 available_patches.erase(available_patches.begin(), cur);
2242 foundStart = true;
2243 break;
2244 }
2245
2246 if (foundStart == false || unlikely(available_patches.empty() == true))
2247 {
2248 if (Debug)
2249 std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": "
2250 << "Couldn't find the start of the patch series." << std::endl;
2251 return false;
2252 }
2253
2254 for (auto const &patch: available_patches)
2255 if (patch.result_hashes.usable() == false ||
2256 patch.patch_hashes.usable() == false ||
2257 patch.download_hashes.usable() == false)
2258 {
2259 if (Debug)
2260 std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": provides no usable hashes for " << patch.file
2261 << " so fallback to complete download" << std::endl;
2262 return false;
2263 }
2264
2265 // patching with too many files is rather slow compared to a fast download
2266 unsigned long const fileLimit = _config->FindI("Acquire::PDiffs::FileLimit", 0);
2267 if (fileLimit != 0 && fileLimit < available_patches.size())
2268 {
2269 if (Debug)
2270 std::clog << "Need " << available_patches.size() << " diffs (Limit is " << fileLimit
2271 << ") so fallback to complete download" << std::endl;
2272 return false;
2273 }
2274
2275 // calculate the size of all patches we have to get
2276 unsigned short const sizeLimitPercent = _config->FindI("Acquire::PDiffs::SizeLimit", 100);
2277 if (sizeLimitPercent > 0)
2278 {
2279 unsigned long long downloadSize = std::accumulate(available_patches.begin(),
2280 available_patches.end(), 0llu, [](unsigned long long const T, DiffInfo const &I) {
2281 return T + I.download_hashes.FileSize();
2282 });
2283 if (downloadSize != 0)
2284 {
2285 unsigned long long downloadSizeIdx = 0;
2286 auto const types = VectorizeString(Target.Option(IndexTarget::COMPRESSIONTYPES), ' ');
2287 for (auto const &t : types)
2288 {
2289 std::string MetaKey = Target.MetaKey;
2290 if (t != "uncompressed")
2291 MetaKey += '.' + t;
2292 HashStringList const hsl = GetExpectedHashesFor(MetaKey);
2293 if (unlikely(hsl.usable() == false))
2294 continue;
2295 downloadSizeIdx = hsl.FileSize();
2296 break;
2297 }
2298 unsigned long long const sizeLimit = downloadSizeIdx * sizeLimitPercent;
2299 if ((sizeLimit/100) < downloadSize)
2300 {
2301 if (Debug)
2302 std::clog << "Need " << downloadSize << " compressed bytes (Limit is " << (sizeLimit/100) << ", "
2303 << "original is " << downloadSizeIdx << ") so fallback to complete download" << std::endl;
2304 return false;
2305 }
2306 }
2307 }
2308
2309 // we have something, queue the diffs
2310 string::size_type const last_space = Description.rfind(" ");
2311 if(last_space != string::npos)
2312 Description.erase(last_space, Description.size()-last_space);
2313
2314 /* decide if we should download patches one by one or in one go:
2315 The first is good if the server merges patches, but many don't so client
2316 based merging can be attempt in which case the second is better.
2317 "bad things" will happen if patches are merged on the server,
2318 but client side merging is attempt as well */
2319 bool pdiff_merge = _config->FindB("Acquire::PDiffs::Merge", true);
2320 if (pdiff_merge == true)
2321 {
2322 // reprepro adds this flag if it has merged patches on the server
2323 std::string const precedence = Tags.FindS("X-Patch-Precedence");
2324 pdiff_merge = (precedence != "merged");
2325 }
2326
2327 // clean the plate
2328 {
2329 std::string const Final = GetExistingFilename(CurrentPackagesFile);
2330 if (unlikely(Final.empty())) // because we wouldn't be called in such a case
2331 return false;
2332 std::string const PartialFile = GetPartialFileNameFromURI(Target.URI);
2333 std::string const PatchedFile = GetKeepCompressedFileName(PartialFile + "-patched", Target);
2334 if (RemoveFileForBootstrapLinking(Debug, CurrentPackagesFile, PartialFile) == false ||
2335 RemoveFileForBootstrapLinking(Debug, CurrentPackagesFile, PatchedFile) == false)
2336 return false;
2337 for (auto const &ext : APT::Configuration::getCompressorExtensions())
2338 {
2339 if (RemoveFileForBootstrapLinking(Debug, CurrentPackagesFile, PartialFile + ext) == false ||
2340 RemoveFileForBootstrapLinking(Debug, CurrentPackagesFile, PatchedFile + ext) == false)
2341 return false;
2342 }
2343 std::string const Ext = Final.substr(CurrentPackagesFile.length());
2344 std::string const Partial = PartialFile + Ext;
2345 if (symlink(Final.c_str(), Partial.c_str()) != 0)
2346 {
2347 if (Debug)
2348 std::clog << "Bootstrap-linking for patching " << CurrentPackagesFile
2349 << " by linking " << Final << " to " << Partial << " failed!" << std::endl;
2350 return false;
2351 }
2352 }
2353
2354 if (pdiff_merge == false)
2355 new pkgAcqIndexDiffs(Owner, TransactionManager, Target, available_patches);
2356 else
2357 {
2358 diffs = new std::vector<pkgAcqIndexMergeDiffs*>(available_patches.size());
2359 for(size_t i = 0; i < available_patches.size(); ++i)
2360 (*diffs)[i] = new pkgAcqIndexMergeDiffs(Owner, TransactionManager,
2361 Target,
2362 available_patches[i],
2363 diffs);
2364 }
2365
2366 Complete = false;
2367 Status = StatDone;
2368 Dequeue();
2369 return true;
2370}
2371 /*}}}*/
2372void pkgAcqDiffIndex::Failed(string const &Message,pkgAcquire::MethodConfig const * const Cnf)/*{{{*/
2373{
2374 pkgAcqBaseIndex::Failed(Message,Cnf);
2375 Status = StatDone;
2376 ExpectedAdditionalItems = 0;
2377
2378 if(Debug)
2379 std::clog << "pkgAcqDiffIndex failed: " << Desc.URI << " with " << Message << std::endl
2380 << "Falling back to normal index file acquire" << std::endl;
2381
2382 new pkgAcqIndex(Owner, TransactionManager, Target);
2383}
2384 /*}}}*/
2385void pkgAcqDiffIndex::Done(string const &Message,HashStringList const &Hashes, /*{{{*/
2386 pkgAcquire::MethodConfig const * const Cnf)
2387{
2388 if(Debug)
2389 std::clog << "pkgAcqDiffIndex::Done(): " << Desc.URI << std::endl;
2390
2391 Item::Done(Message, Hashes, Cnf);
2392
2393 string const FinalFile = GetFinalFilename();
2394 if(StringToBool(LookupTag(Message,"IMS-Hit"),false))
2395 DestFile = FinalFile;
2396
2397 if(ParseDiffIndex(DestFile) == false)
2398 {
2399 Failed("Message: Couldn't parse pdiff index", Cnf);
2400 // queue for final move - this should happen even if we fail
2401 // while parsing (e.g. on sizelimit) and download the complete file.
2402 TransactionManager->TransactionStageCopy(this, DestFile, FinalFile);
2403 return;
2404 }
2405
2406 TransactionManager->TransactionStageCopy(this, DestFile, FinalFile);
2407
2408 Complete = true;
2409 Status = StatDone;
2410 Dequeue();
2411
2412 return;
2413}
2414 /*}}}*/
2415pkgAcqDiffIndex::~pkgAcqDiffIndex()
2416{
2417 if (diffs != NULL)
2418 delete diffs;
2419}
2420
2421// AcqIndexDiffs::AcqIndexDiffs - Constructor /*{{{*/
2422// ---------------------------------------------------------------------
2423/* The package diff is added to the queue. one object is constructed
2424 * for each diff and the index
2425 */
2426pkgAcqIndexDiffs::pkgAcqIndexDiffs(pkgAcquire * const Owner,
2427 pkgAcqMetaClearSig * const TransactionManager,
2428 IndexTarget const &Target,
2429 vector<DiffInfo> const &diffs)
2430 : pkgAcqBaseIndex(Owner, TransactionManager, Target), d(NULL),
2431 available_patches(diffs)
2432{
2433 DestFile = GetKeepCompressedFileName(GetPartialFileNameFromURI(Target.URI), Target);
2434
2435 Debug = _config->FindB("Debug::pkgAcquire::Diffs",false);
2436
2437 Desc.Owner = this;
2438 Description = Target.Description;
2439 Desc.ShortDesc = Target.ShortDesc;
2440
2441 if(available_patches.empty() == true)
2442 {
2443 // we are done (yeah!), check hashes against the final file
2444 DestFile = GetKeepCompressedFileName(GetFinalFileNameFromURI(Target.URI), Target);
2445 Finish(true);
2446 }
2447 else
2448 {
2449 State = StateFetchDiff;
2450 QueueNextDiff();
2451 }
2452}
2453 /*}}}*/
2454void pkgAcqIndexDiffs::Failed(string const &Message,pkgAcquire::MethodConfig const * const Cnf)/*{{{*/
2455{
2456 pkgAcqBaseIndex::Failed(Message,Cnf);
2457 Status = StatDone;
2458
2459 DestFile = GetKeepCompressedFileName(GetPartialFileNameFromURI(Target.URI), Target);
2460 if(Debug)
2461 std::clog << "pkgAcqIndexDiffs failed: " << Desc.URI << " with " << Message << std::endl
2462 << "Falling back to normal index file acquire " << std::endl;
2463 RenameOnError(PDiffError);
2464 std::string const patchname = GetDiffsPatchFileName(DestFile);
2465 if (RealFileExists(patchname))
2466 Rename(patchname, patchname + ".FAILED");
2467 std::string const UnpatchedFile = GetExistingFilename(GetPartialFileNameFromURI(Target.URI));
2468 if (UnpatchedFile.empty() == false && FileExists(UnpatchedFile))
2469 Rename(UnpatchedFile, UnpatchedFile + ".FAILED");
2470 new pkgAcqIndex(Owner, TransactionManager, Target);
2471 Finish();
2472}
2473 /*}}}*/
2474// Finish - helper that cleans the item out of the fetcher queue /*{{{*/
2475void pkgAcqIndexDiffs::Finish(bool allDone)
2476{
2477 if(Debug)
2478 std::clog << "pkgAcqIndexDiffs::Finish(): "
2479 << allDone << " "
2480 << Desc.URI << std::endl;
2481
2482 // we restore the original name, this is required, otherwise
2483 // the file will be cleaned
2484 if(allDone)
2485 {
2486 std::string const Final = GetKeepCompressedFileName(GetFinalFilename(), Target);
2487 TransactionManager->TransactionStageCopy(this, DestFile, Final);
2488
2489 // this is for the "real" finish
2490 Complete = true;
2491 Status = StatDone;
2492 Dequeue();
2493 if(Debug)
2494 std::clog << "\n\nallDone: " << DestFile << "\n" << std::endl;
2495 return;
2496 }
2497 else
2498 DestFile.clear();
2499
2500 if(Debug)
2501 std::clog << "Finishing: " << Desc.URI << std::endl;
2502 Complete = false;
2503 Status = StatDone;
2504 Dequeue();
2505 return;
2506}
2507 /*}}}*/
2508bool pkgAcqIndexDiffs::QueueNextDiff() /*{{{*/
2509{
2510 // calc sha1 of the just patched file
2511 std::string const PartialFile = GetExistingFilename(GetPartialFileNameFromURI(Target.URI));
2512 if(unlikely(PartialFile.empty()))
2513 {
2514 Failed("Message: The file " + GetPartialFileNameFromURI(Target.URI) + " isn't available", NULL);
2515 return false;
2516 }
2517
2518 FileFd fd(PartialFile, FileFd::ReadOnly, FileFd::Extension);
2519 Hashes LocalHashesCalc;
2520 LocalHashesCalc.AddFD(fd);
2521 HashStringList const LocalHashes = LocalHashesCalc.GetHashStringList();
2522
2523 if(Debug)
2524 std::clog << "QueueNextDiff: " << PartialFile << " (" << LocalHashes.find(NULL)->toStr() << ")" << std::endl;
2525
2526 HashStringList const TargetFileHashes = GetExpectedHashesFor(Target.MetaKey);
2527 if (unlikely(LocalHashes.usable() == false || TargetFileHashes.usable() == false))
2528 {
2529 Failed("Local/Expected hashes are not usable for " + PartialFile, NULL);
2530 return false;
2531 }
2532
2533 // final file reached before all patches are applied
2534 if(LocalHashes == TargetFileHashes)
2535 {
2536 Finish(true);
2537 return true;
2538 }
2539
2540 // remove all patches until the next matching patch is found
2541 // this requires the Index file to be ordered
2542 available_patches.erase(available_patches.begin(),
2543 std::find_if(available_patches.begin(), available_patches.end(), [&](DiffInfo const &I) {
2544 return I.result_hashes == LocalHashes;
2545 }));
2546
2547 // error checking and falling back if no patch was found
2548 if(available_patches.empty() == true)
2549 {
2550 Failed("No patches left to reach target for " + PartialFile, NULL);
2551 return false;
2552 }
2553
2554 // queue the right diff
2555 Desc.URI = Target.URI + ".diff/" + available_patches[0].file + ".gz";
2556 Desc.Description = Description + " " + available_patches[0].file + string(".pdiff");
2557 DestFile = GetKeepCompressedFileName(GetPartialFileNameFromURI(Target.URI + ".diff/" + available_patches[0].file), Target);
2558
2559 if(Debug)
2560 std::clog << "pkgAcqIndexDiffs::QueueNextDiff(): " << Desc.URI << std::endl;
2561
2562 QueueURI(Desc);
2563
2564 return true;
2565}
2566 /*}}}*/
2567void pkgAcqIndexDiffs::Done(string const &Message, HashStringList const &Hashes, /*{{{*/
2568 pkgAcquire::MethodConfig const * const Cnf)
2569{
2570 if (Debug)
2571 std::clog << "pkgAcqIndexDiffs::Done(): " << Desc.URI << std::endl;
2572
2573 Item::Done(Message, Hashes, Cnf);
2574
2575 std::string const UncompressedUnpatchedFile = GetPartialFileNameFromURI(Target.URI);
2576 std::string const UnpatchedFile = GetExistingFilename(UncompressedUnpatchedFile);
2577 std::string const PatchFile = GetDiffsPatchFileName(UnpatchedFile);
2578 std::string const PatchedFile = GetKeepCompressedFileName(UncompressedUnpatchedFile, Target);
2579
2580 switch (State)
2581 {
2582 // success in downloading a diff, enter ApplyDiff state
2583 case StateFetchDiff:
2584 Rename(DestFile, PatchFile);
2585 DestFile = GetKeepCompressedFileName(UncompressedUnpatchedFile + "-patched", Target);
2586 if(Debug)
2587 std::clog << "Sending to rred method: " << UnpatchedFile << std::endl;
2588 State = StateApplyDiff;
2589 Local = true;
2590 Desc.URI = "rred:" + UnpatchedFile;
2591 QueueURI(Desc);
2592 SetActiveSubprocess("rred");
2593 return;
2594 // success in download/apply a diff, queue next (if needed)
2595 case StateApplyDiff:
2596 // remove the just applied patch and base file
2597 available_patches.erase(available_patches.begin());
2598 RemoveFile("pkgAcqIndexDiffs::Done", PatchFile);
2599 RemoveFile("pkgAcqIndexDiffs::Done", UnpatchedFile);
2600 if(Debug)
2601 std::clog << "Moving patched file in place: " << std::endl
2602 << DestFile << " -> " << PatchedFile << std::endl;
2603 Rename(DestFile, PatchedFile);
2604
2605 // see if there is more to download
2606 if(available_patches.empty() == false)
2607 {
2608 new pkgAcqIndexDiffs(Owner, TransactionManager, Target, available_patches);
2609 Finish();
2610 } else {
2611 DestFile = PatchedFile;
2612 Finish(true);
2613 }
2614 return;
2615 }
2616}
2617 /*}}}*/
2618std::string pkgAcqIndexDiffs::Custom600Headers() const /*{{{*/
2619{
2620 if(State != StateApplyDiff)
2621 return pkgAcqBaseIndex::Custom600Headers();
2622 std::ostringstream patchhashes;
2623 for (auto && hs : available_patches[0].result_hashes)
2624 patchhashes << "\nStart-" << hs.HashType() << "-Hash: " << hs.HashValue();
2625 for (auto && hs : available_patches[0].patch_hashes)
2626 patchhashes << "\nPatch-0-" << hs.HashType() << "-Hash: " << hs.HashValue();
2627 patchhashes << pkgAcqBaseIndex::Custom600Headers();
2628 return patchhashes.str();
2629}
2630 /*}}}*/
2631pkgAcqIndexDiffs::~pkgAcqIndexDiffs() {}
2632
2633// AcqIndexMergeDiffs::AcqIndexMergeDiffs - Constructor /*{{{*/
2634pkgAcqIndexMergeDiffs::pkgAcqIndexMergeDiffs(pkgAcquire * const Owner,
2635 pkgAcqMetaClearSig * const TransactionManager,
2636 IndexTarget const &Target,
2637 DiffInfo const &patch,
2638 std::vector<pkgAcqIndexMergeDiffs*> const * const allPatches)
2639 : pkgAcqBaseIndex(Owner, TransactionManager, Target), d(NULL),
2640 patch(patch), allPatches(allPatches), State(StateFetchDiff)
2641{
2642 Debug = _config->FindB("Debug::pkgAcquire::Diffs",false);
2643
2644 Desc.Owner = this;
2645 Description = Target.Description;
2646 Desc.ShortDesc = Target.ShortDesc;
2647 Desc.URI = Target.URI + ".diff/" + patch.file + ".gz";
2648 Desc.Description = Description + " " + patch.file + ".pdiff";
2649 DestFile = GetPartialFileNameFromURI(Desc.URI);
2650
2651 if(Debug)
2652 std::clog << "pkgAcqIndexMergeDiffs: " << Desc.URI << std::endl;
2653
2654 QueueURI(Desc);
2655}
2656 /*}}}*/
2657void pkgAcqIndexMergeDiffs::Failed(string const &Message,pkgAcquire::MethodConfig const * const Cnf)/*{{{*/
2658{
2659 if(Debug)
2660 std::clog << "pkgAcqIndexMergeDiffs failed: " << Desc.URI << " with " << Message << std::endl;
2661
2662 pkgAcqBaseIndex::Failed(Message,Cnf);
2663 Status = StatDone;
2664
2665 // check if we are the first to fail, otherwise we are done here
2666 State = StateDoneDiff;
2667 for (std::vector<pkgAcqIndexMergeDiffs *>::const_iterator I = allPatches->begin();
2668 I != allPatches->end(); ++I)
2669 if ((*I)->State == StateErrorDiff)
2670 {
2671 State = StateErrorDiff;
2672 return;
2673 }
2674
2675 // first failure means we should fallback
2676 State = StateErrorDiff;
2677 if (Debug)
2678 std::clog << "Falling back to normal index file acquire" << std::endl;
2679 RenameOnError(PDiffError);
2680 if (RealFileExists(DestFile))
2681 Rename(DestFile, DestFile + ".FAILED");
2682 std::string const UnpatchedFile = GetExistingFilename(GetPartialFileNameFromURI(Target.URI));
2683 if (UnpatchedFile.empty() == false && FileExists(UnpatchedFile))
2684 Rename(UnpatchedFile, UnpatchedFile + ".FAILED");
2685 DestFile.clear();
2686 new pkgAcqIndex(Owner, TransactionManager, Target);
2687}
2688 /*}}}*/
2689void pkgAcqIndexMergeDiffs::Done(string const &Message, HashStringList const &Hashes, /*{{{*/
2690 pkgAcquire::MethodConfig const * const Cnf)
2691{
2692 if(Debug)
2693 std::clog << "pkgAcqIndexMergeDiffs::Done(): " << Desc.URI << std::endl;
2694
2695 Item::Done(Message, Hashes, Cnf);
2696
2697 if (std::any_of(allPatches->begin(), allPatches->end(),
2698 [](pkgAcqIndexMergeDiffs const * const P) { return P->State == StateErrorDiff; }))
2699 {
2700 if(Debug)
2701 std::clog << "Another patch failed already, no point in processing this one." << std::endl;
2702 State = StateErrorDiff;
2703 return;
2704 }
2705
2706 std::string const UncompressedUnpatchedFile = GetPartialFileNameFromURI(Target.URI);
2707 std::string const UnpatchedFile = GetExistingFilename(UncompressedUnpatchedFile);
2708 if (UnpatchedFile.empty())
2709 {
2710 _error->Fatal("Unpatched file %s doesn't exist (anymore)!", UncompressedUnpatchedFile.c_str());
2711 State = StateErrorDiff;
2712 return;
2713 }
2714 std::string const PatchFile = GetMergeDiffsPatchFileName(UnpatchedFile, patch.file);
2715 std::string const PatchedFile = GetKeepCompressedFileName(UncompressedUnpatchedFile, Target);
2716
2717 switch (State)
2718 {
2719 case StateFetchDiff:
2720 Rename(DestFile, PatchFile);
2721
2722 // check if this is the last completed diff
2723 State = StateDoneDiff;
2724 for (std::vector<pkgAcqIndexMergeDiffs *>::const_iterator I = allPatches->begin();
2725 I != allPatches->end(); ++I)
2726 if ((*I)->State != StateDoneDiff)
2727 {
2728 if(Debug)
2729 std::clog << "Not the last done diff in the batch: " << Desc.URI << std::endl;
2730 return;
2731 }
2732 // this is the last completed diff, so we are ready to apply now
2733 DestFile = GetKeepCompressedFileName(UncompressedUnpatchedFile + "-patched", Target);
2734 if(Debug)
2735 std::clog << "Sending to rred method: " << UnpatchedFile << std::endl;
2736 State = StateApplyDiff;
2737 Local = true;
2738 Desc.URI = "rred:" + UnpatchedFile;
2739 QueueURI(Desc);
2740 SetActiveSubprocess("rred");
2741 return;
2742 case StateApplyDiff:
2743 // success in download & apply all diffs, finialize and clean up
2744 if(Debug)
2745 std::clog << "Queue patched file in place: " << std::endl
2746 << DestFile << " -> " << PatchedFile << std::endl;
2747
2748 // queue for copy by the transaction manager
2749 TransactionManager->TransactionStageCopy(this, DestFile, GetKeepCompressedFileName(GetFinalFilename(), Target));
2750
2751 // ensure the ed's are gone regardless of list-cleanup
2752 for (std::vector<pkgAcqIndexMergeDiffs *>::const_iterator I = allPatches->begin();
2753 I != allPatches->end(); ++I)
2754 RemoveFile("pkgAcqIndexMergeDiffs::Done", GetMergeDiffsPatchFileName(UnpatchedFile, (*I)->patch.file));
2755 RemoveFile("pkgAcqIndexMergeDiffs::Done", UnpatchedFile);
2756
2757 // all set and done
2758 Complete = true;
2759 if(Debug)
2760 std::clog << "allDone: " << DestFile << "\n" << std::endl;
2761 return;
2762 case StateDoneDiff: _error->Fatal("Done called for %s which is in an invalid Done state", PatchFile.c_str()); break;
2763 case StateErrorDiff: _error->Fatal("Done called for %s which is in an invalid Error state", PatchFile.c_str()); break;
2764 }
2765}
2766 /*}}}*/
2767std::string pkgAcqIndexMergeDiffs::Custom600Headers() const /*{{{*/
2768{
2769 if(State != StateApplyDiff)
2770 return pkgAcqBaseIndex::Custom600Headers();
2771 std::ostringstream patchhashes;
2772 unsigned int seen_patches = 0;
2773 for (auto && hs : (*allPatches)[0]->patch.result_hashes)
2774 patchhashes << "\nStart-" << hs.HashType() << "-Hash: " << hs.HashValue();
2775 for (std::vector<pkgAcqIndexMergeDiffs *>::const_iterator I = allPatches->begin();
2776 I != allPatches->end(); ++I)
2777 {
2778 HashStringList const ExpectedHashes = (*I)->patch.patch_hashes;
2779 for (HashStringList::const_iterator hs = ExpectedHashes.begin(); hs != ExpectedHashes.end(); ++hs)
2780 patchhashes << "\nPatch-" << std::to_string(seen_patches) << "-" << hs->HashType() << "-Hash: " << hs->HashValue();
2781 ++seen_patches;
2782 }
2783 patchhashes << pkgAcqBaseIndex::Custom600Headers();
2784 return patchhashes.str();
2785}
2786 /*}}}*/
2787pkgAcqIndexMergeDiffs::~pkgAcqIndexMergeDiffs() {}
2788
2789// AcqIndex::AcqIndex - Constructor /*{{{*/
2790pkgAcqIndex::pkgAcqIndex(pkgAcquire * const Owner,
2791 pkgAcqMetaClearSig * const TransactionManager,
2792 IndexTarget const &Target)
2793 : pkgAcqBaseIndex(Owner, TransactionManager, Target), d(NULL), Stage(STAGE_DOWNLOAD),
2794 CompressionExtensions(Target.Option(IndexTarget::COMPRESSIONTYPES))
2795{
2796 Init(Target.URI, Target.Description, Target.ShortDesc);
2797
2798 if(_config->FindB("Debug::Acquire::Transaction", false) == true)
2799 std::clog << "New pkgIndex with TransactionManager "
2800 << TransactionManager << std::endl;
2801}
2802 /*}}}*/
2803// AcqIndex::Init - defered Constructor /*{{{*/
2804static void NextCompressionExtension(std::string &CurrentCompressionExtension, std::string &CompressionExtensions, bool const preview)
2805{
2806 size_t const nextExt = CompressionExtensions.find(' ');
2807 if (nextExt == std::string::npos)
2808 {
2809 CurrentCompressionExtension = CompressionExtensions;
2810 if (preview == false)
2811 CompressionExtensions.clear();
2812 }
2813 else
2814 {
2815 CurrentCompressionExtension = CompressionExtensions.substr(0, nextExt);
2816 if (preview == false)
2817 CompressionExtensions = CompressionExtensions.substr(nextExt+1);
2818 }
2819}
2820void pkgAcqIndex::Init(string const &URI, string const &URIDesc,
2821 string const &ShortDesc)
2822{
2823 Stage = STAGE_DOWNLOAD;
2824
2825 DestFile = GetPartialFileNameFromURI(URI);
2826 NextCompressionExtension(CurrentCompressionExtension, CompressionExtensions, false);
2827
2828 // store file size of the download to ensure the fetcher gives
2829 // accurate progress reporting
2830 FileSize = GetExpectedHashes().FileSize();
2831
2832 if (CurrentCompressionExtension == "uncompressed")
2833 {
2834 Desc.URI = URI;
2835 }
2836 else if (CurrentCompressionExtension == "by-hash")
2837 {
2838 NextCompressionExtension(CurrentCompressionExtension, CompressionExtensions, true);
2839 if(unlikely(CurrentCompressionExtension.empty()))
2840 return;
2841 if (CurrentCompressionExtension != "uncompressed")
2842 {
2843 Desc.URI = URI + '.' + CurrentCompressionExtension;
2844 DestFile = DestFile + '.' + CurrentCompressionExtension;
2845 }
2846
2847 HashStringList const Hashes = GetExpectedHashes();
2848 HashString const * const TargetHash = Hashes.find(NULL);
2849 if (unlikely(TargetHash == nullptr))
2850 return;
2851 std::string const ByHash = "/by-hash/" + TargetHash->HashType() + "/" + TargetHash->HashValue();
2852 size_t const trailing_slash = Desc.URI.find_last_of("/");
2853 if (unlikely(trailing_slash == std::string::npos))
2854 return;
2855 Desc.URI = Desc.URI.replace(
2856 trailing_slash,
2857 Desc.URI.substr(trailing_slash+1).size()+1,
2858 ByHash);
2859 }
2860 else if (unlikely(CurrentCompressionExtension.empty()))
2861 return;
2862 else
2863 {
2864 Desc.URI = URI + '.' + CurrentCompressionExtension;
2865 DestFile = DestFile + '.' + CurrentCompressionExtension;
2866 }
2867
2868
2869 Desc.Description = URIDesc;
2870 Desc.Owner = this;
2871 Desc.ShortDesc = ShortDesc;
2872
2873 QueueURI(Desc);
2874}
2875 /*}}}*/
2876// AcqIndex::Custom600Headers - Insert custom request headers /*{{{*/
2877// ---------------------------------------------------------------------
2878/* The only header we use is the last-modified header. */
2879string pkgAcqIndex::Custom600Headers() const
2880{
2881
2882 string msg = "\nIndex-File: true";
2883
2884 if (TransactionManager->LastMetaIndexParser == NULL)
2885 {
2886 std::string const Final = GetFinalFilename();
2887
2888 struct stat Buf;
2889 if (stat(Final.c_str(),&Buf) == 0)
2890 msg += "\nLast-Modified: " + TimeRFC1123(Buf.st_mtime, false);
2891 }
2892
2893 if(Target.IsOptional)
2894 msg += "\nFail-Ignore: true";
2895
2896 return msg;
2897}
2898 /*}}}*/
2899// AcqIndex::Failed - getting the indexfile failed /*{{{*/
2900void pkgAcqIndex::Failed(string const &Message,pkgAcquire::MethodConfig const * const Cnf)
2901{
2902 pkgAcqBaseIndex::Failed(Message,Cnf);
2903
2904 // authorisation matches will not be fixed by other compression types
2905 if (Status != StatAuthError)
2906 {
2907 if (CompressionExtensions.empty() == false)
2908 {
2909 Init(Target.URI, Desc.Description, Desc.ShortDesc);
2910 Status = StatIdle;
2911 return;
2912 }
2913 }
2914
2915 if(Target.IsOptional && GetExpectedHashes().empty() && Stage == STAGE_DOWNLOAD)
2916 Status = StatDone;
2917 else
2918 TransactionManager->AbortTransaction();
2919}
2920 /*}}}*/
2921// AcqIndex::Done - Finished a fetch /*{{{*/
2922// ---------------------------------------------------------------------
2923/* This goes through a number of states.. On the initial fetch the
2924 method could possibly return an alternate filename which points
2925 to the uncompressed version of the file. If this is so the file
2926 is copied into the partial directory. In all other cases the file
2927 is decompressed with a compressed uri. */
2928void pkgAcqIndex::Done(string const &Message,
2929 HashStringList const &Hashes,
2930 pkgAcquire::MethodConfig const * const Cfg)
2931{
2932 Item::Done(Message,Hashes,Cfg);
2933
2934 switch(Stage)
2935 {
2936 case STAGE_DOWNLOAD:
2937 StageDownloadDone(Message);
2938 break;
2939 case STAGE_DECOMPRESS_AND_VERIFY:
2940 StageDecompressDone();
2941 break;
2942 }
2943}
2944 /*}}}*/
2945// AcqIndex::StageDownloadDone - Queue for decompress and verify /*{{{*/
2946void pkgAcqIndex::StageDownloadDone(string const &Message)
2947{
2948 Local = true;
2949 Complete = true;
2950
2951 std::string const AltFilename = LookupTag(Message,"Alt-Filename");
2952 std::string Filename = LookupTag(Message,"Filename");
2953
2954 // we need to verify the file against the current Release file again
2955 // on if-modfied-since hit to avoid a stale attack against us
2956 if(StringToBool(LookupTag(Message,"IMS-Hit"),false) == true)
2957 {
2958 // copy FinalFile into partial/ so that we check the hash again
2959 string const FinalFile = GetExistingFilename(GetFinalFileNameFromURI(Target.URI));
2960 if (symlink(FinalFile.c_str(), DestFile.c_str()) != 0)
2961 _error->WarningE("pkgAcqIndex::StageDownloadDone", "Symlinking final file %s back to %s failed", FinalFile.c_str(), DestFile.c_str());
2962 else
2963 {
2964 EraseFileName = DestFile;
2965 Filename = DestFile;
2966 }
2967 Stage = STAGE_DECOMPRESS_AND_VERIFY;
2968 Desc.URI = "store:" + Filename;
2969 QueueURI(Desc);
2970 SetActiveSubprocess(::URI(Desc.URI).Access);
2971 return;
2972 }
2973 // methods like file:// give us an alternative (uncompressed) file
2974 else if (Target.KeepCompressed == false && AltFilename.empty() == false)
2975 {
2976 Filename = AltFilename;
2977 EraseFileName.clear();
2978 }
2979 // Methods like e.g. "file:" will give us a (compressed) FileName that is
2980 // not the "DestFile" we set, in this case we uncompress from the local file
2981 else if (Filename != DestFile && RealFileExists(DestFile) == false)
2982 {
2983 // symlinking ensures that the filename can be used for compression detection
2984 // that is e.g. needed for by-hash which has no extension over file
2985 if (symlink(Filename.c_str(),DestFile.c_str()) != 0)
2986 _error->WarningE("pkgAcqIndex::StageDownloadDone", "Symlinking file %s to %s failed", Filename.c_str(), DestFile.c_str());
2987 else
2988 {
2989 EraseFileName = DestFile;
2990 Filename = DestFile;
2991 }
2992 }
2993
2994 Stage = STAGE_DECOMPRESS_AND_VERIFY;
2995 DestFile = GetKeepCompressedFileName(GetPartialFileNameFromURI(Target.URI), Target);
2996 if (Filename != DestFile && flExtension(Filename) == flExtension(DestFile))
2997 Desc.URI = "copy:" + Filename;
2998 else
2999 Desc.URI = "store:" + Filename;
3000 if (DestFile == Filename)
3001 {
3002 if (CurrentCompressionExtension == "uncompressed")
3003 return StageDecompressDone();
3004 DestFile = "/dev/null";
3005 }
3006
3007 if (EraseFileName.empty() && Filename != AltFilename)
3008 EraseFileName = Filename;
3009
3010 // queue uri for the next stage
3011 QueueURI(Desc);
3012 SetActiveSubprocess(::URI(Desc.URI).Access);
3013}
3014 /*}}}*/
3015// AcqIndex::StageDecompressDone - Final verification /*{{{*/
3016void pkgAcqIndex::StageDecompressDone()
3017{
3018 if (DestFile == "/dev/null")
3019 DestFile = GetKeepCompressedFileName(GetPartialFileNameFromURI(Target.URI), Target);
3020
3021 // Done, queue for rename on transaction finished
3022 TransactionManager->TransactionStageCopy(this, DestFile, GetFinalFilename());
3023}
3024 /*}}}*/
3025pkgAcqIndex::~pkgAcqIndex() {}
3026
3027
3028// AcqArchive::AcqArchive - Constructor /*{{{*/
3029// ---------------------------------------------------------------------
3030/* This just sets up the initial fetch environment and queues the first
3031 possibilitiy */
3032pkgAcqArchive::pkgAcqArchive(pkgAcquire * const Owner,pkgSourceList * const Sources,
3033 pkgRecords * const Recs,pkgCache::VerIterator const &Version,
3034 string &StoreFilename) :
3035 Item(Owner), d(NULL), LocalSource(false), Version(Version), Sources(Sources), Recs(Recs),
3036 StoreFilename(StoreFilename), Vf(Version.FileList()),
3037 Trusted(false)
3038{
3039 Retries = _config->FindI("Acquire::Retries",0);
3040
3041 if (Version.Arch() == 0)
3042 {
3043 _error->Error(_("I wasn't able to locate a file for the %s package. "
3044 "This might mean you need to manually fix this package. "
3045 "(due to missing arch)"),
3046 Version.ParentPkg().FullName().c_str());
3047 return;
3048 }
3049
3050 /* We need to find a filename to determine the extension. We make the
3051 assumption here that all the available sources for this version share
3052 the same extension.. */
3053 // Skip not source sources, they do not have file fields.
3054 for (; Vf.end() == false; ++Vf)
3055 {
3056 if (Vf.File().Flagged(pkgCache::Flag::NotSource))
3057 continue;
3058 break;
3059 }
3060
3061 // Does not really matter here.. we are going to fail out below
3062 if (Vf.end() != true)
3063 {
3064 // If this fails to get a file name we will bomb out below.
3065 pkgRecords::Parser &Parse = Recs->Lookup(Vf);
3066 if (_error->PendingError() == true)
3067 return;
3068
3069 // Generate the final file name as: package_version_arch.foo
3070 StoreFilename = QuoteString(Version.ParentPkg().Name(),"_:") + '_' +
3071 QuoteString(Version.VerStr(),"_:") + '_' +
3072 QuoteString(Version.Arch(),"_:.") +
3073 "." + flExtension(Parse.FileName());
3074 }
3075
3076 // check if we have one trusted source for the package. if so, switch
3077 // to "TrustedOnly" mode - but only if not in AllowUnauthenticated mode
3078 bool const allowUnauth = _config->FindB("APT::Get::AllowUnauthenticated", false);
3079 bool const debugAuth = _config->FindB("Debug::pkgAcquire::Auth", false);
3080 bool seenUntrusted = false;
3081 for (pkgCache::VerFileIterator i = Version.FileList(); i.end() == false; ++i)
3082 {
3083 pkgIndexFile *Index;
3084 if (Sources->FindIndex(i.File(),Index) == false)
3085 continue;
3086
3087 if (debugAuth == true)
3088 std::cerr << "Checking index: " << Index->Describe()
3089 << "(Trusted=" << Index->IsTrusted() << ")" << std::endl;
3090
3091 if (Index->IsTrusted() == true)
3092 {
3093 Trusted = true;
3094 if (allowUnauth == false)
3095 break;
3096 }
3097 else
3098 seenUntrusted = true;
3099 }
3100
3101 // "allow-unauthenticated" restores apts old fetching behaviour
3102 // that means that e.g. unauthenticated file:// uris are higher
3103 // priority than authenticated http:// uris
3104 if (allowUnauth == true && seenUntrusted == true)
3105 Trusted = false;
3106
3107 // Select a source
3108 if (QueueNext() == false && _error->PendingError() == false)
3109 _error->Error(_("Can't find a source to download version '%s' of '%s'"),
3110 Version.VerStr(), Version.ParentPkg().FullName(false).c_str());
3111}
3112 /*}}}*/
3113// AcqArchive::QueueNext - Queue the next file source /*{{{*/
3114// ---------------------------------------------------------------------
3115/* This queues the next available file version for download. It checks if
3116 the archive is already available in the cache and stashs the MD5 for
3117 checking later. */
3118bool pkgAcqArchive::QueueNext()
3119{
3120 for (; Vf.end() == false; ++Vf)
3121 {
3122 pkgCache::PkgFileIterator const PkgF = Vf.File();
3123 // Ignore not source sources
3124 if (PkgF.Flagged(pkgCache::Flag::NotSource))
3125 continue;
3126
3127 // Try to cross match against the source list
3128 pkgIndexFile *Index;
3129 if (Sources->FindIndex(PkgF, Index) == false)
3130 continue;
3131 LocalSource = PkgF.Flagged(pkgCache::Flag::LocalSource);
3132
3133 // only try to get a trusted package from another source if that source
3134 // is also trusted
3135 if(Trusted && !Index->IsTrusted())
3136 continue;
3137
3138 // Grab the text package record
3139 pkgRecords::Parser &Parse = Recs->Lookup(Vf);
3140 if (_error->PendingError() == true)
3141 return false;
3142
3143 string PkgFile = Parse.FileName();
3144 ExpectedHashes = Parse.Hashes();
3145
3146 if (PkgFile.empty() == true)
3147 return _error->Error(_("The package index files are corrupted. No Filename: "
3148 "field for package %s."),
3149 Version.ParentPkg().Name());
3150
3151 Desc.URI = Index->ArchiveURI(PkgFile);
3152 Desc.Description = Index->ArchiveInfo(Version);
3153 Desc.Owner = this;
3154 Desc.ShortDesc = Version.ParentPkg().FullName(true);
3155
3156 // See if we already have the file. (Legacy filenames)
3157 FileSize = Version->Size;
3158 string FinalFile = _config->FindDir("Dir::Cache::Archives") + flNotDir(PkgFile);
3159 struct stat Buf;
3160 if (stat(FinalFile.c_str(),&Buf) == 0)
3161 {
3162 // Make sure the size matches
3163 if ((unsigned long long)Buf.st_size == Version->Size)
3164 {
3165 Complete = true;
3166 Local = true;
3167 Status = StatDone;
3168 StoreFilename = DestFile = FinalFile;
3169 return true;
3170 }
3171
3172 /* Hmm, we have a file and its size does not match, this means it is
3173 an old style mismatched arch */
3174 RemoveFile("pkgAcqArchive::QueueNext", FinalFile);
3175 }
3176
3177 // Check it again using the new style output filenames
3178 FinalFile = _config->FindDir("Dir::Cache::Archives") + flNotDir(StoreFilename);
3179 if (stat(FinalFile.c_str(),&Buf) == 0)
3180 {
3181 // Make sure the size matches
3182 if ((unsigned long long)Buf.st_size == Version->Size)
3183 {
3184 Complete = true;
3185 Local = true;
3186 Status = StatDone;
3187 StoreFilename = DestFile = FinalFile;
3188 return true;
3189 }
3190
3191 /* Hmm, we have a file and its size does not match, this shouldn't
3192 happen.. */
3193 RemoveFile("pkgAcqArchive::QueueNext", FinalFile);
3194 }
3195
3196 DestFile = _config->FindDir("Dir::Cache::Archives") + "partial/" + flNotDir(StoreFilename);
3197
3198 // Check the destination file
3199 if (stat(DestFile.c_str(),&Buf) == 0)
3200 {
3201 // Hmm, the partial file is too big, erase it
3202 if ((unsigned long long)Buf.st_size > Version->Size)
3203 RemoveFile("pkgAcqArchive::QueueNext", DestFile);
3204 else
3205 PartialSize = Buf.st_size;
3206 }
3207
3208 // Disables download of archives - useful if no real installation follows,
3209 // e.g. if we are just interested in proposed installation order
3210 if (_config->FindB("Debug::pkgAcqArchive::NoQueue", false) == true)
3211 {
3212 Complete = true;
3213 Local = true;
3214 Status = StatDone;
3215 StoreFilename = DestFile = FinalFile;
3216 return true;
3217 }
3218
3219 // Create the item
3220 Local = false;
3221 QueueURI(Desc);
3222
3223 ++Vf;
3224 return true;
3225 }
3226 return false;
3227}
3228 /*}}}*/
3229// AcqArchive::Done - Finished fetching /*{{{*/
3230// ---------------------------------------------------------------------
3231/* */
3232void pkgAcqArchive::Done(string const &Message, HashStringList const &Hashes,
3233 pkgAcquire::MethodConfig const * const Cfg)
3234{
3235 Item::Done(Message, Hashes, Cfg);
3236
3237 // Grab the output filename
3238 std::string const FileName = LookupTag(Message,"Filename");
3239 if (DestFile != FileName && RealFileExists(DestFile) == false)
3240 {
3241 StoreFilename = DestFile = FileName;
3242 Local = true;
3243 Complete = true;
3244 return;
3245 }
3246
3247 // Done, move it into position
3248 string const FinalFile = GetFinalFilename();
3249 Rename(DestFile,FinalFile);
3250 StoreFilename = DestFile = FinalFile;
3251 Complete = true;
3252}
3253 /*}}}*/
3254// AcqArchive::Failed - Failure handler /*{{{*/
3255// ---------------------------------------------------------------------
3256/* Here we try other sources */
3257void pkgAcqArchive::Failed(string const &Message,pkgAcquire::MethodConfig const * const Cnf)
3258{
3259 Item::Failed(Message,Cnf);
3260
3261 /* We don't really want to retry on failed media swaps, this prevents
3262 that. An interesting observation is that permanent failures are not
3263 recorded. */
3264 if (Cnf->Removable == true &&
3265 StringToBool(LookupTag(Message,"Transient-Failure"),false) == true)
3266 {
3267 // Vf = Version.FileList();
3268 while (Vf.end() == false) ++Vf;
3269 StoreFilename = string();
3270 return;
3271 }
3272
3273 Status = StatIdle;
3274 if (QueueNext() == false)
3275 {
3276 // This is the retry counter
3277 if (Retries != 0 &&
3278 Cnf->LocalOnly == false &&
3279 StringToBool(LookupTag(Message,"Transient-Failure"),false) == true)
3280 {
3281 Retries--;
3282 Vf = Version.FileList();
3283 if (QueueNext() == true)
3284 return;
3285 }
3286
3287 StoreFilename = string();
3288 Status = StatError;
3289 }
3290}
3291 /*}}}*/
3292APT_PURE bool pkgAcqArchive::IsTrusted() const /*{{{*/
3293{
3294 return Trusted;
3295}
3296 /*}}}*/
3297void pkgAcqArchive::Finished() /*{{{*/
3298{
3299 if (Status == pkgAcquire::Item::StatDone &&
3300 Complete == true)
3301 return;
3302 StoreFilename = string();
3303}
3304 /*}}}*/
3305std::string pkgAcqArchive::DescURI() const /*{{{*/
3306{
3307 return Desc.URI;
3308}
3309 /*}}}*/
3310std::string pkgAcqArchive::ShortDesc() const /*{{{*/
3311{
3312 return Desc.ShortDesc;
3313}
3314 /*}}}*/
3315pkgAcqArchive::~pkgAcqArchive() {}
3316
3317// AcqChangelog::pkgAcqChangelog - Constructors /*{{{*/
3318class pkgAcqChangelog::Private
3319{
3320 public:
3321 std::string FinalFile;
3322};
3323pkgAcqChangelog::pkgAcqChangelog(pkgAcquire * const Owner, pkgCache::VerIterator const &Ver,
3324 std::string const &DestDir, std::string const &DestFilename) :
3325 pkgAcquire::Item(Owner), d(new pkgAcqChangelog::Private()), SrcName(Ver.SourcePkgName()), SrcVersion(Ver.SourceVerStr())
3326{
3327 Desc.URI = URI(Ver);
3328 Init(DestDir, DestFilename);
3329}
3330// some parameters are char* here as they come likely from char* interfaces – which can also return NULL
3331pkgAcqChangelog::pkgAcqChangelog(pkgAcquire * const Owner, pkgCache::RlsFileIterator const &RlsFile,
3332 char const * const Component, char const * const SrcName, char const * const SrcVersion,
3333 const string &DestDir, const string &DestFilename) :
3334 pkgAcquire::Item(Owner), d(new pkgAcqChangelog::Private()), SrcName(SrcName), SrcVersion(SrcVersion)
3335{
3336 Desc.URI = URI(RlsFile, Component, SrcName, SrcVersion);
3337 Init(DestDir, DestFilename);
3338}
3339pkgAcqChangelog::pkgAcqChangelog(pkgAcquire * const Owner,
3340 std::string const &URI, char const * const SrcName, char const * const SrcVersion,
3341 const string &DestDir, const string &DestFilename) :
3342 pkgAcquire::Item(Owner), d(new pkgAcqChangelog::Private()), SrcName(SrcName), SrcVersion(SrcVersion)
3343{
3344 Desc.URI = URI;
3345 Init(DestDir, DestFilename);
3346}
3347void pkgAcqChangelog::Init(std::string const &DestDir, std::string const &DestFilename)
3348{
3349 if (Desc.URI.empty())
3350 {
3351 Status = StatError;
3352 // TRANSLATOR: %s=%s is sourcename=sourceversion, e.g. apt=1.1
3353 strprintf(ErrorText, _("Changelog unavailable for %s=%s"), SrcName.c_str(), SrcVersion.c_str());
3354 // Let the error message print something sensible rather than "Failed to fetch /"
3355 if (DestFilename.empty())
3356 DestFile = SrcName + ".changelog";
3357 else
3358 DestFile = DestFilename;
3359 Desc.URI = "changelog:/" + DestFile;
3360 return;
3361 }
3362
3363 std::string DestFileName;
3364 if (DestFilename.empty())
3365 DestFileName = flCombine(DestFile, SrcName + ".changelog");
3366 else
3367 DestFileName = flCombine(DestFile, DestFilename);
3368
3369 std::string const SandboxUser = _config->Find("APT::Sandbox::User");
3370 std::string const systemTemp = GetTempDir(SandboxUser);
3371 char tmpname[1000];
3372 snprintf(tmpname, sizeof(tmpname), "%s/apt-changelog-XXXXXX", systemTemp.c_str());
3373 if (NULL == mkdtemp(tmpname))
3374 {
3375 _error->Errno("mkdtemp", "mkdtemp failed in changelog acquire of %s %s", SrcName.c_str(), SrcVersion.c_str());
3376 Status = StatError;
3377 return;
3378 }
3379 TemporaryDirectory = tmpname;
3380
3381 ChangeOwnerAndPermissionOfFile("Item::QueueURI", TemporaryDirectory.c_str(),
3382 SandboxUser.c_str(), "root", 0700);
3383
3384 DestFile = flCombine(TemporaryDirectory, DestFileName);
3385 if (DestDir.empty() == false)
3386 {
3387 d->FinalFile = flCombine(DestDir, DestFileName);
3388 if (RealFileExists(d->FinalFile))
3389 {
3390 FileFd file1, file2;
3391 if (file1.Open(DestFile, FileFd::WriteOnly | FileFd::Create | FileFd::Exclusive) &&
3392 file2.Open(d->FinalFile, FileFd::ReadOnly) && CopyFile(file2, file1))
3393 {
3394 struct timeval times[2];
3395 times[0].tv_sec = times[1].tv_sec = file2.ModificationTime();
3396 times[0].tv_usec = times[1].tv_usec = 0;
3397 utimes(DestFile.c_str(), times);
3398 }
3399 }
3400 }
3401
3402 Desc.ShortDesc = "Changelog";
3403 strprintf(Desc.Description, "%s %s %s Changelog", URI::SiteOnly(Desc.URI).c_str(), SrcName.c_str(), SrcVersion.c_str());
3404 Desc.Owner = this;
3405 QueueURI(Desc);
3406}
3407 /*}}}*/
3408std::string pkgAcqChangelog::URI(pkgCache::VerIterator const &Ver) /*{{{*/
3409{
3410 std::string const confOnline = "Acquire::Changelogs::AlwaysOnline";
3411 bool AlwaysOnline = _config->FindB(confOnline, false);
3412 if (AlwaysOnline == false)
3413 for (pkgCache::VerFileIterator VF = Ver.FileList(); VF.end() == false; ++VF)
3414 {
3415 pkgCache::PkgFileIterator const PF = VF.File();
3416 if (PF.Flagged(pkgCache::Flag::NotSource) || PF->Release == 0)
3417 continue;
3418 pkgCache::RlsFileIterator const RF = PF.ReleaseFile();
3419 if (RF->Origin != 0 && _config->FindB(confOnline + "::Origin::" + RF.Origin(), false))
3420 {
3421 AlwaysOnline = true;
3422 break;
3423 }
3424 }
3425 if (AlwaysOnline == false)
3426 {
3427 pkgCache::PkgIterator const Pkg = Ver.ParentPkg();
3428 if (Pkg->CurrentVer != 0 && Pkg.CurrentVer() == Ver)
3429 {
3430 std::string const basename = std::string("/usr/share/doc/") + Pkg.Name() + "/changelog";
3431 std::string const debianname = basename + ".Debian";
3432 if (FileExists(debianname))
3433 return "copy://" + debianname;
3434 else if (FileExists(debianname + ".gz"))
3435 return "gzip://" + debianname + ".gz";
3436 else if (FileExists(basename))
3437 return "copy://" + basename;
3438 else if (FileExists(basename + ".gz"))
3439 return "gzip://" + basename + ".gz";
3440 }
3441 }
3442
3443 char const * const SrcName = Ver.SourcePkgName();
3444 char const * const SrcVersion = Ver.SourceVerStr();
3445 // find the first source for this version which promises a changelog
3446 for (pkgCache::VerFileIterator VF = Ver.FileList(); VF.end() == false; ++VF)
3447 {
3448 pkgCache::PkgFileIterator const PF = VF.File();
3449 if (PF.Flagged(pkgCache::Flag::NotSource) || PF->Release == 0)
3450 continue;
3451 pkgCache::RlsFileIterator const RF = PF.ReleaseFile();
3452 std::string const uri = URI(RF, PF.Component(), SrcName, SrcVersion);
3453 if (uri.empty())
3454 continue;
3455 return uri;
3456 }
3457 return "";
3458}
3459std::string pkgAcqChangelog::URITemplate(pkgCache::RlsFileIterator const &Rls)
3460{
3461 if (Rls.end() == true || (Rls->Label == 0 && Rls->Origin == 0))
3462 return "";
3463 std::string const serverConfig = "Acquire::Changelogs::URI";
3464 std::string server;
3465#define APT_EMPTY_SERVER \
3466 if (server.empty() == false) \
3467 { \
3468 if (server != "no") \
3469 return server; \
3470 return ""; \
3471 }
3472#define APT_CHECK_SERVER(X, Y) \
3473 if (Rls->X != 0) \
3474 { \
3475 std::string const specialServerConfig = serverConfig + "::" + Y + #X + "::" + Rls.X(); \
3476 server = _config->Find(specialServerConfig); \
3477 APT_EMPTY_SERVER \
3478 }
3479 // this way e.g. Debian-Security can fallback to Debian
3480 APT_CHECK_SERVER(Label, "Override::")
3481 APT_CHECK_SERVER(Origin, "Override::")
3482
3483 if (RealFileExists(Rls.FileName()))
3484 {
3485 _error->PushToStack();
3486 FileFd rf;
3487 /* This can be costly. A caller wanting to get millions of URIs might
3488 want to do this on its own once and use Override settings.
3489 We don't do this here as Origin/Label are not as unique as they
3490 should be so this could produce request order-dependent anomalies */
3491 if (OpenMaybeClearSignedFile(Rls.FileName(), rf) == true)
3492 {
3493 pkgTagFile TagFile(&rf, rf.Size());
3494 pkgTagSection Section;
3495 if (TagFile.Step(Section) == true)
3496 server = Section.FindS("Changelogs");
3497 }
3498 _error->RevertToStack();
3499 APT_EMPTY_SERVER
3500 }
3501
3502 APT_CHECK_SERVER(Label, "")
3503 APT_CHECK_SERVER(Origin, "")
3504#undef APT_CHECK_SERVER
3505#undef APT_EMPTY_SERVER
3506 return "";
3507}
3508std::string pkgAcqChangelog::URI(pkgCache::RlsFileIterator const &Rls,
3509 char const * const Component, char const * const SrcName,
3510 char const * const SrcVersion)
3511{
3512 return URI(URITemplate(Rls), Component, SrcName, SrcVersion);
3513}
3514std::string pkgAcqChangelog::URI(std::string const &Template,
3515 char const * const Component, char const * const SrcName,
3516 char const * const SrcVersion)
3517{
3518 if (Template.find("@CHANGEPATH@") == std::string::npos)
3519 return "";
3520
3521 // the path is: COMPONENT/SRC/SRCNAME/SRCNAME_SRCVER, e.g. main/a/apt/1.1 or contrib/liba/libapt/2.0
3522 std::string Src = SrcName;
3523 std::string path = APT::String::Startswith(SrcName, "lib") ? Src.substr(0, 4) : Src.substr(0,1);
3524 path.append("/").append(Src).append("/");
3525 path.append(Src).append("_").append(StripEpoch(SrcVersion));
3526 // we omit component for releases without one (= flat-style repositories)
3527 if (Component != NULL && strlen(Component) != 0)
3528 path = std::string(Component) + "/" + path;
3529
3530 return SubstVar(Template, "@CHANGEPATH@", path);
3531}
3532 /*}}}*/
3533// AcqChangelog::Failed - Failure handler /*{{{*/
3534void pkgAcqChangelog::Failed(string const &Message, pkgAcquire::MethodConfig const * const Cnf)
3535{
3536 Item::Failed(Message,Cnf);
3537
3538 std::string errText;
3539 // TRANSLATOR: %s=%s is sourcename=sourceversion, e.g. apt=1.1
3540 strprintf(errText, _("Changelog unavailable for %s=%s"), SrcName.c_str(), SrcVersion.c_str());
3541
3542 // Error is probably something techy like 404 Not Found
3543 if (ErrorText.empty())
3544 ErrorText = errText;
3545 else
3546 ErrorText = errText + " (" + ErrorText + ")";
3547}
3548 /*}}}*/
3549// AcqChangelog::Done - Item downloaded OK /*{{{*/
3550void pkgAcqChangelog::Done(string const &Message,HashStringList const &CalcHashes,
3551 pkgAcquire::MethodConfig const * const Cnf)
3552{
3553 Item::Done(Message,CalcHashes,Cnf);
3554 if (d->FinalFile.empty() == false)
3555 {
3556 if (RemoveFile("pkgAcqChangelog::Done", d->FinalFile) == false ||
3557 Rename(DestFile, d->FinalFile) == false)
3558 Status = StatError;
3559 }
3560
3561 Complete = true;
3562}
3563 /*}}}*/
3564pkgAcqChangelog::~pkgAcqChangelog() /*{{{*/
3565{
3566 if (TemporaryDirectory.empty() == false)
3567 {
3568 RemoveFile("~pkgAcqChangelog", DestFile);
3569 rmdir(TemporaryDirectory.c_str());
3570 }
3571 delete d;
3572}
3573 /*}}}*/
3574
3575// AcqFile::pkgAcqFile - Constructor /*{{{*/
3576pkgAcqFile::pkgAcqFile(pkgAcquire * const Owner,string const &URI, HashStringList const &Hashes,
3577 unsigned long long const Size,string const &Dsc,string const &ShortDesc,
3578 const string &DestDir, const string &DestFilename,
3579 bool const IsIndexFile) :
3580 Item(Owner), d(NULL), IsIndexFile(IsIndexFile), ExpectedHashes(Hashes)
3581{
3582 Retries = _config->FindI("Acquire::Retries",0);
3583
3584 if(!DestFilename.empty())
3585 DestFile = DestFilename;
3586 else if(!DestDir.empty())
3587 DestFile = DestDir + "/" + flNotDir(URI);
3588 else
3589 DestFile = flNotDir(URI);
3590
3591 // Create the item
3592 Desc.URI = URI;
3593 Desc.Description = Dsc;
3594 Desc.Owner = this;
3595
3596 // Set the short description to the archive component
3597 Desc.ShortDesc = ShortDesc;
3598
3599 // Get the transfer sizes
3600 FileSize = Size;
3601 struct stat Buf;
3602 if (stat(DestFile.c_str(),&Buf) == 0)
3603 {
3604 // Hmm, the partial file is too big, erase it
3605 if ((Size > 0) && (unsigned long long)Buf.st_size > Size)
3606 RemoveFile("pkgAcqFile", DestFile);
3607 else
3608 PartialSize = Buf.st_size;
3609 }
3610
3611 QueueURI(Desc);
3612}
3613 /*}}}*/
3614// AcqFile::Done - Item downloaded OK /*{{{*/
3615void pkgAcqFile::Done(string const &Message,HashStringList const &CalcHashes,
3616 pkgAcquire::MethodConfig const * const Cnf)
3617{
3618 Item::Done(Message,CalcHashes,Cnf);
3619
3620 std::string const FileName = LookupTag(Message,"Filename");
3621 Complete = true;
3622
3623 // The files timestamp matches
3624 if (StringToBool(LookupTag(Message,"IMS-Hit"),false) == true)
3625 return;
3626
3627 // We have to copy it into place
3628 if (RealFileExists(DestFile.c_str()) == false)
3629 {
3630 Local = true;
3631 if (_config->FindB("Acquire::Source-Symlinks",true) == false ||
3632 Cnf->Removable == true)
3633 {
3634 Desc.URI = "copy:" + FileName;
3635 QueueURI(Desc);
3636 return;
3637 }
3638
3639 // Erase the file if it is a symlink so we can overwrite it
3640 struct stat St;
3641 if (lstat(DestFile.c_str(),&St) == 0)
3642 {
3643 if (S_ISLNK(St.st_mode) != 0)
3644 RemoveFile("pkgAcqFile::Done", DestFile);
3645 }
3646
3647 // Symlink the file
3648 if (symlink(FileName.c_str(),DestFile.c_str()) != 0)
3649 {
3650 _error->PushToStack();
3651 _error->Errno("pkgAcqFile::Done", "Symlinking file %s failed", DestFile.c_str());
3652 std::stringstream msg;
3653 _error->DumpErrors(msg, GlobalError::DEBUG, false);
3654 _error->RevertToStack();
3655 ErrorText = msg.str();
3656 Status = StatError;
3657 Complete = false;
3658 }
3659 }
3660}
3661 /*}}}*/
3662// AcqFile::Failed - Failure handler /*{{{*/
3663// ---------------------------------------------------------------------
3664/* Here we try other sources */
3665void pkgAcqFile::Failed(string const &Message, pkgAcquire::MethodConfig const * const Cnf)
3666{
3667 Item::Failed(Message,Cnf);
3668
3669 // This is the retry counter
3670 if (Retries != 0 &&
3671 Cnf->LocalOnly == false &&
3672 StringToBool(LookupTag(Message,"Transient-Failure"),false) == true)
3673 {
3674 --Retries;
3675 QueueURI(Desc);
3676 Status = StatIdle;
3677 return;
3678 }
3679
3680}
3681 /*}}}*/
3682string pkgAcqFile::Custom600Headers() const /*{{{*/
3683{
3684 if (IsIndexFile)
3685 return "\nIndex-File: true";
3686 return "";
3687}
3688 /*}}}*/
3689pkgAcqFile::~pkgAcqFile() {}