]> git.saurik.com Git - apt.git/blob - apt-pkg/acquire-item.cc
Merge remote-tracking branch 'donkult/debian/experimental' into debian/experimental
[apt.git] / apt-pkg / acquire-item.cc
1 // -*- mode: cpp; mode: fold -*-
2 // Description /*{{{*/
3 // $Id: acquire-item.cc,v 1.46.2.9 2004/01/16 18:51:11 mdz Exp $
4 /* ######################################################################
5
6 Acquire Item - Item to acquire
7
8 Each item can download to exactly one file at a time. This means you
9 cannot create an item that fetches two uri's to two files at the same
10 time. The pkgAcqIndex class creates a second class upon instantiation
11 to fetch the other index files because of this.
12
13 ##################################################################### */
14 /*}}}*/
15 // Include Files /*{{{*/
16 #include <config.h>
17
18 #include <apt-pkg/acquire-item.h>
19 #include <apt-pkg/configuration.h>
20 #include <apt-pkg/aptconfiguration.h>
21 #include <apt-pkg/sourcelist.h>
22 #include <apt-pkg/error.h>
23 #include <apt-pkg/strutl.h>
24 #include <apt-pkg/fileutl.h>
25 #include <apt-pkg/sha1.h>
26 #include <apt-pkg/tagfile.h>
27 #include <apt-pkg/indexrecords.h>
28 #include <apt-pkg/acquire.h>
29 #include <apt-pkg/hashes.h>
30 #include <apt-pkg/indexfile.h>
31 #include <apt-pkg/pkgcache.h>
32 #include <apt-pkg/cacheiterators.h>
33 #include <apt-pkg/pkgrecords.h>
34
35 #include <stddef.h>
36 #include <stdlib.h>
37 #include <string.h>
38 #include <iostream>
39 #include <vector>
40 #include <sys/stat.h>
41 #include <unistd.h>
42 #include <errno.h>
43 #include <string>
44 #include <sstream>
45 #include <stdio.h>
46 #include <ctime>
47
48 #include <apti18n.h>
49 /*}}}*/
50
51 using namespace std;
52
53 static void printHashSumComparision(std::string const &URI, HashStringList const &Expected, HashStringList const &Actual) /*{{{*/
54 {
55 if (_config->FindB("Debug::Acquire::HashSumMismatch", false) == false)
56 return;
57 std::cerr << std::endl << URI << ":" << std::endl << " Expected Hash: " << std::endl;
58 for (HashStringList::const_iterator hs = Expected.begin(); hs != Expected.end(); ++hs)
59 std::cerr << "\t- " << hs->toStr() << std::endl;
60 std::cerr << " Actual Hash: " << std::endl;
61 for (HashStringList::const_iterator hs = Actual.begin(); hs != Actual.end(); ++hs)
62 std::cerr << "\t- " << hs->toStr() << std::endl;
63 }
64 /*}}}*/
65
66 // Acquire::Item::Item - Constructor /*{{{*/
67 #if __GNUC__ >= 4
68 #pragma GCC diagnostic push
69 #pragma GCC diagnostic ignored "-Wdeprecated-declarations"
70 #endif
71 pkgAcquire::Item::Item(pkgAcquire *Owner, HashStringList const &ExpectedHashes) :
72 Owner(Owner), FileSize(0), PartialSize(0), Mode(0), ID(0), Complete(false),
73 Local(false), QueueCounter(0), ExpectedAdditionalItems(0),
74 ExpectedHashes(ExpectedHashes)
75 {
76 Owner->Add(this);
77 Status = StatIdle;
78 }
79 #if __GNUC__ >= 4
80 #pragma GCC diagnostic pop
81 #endif
82 /*}}}*/
83 // Acquire::Item::~Item - Destructor /*{{{*/
84 // ---------------------------------------------------------------------
85 /* */
86 pkgAcquire::Item::~Item()
87 {
88 Owner->Remove(this);
89 }
90 /*}}}*/
91 // Acquire::Item::Failed - Item failed to download /*{{{*/
92 // ---------------------------------------------------------------------
93 /* We return to an idle state if there are still other queues that could
94 fetch this object */
95 void pkgAcquire::Item::Failed(string Message,pkgAcquire::MethodConfig *Cnf)
96 {
97 Status = StatIdle;
98 ErrorText = LookupTag(Message,"Message");
99 UsedMirror = LookupTag(Message,"UsedMirror");
100 if (QueueCounter <= 1)
101 {
102 /* This indicates that the file is not available right now but might
103 be sometime later. If we do a retry cycle then this should be
104 retried [CDROMs] */
105 if (Cnf->LocalOnly == true &&
106 StringToBool(LookupTag(Message,"Transient-Failure"),false) == true)
107 {
108 Status = StatIdle;
109 Dequeue();
110 return;
111 }
112
113 Status = StatError;
114 Dequeue();
115 }
116
117 // report mirror failure back to LP if we actually use a mirror
118 string FailReason = LookupTag(Message, "FailReason");
119 if(FailReason.size() != 0)
120 ReportMirrorFailure(FailReason);
121 else
122 ReportMirrorFailure(ErrorText);
123 }
124 /*}}}*/
125 // Acquire::Item::Start - Item has begun to download /*{{{*/
126 // ---------------------------------------------------------------------
127 /* Stash status and the file size. Note that setting Complete means
128 sub-phases of the acquire process such as decompresion are operating */
129 void pkgAcquire::Item::Start(string /*Message*/,unsigned long long Size)
130 {
131 Status = StatFetching;
132 if (FileSize == 0 && Complete == false)
133 FileSize = Size;
134 }
135 /*}}}*/
136 // Acquire::Item::Done - Item downloaded OK /*{{{*/
137 // ---------------------------------------------------------------------
138 /* */
139 void pkgAcquire::Item::Done(string Message,unsigned long long Size,HashStringList const &/*Hash*/,
140 pkgAcquire::MethodConfig * /*Cnf*/)
141 {
142 // We just downloaded something..
143 string FileName = LookupTag(Message,"Filename");
144 UsedMirror = LookupTag(Message,"UsedMirror");
145 if (Complete == false && !Local && FileName == DestFile)
146 {
147 if (Owner->Log != 0)
148 Owner->Log->Fetched(Size,atoi(LookupTag(Message,"Resume-Point","0").c_str()));
149 }
150
151 if (FileSize == 0)
152 FileSize= Size;
153 Status = StatDone;
154 ErrorText = string();
155 Owner->Dequeue(this);
156 }
157 /*}}}*/
158 // Acquire::Item::Rename - Rename a file /*{{{*/
159 // ---------------------------------------------------------------------
160 /* This helper function is used by a lot of item methods as their final
161 step */
162 void pkgAcquire::Item::Rename(string From,string To)
163 {
164 if (rename(From.c_str(),To.c_str()) != 0)
165 {
166 char S[300];
167 snprintf(S,sizeof(S),_("rename failed, %s (%s -> %s)."),strerror(errno),
168 From.c_str(),To.c_str());
169 Status = StatError;
170 ErrorText = S;
171 }
172 }
173 /*}}}*/
174 bool pkgAcquire::Item::RenameOnError(pkgAcquire::Item::RenameOnErrorState const error)/*{{{*/
175 {
176 if(FileExists(DestFile))
177 Rename(DestFile, DestFile + ".FAILED");
178
179 switch (error)
180 {
181 case HashSumMismatch:
182 ErrorText = _("Hash Sum mismatch");
183 Status = StatAuthError;
184 ReportMirrorFailure("HashChecksumFailure");
185 break;
186 case SizeMismatch:
187 ErrorText = _("Size mismatch");
188 Status = StatAuthError;
189 ReportMirrorFailure("SizeFailure");
190 break;
191 case InvalidFormat:
192 ErrorText = _("Invalid file format");
193 Status = StatError;
194 // do not report as usually its not the mirrors fault, but Portal/Proxy
195 break;
196 }
197 return false;
198 }
199 /*}}}*/
200 // Acquire::Item::ReportMirrorFailure /*{{{*/
201 // ---------------------------------------------------------------------
202 void pkgAcquire::Item::ReportMirrorFailure(string FailCode)
203 {
204 // we only act if a mirror was used at all
205 if(UsedMirror.empty())
206 return;
207 #if 0
208 std::cerr << "\nReportMirrorFailure: "
209 << UsedMirror
210 << " Uri: " << DescURI()
211 << " FailCode: "
212 << FailCode << std::endl;
213 #endif
214 const char *Args[40];
215 unsigned int i = 0;
216 string report = _config->Find("Methods::Mirror::ProblemReporting",
217 "/usr/lib/apt/apt-report-mirror-failure");
218 if(!FileExists(report))
219 return;
220 Args[i++] = report.c_str();
221 Args[i++] = UsedMirror.c_str();
222 Args[i++] = DescURI().c_str();
223 Args[i++] = FailCode.c_str();
224 Args[i++] = NULL;
225 pid_t pid = ExecFork();
226 if(pid < 0)
227 {
228 _error->Error("ReportMirrorFailure Fork failed");
229 return;
230 }
231 else if(pid == 0)
232 {
233 execvp(Args[0], (char**)Args);
234 std::cerr << "Could not exec " << Args[0] << std::endl;
235 _exit(100);
236 }
237 if(!ExecWait(pid, "report-mirror-failure"))
238 {
239 _error->Warning("Couldn't report problem to '%s'",
240 _config->Find("Methods::Mirror::ProblemReporting").c_str());
241 }
242 }
243 /*}}}*/
244 // AcqSubIndex::AcqSubIndex - Constructor /*{{{*/
245 // ---------------------------------------------------------------------
246 /* Get a sub-index file based on checksums from a 'master' file and
247 possibly query additional files */
248 pkgAcqSubIndex::pkgAcqSubIndex(pkgAcquire *Owner, string const &URI,
249 string const &URIDesc, string const &ShortDesc,
250 HashStringList const &ExpectedHashes)
251 : Item(Owner, ExpectedHashes)
252 {
253 /* XXX: Beware: Currently this class does nothing (of value) anymore ! */
254 Debug = _config->FindB("Debug::pkgAcquire::SubIndex",false);
255
256 DestFile = _config->FindDir("Dir::State::lists") + "partial/";
257 DestFile += URItoFileName(URI);
258
259 Desc.URI = URI;
260 Desc.Description = URIDesc;
261 Desc.Owner = this;
262 Desc.ShortDesc = ShortDesc;
263
264 QueueURI(Desc);
265
266 if(Debug)
267 std::clog << "pkgAcqSubIndex: " << Desc.URI << std::endl;
268 }
269 /*}}}*/
270 // AcqSubIndex::Custom600Headers - Insert custom request headers /*{{{*/
271 // ---------------------------------------------------------------------
272 /* The only header we use is the last-modified header. */
273 string pkgAcqSubIndex::Custom600Headers() const
274 {
275 string Final = _config->FindDir("Dir::State::lists");
276 Final += URItoFileName(Desc.URI);
277
278 struct stat Buf;
279 if (stat(Final.c_str(),&Buf) != 0)
280 return "\nIndex-File: true\nFail-Ignore: true\n";
281 return "\nIndex-File: true\nFail-Ignore: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime);
282 }
283 /*}}}*/
284 void pkgAcqSubIndex::Failed(string Message,pkgAcquire::MethodConfig * /*Cnf*/)/*{{{*/
285 {
286 if(Debug)
287 std::clog << "pkgAcqSubIndex failed: " << Desc.URI << " with " << Message << std::endl;
288
289 Complete = false;
290 Status = StatDone;
291 Dequeue();
292
293 // No good Index is provided
294 }
295 /*}}}*/
296 void pkgAcqSubIndex::Done(string Message,unsigned long long Size,HashStringList const &Hashes, /*{{{*/
297 pkgAcquire::MethodConfig *Cnf)
298 {
299 if(Debug)
300 std::clog << "pkgAcqSubIndex::Done(): " << Desc.URI << std::endl;
301
302 string FileName = LookupTag(Message,"Filename");
303 if (FileName.empty() == true)
304 {
305 Status = StatError;
306 ErrorText = "Method gave a blank filename";
307 return;
308 }
309
310 if (FileName != DestFile)
311 {
312 Local = true;
313 Desc.URI = "copy:" + FileName;
314 QueueURI(Desc);
315 return;
316 }
317
318 Item::Done(Message, Size, Hashes, Cnf);
319
320 string FinalFile = _config->FindDir("Dir::State::lists")+URItoFileName(Desc.URI);
321
322 /* Downloaded invalid transindex => Error (LP: #346386) (Closes: #627642) */
323 indexRecords SubIndexParser;
324 if (FileExists(DestFile) == true && !SubIndexParser.Load(DestFile)) {
325 Status = StatError;
326 ErrorText = SubIndexParser.ErrorText;
327 return;
328 }
329
330 // success in downloading the index
331 // rename the index
332 if(Debug)
333 std::clog << "Renaming: " << DestFile << " -> " << FinalFile << std::endl;
334 Rename(DestFile,FinalFile);
335 chmod(FinalFile.c_str(),0644);
336 DestFile = FinalFile;
337
338 if(ParseIndex(DestFile) == false)
339 return Failed("", NULL);
340
341 Complete = true;
342 Status = StatDone;
343 Dequeue();
344 return;
345 }
346 /*}}}*/
347 bool pkgAcqSubIndex::ParseIndex(string const &IndexFile) /*{{{*/
348 {
349 indexRecords SubIndexParser;
350 if (FileExists(IndexFile) == false || SubIndexParser.Load(IndexFile) == false)
351 return false;
352 // so something with the downloaded index
353 return true;
354 }
355 /*}}}*/
356 // AcqDiffIndex::AcqDiffIndex - Constructor /*{{{*/
357 // ---------------------------------------------------------------------
358 /* Get the DiffIndex file first and see if there are patches available
359 * If so, create a pkgAcqIndexDiffs fetcher that will get and apply the
360 * patches. If anything goes wrong in that process, it will fall back to
361 * the original packages file
362 */
363 pkgAcqDiffIndex::pkgAcqDiffIndex(pkgAcquire *Owner,
364 IndexTarget const * const Target,
365 HashStringList const &ExpectedHashes,
366 indexRecords *MetaIndexParser)
367 : pkgAcqBaseIndex(Owner, Target, ExpectedHashes, MetaIndexParser)
368 {
369
370 Debug = _config->FindB("Debug::pkgAcquire::Diffs",false);
371
372 RealURI = Target->URI;
373 Desc.Owner = this;
374 Desc.Description = Target->Description + "/DiffIndex";
375 Desc.ShortDesc = Target->ShortDesc;
376 Desc.URI = Target->URI + ".diff/Index";
377
378 DestFile = _config->FindDir("Dir::State::lists") + "partial/";
379 DestFile += URItoFileName(Desc.URI);
380
381 if(Debug)
382 std::clog << "pkgAcqDiffIndex: " << Desc.URI << std::endl;
383
384 // look for the current package file
385 CurrentPackagesFile = _config->FindDir("Dir::State::lists");
386 CurrentPackagesFile += URItoFileName(RealURI);
387
388 // FIXME: this file:/ check is a hack to prevent fetching
389 // from local sources. this is really silly, and
390 // should be fixed cleanly as soon as possible
391 if(!FileExists(CurrentPackagesFile) ||
392 Desc.URI.substr(0,strlen("file:/")) == "file:/")
393 {
394 // we don't have a pkg file or we don't want to queue
395 Failed("No index file, local or canceld by user", NULL);
396 return;
397 }
398
399 if(Debug)
400 std::clog << "pkgAcqDiffIndex::pkgAcqDiffIndex(): "
401 << CurrentPackagesFile << std::endl;
402
403 QueueURI(Desc);
404
405 }
406 /*}}}*/
407 // AcqIndex::Custom600Headers - Insert custom request headers /*{{{*/
408 // ---------------------------------------------------------------------
409 /* The only header we use is the last-modified header. */
410 string pkgAcqDiffIndex::Custom600Headers() const
411 {
412 string Final = _config->FindDir("Dir::State::lists");
413 Final += URItoFileName(Desc.URI);
414
415 if(Debug)
416 std::clog << "Custom600Header-IMS: " << Final << std::endl;
417
418 struct stat Buf;
419 if (stat(Final.c_str(),&Buf) != 0)
420 return "\nIndex-File: true";
421
422 return "\nIndex-File: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime);
423 }
424 /*}}}*/
425 bool pkgAcqDiffIndex::ParseDiffIndex(string IndexDiffFile) /*{{{*/
426 {
427 // failing here is fine: our caller will take care of trying to
428 // get the complete file if patching fails
429 if(Debug)
430 std::clog << "pkgAcqDiffIndex::ParseIndexDiff() " << IndexDiffFile
431 << std::endl;
432
433 FileFd Fd(IndexDiffFile,FileFd::ReadOnly);
434 pkgTagFile TF(&Fd);
435 if (_error->PendingError() == true)
436 return false;
437
438 pkgTagSection Tags;
439 if(unlikely(TF.Step(Tags) == false))
440 return false;
441
442 HashStringList ServerHashes;
443 unsigned long long ServerSize = 0;
444
445 for (char const * const * type = HashString::SupportedHashes(); *type != NULL; ++type)
446 {
447 std::string tagname = *type;
448 tagname.append("-Current");
449 std::string const tmp = Tags.FindS(tagname.c_str());
450 if (tmp.empty() == true)
451 continue;
452
453 string hash;
454 unsigned long long size;
455 std::stringstream ss(tmp);
456 ss >> hash >> size;
457 if (unlikely(hash.empty() == true))
458 continue;
459 if (unlikely(ServerSize != 0 && ServerSize != size))
460 continue;
461 ServerHashes.push_back(HashString(*type, hash));
462 ServerSize = size;
463 }
464
465 if (ServerHashes.usable() == false)
466 {
467 if (Debug == true)
468 std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": Did not find a good hashsum in the index" << std::endl;
469 return false;
470 }
471
472 if (ServerHashes != HashSums())
473 {
474 if (Debug == true)
475 std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": Index has different hashes than parser, probably older, so fail pdiffing" << std::endl;
476 return false;
477 }
478
479 if (ServerHashes.VerifyFile(CurrentPackagesFile) == true)
480 {
481 // we have the same sha1 as the server so we are done here
482 if(Debug)
483 std::clog << "pkgAcqDiffIndex: Package file is up-to-date" << std::endl;
484 // list cleanup needs to know that this file as well as the already
485 // present index is ours, so we create an empty diff to save it for us
486 new pkgAcqIndexDiffs(Owner, Target, ExpectedHashes, MetaIndexParser);
487 return true;
488 }
489
490 FileFd fd(CurrentPackagesFile, FileFd::ReadOnly);
491 Hashes LocalHashesCalc;
492 LocalHashesCalc.AddFD(fd);
493 HashStringList const LocalHashes = LocalHashesCalc.GetHashStringList();
494
495 if(Debug)
496 std::clog << "Server-Current: " << ServerHashes.find(NULL)->toStr() << " and we start at "
497 << fd.Name() << " " << fd.FileSize() << " " << LocalHashes.find(NULL)->toStr() << std::endl;
498
499 // parse all of (provided) history
500 vector<DiffInfo> available_patches;
501 bool firstAcceptedHashes = true;
502 for (char const * const * type = HashString::SupportedHashes(); *type != NULL; ++type)
503 {
504 if (LocalHashes.find(*type) == NULL)
505 continue;
506
507 std::string tagname = *type;
508 tagname.append("-History");
509 std::string const tmp = Tags.FindS(tagname.c_str());
510 if (tmp.empty() == true)
511 continue;
512
513 string hash, filename;
514 unsigned long long size;
515 std::stringstream ss(tmp);
516
517 while (ss >> hash >> size >> filename)
518 {
519 if (unlikely(hash.empty() == true || filename.empty() == true))
520 continue;
521
522 // see if we have a record for this file already
523 std::vector<DiffInfo>::iterator cur = available_patches.begin();
524 for (; cur != available_patches.end(); ++cur)
525 {
526 if (cur->file != filename || unlikely(cur->result_size != size))
527 continue;
528 cur->result_hashes.push_back(HashString(*type, hash));
529 break;
530 }
531 if (cur != available_patches.end())
532 continue;
533 if (firstAcceptedHashes == true)
534 {
535 DiffInfo next;
536 next.file = filename;
537 next.result_hashes.push_back(HashString(*type, hash));
538 next.result_size = size;
539 next.patch_size = 0;
540 available_patches.push_back(next);
541 }
542 else
543 {
544 if (Debug == true)
545 std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": File " << filename
546 << " wasn't in the list for the first parsed hash! (history)" << std::endl;
547 break;
548 }
549 }
550 firstAcceptedHashes = false;
551 }
552
553 if (unlikely(available_patches.empty() == true))
554 {
555 if (Debug)
556 std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": "
557 << "Couldn't find any patches for the patch series." << std::endl;
558 return false;
559 }
560
561 for (char const * const * type = HashString::SupportedHashes(); *type != NULL; ++type)
562 {
563 if (LocalHashes.find(*type) == NULL)
564 continue;
565
566 std::string tagname = *type;
567 tagname.append("-Patches");
568 std::string const tmp = Tags.FindS(tagname.c_str());
569 if (tmp.empty() == true)
570 continue;
571
572 string hash, filename;
573 unsigned long long size;
574 std::stringstream ss(tmp);
575
576 while (ss >> hash >> size >> filename)
577 {
578 if (unlikely(hash.empty() == true || filename.empty() == true))
579 continue;
580
581 // see if we have a record for this file already
582 std::vector<DiffInfo>::iterator cur = available_patches.begin();
583 for (; cur != available_patches.end(); ++cur)
584 {
585 if (cur->file != filename)
586 continue;
587 if (unlikely(cur->patch_size != 0 && cur->patch_size != size))
588 continue;
589 cur->patch_hashes.push_back(HashString(*type, hash));
590 cur->patch_size = size;
591 break;
592 }
593 if (cur != available_patches.end())
594 continue;
595 if (Debug == true)
596 std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": File " << filename
597 << " wasn't in the list for the first parsed hash! (patches)" << std::endl;
598 break;
599 }
600 }
601
602 bool foundStart = false;
603 for (std::vector<DiffInfo>::iterator cur = available_patches.begin();
604 cur != available_patches.end(); ++cur)
605 {
606 if (LocalHashes != cur->result_hashes)
607 continue;
608
609 available_patches.erase(available_patches.begin(), cur);
610 foundStart = true;
611 break;
612 }
613
614 if (foundStart == false || unlikely(available_patches.empty() == true))
615 {
616 if (Debug)
617 std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": "
618 << "Couldn't find the start of the patch series." << std::endl;
619 return false;
620 }
621
622 // patching with too many files is rather slow compared to a fast download
623 unsigned long const fileLimit = _config->FindI("Acquire::PDiffs::FileLimit", 0);
624 if (fileLimit != 0 && fileLimit < available_patches.size())
625 {
626 if (Debug)
627 std::clog << "Need " << available_patches.size() << " diffs (Limit is " << fileLimit
628 << ") so fallback to complete download" << std::endl;
629 return false;
630 }
631
632 // calculate the size of all patches we have to get
633 // note that all sizes are uncompressed, while we download compressed files
634 unsigned long long patchesSize = 0;
635 for (std::vector<DiffInfo>::const_iterator cur = available_patches.begin();
636 cur != available_patches.end(); ++cur)
637 patchesSize += cur->patch_size;
638 unsigned long long const sizeLimit = ServerSize * _config->FindI("Acquire::PDiffs::SizeLimit", 100);
639 if (false && sizeLimit > 0 && (sizeLimit/100) < patchesSize)
640 {
641 if (Debug)
642 std::clog << "Need " << patchesSize << " bytes (Limit is " << sizeLimit/100
643 << ") so fallback to complete download" << std::endl;
644 return false;
645 }
646
647 // we have something, queue the diffs
648 string::size_type const last_space = Description.rfind(" ");
649 if(last_space != string::npos)
650 Description.erase(last_space, Description.size()-last_space);
651
652 /* decide if we should download patches one by one or in one go:
653 The first is good if the server merges patches, but many don't so client
654 based merging can be attempt in which case the second is better.
655 "bad things" will happen if patches are merged on the server,
656 but client side merging is attempt as well */
657 bool pdiff_merge = _config->FindB("Acquire::PDiffs::Merge", true);
658 if (pdiff_merge == true)
659 {
660 // reprepro adds this flag if it has merged patches on the server
661 std::string const precedence = Tags.FindS("X-Patch-Precedence");
662 pdiff_merge = (precedence != "merged");
663 }
664
665 if (pdiff_merge == false)
666 {
667 new pkgAcqIndexDiffs(Owner, Target, ExpectedHashes, MetaIndexParser,
668 available_patches);
669 }
670 else
671 {
672 std::vector<pkgAcqIndexMergeDiffs*> *diffs = new std::vector<pkgAcqIndexMergeDiffs*>(available_patches.size());
673 for(size_t i = 0; i < available_patches.size(); ++i)
674 (*diffs)[i] = new pkgAcqIndexMergeDiffs(Owner, Target,
675 ExpectedHashes,
676 MetaIndexParser,
677 available_patches[i],
678 diffs);
679 }
680
681 Complete = false;
682 Status = StatDone;
683 Dequeue();
684 return true;
685 }
686 /*}}}*/
687 void pkgAcqDiffIndex::Failed(string Message,pkgAcquire::MethodConfig * /*Cnf*/)/*{{{*/
688 {
689 if(Debug)
690 std::clog << "pkgAcqDiffIndex failed: " << Desc.URI << " with " << Message << std::endl
691 << "Falling back to normal index file acquire" << std::endl;
692
693 new pkgAcqIndex(Owner, Target, ExpectedHashes, MetaIndexParser);
694
695 Complete = false;
696 Status = StatDone;
697 Dequeue();
698 }
699 /*}}}*/
700 void pkgAcqDiffIndex::Done(string Message,unsigned long long Size,HashStringList const &Hashes, /*{{{*/
701 pkgAcquire::MethodConfig *Cnf)
702 {
703 if(Debug)
704 std::clog << "pkgAcqDiffIndex::Done(): " << Desc.URI << std::endl;
705
706 Item::Done(Message, Size, Hashes, Cnf);
707
708 string FinalFile;
709 FinalFile = _config->FindDir("Dir::State::lists")+URItoFileName(RealURI);
710
711 // success in downloading the index
712 // rename the index
713 FinalFile += string(".IndexDiff");
714 if(Debug)
715 std::clog << "Renaming: " << DestFile << " -> " << FinalFile
716 << std::endl;
717 Rename(DestFile,FinalFile);
718 chmod(FinalFile.c_str(),0644);
719 DestFile = FinalFile;
720
721 if(!ParseDiffIndex(DestFile))
722 return Failed("Parsing pdiff Index failed", NULL);
723
724 Complete = true;
725 Status = StatDone;
726 Dequeue();
727 return;
728 }
729 /*}}}*/
730 // AcqIndexDiffs::AcqIndexDiffs - Constructor /*{{{*/
731 // ---------------------------------------------------------------------
732 /* The package diff is added to the queue. one object is constructed
733 * for each diff and the index
734 */
735 pkgAcqIndexDiffs::pkgAcqIndexDiffs(pkgAcquire *Owner,
736 struct IndexTarget const * const Target,
737 HashStringList const &ExpectedHashes,
738 indexRecords *MetaIndexParser,
739 vector<DiffInfo> diffs)
740 : pkgAcqBaseIndex(Owner, Target, ExpectedHashes, MetaIndexParser),
741 available_patches(diffs)
742 {
743 DestFile = _config->FindDir("Dir::State::lists") + "partial/";
744 DestFile += URItoFileName(Target->URI);
745
746 Debug = _config->FindB("Debug::pkgAcquire::Diffs",false);
747
748 RealURI = Target->URI;
749 Desc.Owner = this;
750 Description = Target->Description;
751 Desc.ShortDesc = Target->ShortDesc;
752
753 if(available_patches.empty() == true)
754 {
755 // we are done (yeah!)
756 Finish(true);
757 }
758 else
759 {
760 // get the next diff
761 State = StateFetchDiff;
762 QueueNextDiff();
763 }
764 }
765 /*}}}*/
766 void pkgAcqIndexDiffs::Failed(string Message,pkgAcquire::MethodConfig * /*Cnf*/)/*{{{*/
767 {
768 if(Debug)
769 std::clog << "pkgAcqIndexDiffs failed: " << Desc.URI << " with " << Message << std::endl
770 << "Falling back to normal index file acquire" << std::endl;
771 new pkgAcqIndex(Owner, Target, ExpectedHashes, MetaIndexParser);
772 Finish();
773 }
774 /*}}}*/
775 // Finish - helper that cleans the item out of the fetcher queue /*{{{*/
776 void pkgAcqIndexDiffs::Finish(bool allDone)
777 {
778 // we restore the original name, this is required, otherwise
779 // the file will be cleaned
780 if(allDone)
781 {
782 DestFile = _config->FindDir("Dir::State::lists");
783 DestFile += URItoFileName(RealURI);
784
785 if(HashSums().usable() && !HashSums().VerifyFile(DestFile))
786 {
787 RenameOnError(HashSumMismatch);
788 Dequeue();
789 return;
790 }
791
792 // this is for the "real" finish
793 Complete = true;
794 Status = StatDone;
795 Dequeue();
796 if(Debug)
797 std::clog << "\n\nallDone: " << DestFile << "\n" << std::endl;
798 return;
799 }
800
801 if(Debug)
802 std::clog << "Finishing: " << Desc.URI << std::endl;
803 Complete = false;
804 Status = StatDone;
805 Dequeue();
806 return;
807 }
808 /*}}}*/
809 bool pkgAcqIndexDiffs::QueueNextDiff() /*{{{*/
810 {
811
812 // calc sha1 of the just patched file
813 string FinalFile = _config->FindDir("Dir::State::lists");
814 FinalFile += URItoFileName(RealURI);
815
816 FileFd fd(FinalFile, FileFd::ReadOnly);
817 Hashes LocalHashesCalc;
818 LocalHashesCalc.AddFD(fd);
819 HashStringList const LocalHashes = LocalHashesCalc.GetHashStringList();
820
821 if(Debug)
822 std::clog << "QueueNextDiff: " << FinalFile << " (" << LocalHashes.find(NULL)->toStr() << ")" << std::endl;
823
824 if (unlikely(LocalHashes.usable() == false || ExpectedHashes.usable() == false))
825 {
826 Failed("Local/Expected hashes are not usable", NULL);
827 return false;
828 }
829
830 // final file reached before all patches are applied
831 if(LocalHashes == ExpectedHashes)
832 {
833 Finish(true);
834 return true;
835 }
836
837 // remove all patches until the next matching patch is found
838 // this requires the Index file to be ordered
839 for(vector<DiffInfo>::iterator I = available_patches.begin();
840 available_patches.empty() == false &&
841 I != available_patches.end() &&
842 I->result_hashes != LocalHashes;
843 ++I)
844 {
845 available_patches.erase(I);
846 }
847
848 // error checking and falling back if no patch was found
849 if(available_patches.empty() == true)
850 {
851 Failed("No patches left to reach target", NULL);
852 return false;
853 }
854
855 // queue the right diff
856 Desc.URI = RealURI + ".diff/" + available_patches[0].file + ".gz";
857 Desc.Description = Description + " " + available_patches[0].file + string(".pdiff");
858 DestFile = _config->FindDir("Dir::State::lists") + "partial/";
859 DestFile += URItoFileName(RealURI + ".diff/" + available_patches[0].file);
860
861 if(Debug)
862 std::clog << "pkgAcqIndexDiffs::QueueNextDiff(): " << Desc.URI << std::endl;
863
864 QueueURI(Desc);
865
866 return true;
867 }
868 /*}}}*/
869 void pkgAcqIndexDiffs::Done(string Message,unsigned long long Size, HashStringList const &Hashes, /*{{{*/
870 pkgAcquire::MethodConfig *Cnf)
871 {
872 if(Debug)
873 std::clog << "pkgAcqIndexDiffs::Done(): " << Desc.URI << std::endl;
874
875 Item::Done(Message, Size, Hashes, Cnf);
876
877 string FinalFile;
878 FinalFile = _config->FindDir("Dir::State::lists")+URItoFileName(RealURI);
879
880 // success in downloading a diff, enter ApplyDiff state
881 if(State == StateFetchDiff)
882 {
883 FileFd fd(DestFile, FileFd::ReadOnly, FileFd::Gzip);
884 class Hashes LocalHashesCalc;
885 LocalHashesCalc.AddFD(fd);
886 HashStringList const LocalHashes = LocalHashesCalc.GetHashStringList();
887
888 if (fd.Size() != available_patches[0].patch_size ||
889 available_patches[0].patch_hashes != LocalHashes)
890 {
891 Failed("Patch has Size/Hashsum mismatch", NULL);
892 return;
893 }
894
895 // rred excepts the patch as $FinalFile.ed
896 Rename(DestFile,FinalFile+".ed");
897
898 if(Debug)
899 std::clog << "Sending to rred method: " << FinalFile << std::endl;
900
901 State = StateApplyDiff;
902 Local = true;
903 Desc.URI = "rred:" + FinalFile;
904 QueueURI(Desc);
905 ActiveSubprocess = "rred";
906 #if __GNUC__ >= 4
907 #pragma GCC diagnostic push
908 #pragma GCC diagnostic ignored "-Wdeprecated-declarations"
909 #endif
910 Mode = "rred";
911 #if __GNUC__ >= 4
912 #pragma GCC diagnostic pop
913 #endif
914 return;
915 }
916
917
918 // success in download/apply a diff, queue next (if needed)
919 if(State == StateApplyDiff)
920 {
921 // remove the just applied patch
922 available_patches.erase(available_patches.begin());
923 unlink((FinalFile + ".ed").c_str());
924
925 // move into place
926 if(Debug)
927 {
928 std::clog << "Moving patched file in place: " << std::endl
929 << DestFile << " -> " << FinalFile << std::endl;
930 }
931 Rename(DestFile,FinalFile);
932 chmod(FinalFile.c_str(),0644);
933
934 // see if there is more to download
935 if(available_patches.empty() == false) {
936 new pkgAcqIndexDiffs(Owner, Target,
937 ExpectedHashes, MetaIndexParser,
938 available_patches);
939 return Finish();
940 } else
941 return Finish(true);
942 }
943 }
944 /*}}}*/
945 // AcqIndexMergeDiffs::AcqIndexMergeDiffs - Constructor /*{{{*/
946 pkgAcqIndexMergeDiffs::pkgAcqIndexMergeDiffs(pkgAcquire *Owner,
947 struct IndexTarget const * const Target,
948 HashStringList const &ExpectedHashes,
949 indexRecords *MetaIndexParser,
950 DiffInfo const &patch,
951 std::vector<pkgAcqIndexMergeDiffs*> const * const allPatches)
952 : pkgAcqBaseIndex(Owner, Target, ExpectedHashes, MetaIndexParser),
953 patch(patch), allPatches(allPatches), State(StateFetchDiff)
954 {
955
956 DestFile = _config->FindDir("Dir::State::lists") + "partial/";
957 DestFile += URItoFileName(Target->URI);
958
959 Debug = _config->FindB("Debug::pkgAcquire::Diffs",false);
960
961 RealURI = Target->URI;
962 Desc.Owner = this;
963 Description = Target->Description;
964 Desc.ShortDesc = Target->ShortDesc;
965
966 Desc.URI = RealURI + ".diff/" + patch.file + ".gz";
967 Desc.Description = Description + " " + patch.file + string(".pdiff");
968 DestFile = _config->FindDir("Dir::State::lists") + "partial/";
969 DestFile += URItoFileName(RealURI + ".diff/" + patch.file);
970
971 if(Debug)
972 std::clog << "pkgAcqIndexMergeDiffs: " << Desc.URI << std::endl;
973
974 QueueURI(Desc);
975 }
976 /*}}}*/
977 void pkgAcqIndexMergeDiffs::Failed(string Message,pkgAcquire::MethodConfig * /*Cnf*/)/*{{{*/
978 {
979 if(Debug)
980 std::clog << "pkgAcqIndexMergeDiffs failed: " << Desc.URI << " with " << Message << std::endl;
981 Complete = false;
982 Status = StatDone;
983 Dequeue();
984
985 // check if we are the first to fail, otherwise we are done here
986 State = StateDoneDiff;
987 for (std::vector<pkgAcqIndexMergeDiffs *>::const_iterator I = allPatches->begin();
988 I != allPatches->end(); ++I)
989 if ((*I)->State == StateErrorDiff)
990 return;
991
992 // first failure means we should fallback
993 State = StateErrorDiff;
994 std::clog << "Falling back to normal index file acquire" << std::endl;
995 new pkgAcqIndex(Owner, Target, ExpectedHashes, MetaIndexParser);
996 }
997 /*}}}*/
998 void pkgAcqIndexMergeDiffs::Done(string Message,unsigned long long Size,HashStringList const &Hashes, /*{{{*/
999 pkgAcquire::MethodConfig *Cnf)
1000 {
1001 if(Debug)
1002 std::clog << "pkgAcqIndexMergeDiffs::Done(): " << Desc.URI << std::endl;
1003
1004 Item::Done(Message,Size,Hashes,Cnf);
1005
1006 string const FinalFile = _config->FindDir("Dir::State::lists") + URItoFileName(RealURI);
1007
1008 if (State == StateFetchDiff)
1009 {
1010 FileFd fd(DestFile, FileFd::ReadOnly, FileFd::Gzip);
1011 class Hashes LocalHashesCalc;
1012 LocalHashesCalc.AddFD(fd);
1013 HashStringList const LocalHashes = LocalHashesCalc.GetHashStringList();
1014
1015 if (fd.Size() != patch.patch_size || patch.patch_hashes != LocalHashes)
1016 {
1017 Failed("Patch has Size/Hashsum mismatch", NULL);
1018 return;
1019 }
1020
1021 // rred expects the patch as $FinalFile.ed.$patchname.gz
1022 Rename(DestFile, FinalFile + ".ed." + patch.file + ".gz");
1023
1024 // check if this is the last completed diff
1025 State = StateDoneDiff;
1026 for (std::vector<pkgAcqIndexMergeDiffs *>::const_iterator I = allPatches->begin();
1027 I != allPatches->end(); ++I)
1028 if ((*I)->State != StateDoneDiff)
1029 {
1030 if(Debug)
1031 std::clog << "Not the last done diff in the batch: " << Desc.URI << std::endl;
1032 return;
1033 }
1034
1035 // this is the last completed diff, so we are ready to apply now
1036 State = StateApplyDiff;
1037
1038 if(Debug)
1039 std::clog << "Sending to rred method: " << FinalFile << std::endl;
1040
1041 Local = true;
1042 Desc.URI = "rred:" + FinalFile;
1043 QueueURI(Desc);
1044 ActiveSubprocess = "rred";
1045 #if __GNUC__ >= 4
1046 #pragma GCC diagnostic push
1047 #pragma GCC diagnostic ignored "-Wdeprecated-declarations"
1048 #endif
1049 Mode = "rred";
1050 #if __GNUC__ >= 4
1051 #pragma GCC diagnostic pop
1052 #endif
1053 return;
1054 }
1055 // success in download/apply all diffs, clean up
1056 else if (State == StateApplyDiff)
1057 {
1058 // see if we really got the expected file
1059 if(ExpectedHashes.usable() && !ExpectedHashes.VerifyFile(DestFile))
1060 {
1061 RenameOnError(HashSumMismatch);
1062 return;
1063 }
1064
1065 // move the result into place
1066 if(Debug)
1067 std::clog << "Moving patched file in place: " << std::endl
1068 << DestFile << " -> " << FinalFile << std::endl;
1069 Rename(DestFile, FinalFile);
1070 chmod(FinalFile.c_str(), 0644);
1071
1072 // otherwise lists cleanup will eat the file
1073 DestFile = FinalFile;
1074
1075 // ensure the ed's are gone regardless of list-cleanup
1076 for (std::vector<pkgAcqIndexMergeDiffs *>::const_iterator I = allPatches->begin();
1077 I != allPatches->end(); ++I)
1078 {
1079 std::string patch = FinalFile + ".ed." + (*I)->patch.file + ".gz";
1080 unlink(patch.c_str());
1081 }
1082
1083 // all set and done
1084 Complete = true;
1085 if(Debug)
1086 std::clog << "allDone: " << DestFile << "\n" << std::endl;
1087 }
1088 }
1089 /*}}}*/
1090 // AcqIndex::AcqIndex - Constructor /*{{{*/
1091 // ---------------------------------------------------------------------
1092 /* The package file is added to the queue and a second class is
1093 instantiated to fetch the revision file */
1094 pkgAcqIndex::pkgAcqIndex(pkgAcquire *Owner,
1095 string URI,string URIDesc,string ShortDesc,
1096 HashStringList const &ExpectedHash, string comprExt)
1097 : pkgAcqBaseIndex(Owner, NULL, ExpectedHash, NULL), RealURI(URI)
1098 {
1099 if(comprExt.empty() == true)
1100 {
1101 // autoselect the compression method
1102 std::vector<std::string> types = APT::Configuration::getCompressionTypes();
1103 for (std::vector<std::string>::const_iterator t = types.begin(); t != types.end(); ++t)
1104 comprExt.append(*t).append(" ");
1105 if (comprExt.empty() == false)
1106 comprExt.erase(comprExt.end()-1);
1107 }
1108 CompressionExtension = comprExt;
1109
1110 Init(URI, URIDesc, ShortDesc);
1111 }
1112 pkgAcqIndex::pkgAcqIndex(pkgAcquire *Owner, IndexTarget const *Target,
1113 HashStringList const &ExpectedHash,
1114 indexRecords *MetaIndexParser)
1115 : pkgAcqBaseIndex(Owner, Target, ExpectedHash, MetaIndexParser),
1116 RealURI(Target->URI)
1117 {
1118 // autoselect the compression method
1119 std::vector<std::string> types = APT::Configuration::getCompressionTypes();
1120 CompressionExtension = "";
1121 if (ExpectedHashes.usable())
1122 {
1123 for (std::vector<std::string>::const_iterator t = types.begin(); t != types.end(); ++t)
1124 if (*t == "uncompressed" || MetaIndexParser->Exists(string(Target->MetaKey).append(".").append(*t)) == true)
1125 CompressionExtension.append(*t).append(" ");
1126 }
1127 else
1128 {
1129 for (std::vector<std::string>::const_iterator t = types.begin(); t != types.end(); ++t)
1130 CompressionExtension.append(*t).append(" ");
1131 }
1132 if (CompressionExtension.empty() == false)
1133 CompressionExtension.erase(CompressionExtension.end()-1);
1134
1135 Init(Target->URI, Target->Description, Target->ShortDesc);
1136 }
1137 /*}}}*/
1138 // AcqIndex::Init - defered Constructor /*{{{*/
1139 void pkgAcqIndex::Init(string const &URI, string const &URIDesc, string const &ShortDesc) {
1140 Decompression = false;
1141 Erase = false;
1142
1143 DestFile = _config->FindDir("Dir::State::lists") + "partial/";
1144 DestFile += URItoFileName(URI);
1145
1146 std::string const comprExt = CompressionExtension.substr(0, CompressionExtension.find(' '));
1147 std::string MetaKey;
1148 if (comprExt == "uncompressed")
1149 {
1150 Desc.URI = URI;
1151 if(Target)
1152 MetaKey = string(Target->MetaKey);
1153 }
1154 else
1155 {
1156 Desc.URI = URI + '.' + comprExt;
1157 DestFile = DestFile + '.' + comprExt;
1158 if(Target)
1159 MetaKey = string(Target->MetaKey) + '.' + comprExt;
1160 }
1161
1162 // load the filesize
1163 if(MetaIndexParser)
1164 {
1165 indexRecords::checkSum *Record = MetaIndexParser->Lookup(MetaKey);
1166 if(Record)
1167 FileSize = Record->Size;
1168
1169 InitByHashIfNeeded(MetaKey);
1170 }
1171
1172 Desc.Description = URIDesc;
1173 Desc.Owner = this;
1174 Desc.ShortDesc = ShortDesc;
1175
1176 QueueURI(Desc);
1177 }
1178 /*}}}*/
1179 // AcqIndex::AdjustForByHash - modify URI for by-hash support /*{{{*/
1180 // ---------------------------------------------------------------------
1181 /* */
1182 void pkgAcqIndex::InitByHashIfNeeded(const std::string MetaKey)
1183 {
1184 // TODO:
1185 // - (maybe?) add support for by-hash into the sources.list as flag
1186 // - make apt-ftparchive generate the hashes (and expire?)
1187 std::string HostKnob = "APT::Acquire::" + ::URI(Desc.URI).Host + "::By-Hash";
1188 if(_config->FindB("APT::Acquire::By-Hash", false) == true ||
1189 _config->FindB(HostKnob, false) == true ||
1190 MetaIndexParser->GetSupportsAcquireByHash())
1191 {
1192 indexRecords::checkSum *Record = MetaIndexParser->Lookup(MetaKey);
1193 if(Record)
1194 {
1195 // FIXME: should we really use the best hash here? or a fixed one?
1196 const HashString *TargetHash = Record->Hashes.find("");
1197 std::string ByHash = "/by-hash/" + TargetHash->HashType() + "/" + TargetHash->HashValue();
1198 size_t trailing_slash = Desc.URI.find_last_of("/");
1199 Desc.URI = Desc.URI.replace(
1200 trailing_slash,
1201 Desc.URI.substr(trailing_slash+1).size()+1,
1202 ByHash);
1203 } else {
1204 _error->Warning(
1205 "Fetching ByHash requested but can not find record for %s",
1206 MetaKey.c_str());
1207 }
1208 }
1209 }
1210 /*}}}*/
1211 // AcqIndex::Custom600Headers - Insert custom request headers /*{{{*/
1212 // ---------------------------------------------------------------------
1213 /* The only header we use is the last-modified header. */
1214 string pkgAcqIndex::Custom600Headers() const
1215 {
1216 std::string const compExt = CompressionExtension.substr(0, CompressionExtension.find(' '));
1217 string Final = _config->FindDir("Dir::State::lists");
1218 Final += URItoFileName(RealURI);
1219 if (_config->FindB("Acquire::GzipIndexes",false))
1220 Final += compExt;
1221
1222 string msg = "\nIndex-File: true";
1223
1224 struct stat Buf;
1225 if (stat(Final.c_str(),&Buf) == 0)
1226 msg += "\nLast-Modified: " + TimeRFC1123(Buf.st_mtime);
1227
1228 return msg;
1229 }
1230 /*}}}*/
1231 void pkgAcqIndex::Failed(string Message,pkgAcquire::MethodConfig *Cnf) /*{{{*/
1232 {
1233 size_t const nextExt = CompressionExtension.find(' ');
1234 if (nextExt != std::string::npos)
1235 {
1236 CompressionExtension = CompressionExtension.substr(nextExt+1);
1237 Init(RealURI, Desc.Description, Desc.ShortDesc);
1238 return;
1239 }
1240
1241 // on decompression failure, remove bad versions in partial/
1242 if (Decompression && Erase) {
1243 string s = _config->FindDir("Dir::State::lists") + "partial/";
1244 s.append(URItoFileName(RealURI));
1245 unlink(s.c_str());
1246 }
1247
1248 Item::Failed(Message,Cnf);
1249 }
1250 /*}}}*/
1251 // pkgAcqIndex::GetFinalFilename - Return the full final file path /*{{{*/
1252 std::string pkgAcqIndex::GetFinalFilename(std::string const &URI,
1253 std::string const &compExt)
1254 {
1255 std::string FinalFile = _config->FindDir("Dir::State::lists");
1256 FinalFile += URItoFileName(URI);
1257 if (_config->FindB("Acquire::GzipIndexes",false) == true)
1258 FinalFile += '.' + compExt;
1259 return FinalFile;
1260 }
1261 /*}}}*/
1262 // AcqIndex::ReverifyAfterIMS - Reverify index after an ims-hit /*{{{*/
1263 void pkgAcqIndex::ReverifyAfterIMS(std::string const &FileName)
1264 {
1265 std::string const compExt = CompressionExtension.substr(0, CompressionExtension.find(' '));
1266 if (_config->FindB("Acquire::GzipIndexes",false) == true)
1267 DestFile += compExt;
1268
1269 string FinalFile = GetFinalFilename(RealURI, compExt);
1270 Rename(FinalFile, FileName);
1271 Decompression = true;
1272 Desc.URI = "copy:" + FileName;
1273 QueueURI(Desc);
1274 }
1275 /*}}}*/
1276 // AcqIndex::Done - Finished a fetch /*{{{*/
1277 // ---------------------------------------------------------------------
1278 /* This goes through a number of states.. On the initial fetch the
1279 method could possibly return an alternate filename which points
1280 to the uncompressed version of the file. If this is so the file
1281 is copied into the partial directory. In all other cases the file
1282 is decompressed with a gzip uri. */
1283 void pkgAcqIndex::Done(string Message,unsigned long long Size,HashStringList const &Hashes,
1284 pkgAcquire::MethodConfig *Cfg)
1285 {
1286 Item::Done(Message,Size,Hashes,Cfg);
1287 std::string const compExt = CompressionExtension.substr(0, CompressionExtension.find(' '));
1288
1289 if (Decompression == true)
1290 {
1291 if (ExpectedHashes.usable() && ExpectedHashes != Hashes)
1292 {
1293 Desc.URI = RealURI;
1294 RenameOnError(HashSumMismatch);
1295 printHashSumComparision(RealURI, ExpectedHashes, Hashes);
1296 return;
1297 }
1298
1299 // FIXME: this can go away once we only ever download stuff that
1300 // has a valid hash and we never do GET based probing
1301 //
1302 /* Always verify the index file for correctness (all indexes must
1303 * have a Package field) (LP: #346386) (Closes: #627642)
1304 */
1305 FileFd fd(DestFile, FileFd::ReadOnly, FileFd::Extension);
1306 // Only test for correctness if the file is not empty (empty is ok)
1307 if (fd.Size() > 0)
1308 {
1309 pkgTagSection sec;
1310 pkgTagFile tag(&fd);
1311
1312 // all our current indexes have a field 'Package' in each section
1313 if (_error->PendingError() == true || tag.Step(sec) == false || sec.Exists("Package") == false)
1314 {
1315 RenameOnError(InvalidFormat);
1316 return;
1317 }
1318 }
1319
1320 // Done, move it into position
1321 string FinalFile = GetFinalFilename(RealURI, compExt);
1322 Rename(DestFile,FinalFile);
1323 chmod(FinalFile.c_str(),0644);
1324
1325 /* We restore the original name to DestFile so that the clean operation
1326 will work OK */
1327 DestFile = _config->FindDir("Dir::State::lists") + "partial/";
1328 DestFile += URItoFileName(RealURI);
1329 if (_config->FindB("Acquire::GzipIndexes",false))
1330 DestFile += '.' + compExt;
1331
1332 // Remove the compressed version.
1333 if (Erase == true)
1334 unlink(DestFile.c_str());
1335
1336 return;
1337 }
1338
1339 Erase = false;
1340 Complete = true;
1341
1342 // Handle the unzipd case
1343 string FileName = LookupTag(Message,"Alt-Filename");
1344 if (FileName.empty() == false)
1345 {
1346 Decompression = true;
1347 Local = true;
1348 DestFile += ".decomp";
1349 Desc.URI = "copy:" + FileName;
1350 QueueURI(Desc);
1351 ActiveSubprocess = "copy";
1352 #if __GNUC__ >= 4
1353 #pragma GCC diagnostic push
1354 #pragma GCC diagnostic ignored "-Wdeprecated-declarations"
1355 #endif
1356 Mode = "copy";
1357 #if __GNUC__ >= 4
1358 #pragma GCC diagnostic pop
1359 #endif
1360 return;
1361 }
1362
1363 FileName = LookupTag(Message,"Filename");
1364 if (FileName.empty() == true)
1365 {
1366 Status = StatError;
1367 ErrorText = "Method gave a blank filename";
1368 }
1369
1370 if (FileName == DestFile)
1371 Erase = true;
1372 else
1373 Local = true;
1374
1375 // do not reverify cdrom sources as apt-cdrom may rewrite the Packages
1376 // file when its doing the indexcopy
1377 if (RealURI.substr(0,6) == "cdrom:" &&
1378 StringToBool(LookupTag(Message,"IMS-Hit"),false) == true)
1379 return;
1380
1381 // The files timestamp matches, for non-local URLs reverify the local
1382 // file, for local file, uncompress again to ensure the hashsum is still
1383 // matching the Release file
1384 if (!Local && StringToBool(LookupTag(Message,"IMS-Hit"),false) == true)
1385 {
1386 // set destfile to the final destfile
1387 if(_config->FindB("Acquire::GzipIndexes",false) == false)
1388 {
1389 DestFile = _config->FindDir("Dir::State::lists") + "partial/";
1390 DestFile += URItoFileName(RealURI);
1391 }
1392
1393 ReverifyAfterIMS(FileName);
1394 return;
1395 }
1396 string decompProg;
1397
1398 // If we enable compressed indexes, queue for hash verification
1399 if (_config->FindB("Acquire::GzipIndexes",false))
1400 {
1401 DestFile = _config->FindDir("Dir::State::lists");
1402 DestFile += URItoFileName(RealURI) + '.' + compExt;
1403
1404 Decompression = true;
1405 Desc.URI = "copy:" + FileName;
1406 QueueURI(Desc);
1407
1408 return;
1409 }
1410
1411 // get the binary name for your used compression type
1412 decompProg = _config->Find(string("Acquire::CompressionTypes::").append(compExt),"");
1413 if(decompProg.empty() == false);
1414 else if(compExt == "uncompressed")
1415 decompProg = "copy";
1416 else {
1417 _error->Error("Unsupported extension: %s", compExt.c_str());
1418 return;
1419 }
1420
1421 Decompression = true;
1422 DestFile += ".decomp";
1423 Desc.URI = decompProg + ":" + FileName;
1424 QueueURI(Desc);
1425
1426 ActiveSubprocess = decompProg;
1427 #if __GNUC__ >= 4
1428 #pragma GCC diagnostic push
1429 #pragma GCC diagnostic ignored "-Wdeprecated-declarations"
1430 #endif
1431 Mode = ActiveSubprocess.c_str();
1432 #if __GNUC__ >= 4
1433 #pragma GCC diagnostic pop
1434 #endif
1435 }
1436 /*}}}*/
1437 // AcqIndexTrans::pkgAcqIndexTrans - Constructor /*{{{*/
1438 // ---------------------------------------------------------------------
1439 /* The Translation file is added to the queue */
1440 pkgAcqIndexTrans::pkgAcqIndexTrans(pkgAcquire *Owner,
1441 string URI,string URIDesc,string ShortDesc)
1442 : pkgAcqIndex(Owner, URI, URIDesc, ShortDesc, HashStringList(), "")
1443 {
1444 }
1445 pkgAcqIndexTrans::pkgAcqIndexTrans(pkgAcquire *Owner, IndexTarget const * const Target,
1446 HashStringList const &ExpectedHashes, indexRecords *MetaIndexParser)
1447 : pkgAcqIndex(Owner, Target, ExpectedHashes, MetaIndexParser)
1448 {
1449 // load the filesize
1450 indexRecords::checkSum *Record = MetaIndexParser->Lookup(string(Target->MetaKey));
1451 if(Record)
1452 FileSize = Record->Size;
1453 }
1454 /*}}}*/
1455 // AcqIndexTrans::Custom600Headers - Insert custom request headers /*{{{*/
1456 // ---------------------------------------------------------------------
1457 string pkgAcqIndexTrans::Custom600Headers() const
1458 {
1459 std::string const compExt = CompressionExtension.substr(0, CompressionExtension.find(' '));
1460 string Final = _config->FindDir("Dir::State::lists");
1461 Final += URItoFileName(RealURI);
1462 if (_config->FindB("Acquire::GzipIndexes",false))
1463 Final += compExt;
1464
1465 struct stat Buf;
1466 if (stat(Final.c_str(),&Buf) != 0)
1467 return "\nFail-Ignore: true\nIndex-File: true";
1468 return "\nFail-Ignore: true\nIndex-File: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime);
1469 }
1470 /*}}}*/
1471 // AcqIndexTrans::Failed - Silence failure messages for missing files /*{{{*/
1472 // ---------------------------------------------------------------------
1473 /* */
1474 void pkgAcqIndexTrans::Failed(string Message,pkgAcquire::MethodConfig *Cnf)
1475 {
1476 size_t const nextExt = CompressionExtension.find(' ');
1477 if (nextExt != std::string::npos)
1478 {
1479 CompressionExtension = CompressionExtension.substr(nextExt+1);
1480 Init(RealURI, Desc.Description, Desc.ShortDesc);
1481 Status = StatIdle;
1482 return;
1483 }
1484
1485 if (Cnf->LocalOnly == true ||
1486 StringToBool(LookupTag(Message,"Transient-Failure"),false) == false)
1487 {
1488 // Ignore this
1489 Status = StatDone;
1490 Complete = false;
1491 Dequeue();
1492 return;
1493 }
1494
1495 Item::Failed(Message,Cnf);
1496 }
1497 /*}}}*/
1498 pkgAcqMetaSig::pkgAcqMetaSig(pkgAcquire *Owner, /*{{{*/
1499 string URI,string URIDesc,string ShortDesc,
1500 string MetaIndexURI, string MetaIndexURIDesc,
1501 string MetaIndexShortDesc,
1502 const vector<IndexTarget*>* IndexTargets,
1503 indexRecords* MetaIndexParser) :
1504 Item(Owner, HashStringList()), RealURI(URI), MetaIndexURI(MetaIndexURI),
1505 MetaIndexURIDesc(MetaIndexURIDesc), MetaIndexShortDesc(MetaIndexShortDesc),
1506 MetaIndexParser(MetaIndexParser), IndexTargets(IndexTargets)
1507 {
1508 DestFile = _config->FindDir("Dir::State::lists") + "partial/";
1509 DestFile += URItoFileName(URI);
1510
1511 // remove any partial downloaded sig-file in partial/.
1512 // it may confuse proxies and is too small to warrant a
1513 // partial download anyway
1514 unlink(DestFile.c_str());
1515
1516 // Create the item
1517 Desc.Description = URIDesc;
1518 Desc.Owner = this;
1519 Desc.ShortDesc = ShortDesc;
1520 Desc.URI = URI;
1521
1522 string Final = _config->FindDir("Dir::State::lists");
1523 Final += URItoFileName(RealURI);
1524 if (RealFileExists(Final) == true)
1525 {
1526 // File was already in place. It needs to be re-downloaded/verified
1527 // because Release might have changed, we do give it a different
1528 // name than DestFile because otherwise the http method will
1529 // send If-Range requests and there are too many broken servers
1530 // out there that do not understand them
1531 LastGoodSig = DestFile+".reverify";
1532 Rename(Final,LastGoodSig);
1533 }
1534
1535 // we expect the indextargets + one additional Release file
1536 ExpectedAdditionalItems = IndexTargets->size() + 1;
1537
1538 QueueURI(Desc);
1539 }
1540 /*}}}*/
1541 pkgAcqMetaSig::~pkgAcqMetaSig() /*{{{*/
1542 {
1543 // if the file was never queued undo file-changes done in the constructor
1544 if (QueueCounter == 1 && Status == StatIdle && FileSize == 0 && Complete == false &&
1545 LastGoodSig.empty() == false)
1546 {
1547 string const Final = _config->FindDir("Dir::State::lists") + URItoFileName(RealURI);
1548 if (RealFileExists(Final) == false && RealFileExists(LastGoodSig) == true)
1549 Rename(LastGoodSig, Final);
1550 }
1551
1552 }
1553 /*}}}*/
1554 // pkgAcqMetaSig::Custom600Headers - Insert custom request headers /*{{{*/
1555 // ---------------------------------------------------------------------
1556 /* The only header we use is the last-modified header. */
1557 string pkgAcqMetaSig::Custom600Headers() const
1558 {
1559 struct stat Buf;
1560 if (stat(LastGoodSig.c_str(),&Buf) != 0)
1561 return "\nIndex-File: true";
1562
1563 return "\nIndex-File: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime);
1564 }
1565
1566 void pkgAcqMetaSig::Done(string Message,unsigned long long Size, HashStringList const &Hashes,
1567 pkgAcquire::MethodConfig *Cfg)
1568 {
1569 Item::Done(Message, Size, Hashes, Cfg);
1570
1571 string FileName = LookupTag(Message,"Filename");
1572 if (FileName.empty() == true)
1573 {
1574 Status = StatError;
1575 ErrorText = "Method gave a blank filename";
1576 return;
1577 }
1578
1579 if (FileName != DestFile)
1580 {
1581 // We have to copy it into place
1582 Local = true;
1583 Desc.URI = "copy:" + FileName;
1584 QueueURI(Desc);
1585 return;
1586 }
1587
1588 Complete = true;
1589
1590 // at this point pkgAcqMetaIndex takes over
1591 ExpectedAdditionalItems = 0;
1592
1593 // put the last known good file back on i-m-s hit (it will
1594 // be re-verified again)
1595 // Else do nothing, we have the new file in DestFile then
1596 if(StringToBool(LookupTag(Message,"IMS-Hit"),false) == true)
1597 Rename(LastGoodSig, DestFile);
1598
1599 // queue a pkgAcqMetaIndex to be verified against the sig we just retrieved
1600 new pkgAcqMetaIndex(Owner, MetaIndexURI, MetaIndexURIDesc,
1601 MetaIndexShortDesc, DestFile, IndexTargets,
1602 MetaIndexParser);
1603
1604 }
1605 /*}}}*/
1606 void pkgAcqMetaSig::Failed(string Message,pkgAcquire::MethodConfig *Cnf)/*{{{*/
1607 {
1608 string Final = _config->FindDir("Dir::State::lists") + URItoFileName(RealURI);
1609
1610 // at this point pkgAcqMetaIndex takes over
1611 ExpectedAdditionalItems = 0;
1612
1613 // if we get a network error we fail gracefully
1614 if(Status == StatTransientNetworkError)
1615 {
1616 Item::Failed(Message,Cnf);
1617 // move the sigfile back on transient network failures
1618 if(FileExists(LastGoodSig))
1619 Rename(LastGoodSig,Final);
1620
1621 // set the status back to , Item::Failed likes to reset it
1622 Status = pkgAcquire::Item::StatTransientNetworkError;
1623 return;
1624 }
1625
1626 // Delete any existing sigfile when the acquire failed
1627 unlink(Final.c_str());
1628
1629 // queue a pkgAcqMetaIndex with no sigfile
1630 new pkgAcqMetaIndex(Owner, MetaIndexURI, MetaIndexURIDesc, MetaIndexShortDesc,
1631 "", IndexTargets, MetaIndexParser);
1632
1633 if (Cnf->LocalOnly == true ||
1634 StringToBool(LookupTag(Message,"Transient-Failure"),false) == false)
1635 {
1636 // Ignore this
1637 Status = StatDone;
1638 Complete = false;
1639 Dequeue();
1640 return;
1641 }
1642
1643 Item::Failed(Message,Cnf);
1644 }
1645 /*}}}*/
1646 pkgAcqMetaIndex::pkgAcqMetaIndex(pkgAcquire *Owner, /*{{{*/
1647 string URI,string URIDesc,string ShortDesc,
1648 string SigFile,
1649 const vector<IndexTarget*>* IndexTargets,
1650 indexRecords* MetaIndexParser) :
1651 Item(Owner, HashStringList()), RealURI(URI), SigFile(SigFile), IndexTargets(IndexTargets),
1652 MetaIndexParser(MetaIndexParser), AuthPass(false), IMSHit(false)
1653 {
1654 DestFile = _config->FindDir("Dir::State::lists") + "partial/";
1655 DestFile += URItoFileName(URI);
1656
1657 // Create the item
1658 Desc.Description = URIDesc;
1659 Desc.Owner = this;
1660 Desc.ShortDesc = ShortDesc;
1661 Desc.URI = URI;
1662
1663 // we expect more item
1664 ExpectedAdditionalItems = IndexTargets->size();
1665
1666 QueueURI(Desc);
1667 }
1668 /*}}}*/
1669 // pkgAcqMetaIndex::Custom600Headers - Insert custom request headers /*{{{*/
1670 // ---------------------------------------------------------------------
1671 /* The only header we use is the last-modified header. */
1672 string pkgAcqMetaIndex::Custom600Headers() const
1673 {
1674 string Final = _config->FindDir("Dir::State::lists");
1675 Final += URItoFileName(RealURI);
1676
1677 struct stat Buf;
1678 if (stat(Final.c_str(),&Buf) != 0)
1679 return "\nIndex-File: true";
1680
1681 return "\nIndex-File: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime);
1682 }
1683 /*}}}*/
1684 void pkgAcqMetaIndex::Done(string Message,unsigned long long Size,HashStringList const &Hashes, /*{{{*/
1685 pkgAcquire::MethodConfig *Cfg)
1686 {
1687 Item::Done(Message,Size,Hashes,Cfg);
1688
1689 // MetaIndexes are done in two passes: one to download the
1690 // metaindex with an appropriate method, and a second to verify it
1691 // with the gpgv method
1692
1693 if (AuthPass == true)
1694 {
1695 AuthDone(Message);
1696
1697 // all cool, move Release file into place
1698 Complete = true;
1699 }
1700 else
1701 {
1702 RetrievalDone(Message);
1703 if (!Complete)
1704 // Still more retrieving to do
1705 return;
1706
1707 if (SigFile == "")
1708 {
1709 // There was no signature file, so we are finished. Download
1710 // the indexes and do only hashsum verification if possible
1711 MetaIndexParser->Load(DestFile);
1712 QueueIndexes(false);
1713 }
1714 else
1715 {
1716 // FIXME: move this into pkgAcqMetaClearSig::Done on the next
1717 // ABI break
1718
1719 // if we expect a ClearTextSignature (InRelase), ensure that
1720 // this is what we get and if not fail to queue a
1721 // Release/Release.gpg, see #346386
1722 if (SigFile == DestFile && !StartsWithGPGClearTextSignature(DestFile))
1723 {
1724 Failed(Message, Cfg);
1725 return;
1726 }
1727
1728 // There was a signature file, so pass it to gpgv for
1729 // verification
1730 if (_config->FindB("Debug::pkgAcquire::Auth", false))
1731 std::cerr << "Metaindex acquired, queueing gpg verification ("
1732 << SigFile << "," << DestFile << ")\n";
1733 AuthPass = true;
1734 Desc.URI = "gpgv:" + SigFile;
1735 QueueURI(Desc);
1736 ActiveSubprocess = "gpgv";
1737 #if __GNUC__ >= 4
1738 #pragma GCC diagnostic push
1739 #pragma GCC diagnostic ignored "-Wdeprecated-declarations"
1740 #endif
1741 Mode = "gpgv";
1742 #if __GNUC__ >= 4
1743 #pragma GCC diagnostic pop
1744 #endif
1745 return;
1746 }
1747 }
1748
1749 if (Complete == true)
1750 {
1751 string FinalFile = _config->FindDir("Dir::State::lists");
1752 FinalFile += URItoFileName(RealURI);
1753 if (SigFile == DestFile)
1754 SigFile = FinalFile;
1755 Rename(DestFile,FinalFile);
1756 chmod(FinalFile.c_str(),0644);
1757 DestFile = FinalFile;
1758 }
1759 }
1760 /*}}}*/
1761 void pkgAcqMetaIndex::RetrievalDone(string Message) /*{{{*/
1762 {
1763 // We have just finished downloading a Release file (it is not
1764 // verified yet)
1765
1766 string FileName = LookupTag(Message,"Filename");
1767 if (FileName.empty() == true)
1768 {
1769 Status = StatError;
1770 ErrorText = "Method gave a blank filename";
1771 return;
1772 }
1773
1774 if (FileName != DestFile)
1775 {
1776 Local = true;
1777 Desc.URI = "copy:" + FileName;
1778 QueueURI(Desc);
1779 return;
1780 }
1781
1782 // make sure to verify against the right file on I-M-S hit
1783 IMSHit = StringToBool(LookupTag(Message,"IMS-Hit"),false);
1784 if(IMSHit)
1785 {
1786 string FinalFile = _config->FindDir("Dir::State::lists");
1787 FinalFile += URItoFileName(RealURI);
1788 if (SigFile == DestFile)
1789 {
1790 SigFile = FinalFile;
1791 // constructor of pkgAcqMetaClearSig moved it out of the way,
1792 // now move it back in on IMS hit for the 'old' file
1793 string const OldClearSig = DestFile + ".reverify";
1794 if (RealFileExists(OldClearSig) == true)
1795 Rename(OldClearSig, FinalFile);
1796 }
1797 DestFile = FinalFile;
1798 }
1799 Complete = true;
1800 }
1801 /*}}}*/
1802 void pkgAcqMetaIndex::AuthDone(string Message) /*{{{*/
1803 {
1804 // At this point, the gpgv method has succeeded, so there is a
1805 // valid signature from a key in the trusted keyring. We
1806 // perform additional verification of its contents, and use them
1807 // to verify the indexes we are about to download
1808
1809 if (!MetaIndexParser->Load(DestFile))
1810 {
1811 Status = StatAuthError;
1812 ErrorText = MetaIndexParser->ErrorText;
1813 return;
1814 }
1815
1816 if (!VerifyVendor(Message))
1817 {
1818 return;
1819 }
1820
1821 if (_config->FindB("Debug::pkgAcquire::Auth", false))
1822 std::cerr << "Signature verification succeeded: "
1823 << DestFile << std::endl;
1824
1825 // do not trust any previously unverified content that we may have
1826 string LastGoodSigFile = _config->FindDir("Dir::State::lists").append("partial/").append(URItoFileName(RealURI));
1827 if (DestFile != SigFile)
1828 LastGoodSigFile.append(".gpg");
1829 LastGoodSigFile.append(".reverify");
1830 if(IMSHit == false && RealFileExists(LastGoodSigFile) == false)
1831 {
1832 for (vector <struct IndexTarget*>::const_iterator Target = IndexTargets->begin();
1833 Target != IndexTargets->end();
1834 ++Target)
1835 {
1836 // remove old indexes
1837 std::string index = _config->FindDir("Dir::State::lists") +
1838 URItoFileName((*Target)->URI);
1839 unlink(index.c_str());
1840 // and also old gzipindexes
1841 std::vector<std::string> types = APT::Configuration::getCompressionTypes();
1842 for (std::vector<std::string>::const_iterator t = types.begin(); t != types.end(); ++t)
1843 {
1844 index += '.' + (*t);
1845 unlink(index.c_str());
1846 }
1847 }
1848 }
1849
1850
1851 // Download further indexes with verification
1852 QueueIndexes(true);
1853
1854 // is it a clearsigned MetaIndex file?
1855 if (DestFile == SigFile)
1856 return;
1857
1858 // Done, move signature file into position
1859 string VerifiedSigFile = _config->FindDir("Dir::State::lists") +
1860 URItoFileName(RealURI) + ".gpg";
1861 Rename(SigFile,VerifiedSigFile);
1862 chmod(VerifiedSigFile.c_str(),0644);
1863 }
1864 /*}}}*/
1865 void pkgAcqMetaIndex::QueueIndexes(bool verify) /*{{{*/
1866 {
1867 #if 0
1868 /* Reject invalid, existing Release files (LP: #346386) (Closes: #627642)
1869 * FIXME: Disabled; it breaks unsigned repositories without hashes */
1870 if (!verify && FileExists(DestFile) && !MetaIndexParser->Load(DestFile))
1871 {
1872 Status = StatError;
1873 ErrorText = MetaIndexParser->ErrorText;
1874 return;
1875 }
1876 #endif
1877 bool transInRelease = false;
1878 {
1879 std::vector<std::string> const keys = MetaIndexParser->MetaKeys();
1880 for (std::vector<std::string>::const_iterator k = keys.begin(); k != keys.end(); ++k)
1881 // FIXME: Feels wrong to check for hardcoded string here, but what should we do else…
1882 if (k->find("Translation-") != std::string::npos)
1883 {
1884 transInRelease = true;
1885 break;
1886 }
1887 }
1888
1889 // at this point the real Items are loaded in the fetcher
1890 ExpectedAdditionalItems = 0;
1891 for (vector <IndexTarget*>::const_iterator Target = IndexTargets->begin();
1892 Target != IndexTargets->end();
1893 ++Target)
1894 {
1895 HashStringList ExpectedIndexHashes;
1896 const indexRecords::checkSum *Record = MetaIndexParser->Lookup((*Target)->MetaKey);
1897 bool compressedAvailable = false;
1898 if (Record == NULL)
1899 {
1900 if ((*Target)->IsOptional() == true)
1901 {
1902 std::vector<std::string> types = APT::Configuration::getCompressionTypes();
1903 for (std::vector<std::string>::const_iterator t = types.begin(); t != types.end(); ++t)
1904 if (MetaIndexParser->Exists((*Target)->MetaKey + "." + *t) == true)
1905 {
1906 compressedAvailable = true;
1907 break;
1908 }
1909 }
1910 else if (verify == true)
1911 {
1912 Status = StatAuthError;
1913 strprintf(ErrorText, _("Unable to find expected entry '%s' in Release file (Wrong sources.list entry or malformed file)"), (*Target)->MetaKey.c_str());
1914 return;
1915 }
1916 }
1917 else
1918 {
1919 ExpectedIndexHashes = Record->Hashes;
1920 if (_config->FindB("Debug::pkgAcquire::Auth", false))
1921 {
1922 std::cerr << "Queueing: " << (*Target)->URI << std::endl
1923 << "Expected Hash:" << std::endl;
1924 for (HashStringList::const_iterator hs = ExpectedIndexHashes.begin(); hs != ExpectedIndexHashes.end(); ++hs)
1925 std::cerr << "\t- " << hs->toStr() << std::endl;
1926 std::cerr << "For: " << Record->MetaKeyFilename << std::endl;
1927 }
1928 if (verify == true && ExpectedIndexHashes.empty() == true && (*Target)->IsOptional() == false)
1929 {
1930 Status = StatAuthError;
1931 strprintf(ErrorText, _("Unable to find hash sum for '%s' in Release file"), (*Target)->MetaKey.c_str());
1932 return;
1933 }
1934 }
1935
1936 if ((*Target)->IsOptional() == true)
1937 {
1938 if ((*Target)->IsSubIndex() == true)
1939 new pkgAcqSubIndex(Owner, (*Target)->URI, (*Target)->Description,
1940 (*Target)->ShortDesc, ExpectedIndexHashes);
1941 else if (transInRelease == false || Record != NULL || compressedAvailable == true)
1942 {
1943 if (_config->FindB("Acquire::PDiffs",true) == true && transInRelease == true &&
1944 MetaIndexParser->Exists((*Target)->MetaKey + ".diff/Index") == true)
1945 new pkgAcqDiffIndex(Owner, *Target, ExpectedIndexHashes, MetaIndexParser);
1946 else
1947 new pkgAcqIndexTrans(Owner, *Target, ExpectedIndexHashes, MetaIndexParser);
1948 }
1949 continue;
1950 }
1951
1952 /* Queue Packages file (either diff or full packages files, depending
1953 on the users option) - we also check if the PDiff Index file is listed
1954 in the Meta-Index file. Ideal would be if pkgAcqDiffIndex would test this
1955 instead, but passing the required info to it is to much hassle */
1956 if(_config->FindB("Acquire::PDiffs",true) == true && (verify == false ||
1957 MetaIndexParser->Exists((*Target)->MetaKey + ".diff/Index") == true))
1958 new pkgAcqDiffIndex(Owner, *Target, ExpectedIndexHashes, MetaIndexParser);
1959 else
1960 new pkgAcqIndex(Owner, *Target, ExpectedIndexHashes, MetaIndexParser);
1961 }
1962 }
1963 /*}}}*/
1964 bool pkgAcqMetaIndex::VerifyVendor(string Message) /*{{{*/
1965 {
1966 string::size_type pos;
1967
1968 // check for missing sigs (that where not fatal because otherwise we had
1969 // bombed earlier)
1970 string missingkeys;
1971 string msg = _("There is no public key available for the "
1972 "following key IDs:\n");
1973 pos = Message.find("NO_PUBKEY ");
1974 if (pos != std::string::npos)
1975 {
1976 string::size_type start = pos+strlen("NO_PUBKEY ");
1977 string Fingerprint = Message.substr(start, Message.find("\n")-start);
1978 missingkeys += (Fingerprint);
1979 }
1980 if(!missingkeys.empty())
1981 _error->Warning("%s", (msg + missingkeys).c_str());
1982
1983 string Transformed = MetaIndexParser->GetExpectedDist();
1984
1985 if (Transformed == "../project/experimental")
1986 {
1987 Transformed = "experimental";
1988 }
1989
1990 pos = Transformed.rfind('/');
1991 if (pos != string::npos)
1992 {
1993 Transformed = Transformed.substr(0, pos);
1994 }
1995
1996 if (Transformed == ".")
1997 {
1998 Transformed = "";
1999 }
2000
2001 if (_config->FindB("Acquire::Check-Valid-Until", true) == true &&
2002 MetaIndexParser->GetValidUntil() > 0) {
2003 time_t const invalid_since = time(NULL) - MetaIndexParser->GetValidUntil();
2004 if (invalid_since > 0)
2005 // TRANSLATOR: The first %s is the URL of the bad Release file, the second is
2006 // the time since then the file is invalid - formated in the same way as in
2007 // the download progress display (e.g. 7d 3h 42min 1s)
2008 return _error->Error(
2009 _("Release file for %s is expired (invalid since %s). "
2010 "Updates for this repository will not be applied."),
2011 RealURI.c_str(), TimeToStr(invalid_since).c_str());
2012 }
2013
2014 if (_config->FindB("Debug::pkgAcquire::Auth", false))
2015 {
2016 std::cerr << "Got Codename: " << MetaIndexParser->GetDist() << std::endl;
2017 std::cerr << "Expecting Dist: " << MetaIndexParser->GetExpectedDist() << std::endl;
2018 std::cerr << "Transformed Dist: " << Transformed << std::endl;
2019 }
2020
2021 if (MetaIndexParser->CheckDist(Transformed) == false)
2022 {
2023 // This might become fatal one day
2024 // Status = StatAuthError;
2025 // ErrorText = "Conflicting distribution; expected "
2026 // + MetaIndexParser->GetExpectedDist() + " but got "
2027 // + MetaIndexParser->GetDist();
2028 // return false;
2029 if (!Transformed.empty())
2030 {
2031 _error->Warning(_("Conflicting distribution: %s (expected %s but got %s)"),
2032 Desc.Description.c_str(),
2033 Transformed.c_str(),
2034 MetaIndexParser->GetDist().c_str());
2035 }
2036 }
2037
2038 return true;
2039 }
2040 /*}}}*/
2041 // pkgAcqMetaIndex::Failed - no Release file present or no signature file present /*{{{*/
2042 // ---------------------------------------------------------------------
2043 /* */
2044 void pkgAcqMetaIndex::Failed(string Message,pkgAcquire::MethodConfig * /*Cnf*/)
2045 {
2046 if (AuthPass == true)
2047 {
2048 // gpgv method failed, if we have a good signature
2049 string LastGoodSigFile = _config->FindDir("Dir::State::lists").append("partial/").append(URItoFileName(RealURI));
2050 if (DestFile != SigFile)
2051 LastGoodSigFile.append(".gpg");
2052 LastGoodSigFile.append(".reverify");
2053
2054 if(FileExists(LastGoodSigFile))
2055 {
2056 string VerifiedSigFile = _config->FindDir("Dir::State::lists") + URItoFileName(RealURI);
2057 if (DestFile != SigFile)
2058 VerifiedSigFile.append(".gpg");
2059 Rename(LastGoodSigFile, VerifiedSigFile);
2060 Status = StatTransientNetworkError;
2061 _error->Warning(_("An error occurred during the signature "
2062 "verification. The repository is not updated "
2063 "and the previous index files will be used. "
2064 "GPG error: %s: %s\n"),
2065 Desc.Description.c_str(),
2066 LookupTag(Message,"Message").c_str());
2067 RunScripts("APT::Update::Auth-Failure");
2068 return;
2069 } else if (LookupTag(Message,"Message").find("NODATA") != string::npos) {
2070 /* Invalid signature file, reject (LP: #346386) (Closes: #627642) */
2071 _error->Error(_("GPG error: %s: %s"),
2072 Desc.Description.c_str(),
2073 LookupTag(Message,"Message").c_str());
2074 return;
2075 } else {
2076 _error->Warning(_("GPG error: %s: %s"),
2077 Desc.Description.c_str(),
2078 LookupTag(Message,"Message").c_str());
2079 }
2080 // gpgv method failed
2081 ReportMirrorFailure("GPGFailure");
2082 }
2083
2084 /* Always move the meta index, even if gpgv failed. This ensures
2085 * that PackageFile objects are correctly filled in */
2086 if (FileExists(DestFile)) {
2087 string FinalFile = _config->FindDir("Dir::State::lists");
2088 FinalFile += URItoFileName(RealURI);
2089 /* InRelease files become Release files, otherwise
2090 * they would be considered as trusted later on */
2091 if (SigFile == DestFile) {
2092 RealURI = RealURI.replace(RealURI.rfind("InRelease"), 9,
2093 "Release");
2094 FinalFile = FinalFile.replace(FinalFile.rfind("InRelease"), 9,
2095 "Release");
2096 SigFile = FinalFile;
2097 }
2098 Rename(DestFile,FinalFile);
2099 chmod(FinalFile.c_str(),0644);
2100
2101 DestFile = FinalFile;
2102 }
2103
2104 // No Release file was present, or verification failed, so fall
2105 // back to queueing Packages files without verification
2106 QueueIndexes(false);
2107 }
2108 /*}}}*/
2109 pkgAcqMetaClearSig::pkgAcqMetaClearSig(pkgAcquire *Owner, /*{{{*/
2110 string const &URI, string const &URIDesc, string const &ShortDesc,
2111 string const &MetaIndexURI, string const &MetaIndexURIDesc, string const &MetaIndexShortDesc,
2112 string const &MetaSigURI, string const &MetaSigURIDesc, string const &MetaSigShortDesc,
2113 const vector<IndexTarget*>* IndexTargets,
2114 indexRecords* MetaIndexParser) :
2115 pkgAcqMetaIndex(Owner, URI, URIDesc, ShortDesc, "", IndexTargets, MetaIndexParser),
2116 MetaIndexURI(MetaIndexURI), MetaIndexURIDesc(MetaIndexURIDesc), MetaIndexShortDesc(MetaIndexShortDesc),
2117 MetaSigURI(MetaSigURI), MetaSigURIDesc(MetaSigURIDesc), MetaSigShortDesc(MetaSigShortDesc)
2118 {
2119 SigFile = DestFile;
2120
2121 // index targets + (worst case:) Release/Release.gpg
2122 ExpectedAdditionalItems = IndexTargets->size() + 2;
2123
2124
2125 // keep the old InRelease around in case of transistent network errors
2126 string const Final = _config->FindDir("Dir::State::lists") + URItoFileName(RealURI);
2127 if (RealFileExists(Final) == true)
2128 {
2129 string const LastGoodSig = DestFile + ".reverify";
2130 Rename(Final,LastGoodSig);
2131 }
2132 }
2133 /*}}}*/
2134 pkgAcqMetaClearSig::~pkgAcqMetaClearSig() /*{{{*/
2135 {
2136 // if the file was never queued undo file-changes done in the constructor
2137 if (QueueCounter == 1 && Status == StatIdle && FileSize == 0 && Complete == false)
2138 {
2139 string const Final = _config->FindDir("Dir::State::lists") + URItoFileName(RealURI);
2140 string const LastGoodSig = DestFile + ".reverify";
2141 if (RealFileExists(Final) == false && RealFileExists(LastGoodSig) == true)
2142 Rename(LastGoodSig, Final);
2143 }
2144 }
2145 /*}}}*/
2146 // pkgAcqMetaClearSig::Custom600Headers - Insert custom request headers /*{{{*/
2147 // ---------------------------------------------------------------------
2148 // FIXME: this can go away once the InRelease file is used widely
2149 string pkgAcqMetaClearSig::Custom600Headers() const
2150 {
2151 string Final = _config->FindDir("Dir::State::lists");
2152 Final += URItoFileName(RealURI);
2153
2154 struct stat Buf;
2155 if (stat(Final.c_str(),&Buf) != 0)
2156 {
2157 Final = DestFile + ".reverify";
2158 if (stat(Final.c_str(),&Buf) != 0)
2159 return "\nIndex-File: true\nFail-Ignore: true\n";
2160 }
2161
2162 return "\nIndex-File: true\nFail-Ignore: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime);
2163 }
2164 /*}}}*/
2165 void pkgAcqMetaClearSig::Failed(string Message,pkgAcquire::MethodConfig *Cnf) /*{{{*/
2166 {
2167 // we failed, we will not get additional items from this method
2168 ExpectedAdditionalItems = 0;
2169
2170 if (AuthPass == false)
2171 {
2172 // Remove the 'old' InRelease file if we try Release.gpg now as otherwise
2173 // the file will stay around and gives a false-auth impression (CVE-2012-0214)
2174 string FinalFile = _config->FindDir("Dir::State::lists");
2175 FinalFile.append(URItoFileName(RealURI));
2176 if (FileExists(FinalFile))
2177 unlink(FinalFile.c_str());
2178
2179 new pkgAcqMetaSig(Owner,
2180 MetaSigURI, MetaSigURIDesc, MetaSigShortDesc,
2181 MetaIndexURI, MetaIndexURIDesc, MetaIndexShortDesc,
2182 IndexTargets, MetaIndexParser);
2183 if (Cnf->LocalOnly == true ||
2184 StringToBool(LookupTag(Message, "Transient-Failure"), false) == false)
2185 Dequeue();
2186 }
2187 else
2188 pkgAcqMetaIndex::Failed(Message, Cnf);
2189 }
2190 /*}}}*/
2191 // AcqArchive::AcqArchive - Constructor /*{{{*/
2192 // ---------------------------------------------------------------------
2193 /* This just sets up the initial fetch environment and queues the first
2194 possibilitiy */
2195 pkgAcqArchive::pkgAcqArchive(pkgAcquire *Owner,pkgSourceList *Sources,
2196 pkgRecords *Recs,pkgCache::VerIterator const &Version,
2197 string &StoreFilename) :
2198 Item(Owner, HashStringList()), Version(Version), Sources(Sources), Recs(Recs),
2199 StoreFilename(StoreFilename), Vf(Version.FileList()),
2200 Trusted(false)
2201 {
2202 Retries = _config->FindI("Acquire::Retries",0);
2203
2204 if (Version.Arch() == 0)
2205 {
2206 _error->Error(_("I wasn't able to locate a file for the %s package. "
2207 "This might mean you need to manually fix this package. "
2208 "(due to missing arch)"),
2209 Version.ParentPkg().FullName().c_str());
2210 return;
2211 }
2212
2213 /* We need to find a filename to determine the extension. We make the
2214 assumption here that all the available sources for this version share
2215 the same extension.. */
2216 // Skip not source sources, they do not have file fields.
2217 for (; Vf.end() == false; ++Vf)
2218 {
2219 if ((Vf.File()->Flags & pkgCache::Flag::NotSource) != 0)
2220 continue;
2221 break;
2222 }
2223
2224 // Does not really matter here.. we are going to fail out below
2225 if (Vf.end() != true)
2226 {
2227 // If this fails to get a file name we will bomb out below.
2228 pkgRecords::Parser &Parse = Recs->Lookup(Vf);
2229 if (_error->PendingError() == true)
2230 return;
2231
2232 // Generate the final file name as: package_version_arch.foo
2233 StoreFilename = QuoteString(Version.ParentPkg().Name(),"_:") + '_' +
2234 QuoteString(Version.VerStr(),"_:") + '_' +
2235 QuoteString(Version.Arch(),"_:.") +
2236 "." + flExtension(Parse.FileName());
2237 }
2238
2239 // check if we have one trusted source for the package. if so, switch
2240 // to "TrustedOnly" mode - but only if not in AllowUnauthenticated mode
2241 bool const allowUnauth = _config->FindB("APT::Get::AllowUnauthenticated", false);
2242 bool const debugAuth = _config->FindB("Debug::pkgAcquire::Auth", false);
2243 bool seenUntrusted = false;
2244 for (pkgCache::VerFileIterator i = Version.FileList(); i.end() == false; ++i)
2245 {
2246 pkgIndexFile *Index;
2247 if (Sources->FindIndex(i.File(),Index) == false)
2248 continue;
2249
2250 if (debugAuth == true)
2251 std::cerr << "Checking index: " << Index->Describe()
2252 << "(Trusted=" << Index->IsTrusted() << ")" << std::endl;
2253
2254 if (Index->IsTrusted() == true)
2255 {
2256 Trusted = true;
2257 if (allowUnauth == false)
2258 break;
2259 }
2260 else
2261 seenUntrusted = true;
2262 }
2263
2264 // "allow-unauthenticated" restores apts old fetching behaviour
2265 // that means that e.g. unauthenticated file:// uris are higher
2266 // priority than authenticated http:// uris
2267 if (allowUnauth == true && seenUntrusted == true)
2268 Trusted = false;
2269
2270 // Select a source
2271 if (QueueNext() == false && _error->PendingError() == false)
2272 _error->Error(_("Can't find a source to download version '%s' of '%s'"),
2273 Version.VerStr(), Version.ParentPkg().FullName(false).c_str());
2274 }
2275 /*}}}*/
2276 // AcqArchive::QueueNext - Queue the next file source /*{{{*/
2277 // ---------------------------------------------------------------------
2278 /* This queues the next available file version for download. It checks if
2279 the archive is already available in the cache and stashs the MD5 for
2280 checking later. */
2281 bool pkgAcqArchive::QueueNext()
2282 {
2283 for (; Vf.end() == false; ++Vf)
2284 {
2285 // Ignore not source sources
2286 if ((Vf.File()->Flags & pkgCache::Flag::NotSource) != 0)
2287 continue;
2288
2289 // Try to cross match against the source list
2290 pkgIndexFile *Index;
2291 if (Sources->FindIndex(Vf.File(),Index) == false)
2292 continue;
2293
2294 // only try to get a trusted package from another source if that source
2295 // is also trusted
2296 if(Trusted && !Index->IsTrusted())
2297 continue;
2298
2299 // Grab the text package record
2300 pkgRecords::Parser &Parse = Recs->Lookup(Vf);
2301 if (_error->PendingError() == true)
2302 return false;
2303
2304 string PkgFile = Parse.FileName();
2305 ExpectedHashes = Parse.Hashes();
2306
2307 if (PkgFile.empty() == true)
2308 return _error->Error(_("The package index files are corrupted. No Filename: "
2309 "field for package %s."),
2310 Version.ParentPkg().Name());
2311
2312 Desc.URI = Index->ArchiveURI(PkgFile);
2313 Desc.Description = Index->ArchiveInfo(Version);
2314 Desc.Owner = this;
2315 Desc.ShortDesc = Version.ParentPkg().FullName(true);
2316
2317 // See if we already have the file. (Legacy filenames)
2318 FileSize = Version->Size;
2319 string FinalFile = _config->FindDir("Dir::Cache::Archives") + flNotDir(PkgFile);
2320 struct stat Buf;
2321 if (stat(FinalFile.c_str(),&Buf) == 0)
2322 {
2323 // Make sure the size matches
2324 if ((unsigned long long)Buf.st_size == Version->Size)
2325 {
2326 Complete = true;
2327 Local = true;
2328 Status = StatDone;
2329 StoreFilename = DestFile = FinalFile;
2330 return true;
2331 }
2332
2333 /* Hmm, we have a file and its size does not match, this means it is
2334 an old style mismatched arch */
2335 unlink(FinalFile.c_str());
2336 }
2337
2338 // Check it again using the new style output filenames
2339 FinalFile = _config->FindDir("Dir::Cache::Archives") + flNotDir(StoreFilename);
2340 if (stat(FinalFile.c_str(),&Buf) == 0)
2341 {
2342 // Make sure the size matches
2343 if ((unsigned long long)Buf.st_size == Version->Size)
2344 {
2345 Complete = true;
2346 Local = true;
2347 Status = StatDone;
2348 StoreFilename = DestFile = FinalFile;
2349 return true;
2350 }
2351
2352 /* Hmm, we have a file and its size does not match, this shouldn't
2353 happen.. */
2354 unlink(FinalFile.c_str());
2355 }
2356
2357 DestFile = _config->FindDir("Dir::Cache::Archives") + "partial/" + flNotDir(StoreFilename);
2358
2359 // Check the destination file
2360 if (stat(DestFile.c_str(),&Buf) == 0)
2361 {
2362 // Hmm, the partial file is too big, erase it
2363 if ((unsigned long long)Buf.st_size > Version->Size)
2364 unlink(DestFile.c_str());
2365 else
2366 PartialSize = Buf.st_size;
2367 }
2368
2369 // Disables download of archives - useful if no real installation follows,
2370 // e.g. if we are just interested in proposed installation order
2371 if (_config->FindB("Debug::pkgAcqArchive::NoQueue", false) == true)
2372 {
2373 Complete = true;
2374 Local = true;
2375 Status = StatDone;
2376 StoreFilename = DestFile = FinalFile;
2377 return true;
2378 }
2379
2380 // Create the item
2381 Local = false;
2382 QueueURI(Desc);
2383
2384 ++Vf;
2385 return true;
2386 }
2387 return false;
2388 }
2389 /*}}}*/
2390 // AcqArchive::Done - Finished fetching /*{{{*/
2391 // ---------------------------------------------------------------------
2392 /* */
2393 void pkgAcqArchive::Done(string Message,unsigned long long Size, HashStringList const &CalcHashes,
2394 pkgAcquire::MethodConfig *Cfg)
2395 {
2396 Item::Done(Message, Size, CalcHashes, Cfg);
2397
2398 // Check the size
2399 if (Size != Version->Size)
2400 {
2401 RenameOnError(SizeMismatch);
2402 return;
2403 }
2404
2405 // FIXME: could this empty() check impose *any* sort of security issue?
2406 if(ExpectedHashes.usable() && ExpectedHashes != CalcHashes)
2407 {
2408 RenameOnError(HashSumMismatch);
2409 printHashSumComparision(DestFile, ExpectedHashes, CalcHashes);
2410 return;
2411 }
2412
2413 // Grab the output filename
2414 string FileName = LookupTag(Message,"Filename");
2415 if (FileName.empty() == true)
2416 {
2417 Status = StatError;
2418 ErrorText = "Method gave a blank filename";
2419 return;
2420 }
2421
2422 Complete = true;
2423
2424 // Reference filename
2425 if (FileName != DestFile)
2426 {
2427 StoreFilename = DestFile = FileName;
2428 Local = true;
2429 return;
2430 }
2431
2432 // Done, move it into position
2433 string FinalFile = _config->FindDir("Dir::Cache::Archives");
2434 FinalFile += flNotDir(StoreFilename);
2435 Rename(DestFile,FinalFile);
2436
2437 StoreFilename = DestFile = FinalFile;
2438 Complete = true;
2439 }
2440 /*}}}*/
2441 // AcqArchive::Failed - Failure handler /*{{{*/
2442 // ---------------------------------------------------------------------
2443 /* Here we try other sources */
2444 void pkgAcqArchive::Failed(string Message,pkgAcquire::MethodConfig *Cnf)
2445 {
2446 ErrorText = LookupTag(Message,"Message");
2447
2448 /* We don't really want to retry on failed media swaps, this prevents
2449 that. An interesting observation is that permanent failures are not
2450 recorded. */
2451 if (Cnf->Removable == true &&
2452 StringToBool(LookupTag(Message,"Transient-Failure"),false) == true)
2453 {
2454 // Vf = Version.FileList();
2455 while (Vf.end() == false) ++Vf;
2456 StoreFilename = string();
2457 Item::Failed(Message,Cnf);
2458 return;
2459 }
2460
2461 if (QueueNext() == false)
2462 {
2463 // This is the retry counter
2464 if (Retries != 0 &&
2465 Cnf->LocalOnly == false &&
2466 StringToBool(LookupTag(Message,"Transient-Failure"),false) == true)
2467 {
2468 Retries--;
2469 Vf = Version.FileList();
2470 if (QueueNext() == true)
2471 return;
2472 }
2473
2474 StoreFilename = string();
2475 Item::Failed(Message,Cnf);
2476 }
2477 }
2478 /*}}}*/
2479 // AcqArchive::IsTrusted - Determine whether this archive comes from a trusted source /*{{{*/
2480 // ---------------------------------------------------------------------
2481 APT_PURE bool pkgAcqArchive::IsTrusted() const
2482 {
2483 return Trusted;
2484 }
2485 /*}}}*/
2486 // AcqArchive::Finished - Fetching has finished, tidy up /*{{{*/
2487 // ---------------------------------------------------------------------
2488 /* */
2489 void pkgAcqArchive::Finished()
2490 {
2491 if (Status == pkgAcquire::Item::StatDone &&
2492 Complete == true)
2493 return;
2494 StoreFilename = string();
2495 }
2496 /*}}}*/
2497 // AcqFile::pkgAcqFile - Constructor /*{{{*/
2498 // ---------------------------------------------------------------------
2499 /* The file is added to the queue */
2500 pkgAcqFile::pkgAcqFile(pkgAcquire *Owner,string URI, HashStringList const &Hashes,
2501 unsigned long long Size,string Dsc,string ShortDesc,
2502 const string &DestDir, const string &DestFilename,
2503 bool IsIndexFile) :
2504 Item(Owner, Hashes), IsIndexFile(IsIndexFile)
2505 {
2506 Retries = _config->FindI("Acquire::Retries",0);
2507
2508 if(!DestFilename.empty())
2509 DestFile = DestFilename;
2510 else if(!DestDir.empty())
2511 DestFile = DestDir + "/" + flNotDir(URI);
2512 else
2513 DestFile = flNotDir(URI);
2514
2515 // Create the item
2516 Desc.URI = URI;
2517 Desc.Description = Dsc;
2518 Desc.Owner = this;
2519
2520 // Set the short description to the archive component
2521 Desc.ShortDesc = ShortDesc;
2522
2523 // Get the transfer sizes
2524 FileSize = Size;
2525 struct stat Buf;
2526 if (stat(DestFile.c_str(),&Buf) == 0)
2527 {
2528 // Hmm, the partial file is too big, erase it
2529 if ((Size > 0) && (unsigned long long)Buf.st_size > Size)
2530 unlink(DestFile.c_str());
2531 else
2532 PartialSize = Buf.st_size;
2533 }
2534
2535 QueueURI(Desc);
2536 }
2537 /*}}}*/
2538 // AcqFile::Done - Item downloaded OK /*{{{*/
2539 // ---------------------------------------------------------------------
2540 /* */
2541 void pkgAcqFile::Done(string Message,unsigned long long Size,HashStringList const &CalcHashes,
2542 pkgAcquire::MethodConfig *Cnf)
2543 {
2544 Item::Done(Message,Size,CalcHashes,Cnf);
2545
2546 // Check the hash
2547 if(ExpectedHashes.usable() && ExpectedHashes != CalcHashes)
2548 {
2549 RenameOnError(HashSumMismatch);
2550 printHashSumComparision(DestFile, ExpectedHashes, CalcHashes);
2551 return;
2552 }
2553
2554 string FileName = LookupTag(Message,"Filename");
2555 if (FileName.empty() == true)
2556 {
2557 Status = StatError;
2558 ErrorText = "Method gave a blank filename";
2559 return;
2560 }
2561
2562 Complete = true;
2563
2564 // The files timestamp matches
2565 if (StringToBool(LookupTag(Message,"IMS-Hit"),false) == true)
2566 return;
2567
2568 // We have to copy it into place
2569 if (FileName != DestFile)
2570 {
2571 Local = true;
2572 if (_config->FindB("Acquire::Source-Symlinks",true) == false ||
2573 Cnf->Removable == true)
2574 {
2575 Desc.URI = "copy:" + FileName;
2576 QueueURI(Desc);
2577 return;
2578 }
2579
2580 // Erase the file if it is a symlink so we can overwrite it
2581 struct stat St;
2582 if (lstat(DestFile.c_str(),&St) == 0)
2583 {
2584 if (S_ISLNK(St.st_mode) != 0)
2585 unlink(DestFile.c_str());
2586 }
2587
2588 // Symlink the file
2589 if (symlink(FileName.c_str(),DestFile.c_str()) != 0)
2590 {
2591 ErrorText = "Link to " + DestFile + " failure ";
2592 Status = StatError;
2593 Complete = false;
2594 }
2595 }
2596 }
2597 /*}}}*/
2598 // AcqFile::Failed - Failure handler /*{{{*/
2599 // ---------------------------------------------------------------------
2600 /* Here we try other sources */
2601 void pkgAcqFile::Failed(string Message,pkgAcquire::MethodConfig *Cnf)
2602 {
2603 ErrorText = LookupTag(Message,"Message");
2604
2605 // This is the retry counter
2606 if (Retries != 0 &&
2607 Cnf->LocalOnly == false &&
2608 StringToBool(LookupTag(Message,"Transient-Failure"),false) == true)
2609 {
2610 Retries--;
2611 QueueURI(Desc);
2612 return;
2613 }
2614
2615 Item::Failed(Message,Cnf);
2616 }
2617 /*}}}*/
2618 // AcqIndex::Custom600Headers - Insert custom request headers /*{{{*/
2619 // ---------------------------------------------------------------------
2620 /* The only header we use is the last-modified header. */
2621 string pkgAcqFile::Custom600Headers() const
2622 {
2623 if (IsIndexFile)
2624 return "\nIndex-File: true";
2625 return "";
2626 }
2627 /*}}}*/