]> git.saurik.com Git - apt.git/blob - apt-pkg/pkgcachegen.cc
fix arguments for MarkInstall so packages are really marked as automatic
[apt.git] / apt-pkg / pkgcachegen.cc
1 // -*- mode: cpp; mode: fold -*-
2 // Description /*{{{*/
3 // $Id: pkgcachegen.cc,v 1.53.2.1 2003/12/24 23:09:17 mdz Exp $
4 /* ######################################################################
5
6 Package Cache Generator - Generator for the cache structure.
7
8 This builds the cache structure from the abstract package list parser.
9
10 ##################################################################### */
11 /*}}}*/
12 // Include Files /*{{{*/
13 #define APT_COMPATIBILITY 986
14
15 #include <apt-pkg/pkgcachegen.h>
16 #include <apt-pkg/error.h>
17 #include <apt-pkg/version.h>
18 #include <apt-pkg/progress.h>
19 #include <apt-pkg/sourcelist.h>
20 #include <apt-pkg/configuration.h>
21 #include <apt-pkg/aptconfiguration.h>
22 #include <apt-pkg/strutl.h>
23 #include <apt-pkg/sptr.h>
24 #include <apt-pkg/pkgsystem.h>
25 #include <apt-pkg/macros.h>
26
27 #include <apt-pkg/tagfile.h>
28
29 #include <apti18n.h>
30
31 #include <vector>
32
33 #include <sys/stat.h>
34 #include <unistd.h>
35 #include <errno.h>
36 #include <stdio.h>
37 /*}}}*/
38 typedef vector<pkgIndexFile *>::iterator FileIterator;
39 template <typename Iter> std::vector<Iter*> pkgCacheGenerator::Dynamic<Iter>::toReMap;
40
41 // CacheGenerator::pkgCacheGenerator - Constructor /*{{{*/
42 // ---------------------------------------------------------------------
43 /* We set the dirty flag and make sure that is written to the disk */
44 pkgCacheGenerator::pkgCacheGenerator(DynamicMMap *pMap,OpProgress *Prog) :
45 Map(*pMap), Cache(pMap,false), Progress(Prog),
46 FoundFileDeps(0)
47 {
48 CurrentFile = 0;
49 memset(UniqHash,0,sizeof(UniqHash));
50
51 if (_error->PendingError() == true)
52 return;
53
54 if (Map.Size() == 0)
55 {
56 // Setup the map interface..
57 Cache.HeaderP = (pkgCache::Header *)Map.Data();
58 if (Map.RawAllocate(sizeof(pkgCache::Header)) == 0 && _error->PendingError() == true)
59 return;
60
61 Map.UsePools(*Cache.HeaderP->Pools,sizeof(Cache.HeaderP->Pools)/sizeof(Cache.HeaderP->Pools[0]));
62
63 // Starting header
64 *Cache.HeaderP = pkgCache::Header();
65 map_ptrloc const idxVerSysName = WriteStringInMap(_system->VS->Label);
66 Cache.HeaderP->VerSysName = idxVerSysName;
67 map_ptrloc const idxArchitecture = WriteStringInMap(_config->Find("APT::Architecture"));
68 Cache.HeaderP->Architecture = idxArchitecture;
69 if (unlikely(idxVerSysName == 0 || idxArchitecture == 0))
70 return;
71 Cache.ReMap();
72 }
73 else
74 {
75 // Map directly from the existing file
76 Cache.ReMap();
77 Map.UsePools(*Cache.HeaderP->Pools,sizeof(Cache.HeaderP->Pools)/sizeof(Cache.HeaderP->Pools[0]));
78 if (Cache.VS != _system->VS)
79 {
80 _error->Error(_("Cache has an incompatible versioning system"));
81 return;
82 }
83 }
84
85 Cache.HeaderP->Dirty = true;
86 Map.Sync(0,sizeof(pkgCache::Header));
87 }
88 /*}}}*/
89 // CacheGenerator::~pkgCacheGenerator - Destructor /*{{{*/
90 // ---------------------------------------------------------------------
91 /* We sync the data then unset the dirty flag in two steps so as to
92 advoid a problem during a crash */
93 pkgCacheGenerator::~pkgCacheGenerator()
94 {
95 if (_error->PendingError() == true)
96 return;
97 if (Map.Sync() == false)
98 return;
99
100 Cache.HeaderP->Dirty = false;
101 Map.Sync(0,sizeof(pkgCache::Header));
102 }
103 /*}}}*/
104 void pkgCacheGenerator::ReMap(void const * const oldMap, void const * const newMap) {/*{{{*/
105 if (oldMap == newMap)
106 return;
107
108 Cache.ReMap(false);
109
110 CurrentFile += (pkgCache::PackageFile*) newMap - (pkgCache::PackageFile*) oldMap;
111
112 for (size_t i = 0; i < _count(UniqHash); ++i)
113 if (UniqHash[i] != 0)
114 UniqHash[i] += (pkgCache::StringItem*) newMap - (pkgCache::StringItem*) oldMap;
115
116 for (std::vector<pkgCache::GrpIterator*>::const_iterator i = Dynamic<pkgCache::GrpIterator>::toReMap.begin();
117 i != Dynamic<pkgCache::GrpIterator>::toReMap.end(); ++i)
118 (*i)->ReMap(oldMap, newMap);
119 for (std::vector<pkgCache::PkgIterator*>::const_iterator i = Dynamic<pkgCache::PkgIterator>::toReMap.begin();
120 i != Dynamic<pkgCache::PkgIterator>::toReMap.end(); ++i)
121 (*i)->ReMap(oldMap, newMap);
122 for (std::vector<pkgCache::VerIterator*>::const_iterator i = Dynamic<pkgCache::VerIterator>::toReMap.begin();
123 i != Dynamic<pkgCache::VerIterator>::toReMap.end(); ++i)
124 (*i)->ReMap(oldMap, newMap);
125 for (std::vector<pkgCache::DepIterator*>::const_iterator i = Dynamic<pkgCache::DepIterator>::toReMap.begin();
126 i != Dynamic<pkgCache::DepIterator>::toReMap.end(); ++i)
127 (*i)->ReMap(oldMap, newMap);
128 for (std::vector<pkgCache::DescIterator*>::const_iterator i = Dynamic<pkgCache::DescIterator>::toReMap.begin();
129 i != Dynamic<pkgCache::DescIterator>::toReMap.end(); ++i)
130 (*i)->ReMap(oldMap, newMap);
131 for (std::vector<pkgCache::PrvIterator*>::const_iterator i = Dynamic<pkgCache::PrvIterator>::toReMap.begin();
132 i != Dynamic<pkgCache::PrvIterator>::toReMap.end(); ++i)
133 (*i)->ReMap(oldMap, newMap);
134 for (std::vector<pkgCache::PkgFileIterator*>::const_iterator i = Dynamic<pkgCache::PkgFileIterator>::toReMap.begin();
135 i != Dynamic<pkgCache::PkgFileIterator>::toReMap.end(); ++i)
136 (*i)->ReMap(oldMap, newMap);
137 } /*}}}*/
138 // CacheGenerator::WriteStringInMap /*{{{*/
139 map_ptrloc pkgCacheGenerator::WriteStringInMap(const char *String,
140 const unsigned long &Len) {
141 void const * const oldMap = Map.Data();
142 map_ptrloc const index = Map.WriteString(String, Len);
143 if (index != 0)
144 ReMap(oldMap, Map.Data());
145 return index;
146 }
147 /*}}}*/
148 // CacheGenerator::WriteStringInMap /*{{{*/
149 map_ptrloc pkgCacheGenerator::WriteStringInMap(const char *String) {
150 void const * const oldMap = Map.Data();
151 map_ptrloc const index = Map.WriteString(String);
152 if (index != 0)
153 ReMap(oldMap, Map.Data());
154 return index;
155 }
156 /*}}}*/
157 map_ptrloc pkgCacheGenerator::AllocateInMap(const unsigned long &size) {/*{{{*/
158 void const * const oldMap = Map.Data();
159 map_ptrloc const index = Map.Allocate(size);
160 if (index != 0)
161 ReMap(oldMap, Map.Data());
162 return index;
163 }
164 /*}}}*/
165 // CacheGenerator::MergeList - Merge the package list /*{{{*/
166 // ---------------------------------------------------------------------
167 /* This provides the generation of the entries in the cache. Each loop
168 goes through a single package record from the underlying parse engine. */
169 bool pkgCacheGenerator::MergeList(ListParser &List,
170 pkgCache::VerIterator *OutVer)
171 {
172 List.Owner = this;
173
174 unsigned int Counter = 0;
175 while (List.Step() == true)
176 {
177 string const PackageName = List.Package();
178 if (PackageName.empty() == true)
179 return false;
180
181 string const Arch = List.Architecture();
182
183 // Get a pointer to the package structure
184 pkgCache::PkgIterator Pkg;
185 Dynamic<pkgCache::PkgIterator> DynPkg(Pkg);
186 if (NewPackage(Pkg, PackageName, Arch) == false)
187 return _error->Error(_("Error occurred while processing %s (NewPackage)"),PackageName.c_str());
188 Counter++;
189 if (Counter % 100 == 0 && Progress != 0)
190 Progress->Progress(List.Offset());
191
192 /* Get a pointer to the version structure. We know the list is sorted
193 so we use that fact in the search. Insertion of new versions is
194 done with correct sorting */
195 string Version = List.Version();
196 if (Version.empty() == true)
197 {
198 // we first process the package, then the descriptions
199 // (this has the bonus that we get MMap error when we run out
200 // of MMap space)
201 pkgCache::VerIterator Ver(Cache);
202 Dynamic<pkgCache::VerIterator> DynVer(Ver);
203 if (List.UsePackage(Pkg, Ver) == false)
204 return _error->Error(_("Error occurred while processing %s (UsePackage1)"),
205 PackageName.c_str());
206
207 // Find the right version to write the description
208 MD5SumValue CurMd5 = List.Description_md5();
209 Ver = Pkg.VersionList();
210
211 for (; Ver.end() == false; ++Ver)
212 {
213 pkgCache::DescIterator Desc = Ver.DescriptionList();
214 Dynamic<pkgCache::DescIterator> DynDesc(Desc);
215 map_ptrloc *LastDesc = &Ver->DescriptionList;
216 bool duplicate=false;
217
218 // don't add a new description if we have one for the given
219 // md5 && language
220 for ( ; Desc.end() == false; Desc++)
221 if (MD5SumValue(Desc.md5()) == CurMd5 &&
222 Desc.LanguageCode() == List.DescriptionLanguage())
223 duplicate=true;
224 if(duplicate)
225 continue;
226
227 for (Desc = Ver.DescriptionList();
228 Desc.end() == false;
229 LastDesc = &Desc->NextDesc, Desc++)
230 {
231 if (MD5SumValue(Desc.md5()) == CurMd5)
232 {
233 // Add new description
234 void const * const oldMap = Map.Data();
235 map_ptrloc const descindex = NewDescription(Desc, List.DescriptionLanguage(), CurMd5, *LastDesc);
236 if (oldMap != Map.Data())
237 LastDesc += (map_ptrloc*) Map.Data() - (map_ptrloc*) oldMap;
238 *LastDesc = descindex;
239 Desc->ParentPkg = Pkg.Index();
240
241 if ((*LastDesc == 0 && _error->PendingError()) || NewFileDesc(Desc,List) == false)
242 return _error->Error(_("Error occurred while processing %s (NewFileDesc1)"),PackageName.c_str());
243 break;
244 }
245 }
246 }
247
248 continue;
249 }
250
251 pkgCache::VerIterator Ver = Pkg.VersionList();
252 Dynamic<pkgCache::VerIterator> DynVer(Ver);
253 map_ptrloc *LastVer = &Pkg->VersionList;
254 void const * oldMap = Map.Data();
255 int Res = 1;
256 unsigned long const Hash = List.VersionHash();
257 for (; Ver.end() == false; LastVer = &Ver->NextVer, Ver++)
258 {
259 Res = Cache.VS->CmpVersion(Version,Ver.VerStr());
260 // Version is higher as current version - insert here
261 if (Res > 0)
262 break;
263 // Versionstrings are equal - is hash also equal?
264 if (Res == 0 && Ver->Hash == Hash)
265 break;
266 // proceed with the next till we have either the right
267 // or we found another version (which will be lower)
268 }
269
270 /* We already have a version for this item, record that we saw it */
271 if (Res == 0 && Ver.end() == false && Ver->Hash == Hash)
272 {
273 if (List.UsePackage(Pkg,Ver) == false)
274 return _error->Error(_("Error occurred while processing %s (UsePackage2)"),
275 PackageName.c_str());
276
277 if (NewFileVer(Ver,List) == false)
278 return _error->Error(_("Error occurred while processing %s (NewFileVer1)"),
279 PackageName.c_str());
280
281 // Read only a single record and return
282 if (OutVer != 0)
283 {
284 *OutVer = Ver;
285 FoundFileDeps |= List.HasFileDeps();
286 return true;
287 }
288
289 continue;
290 }
291
292 // Add a new version
293 map_ptrloc const verindex = NewVersion(Ver,Version,*LastVer);
294 if (verindex == 0 && _error->PendingError())
295 return _error->Error(_("Error occurred while processing %s (NewVersion%d)"),
296 PackageName.c_str(), 1);
297
298 if (oldMap != Map.Data())
299 LastVer += (map_ptrloc*) Map.Data() - (map_ptrloc*) oldMap;
300 *LastVer = verindex;
301 Ver->ParentPkg = Pkg.Index();
302 Ver->Hash = Hash;
303
304 if (List.NewVersion(Ver) == false)
305 return _error->Error(_("Error occurred while processing %s (NewVersion%d)"),
306 PackageName.c_str(), 2);
307
308 if (List.UsePackage(Pkg,Ver) == false)
309 return _error->Error(_("Error occurred while processing %s (UsePackage3)"),
310 PackageName.c_str());
311
312 if (NewFileVer(Ver,List) == false)
313 return _error->Error(_("Error occurred while processing %s (NewVersion%d)"),
314 PackageName.c_str(), 3);
315
316 // Read only a single record and return
317 if (OutVer != 0)
318 {
319 *OutVer = Ver;
320 FoundFileDeps |= List.HasFileDeps();
321 return true;
322 }
323
324 /* Record the Description data. Description data always exist in
325 Packages and Translation-* files. */
326 pkgCache::DescIterator Desc = Ver.DescriptionList();
327 Dynamic<pkgCache::DescIterator> DynDesc(Desc);
328 map_ptrloc *LastDesc = &Ver->DescriptionList;
329
330 // Skip to the end of description set
331 for (; Desc.end() == false; LastDesc = &Desc->NextDesc, Desc++);
332
333 // Add new description
334 oldMap = Map.Data();
335 map_ptrloc const descindex = NewDescription(Desc, List.DescriptionLanguage(), List.Description_md5(), *LastDesc);
336 if (oldMap != Map.Data())
337 LastDesc += (map_ptrloc*) Map.Data() - (map_ptrloc*) oldMap;
338 *LastDesc = descindex;
339 Desc->ParentPkg = Pkg.Index();
340
341 if ((*LastDesc == 0 && _error->PendingError()) || NewFileDesc(Desc,List) == false)
342 return _error->Error(_("Error occurred while processing %s (NewFileDesc2)"),PackageName.c_str());
343 }
344
345 FoundFileDeps |= List.HasFileDeps();
346
347 if (Cache.HeaderP->PackageCount >= (1ULL<<sizeof(Cache.PkgP->ID)*8)-1)
348 return _error->Error(_("Wow, you exceeded the number of package "
349 "names this APT is capable of."));
350 if (Cache.HeaderP->VersionCount >= (1ULL<<(sizeof(Cache.VerP->ID)*8))-1)
351 return _error->Error(_("Wow, you exceeded the number of versions "
352 "this APT is capable of."));
353 if (Cache.HeaderP->DescriptionCount >= (1ULL<<(sizeof(Cache.DescP->ID)*8))-1)
354 return _error->Error(_("Wow, you exceeded the number of descriptions "
355 "this APT is capable of."));
356 if (Cache.HeaderP->DependsCount >= (1ULL<<(sizeof(Cache.DepP->ID)*8))-1ULL)
357 return _error->Error(_("Wow, you exceeded the number of dependencies "
358 "this APT is capable of."));
359 return true;
360 }
361 /*}}}*/
362 // CacheGenerator::MergeFileProvides - Merge file provides /*{{{*/
363 // ---------------------------------------------------------------------
364 /* If we found any file depends while parsing the main list we need to
365 resolve them. Since it is undesired to load the entire list of files
366 into the cache as virtual packages we do a two stage effort. MergeList
367 identifies the file depends and this creates Provdies for them by
368 re-parsing all the indexs. */
369 bool pkgCacheGenerator::MergeFileProvides(ListParser &List)
370 {
371 List.Owner = this;
372
373 unsigned int Counter = 0;
374 while (List.Step() == true)
375 {
376 string PackageName = List.Package();
377 if (PackageName.empty() == true)
378 return false;
379 string Version = List.Version();
380 if (Version.empty() == true)
381 continue;
382
383 pkgCache::PkgIterator Pkg = Cache.FindPkg(PackageName);
384 Dynamic<pkgCache::PkgIterator> DynPkg(Pkg);
385 if (Pkg.end() == true)
386 return _error->Error(_("Error occurred while processing %s (FindPkg)"),
387 PackageName.c_str());
388 Counter++;
389 if (Counter % 100 == 0 && Progress != 0)
390 Progress->Progress(List.Offset());
391
392 unsigned long Hash = List.VersionHash();
393 pkgCache::VerIterator Ver = Pkg.VersionList();
394 Dynamic<pkgCache::VerIterator> DynVer(Ver);
395 for (; Ver.end() == false; Ver++)
396 {
397 if (Ver->Hash == Hash && Version.c_str() == Ver.VerStr())
398 {
399 if (List.CollectFileProvides(Cache,Ver) == false)
400 return _error->Error(_("Error occurred while processing %s (CollectFileProvides)"),PackageName.c_str());
401 break;
402 }
403 }
404
405 if (Ver.end() == true)
406 _error->Warning(_("Package %s %s was not found while processing file dependencies"),PackageName.c_str(),Version.c_str());
407 }
408
409 return true;
410 }
411 /*}}}*/
412 // CacheGenerator::NewGroup - Add a new group /*{{{*/
413 // ---------------------------------------------------------------------
414 /* This creates a new group structure and adds it to the hash table */
415 bool pkgCacheGenerator::NewGroup(pkgCache::GrpIterator &Grp, const string &Name)
416 {
417 Grp = Cache.FindGrp(Name);
418 if (Grp.end() == false)
419 return true;
420
421 // Get a structure
422 map_ptrloc const Group = AllocateInMap(sizeof(pkgCache::Group));
423 if (unlikely(Group == 0))
424 return false;
425
426 Grp = pkgCache::GrpIterator(Cache, Cache.GrpP + Group);
427 map_ptrloc const idxName = WriteStringInMap(Name);
428 if (unlikely(idxName == 0))
429 return false;
430 Grp->Name = idxName;
431
432 // Insert it into the hash table
433 unsigned long const Hash = Cache.Hash(Name);
434 Grp->Next = Cache.HeaderP->GrpHashTable[Hash];
435 Cache.HeaderP->GrpHashTable[Hash] = Group;
436
437 Grp->ID = Cache.HeaderP->GroupCount++;
438 return true;
439 }
440 /*}}}*/
441 // CacheGenerator::NewPackage - Add a new package /*{{{*/
442 // ---------------------------------------------------------------------
443 /* This creates a new package structure and adds it to the hash table */
444 bool pkgCacheGenerator::NewPackage(pkgCache::PkgIterator &Pkg,const string &Name,
445 const string &Arch) {
446 pkgCache::GrpIterator Grp;
447 Dynamic<pkgCache::GrpIterator> DynGrp(Grp);
448 if (unlikely(NewGroup(Grp, Name) == false))
449 return false;
450
451 Pkg = Grp.FindPkg(Arch);
452 if (Pkg.end() == false)
453 return true;
454
455 // Get a structure
456 map_ptrloc const Package = AllocateInMap(sizeof(pkgCache::Package));
457 if (unlikely(Package == 0))
458 return false;
459 Pkg = pkgCache::PkgIterator(Cache,Cache.PkgP + Package);
460
461 // Insert the package into our package list
462 if (Grp->FirstPackage == 0) // the group is new
463 {
464 // Insert it into the hash table
465 unsigned long const Hash = Cache.Hash(Name);
466 Pkg->NextPackage = Cache.HeaderP->PkgHashTable[Hash];
467 Cache.HeaderP->PkgHashTable[Hash] = Package;
468 Grp->FirstPackage = Package;
469 }
470 else // Group the Packages together
471 {
472 // this package is the new last package
473 pkgCache::PkgIterator LastPkg(Cache, Cache.PkgP + Grp->LastPackage);
474 Pkg->NextPackage = LastPkg->NextPackage;
475 LastPkg->NextPackage = Package;
476 }
477 Grp->LastPackage = Package;
478
479 // Set the name, arch and the ID
480 Pkg->Name = Grp->Name;
481 Pkg->Group = Grp.Index();
482 map_ptrloc const idxArch = WriteUniqString((Arch == "all") ? _config->Find("APT::Architecture") : Arch.c_str());
483 if (unlikely(idxArch == 0))
484 return false;
485 Pkg->Arch = idxArch;
486 Pkg->ID = Cache.HeaderP->PackageCount++;
487
488 return true;
489 }
490 /*}}}*/
491 // CacheGenerator::NewFileVer - Create a new File<->Version association /*{{{*/
492 // ---------------------------------------------------------------------
493 /* */
494 bool pkgCacheGenerator::NewFileVer(pkgCache::VerIterator &Ver,
495 ListParser &List)
496 {
497 if (CurrentFile == 0)
498 return true;
499
500 // Get a structure
501 map_ptrloc const VerFile = AllocateInMap(sizeof(pkgCache::VerFile));
502 if (VerFile == 0)
503 return 0;
504
505 pkgCache::VerFileIterator VF(Cache,Cache.VerFileP + VerFile);
506 VF->File = CurrentFile - Cache.PkgFileP;
507
508 // Link it to the end of the list
509 map_ptrloc *Last = &Ver->FileList;
510 for (pkgCache::VerFileIterator V = Ver.FileList(); V.end() == false; V++)
511 Last = &V->NextFile;
512 VF->NextFile = *Last;
513 *Last = VF.Index();
514
515 VF->Offset = List.Offset();
516 VF->Size = List.Size();
517 if (Cache.HeaderP->MaxVerFileSize < VF->Size)
518 Cache.HeaderP->MaxVerFileSize = VF->Size;
519 Cache.HeaderP->VerFileCount++;
520
521 return true;
522 }
523 /*}}}*/
524 // CacheGenerator::NewVersion - Create a new Version /*{{{*/
525 // ---------------------------------------------------------------------
526 /* This puts a version structure in the linked list */
527 unsigned long pkgCacheGenerator::NewVersion(pkgCache::VerIterator &Ver,
528 const string &VerStr,
529 unsigned long Next)
530 {
531 // Get a structure
532 map_ptrloc const Version = AllocateInMap(sizeof(pkgCache::Version));
533 if (Version == 0)
534 return 0;
535
536 // Fill it in
537 Ver = pkgCache::VerIterator(Cache,Cache.VerP + Version);
538 Ver->NextVer = Next;
539 Ver->ID = Cache.HeaderP->VersionCount++;
540 map_ptrloc const idxVerStr = WriteStringInMap(VerStr);
541 if (unlikely(idxVerStr == 0))
542 return 0;
543 Ver->VerStr = idxVerStr;
544
545 return Version;
546 }
547 /*}}}*/
548 // CacheGenerator::NewFileDesc - Create a new File<->Desc association /*{{{*/
549 // ---------------------------------------------------------------------
550 /* */
551 bool pkgCacheGenerator::NewFileDesc(pkgCache::DescIterator &Desc,
552 ListParser &List)
553 {
554 if (CurrentFile == 0)
555 return true;
556
557 // Get a structure
558 map_ptrloc const DescFile = AllocateInMap(sizeof(pkgCache::DescFile));
559 if (DescFile == 0)
560 return false;
561
562 pkgCache::DescFileIterator DF(Cache,Cache.DescFileP + DescFile);
563 DF->File = CurrentFile - Cache.PkgFileP;
564
565 // Link it to the end of the list
566 map_ptrloc *Last = &Desc->FileList;
567 for (pkgCache::DescFileIterator D = Desc.FileList(); D.end() == false; D++)
568 Last = &D->NextFile;
569
570 DF->NextFile = *Last;
571 *Last = DF.Index();
572
573 DF->Offset = List.Offset();
574 DF->Size = List.Size();
575 if (Cache.HeaderP->MaxDescFileSize < DF->Size)
576 Cache.HeaderP->MaxDescFileSize = DF->Size;
577 Cache.HeaderP->DescFileCount++;
578
579 return true;
580 }
581 /*}}}*/
582 // CacheGenerator::NewDescription - Create a new Description /*{{{*/
583 // ---------------------------------------------------------------------
584 /* This puts a description structure in the linked list */
585 map_ptrloc pkgCacheGenerator::NewDescription(pkgCache::DescIterator &Desc,
586 const string &Lang,
587 const MD5SumValue &md5sum,
588 map_ptrloc Next)
589 {
590 // Get a structure
591 map_ptrloc const Description = AllocateInMap(sizeof(pkgCache::Description));
592 if (Description == 0)
593 return 0;
594
595 // Fill it in
596 Desc = pkgCache::DescIterator(Cache,Cache.DescP + Description);
597 Desc->NextDesc = Next;
598 Desc->ID = Cache.HeaderP->DescriptionCount++;
599 map_ptrloc const idxlanguage_code = WriteStringInMap(Lang);
600 map_ptrloc const idxmd5sum = WriteStringInMap(md5sum.Value());
601 if (unlikely(idxlanguage_code == 0 || idxmd5sum == 0))
602 return 0;
603 Desc->language_code = idxlanguage_code;
604 Desc->md5sum = idxmd5sum;
605
606 return Description;
607 }
608 /*}}}*/
609 // CacheGenerator::FinishCache - do various finish operations /*{{{*/
610 // ---------------------------------------------------------------------
611 /* This prepares the Cache for delivery */
612 bool pkgCacheGenerator::FinishCache(OpProgress *Progress)
613 {
614 // FIXME: add progress reporting for this operation
615 // Do we have different architectures in your groups ?
616 vector<string> archs = APT::Configuration::getArchitectures();
617 if (archs.size() > 1)
618 {
619 // Create Conflicts in between the group
620 pkgCache::GrpIterator G = GetCache().GrpBegin();
621 Dynamic<pkgCache::GrpIterator> DynG(G);
622 for (; G.end() != true; G++)
623 {
624 string const PkgName = G.Name();
625 pkgCache::PkgIterator P = G.PackageList();
626 Dynamic<pkgCache::PkgIterator> DynP(P);
627 for (; P.end() != true; P = G.NextPkg(P))
628 {
629 pkgCache::PkgIterator allPkg;
630 Dynamic<pkgCache::PkgIterator> DynallPkg(allPkg);
631 pkgCache::VerIterator V = P.VersionList();
632 Dynamic<pkgCache::VerIterator> DynV(V);
633 for (; V.end() != true; V++)
634 {
635 char const * const Arch = P.Arch();
636 map_ptrloc *OldDepLast = NULL;
637 /* MultiArch handling introduces a lot of implicit Dependencies:
638 - MultiArch: same → Co-Installable if they have the same version
639 - Architecture: all → Need to be Co-Installable for internal reasons
640 - All others conflict with all other group members */
641 bool const coInstall = ((V->MultiArch & pkgCache::Version::Same) == pkgCache::Version::Same);
642 for (vector<string>::const_iterator A = archs.begin(); A != archs.end(); ++A)
643 {
644 if (Arch == 0 || *A == Arch)
645 continue;
646 /* We allow only one installed arch at the time
647 per group, therefore each group member conflicts
648 with all other group members */
649 pkgCache::PkgIterator D = G.FindPkg(*A);
650 Dynamic<pkgCache::PkgIterator> DynD(D);
651 if (D.end() == true)
652 continue;
653 if (coInstall == true)
654 {
655 // Replaces: ${self}:other ( << ${binary:Version})
656 NewDepends(D, V, V.VerStr(),
657 pkgCache::Dep::Less, pkgCache::Dep::Replaces,
658 OldDepLast);
659 // Breaks: ${self}:other (!= ${binary:Version})
660 NewDepends(D, V, V.VerStr(),
661 pkgCache::Dep::NotEquals, pkgCache::Dep::DpkgBreaks,
662 OldDepLast);
663 } else {
664 // Conflicts: ${self}:other
665 NewDepends(D, V, "",
666 pkgCache::Dep::NoOp, pkgCache::Dep::Conflicts,
667 OldDepLast);
668 }
669 }
670 }
671 }
672 }
673 }
674 return true;
675 }
676 /*}}}*/
677 // CacheGenerator::NewDepends - Create a dependency element /*{{{*/
678 // ---------------------------------------------------------------------
679 /* This creates a dependency element in the tree. It is linked to the
680 version and to the package that it is pointing to. */
681 bool pkgCacheGenerator::NewDepends(pkgCache::PkgIterator &Pkg,
682 pkgCache::VerIterator &Ver,
683 string const &Version,
684 unsigned int const &Op,
685 unsigned int const &Type,
686 map_ptrloc *OldDepLast)
687 {
688 void const * const oldMap = Map.Data();
689 // Get a structure
690 map_ptrloc const Dependency = AllocateInMap(sizeof(pkgCache::Dependency));
691 if (unlikely(Dependency == 0))
692 return false;
693
694 // Fill it in
695 pkgCache::DepIterator Dep(Cache,Cache.DepP + Dependency);
696 Dynamic<pkgCache::DepIterator> DynDep(Dep);
697 Dep->ParentVer = Ver.Index();
698 Dep->Type = Type;
699 Dep->CompareOp = Op;
700 Dep->ID = Cache.HeaderP->DependsCount++;
701
702 // Probe the reverse dependency list for a version string that matches
703 if (Version.empty() == false)
704 {
705 /* for (pkgCache::DepIterator I = Pkg.RevDependsList(); I.end() == false; I++)
706 if (I->Version != 0 && I.TargetVer() == Version)
707 Dep->Version = I->Version;*/
708 if (Dep->Version == 0) {
709 map_ptrloc const index = WriteStringInMap(Version);
710 if (unlikely(index == 0))
711 return false;
712 Dep->Version = index;
713 }
714 }
715
716 // Link it to the package
717 Dep->Package = Pkg.Index();
718 Dep->NextRevDepends = Pkg->RevDepends;
719 Pkg->RevDepends = Dep.Index();
720
721 // Do we know where to link the Dependency to?
722 if (OldDepLast == NULL)
723 {
724 OldDepLast = &Ver->DependsList;
725 for (pkgCache::DepIterator D = Ver.DependsList(); D.end() == false; D++)
726 OldDepLast = &D->NextDepends;
727 } else if (oldMap != Map.Data())
728 OldDepLast += (map_ptrloc*) Map.Data() - (map_ptrloc*) oldMap;
729
730 Dep->NextDepends = *OldDepLast;
731 *OldDepLast = Dep.Index();
732 OldDepLast = &Dep->NextDepends;
733
734 return true;
735 }
736 /*}}}*/
737 // ListParser::NewDepends - Create the environment for a new dependency /*{{{*/
738 // ---------------------------------------------------------------------
739 /* This creates a Group and the Package to link this dependency to if
740 needed and handles also the caching of the old endpoint */
741 bool pkgCacheGenerator::ListParser::NewDepends(pkgCache::VerIterator &Ver,
742 const string &PackageName,
743 const string &Arch,
744 const string &Version,
745 unsigned int Op,
746 unsigned int Type)
747 {
748 pkgCache::GrpIterator Grp;
749 Dynamic<pkgCache::GrpIterator> DynGrp(Grp);
750 if (unlikely(Owner->NewGroup(Grp, PackageName) == false))
751 return false;
752
753 // Locate the target package
754 pkgCache::PkgIterator Pkg = Grp.FindPkg(Arch);
755 Dynamic<pkgCache::PkgIterator> DynPkg(Pkg);
756 if (Pkg.end() == true) {
757 if (unlikely(Owner->NewPackage(Pkg, PackageName, Arch) == false))
758 return false;
759 }
760
761 // Is it a file dependency?
762 if (unlikely(PackageName[0] == '/'))
763 FoundFileDeps = true;
764
765 /* Caching the old end point speeds up generation substantially */
766 if (OldDepVer != Ver) {
767 OldDepLast = NULL;
768 OldDepVer = Ver;
769 }
770
771 return Owner->NewDepends(Pkg, Ver, Version, Op, Type, OldDepLast);
772 }
773 /*}}}*/
774 // ListParser::NewProvides - Create a Provides element /*{{{*/
775 // ---------------------------------------------------------------------
776 /* */
777 bool pkgCacheGenerator::ListParser::NewProvides(pkgCache::VerIterator &Ver,
778 const string &PkgName,
779 const string &PkgArch,
780 const string &Version)
781 {
782 pkgCache &Cache = Owner->Cache;
783
784 // We do not add self referencing provides
785 if (Ver.ParentPkg().Name() == PkgName && (PkgArch == Ver.ParentPkg().Arch() ||
786 (PkgArch == "all" && _config->Find("APT::Architecture") == Ver.ParentPkg().Arch())))
787 return true;
788
789 // Get a structure
790 map_ptrloc const Provides = Owner->AllocateInMap(sizeof(pkgCache::Provides));
791 if (unlikely(Provides == 0))
792 return false;
793 Cache.HeaderP->ProvidesCount++;
794
795 // Fill it in
796 pkgCache::PrvIterator Prv(Cache,Cache.ProvideP + Provides,Cache.PkgP);
797 Dynamic<pkgCache::PrvIterator> DynPrv(Prv);
798 Prv->Version = Ver.Index();
799 Prv->NextPkgProv = Ver->ProvidesList;
800 Ver->ProvidesList = Prv.Index();
801 if (Version.empty() == false && unlikely((Prv->ProvideVersion = WriteString(Version)) == 0))
802 return false;
803
804 // Locate the target package
805 pkgCache::PkgIterator Pkg;
806 Dynamic<pkgCache::PkgIterator> DynPkg(Pkg);
807 if (unlikely(Owner->NewPackage(Pkg,PkgName, PkgArch) == false))
808 return false;
809
810 // Link it to the package
811 Prv->ParentPkg = Pkg.Index();
812 Prv->NextProvides = Pkg->ProvidesList;
813 Pkg->ProvidesList = Prv.Index();
814
815 return true;
816 }
817 /*}}}*/
818 // CacheGenerator::SelectFile - Select the current file being parsed /*{{{*/
819 // ---------------------------------------------------------------------
820 /* This is used to select which file is to be associated with all newly
821 added versions. The caller is responsible for setting the IMS fields. */
822 bool pkgCacheGenerator::SelectFile(const string &File,const string &Site,
823 const pkgIndexFile &Index,
824 unsigned long Flags)
825 {
826 // Get some space for the structure
827 map_ptrloc const idxFile = AllocateInMap(sizeof(*CurrentFile));
828 if (unlikely(idxFile == 0))
829 return false;
830 CurrentFile = Cache.PkgFileP + idxFile;
831
832 // Fill it in
833 map_ptrloc const idxFileName = WriteStringInMap(File);
834 map_ptrloc const idxSite = WriteUniqString(Site);
835 if (unlikely(idxFileName == 0 || idxSite == 0))
836 return false;
837 CurrentFile->FileName = idxFileName;
838 CurrentFile->Site = idxSite;
839 CurrentFile->NextFile = Cache.HeaderP->FileList;
840 CurrentFile->Flags = Flags;
841 CurrentFile->ID = Cache.HeaderP->PackageFileCount;
842 map_ptrloc const idxIndexType = WriteUniqString(Index.GetType()->Label);
843 if (unlikely(idxIndexType == 0))
844 return false;
845 CurrentFile->IndexType = idxIndexType;
846 PkgFileName = File;
847 Cache.HeaderP->FileList = CurrentFile - Cache.PkgFileP;
848 Cache.HeaderP->PackageFileCount++;
849
850 if (Progress != 0)
851 Progress->SubProgress(Index.Size());
852 return true;
853 }
854 /*}}}*/
855 // CacheGenerator::WriteUniqueString - Insert a unique string /*{{{*/
856 // ---------------------------------------------------------------------
857 /* This is used to create handles to strings. Given the same text it
858 always returns the same number */
859 unsigned long pkgCacheGenerator::WriteUniqString(const char *S,
860 unsigned int Size)
861 {
862 /* We use a very small transient hash table here, this speeds up generation
863 by a fair amount on slower machines */
864 pkgCache::StringItem *&Bucket = UniqHash[(S[0]*5 + S[1]) % _count(UniqHash)];
865 if (Bucket != 0 &&
866 stringcmp(S,S+Size,Cache.StrP + Bucket->String) == 0)
867 return Bucket->String;
868
869 // Search for an insertion point
870 pkgCache::StringItem *I = Cache.StringItemP + Cache.HeaderP->StringList;
871 int Res = 1;
872 map_ptrloc *Last = &Cache.HeaderP->StringList;
873 for (; I != Cache.StringItemP; Last = &I->NextItem,
874 I = Cache.StringItemP + I->NextItem)
875 {
876 Res = stringcmp(S,S+Size,Cache.StrP + I->String);
877 if (Res >= 0)
878 break;
879 }
880
881 // Match
882 if (Res == 0)
883 {
884 Bucket = I;
885 return I->String;
886 }
887
888 // Get a structure
889 void const * const oldMap = Map.Data();
890 map_ptrloc const Item = AllocateInMap(sizeof(pkgCache::StringItem));
891 if (Item == 0)
892 return 0;
893
894 map_ptrloc const idxString = WriteStringInMap(S,Size);
895 if (unlikely(idxString == 0))
896 return 0;
897 if (oldMap != Map.Data()) {
898 Last += (map_ptrloc*) Map.Data() - (map_ptrloc*) oldMap;
899 I += (pkgCache::StringItem*) Map.Data() - (pkgCache::StringItem*) oldMap;
900 }
901 *Last = Item;
902
903 // Fill in the structure
904 pkgCache::StringItem *ItemP = Cache.StringItemP + Item;
905 ItemP->NextItem = I - Cache.StringItemP;
906 ItemP->String = idxString;
907
908 Bucket = ItemP;
909 return ItemP->String;
910 }
911 /*}}}*/
912 // CheckValidity - Check that a cache is up-to-date /*{{{*/
913 // ---------------------------------------------------------------------
914 /* This just verifies that each file in the list of index files exists,
915 has matching attributes with the cache and the cache does not have
916 any extra files. */
917 static bool CheckValidity(const string &CacheFile, FileIterator Start,
918 FileIterator End,MMap **OutMap = 0)
919 {
920 bool const Debug = _config->FindB("Debug::pkgCacheGen", false);
921 // No file, certainly invalid
922 if (CacheFile.empty() == true || FileExists(CacheFile) == false)
923 {
924 if (Debug == true)
925 std::clog << "CacheFile doesn't exist" << std::endl;
926 return false;
927 }
928
929 // Map it
930 FileFd CacheF(CacheFile,FileFd::ReadOnly);
931 SPtr<MMap> Map = new MMap(CacheF,0);
932 pkgCache Cache(Map);
933 if (_error->PendingError() == true || Map->Size() == 0)
934 {
935 if (Debug == true)
936 std::clog << "Errors are pending or Map is empty()" << std::endl;
937 _error->Discard();
938 return false;
939 }
940
941 /* Now we check every index file, see if it is in the cache,
942 verify the IMS data and check that it is on the disk too.. */
943 SPtrArray<bool> Visited = new bool[Cache.HeaderP->PackageFileCount];
944 memset(Visited,0,sizeof(*Visited)*Cache.HeaderP->PackageFileCount);
945 for (; Start != End; Start++)
946 {
947 if (Debug == true)
948 std::clog << "Checking PkgFile " << (*Start)->Describe() << ": ";
949 if ((*Start)->HasPackages() == false)
950 {
951 if (Debug == true)
952 std::clog << "Has NO packages" << std::endl;
953 continue;
954 }
955
956 if ((*Start)->Exists() == false)
957 {
958 #if 0 // mvo: we no longer give a message here (Default Sources spec)
959 _error->WarningE("stat",_("Couldn't stat source package list %s"),
960 (*Start)->Describe().c_str());
961 #endif
962 if (Debug == true)
963 std::clog << "file doesn't exist" << std::endl;
964 continue;
965 }
966
967 // FindInCache is also expected to do an IMS check.
968 pkgCache::PkgFileIterator File = (*Start)->FindInCache(Cache);
969 if (File.end() == true)
970 {
971 if (Debug == true)
972 std::clog << "FindInCache returned end-Pointer" << std::endl;
973 return false;
974 }
975
976 Visited[File->ID] = true;
977 if (Debug == true)
978 std::clog << "with ID " << File->ID << " is valid" << std::endl;
979 }
980
981 for (unsigned I = 0; I != Cache.HeaderP->PackageFileCount; I++)
982 if (Visited[I] == false)
983 {
984 if (Debug == true)
985 std::clog << "File with ID" << I << " wasn't visited" << std::endl;
986 return false;
987 }
988
989 if (_error->PendingError() == true)
990 {
991 if (Debug == true)
992 {
993 std::clog << "Validity failed because of pending errors:" << std::endl;
994 _error->DumpErrors();
995 }
996 _error->Discard();
997 return false;
998 }
999
1000 if (OutMap != 0)
1001 *OutMap = Map.UnGuard();
1002 return true;
1003 }
1004 /*}}}*/
1005 // ComputeSize - Compute the total size of a bunch of files /*{{{*/
1006 // ---------------------------------------------------------------------
1007 /* Size is kind of an abstract notion that is only used for the progress
1008 meter */
1009 static unsigned long ComputeSize(FileIterator Start,FileIterator End)
1010 {
1011 unsigned long TotalSize = 0;
1012 for (; Start != End; Start++)
1013 {
1014 if ((*Start)->HasPackages() == false)
1015 continue;
1016 TotalSize += (*Start)->Size();
1017 }
1018 return TotalSize;
1019 }
1020 /*}}}*/
1021 // BuildCache - Merge the list of index files into the cache /*{{{*/
1022 // ---------------------------------------------------------------------
1023 /* */
1024 static bool BuildCache(pkgCacheGenerator &Gen,
1025 OpProgress *Progress,
1026 unsigned long &CurrentSize,unsigned long TotalSize,
1027 FileIterator Start, FileIterator End)
1028 {
1029 FileIterator I;
1030 for (I = Start; I != End; I++)
1031 {
1032 if ((*I)->HasPackages() == false)
1033 continue;
1034
1035 if ((*I)->Exists() == false)
1036 continue;
1037
1038 if ((*I)->FindInCache(Gen.GetCache()).end() == false)
1039 {
1040 _error->Warning("Duplicate sources.list entry %s",
1041 (*I)->Describe().c_str());
1042 continue;
1043 }
1044
1045 unsigned long Size = (*I)->Size();
1046 if (Progress != NULL)
1047 Progress->OverallProgress(CurrentSize,TotalSize,Size,_("Reading package lists"));
1048 CurrentSize += Size;
1049
1050 if ((*I)->Merge(Gen,Progress) == false)
1051 return false;
1052 }
1053
1054 if (Gen.HasFileDeps() == true)
1055 {
1056 if (Progress != NULL)
1057 Progress->Done();
1058 TotalSize = ComputeSize(Start, End);
1059 CurrentSize = 0;
1060 for (I = Start; I != End; I++)
1061 {
1062 unsigned long Size = (*I)->Size();
1063 if (Progress != NULL)
1064 Progress->OverallProgress(CurrentSize,TotalSize,Size,_("Collecting File Provides"));
1065 CurrentSize += Size;
1066 if ((*I)->MergeFileProvides(Gen,Progress) == false)
1067 return false;
1068 }
1069 }
1070
1071 return true;
1072 }
1073 /*}}}*/
1074 // CacheGenerator::CreateDynamicMMap - load an mmap with configuration options /*{{{*/
1075 DynamicMMap* pkgCacheGenerator::CreateDynamicMMap(FileFd *CacheF, unsigned long Flags) {
1076 unsigned long const MapStart = _config->FindI("APT::Cache-Start", 24*1024*1024);
1077 unsigned long const MapGrow = _config->FindI("APT::Cache-Grow", 1*1024*1024);
1078 unsigned long const MapLimit = _config->FindI("APT::Cache-Limit", 0);
1079 Flags |= MMap::Moveable;
1080 if (_config->FindB("APT::Cache-Fallback", false) == true)
1081 Flags |= MMap::Fallback;
1082 if (CacheF != NULL)
1083 return new DynamicMMap(*CacheF, Flags, MapStart, MapGrow, MapLimit);
1084 else
1085 return new DynamicMMap(Flags, MapStart, MapGrow, MapLimit);
1086 }
1087 /*}}}*/
1088 // CacheGenerator::MakeStatusCache - Construct the status cache /*{{{*/
1089 // ---------------------------------------------------------------------
1090 /* This makes sure that the status cache (the cache that has all
1091 index files from the sources list and all local ones) is ready
1092 to be mmaped. If OutMap is not zero then a MMap object representing
1093 the cache will be stored there. This is pretty much mandetory if you
1094 are using AllowMem. AllowMem lets the function be run as non-root
1095 where it builds the cache 'fast' into a memory buffer. */
1096 __deprecated bool pkgMakeStatusCache(pkgSourceList &List,OpProgress &Progress,
1097 MMap **OutMap, bool AllowMem)
1098 { return pkgCacheGenerator::MakeStatusCache(List, &Progress, OutMap, AllowMem); }
1099 bool pkgCacheGenerator::MakeStatusCache(pkgSourceList &List,OpProgress *Progress,
1100 MMap **OutMap,bool AllowMem)
1101 {
1102 bool const Debug = _config->FindB("Debug::pkgCacheGen", false);
1103
1104 vector<pkgIndexFile *> Files;
1105 for (vector<metaIndex *>::const_iterator i = List.begin();
1106 i != List.end();
1107 i++)
1108 {
1109 vector <pkgIndexFile *> *Indexes = (*i)->GetIndexFiles();
1110 for (vector<pkgIndexFile *>::const_iterator j = Indexes->begin();
1111 j != Indexes->end();
1112 j++)
1113 Files.push_back (*j);
1114 }
1115
1116 unsigned long const EndOfSource = Files.size();
1117 if (_system->AddStatusFiles(Files) == false)
1118 return false;
1119
1120 // Decide if we can write to the files..
1121 string const CacheFile = _config->FindFile("Dir::Cache::pkgcache");
1122 string const SrcCacheFile = _config->FindFile("Dir::Cache::srcpkgcache");
1123
1124 // ensure the cache directory exists
1125 if (CacheFile.empty() == false || SrcCacheFile.empty() == false)
1126 {
1127 string dir = _config->FindDir("Dir::Cache");
1128 size_t const len = dir.size();
1129 if (len > 5 && dir.find("/apt/", len - 6, 5) == len - 5)
1130 dir = dir.substr(0, len - 5);
1131 if (CacheFile.empty() == false)
1132 CreateDirectory(dir, flNotFile(CacheFile));
1133 if (SrcCacheFile.empty() == false)
1134 CreateDirectory(dir, flNotFile(SrcCacheFile));
1135 }
1136
1137 // Decide if we can write to the cache
1138 bool Writeable = false;
1139 if (CacheFile.empty() == false)
1140 Writeable = access(flNotFile(CacheFile).c_str(),W_OK) == 0;
1141 else
1142 if (SrcCacheFile.empty() == false)
1143 Writeable = access(flNotFile(SrcCacheFile).c_str(),W_OK) == 0;
1144 if (Debug == true)
1145 std::clog << "Do we have write-access to the cache files? " << (Writeable ? "YES" : "NO") << std::endl;
1146
1147 if (Writeable == false && AllowMem == false && CacheFile.empty() == false)
1148 return _error->Error(_("Unable to write to %s"),flNotFile(CacheFile).c_str());
1149
1150 if (Progress != NULL)
1151 Progress->OverallProgress(0,1,1,_("Reading package lists"));
1152
1153 // Cache is OK, Fin.
1154 if (CheckValidity(CacheFile,Files.begin(),Files.end(),OutMap) == true)
1155 {
1156 if (Progress != NULL)
1157 Progress->OverallProgress(1,1,1,_("Reading package lists"));
1158 if (Debug == true)
1159 std::clog << "pkgcache.bin is valid - no need to build anything" << std::endl;
1160 return true;
1161 }
1162 else if (Debug == true)
1163 std::clog << "pkgcache.bin is NOT valid" << std::endl;
1164
1165 /* At this point we know we need to reconstruct the package cache,
1166 begin. */
1167 SPtr<FileFd> CacheF;
1168 SPtr<DynamicMMap> Map;
1169 if (Writeable == true && CacheFile.empty() == false)
1170 {
1171 unlink(CacheFile.c_str());
1172 CacheF = new FileFd(CacheFile,FileFd::WriteAtomic);
1173 fchmod(CacheF->Fd(),0644);
1174 Map = CreateDynamicMMap(CacheF, MMap::Public);
1175 if (_error->PendingError() == true)
1176 return false;
1177 if (Debug == true)
1178 std::clog << "Open filebased MMap" << std::endl;
1179 }
1180 else
1181 {
1182 // Just build it in memory..
1183 Map = CreateDynamicMMap(NULL);
1184 if (Debug == true)
1185 std::clog << "Open memory Map (not filebased)" << std::endl;
1186 }
1187
1188 // Lets try the source cache.
1189 unsigned long CurrentSize = 0;
1190 unsigned long TotalSize = 0;
1191 if (CheckValidity(SrcCacheFile,Files.begin(),
1192 Files.begin()+EndOfSource) == true)
1193 {
1194 if (Debug == true)
1195 std::clog << "srcpkgcache.bin is valid - populate MMap with it." << std::endl;
1196 // Preload the map with the source cache
1197 FileFd SCacheF(SrcCacheFile,FileFd::ReadOnly);
1198 unsigned long const alloc = Map->RawAllocate(SCacheF.Size());
1199 if ((alloc == 0 && _error->PendingError())
1200 || SCacheF.Read((unsigned char *)Map->Data() + alloc,
1201 SCacheF.Size()) == false)
1202 return false;
1203
1204 TotalSize = ComputeSize(Files.begin()+EndOfSource,Files.end());
1205
1206 // Build the status cache
1207 pkgCacheGenerator Gen(Map.Get(),Progress);
1208 if (_error->PendingError() == true)
1209 return false;
1210 if (BuildCache(Gen,Progress,CurrentSize,TotalSize,
1211 Files.begin()+EndOfSource,Files.end()) == false)
1212 return false;
1213
1214 // FIXME: move me to a better place
1215 Gen.FinishCache(Progress);
1216 }
1217 else
1218 {
1219 if (Debug == true)
1220 std::clog << "srcpkgcache.bin is NOT valid - rebuild" << std::endl;
1221 TotalSize = ComputeSize(Files.begin(),Files.end());
1222
1223 // Build the source cache
1224 pkgCacheGenerator Gen(Map.Get(),Progress);
1225 if (_error->PendingError() == true)
1226 return false;
1227 if (BuildCache(Gen,Progress,CurrentSize,TotalSize,
1228 Files.begin(),Files.begin()+EndOfSource) == false)
1229 return false;
1230
1231 // Write it back
1232 if (Writeable == true && SrcCacheFile.empty() == false)
1233 {
1234 FileFd SCacheF(SrcCacheFile,FileFd::WriteAtomic);
1235 if (_error->PendingError() == true)
1236 return false;
1237
1238 fchmod(SCacheF.Fd(),0644);
1239
1240 // Write out the main data
1241 if (SCacheF.Write(Map->Data(),Map->Size()) == false)
1242 return _error->Error(_("IO Error saving source cache"));
1243 SCacheF.Sync();
1244
1245 // Write out the proper header
1246 Gen.GetCache().HeaderP->Dirty = false;
1247 if (SCacheF.Seek(0) == false ||
1248 SCacheF.Write(Map->Data(),sizeof(*Gen.GetCache().HeaderP)) == false)
1249 return _error->Error(_("IO Error saving source cache"));
1250 Gen.GetCache().HeaderP->Dirty = true;
1251 SCacheF.Sync();
1252 }
1253
1254 // Build the status cache
1255 if (BuildCache(Gen,Progress,CurrentSize,TotalSize,
1256 Files.begin()+EndOfSource,Files.end()) == false)
1257 return false;
1258
1259 // FIXME: move me to a better place
1260 Gen.FinishCache(Progress);
1261 }
1262 if (Debug == true)
1263 std::clog << "Caches are ready for shipping" << std::endl;
1264
1265 if (_error->PendingError() == true)
1266 return false;
1267 if (OutMap != 0)
1268 {
1269 if (CacheF != 0)
1270 {
1271 delete Map.UnGuard();
1272 *OutMap = new MMap(*CacheF,0);
1273 }
1274 else
1275 {
1276 *OutMap = Map.UnGuard();
1277 }
1278 }
1279
1280 return true;
1281 }
1282 /*}}}*/
1283 // CacheGenerator::MakeOnlyStatusCache - Build only a status files cache/*{{{*/
1284 // ---------------------------------------------------------------------
1285 /* */
1286 __deprecated bool pkgMakeOnlyStatusCache(OpProgress &Progress,DynamicMMap **OutMap)
1287 { return pkgCacheGenerator::MakeOnlyStatusCache(&Progress, OutMap); }
1288 bool pkgCacheGenerator::MakeOnlyStatusCache(OpProgress *Progress,DynamicMMap **OutMap)
1289 {
1290 vector<pkgIndexFile *> Files;
1291 unsigned long EndOfSource = Files.size();
1292 if (_system->AddStatusFiles(Files) == false)
1293 return false;
1294
1295 SPtr<DynamicMMap> Map = CreateDynamicMMap(NULL);
1296 unsigned long CurrentSize = 0;
1297 unsigned long TotalSize = 0;
1298
1299 TotalSize = ComputeSize(Files.begin()+EndOfSource,Files.end());
1300
1301 // Build the status cache
1302 if (Progress != NULL)
1303 Progress->OverallProgress(0,1,1,_("Reading package lists"));
1304 pkgCacheGenerator Gen(Map.Get(),Progress);
1305 if (_error->PendingError() == true)
1306 return false;
1307 if (BuildCache(Gen,Progress,CurrentSize,TotalSize,
1308 Files.begin()+EndOfSource,Files.end()) == false)
1309 return false;
1310
1311 // FIXME: move me to a better place
1312 Gen.FinishCache(Progress);
1313
1314 if (_error->PendingError() == true)
1315 return false;
1316 *OutMap = Map.UnGuard();
1317
1318 return true;
1319 }
1320 /*}}}*/