]> git.saurik.com Git - apt.git/blob - apt-pkg/pkgcachegen.cc
3545517fe67c03922c67b142b84a8fd0c2e46986
[apt.git] / apt-pkg / pkgcachegen.cc
1 // -*- mode: cpp; mode: fold -*-
2 // Description /*{{{*/
3 // $Id: pkgcachegen.cc,v 1.53.2.1 2003/12/24 23:09:17 mdz Exp $
4 /* ######################################################################
5
6 Package Cache Generator - Generator for the cache structure.
7
8 This builds the cache structure from the abstract package list parser.
9
10 ##################################################################### */
11 /*}}}*/
12 // Include Files /*{{{*/
13 #include <config.h>
14
15 #include <apt-pkg/pkgcachegen.h>
16 #include <apt-pkg/error.h>
17 #include <apt-pkg/version.h>
18 #include <apt-pkg/progress.h>
19 #include <apt-pkg/sourcelist.h>
20 #include <apt-pkg/configuration.h>
21 #include <apt-pkg/aptconfiguration.h>
22 #include <apt-pkg/strutl.h>
23 #include <apt-pkg/sptr.h>
24 #include <apt-pkg/pkgsystem.h>
25 #include <apt-pkg/macros.h>
26 #include <apt-pkg/tagfile.h>
27 #include <apt-pkg/metaindex.h>
28 #include <apt-pkg/fileutl.h>
29
30 #include <vector>
31 #include <sys/stat.h>
32 #include <unistd.h>
33 #include <errno.h>
34 #include <stdio.h>
35
36 #include <apti18n.h>
37 /*}}}*/
38 typedef vector<pkgIndexFile *>::iterator FileIterator;
39 template <typename Iter> std::vector<Iter*> pkgCacheGenerator::Dynamic<Iter>::toReMap;
40
41 bool IsDuplicateDescription(pkgCache::DescIterator Desc,
42 MD5SumValue const &CurMd5, std::string const &CurLang);
43
44 // CacheGenerator::pkgCacheGenerator - Constructor /*{{{*/
45 // ---------------------------------------------------------------------
46 /* We set the dirty flag and make sure that is written to the disk */
47 pkgCacheGenerator::pkgCacheGenerator(DynamicMMap *pMap,OpProgress *Prog) :
48 Map(*pMap), Cache(pMap,false), Progress(Prog),
49 FoundFileDeps(0)
50 {
51 CurrentFile = 0;
52 memset(UniqHash,0,sizeof(UniqHash));
53
54 if (_error->PendingError() == true)
55 return;
56
57 if (Map.Size() == 0)
58 {
59 // Setup the map interface..
60 Cache.HeaderP = (pkgCache::Header *)Map.Data();
61 if (Map.RawAllocate(sizeof(pkgCache::Header)) == 0 && _error->PendingError() == true)
62 return;
63
64 Map.UsePools(*Cache.HeaderP->Pools,sizeof(Cache.HeaderP->Pools)/sizeof(Cache.HeaderP->Pools[0]));
65
66 // Starting header
67 *Cache.HeaderP = pkgCache::Header();
68 map_ptrloc const idxVerSysName = WriteStringInMap(_system->VS->Label);
69 Cache.HeaderP->VerSysName = idxVerSysName;
70 map_ptrloc const idxArchitecture = WriteStringInMap(_config->Find("APT::Architecture"));
71 Cache.HeaderP->Architecture = idxArchitecture;
72 if (unlikely(idxVerSysName == 0 || idxArchitecture == 0))
73 return;
74 Cache.ReMap();
75 }
76 else
77 {
78 // Map directly from the existing file
79 Cache.ReMap();
80 Map.UsePools(*Cache.HeaderP->Pools,sizeof(Cache.HeaderP->Pools)/sizeof(Cache.HeaderP->Pools[0]));
81 if (Cache.VS != _system->VS)
82 {
83 _error->Error(_("Cache has an incompatible versioning system"));
84 return;
85 }
86 }
87
88 Cache.HeaderP->Dirty = true;
89 Map.Sync(0,sizeof(pkgCache::Header));
90 }
91 /*}}}*/
92 // CacheGenerator::~pkgCacheGenerator - Destructor /*{{{*/
93 // ---------------------------------------------------------------------
94 /* We sync the data then unset the dirty flag in two steps so as to
95 advoid a problem during a crash */
96 pkgCacheGenerator::~pkgCacheGenerator()
97 {
98 if (_error->PendingError() == true)
99 return;
100 if (Map.Sync() == false)
101 return;
102
103 Cache.HeaderP->Dirty = false;
104 Cache.HeaderP->CacheFileSize = Map.Size();
105 Map.Sync(0,sizeof(pkgCache::Header));
106 }
107 /*}}}*/
108 void pkgCacheGenerator::ReMap(void const * const oldMap, void const * const newMap) {/*{{{*/
109 if (oldMap == newMap)
110 return;
111
112 if (_config->FindB("Debug::pkgCacheGen", false))
113 std::clog << "Remaping from " << oldMap << " to " << newMap << std::endl;
114
115 Cache.ReMap(false);
116
117 CurrentFile += (pkgCache::PackageFile*) newMap - (pkgCache::PackageFile*) oldMap;
118
119 for (size_t i = 0; i < _count(UniqHash); ++i)
120 if (UniqHash[i] != 0)
121 UniqHash[i] += (pkgCache::StringItem*) newMap - (pkgCache::StringItem*) oldMap;
122
123 for (std::vector<pkgCache::GrpIterator*>::const_iterator i = Dynamic<pkgCache::GrpIterator>::toReMap.begin();
124 i != Dynamic<pkgCache::GrpIterator>::toReMap.end(); ++i)
125 (*i)->ReMap(oldMap, newMap);
126 for (std::vector<pkgCache::PkgIterator*>::const_iterator i = Dynamic<pkgCache::PkgIterator>::toReMap.begin();
127 i != Dynamic<pkgCache::PkgIterator>::toReMap.end(); ++i)
128 (*i)->ReMap(oldMap, newMap);
129 for (std::vector<pkgCache::VerIterator*>::const_iterator i = Dynamic<pkgCache::VerIterator>::toReMap.begin();
130 i != Dynamic<pkgCache::VerIterator>::toReMap.end(); ++i)
131 (*i)->ReMap(oldMap, newMap);
132 for (std::vector<pkgCache::DepIterator*>::const_iterator i = Dynamic<pkgCache::DepIterator>::toReMap.begin();
133 i != Dynamic<pkgCache::DepIterator>::toReMap.end(); ++i)
134 (*i)->ReMap(oldMap, newMap);
135 for (std::vector<pkgCache::DescIterator*>::const_iterator i = Dynamic<pkgCache::DescIterator>::toReMap.begin();
136 i != Dynamic<pkgCache::DescIterator>::toReMap.end(); ++i)
137 (*i)->ReMap(oldMap, newMap);
138 for (std::vector<pkgCache::PrvIterator*>::const_iterator i = Dynamic<pkgCache::PrvIterator>::toReMap.begin();
139 i != Dynamic<pkgCache::PrvIterator>::toReMap.end(); ++i)
140 (*i)->ReMap(oldMap, newMap);
141 for (std::vector<pkgCache::PkgFileIterator*>::const_iterator i = Dynamic<pkgCache::PkgFileIterator>::toReMap.begin();
142 i != Dynamic<pkgCache::PkgFileIterator>::toReMap.end(); ++i)
143 (*i)->ReMap(oldMap, newMap);
144 } /*}}}*/
145 // CacheGenerator::WriteStringInMap /*{{{*/
146 map_ptrloc pkgCacheGenerator::WriteStringInMap(const char *String,
147 const unsigned long &Len) {
148 void const * const oldMap = Map.Data();
149 map_ptrloc const index = Map.WriteString(String, Len);
150 if (index != 0)
151 ReMap(oldMap, Map.Data());
152 return index;
153 }
154 /*}}}*/
155 // CacheGenerator::WriteStringInMap /*{{{*/
156 map_ptrloc pkgCacheGenerator::WriteStringInMap(const char *String) {
157 void const * const oldMap = Map.Data();
158 map_ptrloc const index = Map.WriteString(String);
159 if (index != 0)
160 ReMap(oldMap, Map.Data());
161 return index;
162 }
163 /*}}}*/
164 map_ptrloc pkgCacheGenerator::AllocateInMap(const unsigned long &size) {/*{{{*/
165 void const * const oldMap = Map.Data();
166 map_ptrloc const index = Map.Allocate(size);
167 if (index != 0)
168 ReMap(oldMap, Map.Data());
169 return index;
170 }
171 /*}}}*/
172 // CacheGenerator::MergeList - Merge the package list /*{{{*/
173 // ---------------------------------------------------------------------
174 /* This provides the generation of the entries in the cache. Each loop
175 goes through a single package record from the underlying parse engine. */
176 bool pkgCacheGenerator::MergeList(ListParser &List,
177 pkgCache::VerIterator *OutVer)
178 {
179 List.Owner = this;
180
181 unsigned int Counter = 0;
182 while (List.Step() == true)
183 {
184 string const PackageName = List.Package();
185 if (PackageName.empty() == true)
186 return false;
187
188 Counter++;
189 if (Counter % 100 == 0 && Progress != 0)
190 Progress->Progress(List.Offset());
191
192 string Arch = List.Architecture();
193 string const Version = List.Version();
194 if (Version.empty() == true && Arch.empty() == true)
195 {
196 if (MergeListGroup(List, PackageName) == false)
197 return false;
198 }
199
200 if (Arch.empty() == true)
201 Arch = _config->Find("APT::Architecture");
202
203 // Get a pointer to the package structure
204 pkgCache::PkgIterator Pkg;
205 Dynamic<pkgCache::PkgIterator> DynPkg(Pkg);
206 if (NewPackage(Pkg, PackageName, Arch) == false)
207 return _error->Error(_("Error occurred while processing %s (NewPackage)"),PackageName.c_str());
208
209
210 if (Version.empty() == true)
211 {
212 if (MergeListPackage(List, Pkg) == false)
213 return false;
214 }
215 else
216 {
217 if (MergeListVersion(List, Pkg, Version, OutVer) == false)
218 return false;
219 }
220
221 if (OutVer != 0)
222 {
223 FoundFileDeps |= List.HasFileDeps();
224 return true;
225 }
226 }
227
228 if (Cache.HeaderP->PackageCount >= (1ULL<<sizeof(Cache.PkgP->ID)*8)-1)
229 return _error->Error(_("Wow, you exceeded the number of package "
230 "names this APT is capable of."));
231 if (Cache.HeaderP->VersionCount >= (1ULL<<(sizeof(Cache.VerP->ID)*8))-1)
232 return _error->Error(_("Wow, you exceeded the number of versions "
233 "this APT is capable of."));
234 if (Cache.HeaderP->DescriptionCount >= (1ULL<<(sizeof(Cache.DescP->ID)*8))-1)
235 return _error->Error(_("Wow, you exceeded the number of descriptions "
236 "this APT is capable of."));
237 if (Cache.HeaderP->DependsCount >= (1ULL<<(sizeof(Cache.DepP->ID)*8))-1ULL)
238 return _error->Error(_("Wow, you exceeded the number of dependencies "
239 "this APT is capable of."));
240
241 FoundFileDeps |= List.HasFileDeps();
242 return true;
243 }
244 // CacheGenerator::MergeListGroup /*{{{*/
245 bool pkgCacheGenerator::MergeListGroup(ListParser &List, std::string const &GrpName)
246 {
247 pkgCache::GrpIterator Grp = Cache.FindGrp(GrpName);
248 // a group has no data on it's own, only packages have it but these
249 // stanzas like this come from Translation- files to add descriptions,
250 // but without a version we don't need a description for it…
251 if (Grp.end() == true)
252 return true;
253 Dynamic<pkgCache::GrpIterator> DynGrp(Grp);
254
255 pkgCache::PkgIterator Pkg;
256 Dynamic<pkgCache::PkgIterator> DynPkg(Pkg);
257 for (Pkg = Grp.PackageList(); Pkg.end() == false; Pkg = Grp.NextPkg(Pkg))
258 if (MergeListPackage(List, Pkg) == false)
259 return false;
260
261 return true;
262 }
263 /*}}}*/
264 // CacheGenerator::MergeListPackage /*{{{*/
265 bool pkgCacheGenerator::MergeListPackage(ListParser &List, pkgCache::PkgIterator &Pkg)
266 {
267 // we first process the package, then the descriptions
268 // (for deb this package processing is in fact a no-op)
269 pkgCache::VerIterator Ver(Cache);
270 Dynamic<pkgCache::VerIterator> DynVer(Ver);
271 if (List.UsePackage(Pkg, Ver) == false)
272 return _error->Error(_("Error occurred while processing %s (UsePackage1)"),
273 Pkg.Name());
274
275 // Find the right version to write the description
276 MD5SumValue CurMd5 = List.Description_md5();
277 std::string CurLang = List.DescriptionLanguage();
278
279 for (Ver = Pkg.VersionList(); Ver.end() == false; ++Ver)
280 {
281 pkgCache::DescIterator Desc = Ver.DescriptionList();
282 Dynamic<pkgCache::DescIterator> DynDesc(Desc);
283 map_ptrloc *LastDesc = &Ver->DescriptionList;
284
285 // don't add a new description if we have one for the given
286 // md5 && language
287 if (IsDuplicateDescription(Desc, CurMd5, CurLang) == true)
288 continue;
289
290 for (Desc = Ver.DescriptionList();
291 Desc.end() == false;
292 LastDesc = &Desc->NextDesc, ++Desc)
293 {
294 if (MD5SumValue(Desc.md5()) != CurMd5)
295 continue;
296
297 // Add new description
298 void const * const oldMap = Map.Data();
299 map_ptrloc const descindex = NewDescription(Desc, CurLang, CurMd5, *LastDesc);
300 if (oldMap != Map.Data())
301 LastDesc += (map_ptrloc*) Map.Data() - (map_ptrloc*) oldMap;
302 *LastDesc = descindex;
303 Desc->ParentPkg = Pkg.Index();
304
305 if ((*LastDesc == 0 && _error->PendingError()) || NewFileDesc(Desc,List) == false)
306 return _error->Error(_("Error occurred while processing %s (NewFileDesc1)"), Pkg.Name());
307 break;
308 }
309 }
310
311 return true;
312 }
313 /*}}}*/
314 // CacheGenerator::MergeListVersion /*{{{*/
315 bool pkgCacheGenerator::MergeListVersion(ListParser &List, pkgCache::PkgIterator &Pkg,
316 std::string const &Version, pkgCache::VerIterator* &OutVer)
317 {
318 pkgCache::VerIterator Ver = Pkg.VersionList();
319 Dynamic<pkgCache::VerIterator> DynVer(Ver);
320 map_ptrloc *LastVer = &Pkg->VersionList;
321 void const * oldMap = Map.Data();
322
323 unsigned long const Hash = List.VersionHash();
324 if (Ver.end() == false)
325 {
326 /* We know the list is sorted so we use that fact in the search.
327 Insertion of new versions is done with correct sorting */
328 int Res = 1;
329 for (; Ver.end() == false; LastVer = &Ver->NextVer, Ver++)
330 {
331 Res = Cache.VS->CmpVersion(Version,Ver.VerStr());
332 // Version is higher as current version - insert here
333 if (Res > 0)
334 break;
335 // Versionstrings are equal - is hash also equal?
336 if (Res == 0 && Ver->Hash == Hash)
337 break;
338 // proceed with the next till we have either the right
339 // or we found another version (which will be lower)
340 }
341
342 /* We already have a version for this item, record that we saw it */
343 if (Res == 0 && Ver.end() == false && Ver->Hash == Hash)
344 {
345 if (List.UsePackage(Pkg,Ver) == false)
346 return _error->Error(_("Error occurred while processing %s (UsePackage2)"),
347 Pkg.Name());
348
349 if (NewFileVer(Ver,List) == false)
350 return _error->Error(_("Error occurred while processing %s (NewFileVer1)"),
351 Pkg.Name());
352
353 // Read only a single record and return
354 if (OutVer != 0)
355 {
356 *OutVer = Ver;
357 return true;
358 }
359
360 return true;
361 }
362 }
363
364 // Add a new version
365 map_ptrloc const verindex = NewVersion(Ver,Version,*LastVer);
366 if (verindex == 0 && _error->PendingError())
367 return _error->Error(_("Error occurred while processing %s (NewVersion%d)"),
368 Pkg.Name(), 1);
369
370 if (oldMap != Map.Data())
371 LastVer += (map_ptrloc*) Map.Data() - (map_ptrloc*) oldMap;
372 *LastVer = verindex;
373 Ver->ParentPkg = Pkg.Index();
374 Ver->Hash = Hash;
375
376 if (List.NewVersion(Ver) == false)
377 return _error->Error(_("Error occurred while processing %s (NewVersion%d)"),
378 Pkg.Name(), 2);
379
380 if (List.UsePackage(Pkg,Ver) == false)
381 return _error->Error(_("Error occurred while processing %s (UsePackage3)"),
382 Pkg.Name());
383
384 if (NewFileVer(Ver,List) == false)
385 return _error->Error(_("Error occurred while processing %s (NewVersion%d)"),
386 Pkg.Name(), 3);
387
388 // Read only a single record and return
389 if (OutVer != 0)
390 {
391 *OutVer = Ver;
392 return true;
393 }
394
395 /* Record the Description (it is not translated) */
396 MD5SumValue CurMd5 = List.Description_md5();
397 if (CurMd5.Value().empty() == true)
398 return true;
399 std::string CurLang = List.DescriptionLanguage();
400
401 /* Before we add a new description we first search in the group for
402 a version with a description of the same MD5 - if so we reuse this
403 description group instead of creating our own for this version */
404 pkgCache::GrpIterator Grp = Pkg.Group();
405 for (pkgCache::PkgIterator P = Grp.PackageList();
406 P.end() == false; P = Grp.NextPkg(P))
407 {
408 for (pkgCache::VerIterator V = P.VersionList();
409 V.end() == false; ++V)
410 {
411 if (IsDuplicateDescription(V.DescriptionList(), CurMd5, "") == false)
412 continue;
413 Ver->DescriptionList = V->DescriptionList;
414 return true;
415 }
416 }
417
418 // We haven't found reusable descriptions, so add the first description
419 pkgCache::DescIterator Desc = Ver.DescriptionList();
420 Dynamic<pkgCache::DescIterator> DynDesc(Desc);
421 map_ptrloc *LastDesc = &Ver->DescriptionList;
422
423 oldMap = Map.Data();
424 map_ptrloc const descindex = NewDescription(Desc, List.DescriptionLanguage(), List.Description_md5(), *LastDesc);
425 if (oldMap != Map.Data())
426 LastDesc += (map_ptrloc*) Map.Data() - (map_ptrloc*) oldMap;
427 *LastDesc = descindex;
428 Desc->ParentPkg = Pkg.Index();
429
430 if ((*LastDesc == 0 && _error->PendingError()) || NewFileDesc(Desc,List) == false)
431 return _error->Error(_("Error occurred while processing %s (NewFileDesc2)"),Pkg.Name());
432
433 return true;
434 }
435 /*}}}*/
436 /*}}}*/
437 // CacheGenerator::MergeFileProvides - Merge file provides /*{{{*/
438 // ---------------------------------------------------------------------
439 /* If we found any file depends while parsing the main list we need to
440 resolve them. Since it is undesired to load the entire list of files
441 into the cache as virtual packages we do a two stage effort. MergeList
442 identifies the file depends and this creates Provdies for them by
443 re-parsing all the indexs. */
444 bool pkgCacheGenerator::MergeFileProvides(ListParser &List)
445 {
446 List.Owner = this;
447
448 unsigned int Counter = 0;
449 while (List.Step() == true)
450 {
451 string PackageName = List.Package();
452 if (PackageName.empty() == true)
453 return false;
454 string Version = List.Version();
455 if (Version.empty() == true)
456 continue;
457
458 pkgCache::PkgIterator Pkg = Cache.FindPkg(PackageName);
459 Dynamic<pkgCache::PkgIterator> DynPkg(Pkg);
460 if (Pkg.end() == true)
461 return _error->Error(_("Error occurred while processing %s (FindPkg)"),
462 PackageName.c_str());
463 Counter++;
464 if (Counter % 100 == 0 && Progress != 0)
465 Progress->Progress(List.Offset());
466
467 unsigned long Hash = List.VersionHash();
468 pkgCache::VerIterator Ver = Pkg.VersionList();
469 Dynamic<pkgCache::VerIterator> DynVer(Ver);
470 for (; Ver.end() == false; ++Ver)
471 {
472 if (Ver->Hash == Hash && Version.c_str() == Ver.VerStr())
473 {
474 if (List.CollectFileProvides(Cache,Ver) == false)
475 return _error->Error(_("Error occurred while processing %s (CollectFileProvides)"),PackageName.c_str());
476 break;
477 }
478 }
479
480 if (Ver.end() == true)
481 _error->Warning(_("Package %s %s was not found while processing file dependencies"),PackageName.c_str(),Version.c_str());
482 }
483
484 return true;
485 }
486 /*}}}*/
487 // CacheGenerator::NewGroup - Add a new group /*{{{*/
488 // ---------------------------------------------------------------------
489 /* This creates a new group structure and adds it to the hash table */
490 bool pkgCacheGenerator::NewGroup(pkgCache::GrpIterator &Grp, const string &Name)
491 {
492 Grp = Cache.FindGrp(Name);
493 if (Grp.end() == false)
494 return true;
495
496 // Get a structure
497 map_ptrloc const Group = AllocateInMap(sizeof(pkgCache::Group));
498 if (unlikely(Group == 0))
499 return false;
500
501 Grp = pkgCache::GrpIterator(Cache, Cache.GrpP + Group);
502 map_ptrloc const idxName = WriteStringInMap(Name);
503 if (unlikely(idxName == 0))
504 return false;
505 Grp->Name = idxName;
506
507 // Insert it into the hash table
508 unsigned long const Hash = Cache.Hash(Name);
509 Grp->Next = Cache.HeaderP->GrpHashTable[Hash];
510 Cache.HeaderP->GrpHashTable[Hash] = Group;
511
512 Grp->ID = Cache.HeaderP->GroupCount++;
513 return true;
514 }
515 /*}}}*/
516 // CacheGenerator::NewPackage - Add a new package /*{{{*/
517 // ---------------------------------------------------------------------
518 /* This creates a new package structure and adds it to the hash table */
519 bool pkgCacheGenerator::NewPackage(pkgCache::PkgIterator &Pkg,const string &Name,
520 const string &Arch) {
521 pkgCache::GrpIterator Grp;
522 Dynamic<pkgCache::GrpIterator> DynGrp(Grp);
523 if (unlikely(NewGroup(Grp, Name) == false))
524 return false;
525
526 Pkg = Grp.FindPkg(Arch);
527 if (Pkg.end() == false)
528 return true;
529
530 // Get a structure
531 map_ptrloc const Package = AllocateInMap(sizeof(pkgCache::Package));
532 if (unlikely(Package == 0))
533 return false;
534 Pkg = pkgCache::PkgIterator(Cache,Cache.PkgP + Package);
535
536 // Insert the package into our package list
537 if (Grp->FirstPackage == 0) // the group is new
538 {
539 // Insert it into the hash table
540 unsigned long const Hash = Cache.Hash(Name);
541 Pkg->NextPackage = Cache.HeaderP->PkgHashTable[Hash];
542 Cache.HeaderP->PkgHashTable[Hash] = Package;
543 Grp->FirstPackage = Package;
544 }
545 else // Group the Packages together
546 {
547 // this package is the new last package
548 pkgCache::PkgIterator LastPkg(Cache, Cache.PkgP + Grp->LastPackage);
549 Pkg->NextPackage = LastPkg->NextPackage;
550 LastPkg->NextPackage = Package;
551 }
552 Grp->LastPackage = Package;
553
554 // Set the name, arch and the ID
555 Pkg->Name = Grp->Name;
556 Pkg->Group = Grp.Index();
557 // all is mapped to the native architecture
558 map_ptrloc const idxArch = (Arch == "all") ? Cache.HeaderP->Architecture : WriteUniqString(Arch.c_str());
559 if (unlikely(idxArch == 0))
560 return false;
561 Pkg->Arch = idxArch;
562 Pkg->ID = Cache.HeaderP->PackageCount++;
563
564 return true;
565 }
566 /*}}}*/
567 // CacheGenerator::NewFileVer - Create a new File<->Version association /*{{{*/
568 // ---------------------------------------------------------------------
569 /* */
570 bool pkgCacheGenerator::NewFileVer(pkgCache::VerIterator &Ver,
571 ListParser &List)
572 {
573 if (CurrentFile == 0)
574 return true;
575
576 // Get a structure
577 map_ptrloc const VerFile = AllocateInMap(sizeof(pkgCache::VerFile));
578 if (VerFile == 0)
579 return 0;
580
581 pkgCache::VerFileIterator VF(Cache,Cache.VerFileP + VerFile);
582 VF->File = CurrentFile - Cache.PkgFileP;
583
584 // Link it to the end of the list
585 map_ptrloc *Last = &Ver->FileList;
586 for (pkgCache::VerFileIterator V = Ver.FileList(); V.end() == false; ++V)
587 Last = &V->NextFile;
588 VF->NextFile = *Last;
589 *Last = VF.Index();
590
591 VF->Offset = List.Offset();
592 VF->Size = List.Size();
593 if (Cache.HeaderP->MaxVerFileSize < VF->Size)
594 Cache.HeaderP->MaxVerFileSize = VF->Size;
595 Cache.HeaderP->VerFileCount++;
596
597 return true;
598 }
599 /*}}}*/
600 // CacheGenerator::NewVersion - Create a new Version /*{{{*/
601 // ---------------------------------------------------------------------
602 /* This puts a version structure in the linked list */
603 unsigned long pkgCacheGenerator::NewVersion(pkgCache::VerIterator &Ver,
604 const string &VerStr,
605 unsigned long Next)
606 {
607 // Get a structure
608 map_ptrloc const Version = AllocateInMap(sizeof(pkgCache::Version));
609 if (Version == 0)
610 return 0;
611
612 // Fill it in
613 Ver = pkgCache::VerIterator(Cache,Cache.VerP + Version);
614 Ver->NextVer = Next;
615 Ver->ID = Cache.HeaderP->VersionCount++;
616 map_ptrloc const idxVerStr = WriteStringInMap(VerStr);
617 if (unlikely(idxVerStr == 0))
618 return 0;
619 Ver->VerStr = idxVerStr;
620
621 return Version;
622 }
623 /*}}}*/
624 // CacheGenerator::NewFileDesc - Create a new File<->Desc association /*{{{*/
625 // ---------------------------------------------------------------------
626 /* */
627 bool pkgCacheGenerator::NewFileDesc(pkgCache::DescIterator &Desc,
628 ListParser &List)
629 {
630 if (CurrentFile == 0)
631 return true;
632
633 // Get a structure
634 map_ptrloc const DescFile = AllocateInMap(sizeof(pkgCache::DescFile));
635 if (DescFile == 0)
636 return false;
637
638 pkgCache::DescFileIterator DF(Cache,Cache.DescFileP + DescFile);
639 DF->File = CurrentFile - Cache.PkgFileP;
640
641 // Link it to the end of the list
642 map_ptrloc *Last = &Desc->FileList;
643 for (pkgCache::DescFileIterator D = Desc.FileList(); D.end() == false; ++D)
644 Last = &D->NextFile;
645
646 DF->NextFile = *Last;
647 *Last = DF.Index();
648
649 DF->Offset = List.Offset();
650 DF->Size = List.Size();
651 if (Cache.HeaderP->MaxDescFileSize < DF->Size)
652 Cache.HeaderP->MaxDescFileSize = DF->Size;
653 Cache.HeaderP->DescFileCount++;
654
655 return true;
656 }
657 /*}}}*/
658 // CacheGenerator::NewDescription - Create a new Description /*{{{*/
659 // ---------------------------------------------------------------------
660 /* This puts a description structure in the linked list */
661 map_ptrloc pkgCacheGenerator::NewDescription(pkgCache::DescIterator &Desc,
662 const string &Lang,
663 const MD5SumValue &md5sum,
664 map_ptrloc Next)
665 {
666 // Get a structure
667 map_ptrloc const Description = AllocateInMap(sizeof(pkgCache::Description));
668 if (Description == 0)
669 return 0;
670
671 // Fill it in
672 Desc = pkgCache::DescIterator(Cache,Cache.DescP + Description);
673 Desc->NextDesc = Next;
674 Desc->ID = Cache.HeaderP->DescriptionCount++;
675 map_ptrloc const idxlanguage_code = WriteStringInMap(Lang);
676 map_ptrloc const idxmd5sum = WriteStringInMap(md5sum.Value());
677 if (unlikely(idxlanguage_code == 0 || idxmd5sum == 0))
678 return 0;
679 Desc->language_code = idxlanguage_code;
680 Desc->md5sum = idxmd5sum;
681
682 return Description;
683 }
684 /*}}}*/
685 // CacheGenerator::FinishCache - do various finish operations /*{{{*/
686 // ---------------------------------------------------------------------
687 /* This prepares the Cache for delivery */
688 bool pkgCacheGenerator::FinishCache(OpProgress *Progress)
689 {
690 // FIXME: add progress reporting for this operation
691 // Do we have different architectures in your groups ?
692 vector<string> archs = APT::Configuration::getArchitectures();
693 if (archs.size() > 1)
694 {
695 // Create Conflicts in between the group
696 pkgCache::GrpIterator G = GetCache().GrpBegin();
697 Dynamic<pkgCache::GrpIterator> DynG(G);
698 for (; G.end() != true; ++G)
699 {
700 string const PkgName = G.Name();
701 pkgCache::PkgIterator P = G.PackageList();
702 Dynamic<pkgCache::PkgIterator> DynP(P);
703 for (; P.end() != true; P = G.NextPkg(P))
704 {
705 pkgCache::PkgIterator allPkg;
706 Dynamic<pkgCache::PkgIterator> DynallPkg(allPkg);
707 pkgCache::VerIterator V = P.VersionList();
708 Dynamic<pkgCache::VerIterator> DynV(V);
709 for (; V.end() != true; ++V)
710 {
711 // copy P.Arch() into a string here as a cache remap
712 // in NewDepends() later may alter the pointer location
713 string Arch = P.Arch() == NULL ? "" : P.Arch();
714 map_ptrloc *OldDepLast = NULL;
715 /* MultiArch handling introduces a lot of implicit Dependencies:
716 - MultiArch: same → Co-Installable if they have the same version
717 - Architecture: all → Need to be Co-Installable for internal reasons
718 - All others conflict with all other group members */
719 bool const coInstall = ((V->MultiArch & pkgCache::Version::Same) == pkgCache::Version::Same);
720 for (vector<string>::const_iterator A = archs.begin(); A != archs.end(); ++A)
721 {
722 if (*A == Arch)
723 continue;
724 /* We allow only one installed arch at the time
725 per group, therefore each group member conflicts
726 with all other group members */
727 pkgCache::PkgIterator D = G.FindPkg(*A);
728 Dynamic<pkgCache::PkgIterator> DynD(D);
729 if (D.end() == true)
730 continue;
731 if (coInstall == true)
732 {
733 // Replaces: ${self}:other ( << ${binary:Version})
734 NewDepends(D, V, V.VerStr(),
735 pkgCache::Dep::Less, pkgCache::Dep::Replaces,
736 OldDepLast);
737 // Breaks: ${self}:other (!= ${binary:Version})
738 NewDepends(D, V, V.VerStr(),
739 pkgCache::Dep::NotEquals, pkgCache::Dep::DpkgBreaks,
740 OldDepLast);
741 } else {
742 // Conflicts: ${self}:other
743 NewDepends(D, V, "",
744 pkgCache::Dep::NoOp, pkgCache::Dep::Conflicts,
745 OldDepLast);
746 }
747 }
748 }
749 }
750 }
751 }
752 return true;
753 }
754 /*}}}*/
755 // CacheGenerator::NewDepends - Create a dependency element /*{{{*/
756 // ---------------------------------------------------------------------
757 /* This creates a dependency element in the tree. It is linked to the
758 version and to the package that it is pointing to. */
759 bool pkgCacheGenerator::NewDepends(pkgCache::PkgIterator &Pkg,
760 pkgCache::VerIterator &Ver,
761 string const &Version,
762 unsigned int const &Op,
763 unsigned int const &Type,
764 map_ptrloc* &OldDepLast)
765 {
766 void const * const oldMap = Map.Data();
767 // Get a structure
768 map_ptrloc const Dependency = AllocateInMap(sizeof(pkgCache::Dependency));
769 if (unlikely(Dependency == 0))
770 return false;
771
772 // Fill it in
773 pkgCache::DepIterator Dep(Cache,Cache.DepP + Dependency);
774 Dynamic<pkgCache::DepIterator> DynDep(Dep);
775 Dep->ParentVer = Ver.Index();
776 Dep->Type = Type;
777 Dep->CompareOp = Op;
778 Dep->ID = Cache.HeaderP->DependsCount++;
779
780 // Probe the reverse dependency list for a version string that matches
781 if (Version.empty() == false)
782 {
783 /* for (pkgCache::DepIterator I = Pkg.RevDependsList(); I.end() == false; I++)
784 if (I->Version != 0 && I.TargetVer() == Version)
785 Dep->Version = I->Version;*/
786 if (Dep->Version == 0) {
787 map_ptrloc const index = WriteStringInMap(Version);
788 if (unlikely(index == 0))
789 return false;
790 Dep->Version = index;
791 }
792 }
793
794 // Link it to the package
795 Dep->Package = Pkg.Index();
796 Dep->NextRevDepends = Pkg->RevDepends;
797 Pkg->RevDepends = Dep.Index();
798
799 // Do we know where to link the Dependency to?
800 if (OldDepLast == NULL)
801 {
802 OldDepLast = &Ver->DependsList;
803 for (pkgCache::DepIterator D = Ver.DependsList(); D.end() == false; ++D)
804 OldDepLast = &D->NextDepends;
805 } else if (oldMap != Map.Data())
806 OldDepLast += (map_ptrloc*) Map.Data() - (map_ptrloc*) oldMap;
807
808 Dep->NextDepends = *OldDepLast;
809 *OldDepLast = Dep.Index();
810 OldDepLast = &Dep->NextDepends;
811
812 return true;
813 }
814 /*}}}*/
815 // ListParser::NewDepends - Create the environment for a new dependency /*{{{*/
816 // ---------------------------------------------------------------------
817 /* This creates a Group and the Package to link this dependency to if
818 needed and handles also the caching of the old endpoint */
819 bool pkgCacheGenerator::ListParser::NewDepends(pkgCache::VerIterator &Ver,
820 const string &PackageName,
821 const string &Arch,
822 const string &Version,
823 unsigned int Op,
824 unsigned int Type)
825 {
826 pkgCache::GrpIterator Grp;
827 Dynamic<pkgCache::GrpIterator> DynGrp(Grp);
828 if (unlikely(Owner->NewGroup(Grp, PackageName) == false))
829 return false;
830
831 // Locate the target package
832 pkgCache::PkgIterator Pkg = Grp.FindPkg(Arch);
833 Dynamic<pkgCache::PkgIterator> DynPkg(Pkg);
834 if (Pkg.end() == true) {
835 if (unlikely(Owner->NewPackage(Pkg, PackageName, Arch) == false))
836 return false;
837 }
838
839 // Is it a file dependency?
840 if (unlikely(PackageName[0] == '/'))
841 FoundFileDeps = true;
842
843 /* Caching the old end point speeds up generation substantially */
844 if (OldDepVer != Ver) {
845 OldDepLast = NULL;
846 OldDepVer = Ver;
847 }
848
849 return Owner->NewDepends(Pkg, Ver, Version, Op, Type, OldDepLast);
850 }
851 /*}}}*/
852 // ListParser::NewProvides - Create a Provides element /*{{{*/
853 // ---------------------------------------------------------------------
854 /* */
855 bool pkgCacheGenerator::ListParser::NewProvides(pkgCache::VerIterator &Ver,
856 const string &PkgName,
857 const string &PkgArch,
858 const string &Version)
859 {
860 pkgCache &Cache = Owner->Cache;
861
862 // We do not add self referencing provides
863 if (Ver.ParentPkg().Name() == PkgName && (PkgArch == Ver.ParentPkg().Arch() ||
864 (PkgArch == "all" && strcmp((Cache.StrP + Cache.HeaderP->Architecture), Ver.ParentPkg().Arch()) == 0)))
865 return true;
866
867 // Get a structure
868 map_ptrloc const Provides = Owner->AllocateInMap(sizeof(pkgCache::Provides));
869 if (unlikely(Provides == 0))
870 return false;
871 Cache.HeaderP->ProvidesCount++;
872
873 // Fill it in
874 pkgCache::PrvIterator Prv(Cache,Cache.ProvideP + Provides,Cache.PkgP);
875 Dynamic<pkgCache::PrvIterator> DynPrv(Prv);
876 Prv->Version = Ver.Index();
877 Prv->NextPkgProv = Ver->ProvidesList;
878 Ver->ProvidesList = Prv.Index();
879 if (Version.empty() == false && unlikely((Prv->ProvideVersion = WriteString(Version)) == 0))
880 return false;
881
882 // Locate the target package
883 pkgCache::PkgIterator Pkg;
884 Dynamic<pkgCache::PkgIterator> DynPkg(Pkg);
885 if (unlikely(Owner->NewPackage(Pkg,PkgName, PkgArch) == false))
886 return false;
887
888 // Link it to the package
889 Prv->ParentPkg = Pkg.Index();
890 Prv->NextProvides = Pkg->ProvidesList;
891 Pkg->ProvidesList = Prv.Index();
892
893 return true;
894 }
895 /*}}}*/
896 // CacheGenerator::SelectFile - Select the current file being parsed /*{{{*/
897 // ---------------------------------------------------------------------
898 /* This is used to select which file is to be associated with all newly
899 added versions. The caller is responsible for setting the IMS fields. */
900 bool pkgCacheGenerator::SelectFile(const string &File,const string &Site,
901 const pkgIndexFile &Index,
902 unsigned long Flags)
903 {
904 // Get some space for the structure
905 map_ptrloc const idxFile = AllocateInMap(sizeof(*CurrentFile));
906 if (unlikely(idxFile == 0))
907 return false;
908 CurrentFile = Cache.PkgFileP + idxFile;
909
910 // Fill it in
911 map_ptrloc const idxFileName = WriteStringInMap(File);
912 map_ptrloc const idxSite = WriteUniqString(Site);
913 if (unlikely(idxFileName == 0 || idxSite == 0))
914 return false;
915 CurrentFile->FileName = idxFileName;
916 CurrentFile->Site = idxSite;
917 CurrentFile->NextFile = Cache.HeaderP->FileList;
918 CurrentFile->Flags = Flags;
919 CurrentFile->ID = Cache.HeaderP->PackageFileCount;
920 map_ptrloc const idxIndexType = WriteUniqString(Index.GetType()->Label);
921 if (unlikely(idxIndexType == 0))
922 return false;
923 CurrentFile->IndexType = idxIndexType;
924 PkgFileName = File;
925 Cache.HeaderP->FileList = CurrentFile - Cache.PkgFileP;
926 Cache.HeaderP->PackageFileCount++;
927
928 if (Progress != 0)
929 Progress->SubProgress(Index.Size());
930 return true;
931 }
932 /*}}}*/
933 // CacheGenerator::WriteUniqueString - Insert a unique string /*{{{*/
934 // ---------------------------------------------------------------------
935 /* This is used to create handles to strings. Given the same text it
936 always returns the same number */
937 unsigned long pkgCacheGenerator::WriteUniqString(const char *S,
938 unsigned int Size)
939 {
940 /* We use a very small transient hash table here, this speeds up generation
941 by a fair amount on slower machines */
942 pkgCache::StringItem *&Bucket = UniqHash[(S[0]*5 + S[1]) % _count(UniqHash)];
943 if (Bucket != 0 &&
944 stringcmp(S,S+Size,Cache.StrP + Bucket->String) == 0)
945 return Bucket->String;
946
947 // Search for an insertion point
948 pkgCache::StringItem *I = Cache.StringItemP + Cache.HeaderP->StringList;
949 int Res = 1;
950 map_ptrloc *Last = &Cache.HeaderP->StringList;
951 for (; I != Cache.StringItemP; Last = &I->NextItem,
952 I = Cache.StringItemP + I->NextItem)
953 {
954 Res = stringcmp(S,S+Size,Cache.StrP + I->String);
955 if (Res >= 0)
956 break;
957 }
958
959 // Match
960 if (Res == 0)
961 {
962 Bucket = I;
963 return I->String;
964 }
965
966 // Get a structure
967 void const * const oldMap = Map.Data();
968 map_ptrloc const Item = AllocateInMap(sizeof(pkgCache::StringItem));
969 if (Item == 0)
970 return 0;
971
972 map_ptrloc const idxString = WriteStringInMap(S,Size);
973 if (unlikely(idxString == 0))
974 return 0;
975 if (oldMap != Map.Data()) {
976 Last += (map_ptrloc*) Map.Data() - (map_ptrloc*) oldMap;
977 I += (pkgCache::StringItem*) Map.Data() - (pkgCache::StringItem*) oldMap;
978 }
979 *Last = Item;
980
981 // Fill in the structure
982 pkgCache::StringItem *ItemP = Cache.StringItemP + Item;
983 ItemP->NextItem = I - Cache.StringItemP;
984 ItemP->String = idxString;
985
986 Bucket = ItemP;
987 return ItemP->String;
988 }
989 /*}}}*/
990 // CheckValidity - Check that a cache is up-to-date /*{{{*/
991 // ---------------------------------------------------------------------
992 /* This just verifies that each file in the list of index files exists,
993 has matching attributes with the cache and the cache does not have
994 any extra files. */
995 static bool CheckValidity(const string &CacheFile,
996 pkgSourceList &List,
997 FileIterator Start,
998 FileIterator End,
999 MMap **OutMap = 0)
1000 {
1001 bool const Debug = _config->FindB("Debug::pkgCacheGen", false);
1002 // No file, certainly invalid
1003 if (CacheFile.empty() == true || FileExists(CacheFile) == false)
1004 {
1005 if (Debug == true)
1006 std::clog << "CacheFile doesn't exist" << std::endl;
1007 return false;
1008 }
1009
1010 if (List.GetLastModifiedTime() > GetModificationTime(CacheFile))
1011 {
1012 if (Debug == true)
1013 std::clog << "sources.list is newer than the cache" << std::endl;
1014 return false;
1015 }
1016
1017 // Map it
1018 FileFd CacheF(CacheFile,FileFd::ReadOnly);
1019 SPtr<MMap> Map = new MMap(CacheF,0);
1020 pkgCache Cache(Map);
1021 if (_error->PendingError() == true || Map->Size() == 0)
1022 {
1023 if (Debug == true)
1024 std::clog << "Errors are pending or Map is empty()" << std::endl;
1025 _error->Discard();
1026 return false;
1027 }
1028
1029 /* Now we check every index file, see if it is in the cache,
1030 verify the IMS data and check that it is on the disk too.. */
1031 SPtrArray<bool> Visited = new bool[Cache.HeaderP->PackageFileCount];
1032 memset(Visited,0,sizeof(*Visited)*Cache.HeaderP->PackageFileCount);
1033 for (; Start != End; ++Start)
1034 {
1035 if (Debug == true)
1036 std::clog << "Checking PkgFile " << (*Start)->Describe() << ": ";
1037 if ((*Start)->HasPackages() == false)
1038 {
1039 if (Debug == true)
1040 std::clog << "Has NO packages" << std::endl;
1041 continue;
1042 }
1043
1044 if ((*Start)->Exists() == false)
1045 {
1046 #if 0 // mvo: we no longer give a message here (Default Sources spec)
1047 _error->WarningE("stat",_("Couldn't stat source package list %s"),
1048 (*Start)->Describe().c_str());
1049 #endif
1050 if (Debug == true)
1051 std::clog << "file doesn't exist" << std::endl;
1052 continue;
1053 }
1054
1055 // FindInCache is also expected to do an IMS check.
1056 pkgCache::PkgFileIterator File = (*Start)->FindInCache(Cache);
1057 if (File.end() == true)
1058 {
1059 if (Debug == true)
1060 std::clog << "FindInCache returned end-Pointer" << std::endl;
1061 return false;
1062 }
1063
1064 Visited[File->ID] = true;
1065 if (Debug == true)
1066 std::clog << "with ID " << File->ID << " is valid" << std::endl;
1067 }
1068
1069 for (unsigned I = 0; I != Cache.HeaderP->PackageFileCount; I++)
1070 if (Visited[I] == false)
1071 {
1072 if (Debug == true)
1073 std::clog << "File with ID" << I << " wasn't visited" << std::endl;
1074 return false;
1075 }
1076
1077 if (_error->PendingError() == true)
1078 {
1079 if (Debug == true)
1080 {
1081 std::clog << "Validity failed because of pending errors:" << std::endl;
1082 _error->DumpErrors();
1083 }
1084 _error->Discard();
1085 return false;
1086 }
1087
1088 if (OutMap != 0)
1089 *OutMap = Map.UnGuard();
1090 return true;
1091 }
1092 /*}}}*/
1093 // ComputeSize - Compute the total size of a bunch of files /*{{{*/
1094 // ---------------------------------------------------------------------
1095 /* Size is kind of an abstract notion that is only used for the progress
1096 meter */
1097 static unsigned long ComputeSize(FileIterator Start,FileIterator End)
1098 {
1099 unsigned long TotalSize = 0;
1100 for (; Start != End; ++Start)
1101 {
1102 if ((*Start)->HasPackages() == false)
1103 continue;
1104 TotalSize += (*Start)->Size();
1105 }
1106 return TotalSize;
1107 }
1108 /*}}}*/
1109 // BuildCache - Merge the list of index files into the cache /*{{{*/
1110 // ---------------------------------------------------------------------
1111 /* */
1112 static bool BuildCache(pkgCacheGenerator &Gen,
1113 OpProgress *Progress,
1114 unsigned long &CurrentSize,unsigned long TotalSize,
1115 FileIterator Start, FileIterator End)
1116 {
1117 FileIterator I;
1118 for (I = Start; I != End; ++I)
1119 {
1120 if ((*I)->HasPackages() == false)
1121 continue;
1122
1123 if ((*I)->Exists() == false)
1124 continue;
1125
1126 if ((*I)->FindInCache(Gen.GetCache()).end() == false)
1127 {
1128 _error->Warning("Duplicate sources.list entry %s",
1129 (*I)->Describe().c_str());
1130 continue;
1131 }
1132
1133 unsigned long Size = (*I)->Size();
1134 if (Progress != NULL)
1135 Progress->OverallProgress(CurrentSize,TotalSize,Size,_("Reading package lists"));
1136 CurrentSize += Size;
1137
1138 if ((*I)->Merge(Gen,Progress) == false)
1139 return false;
1140 }
1141
1142 if (Gen.HasFileDeps() == true)
1143 {
1144 if (Progress != NULL)
1145 Progress->Done();
1146 TotalSize = ComputeSize(Start, End);
1147 CurrentSize = 0;
1148 for (I = Start; I != End; ++I)
1149 {
1150 unsigned long Size = (*I)->Size();
1151 if (Progress != NULL)
1152 Progress->OverallProgress(CurrentSize,TotalSize,Size,_("Collecting File Provides"));
1153 CurrentSize += Size;
1154 if ((*I)->MergeFileProvides(Gen,Progress) == false)
1155 return false;
1156 }
1157 }
1158
1159 return true;
1160 }
1161 /*}}}*/
1162 // CacheGenerator::CreateDynamicMMap - load an mmap with configuration options /*{{{*/
1163 DynamicMMap* pkgCacheGenerator::CreateDynamicMMap(FileFd *CacheF, unsigned long Flags) {
1164 unsigned long const MapStart = _config->FindI("APT::Cache-Start", 24*1024*1024);
1165 unsigned long const MapGrow = _config->FindI("APT::Cache-Grow", 1*1024*1024);
1166 unsigned long const MapLimit = _config->FindI("APT::Cache-Limit", 0);
1167 Flags |= MMap::Moveable;
1168 if (_config->FindB("APT::Cache-Fallback", false) == true)
1169 Flags |= MMap::Fallback;
1170 if (CacheF != NULL)
1171 return new DynamicMMap(*CacheF, Flags, MapStart, MapGrow, MapLimit);
1172 else
1173 return new DynamicMMap(Flags, MapStart, MapGrow, MapLimit);
1174 }
1175 /*}}}*/
1176 // CacheGenerator::MakeStatusCache - Construct the status cache /*{{{*/
1177 // ---------------------------------------------------------------------
1178 /* This makes sure that the status cache (the cache that has all
1179 index files from the sources list and all local ones) is ready
1180 to be mmaped. If OutMap is not zero then a MMap object representing
1181 the cache will be stored there. This is pretty much mandetory if you
1182 are using AllowMem. AllowMem lets the function be run as non-root
1183 where it builds the cache 'fast' into a memory buffer. */
1184 __deprecated bool pkgMakeStatusCache(pkgSourceList &List,OpProgress &Progress,
1185 MMap **OutMap, bool AllowMem)
1186 { return pkgCacheGenerator::MakeStatusCache(List, &Progress, OutMap, AllowMem); }
1187 bool pkgCacheGenerator::MakeStatusCache(pkgSourceList &List,OpProgress *Progress,
1188 MMap **OutMap,bool AllowMem)
1189 {
1190 bool const Debug = _config->FindB("Debug::pkgCacheGen", false);
1191
1192 vector<pkgIndexFile *> Files;
1193 for (vector<metaIndex *>::const_iterator i = List.begin();
1194 i != List.end();
1195 ++i)
1196 {
1197 vector <pkgIndexFile *> *Indexes = (*i)->GetIndexFiles();
1198 for (vector<pkgIndexFile *>::const_iterator j = Indexes->begin();
1199 j != Indexes->end();
1200 ++j)
1201 Files.push_back (*j);
1202 }
1203
1204 unsigned long const EndOfSource = Files.size();
1205 if (_system->AddStatusFiles(Files) == false)
1206 return false;
1207
1208 // Decide if we can write to the files..
1209 string const CacheFile = _config->FindFile("Dir::Cache::pkgcache");
1210 string const SrcCacheFile = _config->FindFile("Dir::Cache::srcpkgcache");
1211
1212 // ensure the cache directory exists
1213 if (CacheFile.empty() == false || SrcCacheFile.empty() == false)
1214 {
1215 string dir = _config->FindDir("Dir::Cache");
1216 size_t const len = dir.size();
1217 if (len > 5 && dir.find("/apt/", len - 6, 5) == len - 5)
1218 dir = dir.substr(0, len - 5);
1219 if (CacheFile.empty() == false)
1220 CreateDirectory(dir, flNotFile(CacheFile));
1221 if (SrcCacheFile.empty() == false)
1222 CreateDirectory(dir, flNotFile(SrcCacheFile));
1223 }
1224
1225 // Decide if we can write to the cache
1226 bool Writeable = false;
1227 if (CacheFile.empty() == false)
1228 Writeable = access(flNotFile(CacheFile).c_str(),W_OK) == 0;
1229 else
1230 if (SrcCacheFile.empty() == false)
1231 Writeable = access(flNotFile(SrcCacheFile).c_str(),W_OK) == 0;
1232 if (Debug == true)
1233 std::clog << "Do we have write-access to the cache files? " << (Writeable ? "YES" : "NO") << std::endl;
1234
1235 if (Writeable == false && AllowMem == false && CacheFile.empty() == false)
1236 return _error->Error(_("Unable to write to %s"),flNotFile(CacheFile).c_str());
1237
1238 if (Progress != NULL)
1239 Progress->OverallProgress(0,1,1,_("Reading package lists"));
1240
1241 // Cache is OK, Fin.
1242 if (CheckValidity(CacheFile, List, Files.begin(),Files.end(),OutMap) == true)
1243 {
1244 if (Progress != NULL)
1245 Progress->OverallProgress(1,1,1,_("Reading package lists"));
1246 if (Debug == true)
1247 std::clog << "pkgcache.bin is valid - no need to build anything" << std::endl;
1248 return true;
1249 }
1250 else if (Debug == true)
1251 std::clog << "pkgcache.bin is NOT valid" << std::endl;
1252
1253 /* At this point we know we need to reconstruct the package cache,
1254 begin. */
1255 SPtr<FileFd> CacheF;
1256 SPtr<DynamicMMap> Map;
1257 if (Writeable == true && CacheFile.empty() == false)
1258 {
1259 _error->PushToStack();
1260 unlink(CacheFile.c_str());
1261 CacheF = new FileFd(CacheFile,FileFd::WriteAtomic);
1262 fchmod(CacheF->Fd(),0644);
1263 Map = CreateDynamicMMap(CacheF, MMap::Public);
1264 if (_error->PendingError() == true)
1265 {
1266 delete CacheF.UnGuard();
1267 delete Map.UnGuard();
1268 if (Debug == true)
1269 std::clog << "Open filebased MMap FAILED" << std::endl;
1270 Writeable = false;
1271 if (AllowMem == false)
1272 {
1273 _error->MergeWithStack();
1274 return false;
1275 }
1276 _error->RevertToStack();
1277 }
1278 else if (Debug == true)
1279 {
1280 _error->MergeWithStack();
1281 std::clog << "Open filebased MMap" << std::endl;
1282 }
1283 }
1284 if (Writeable == false || CacheFile.empty() == true)
1285 {
1286 // Just build it in memory..
1287 Map = CreateDynamicMMap(NULL);
1288 if (Debug == true)
1289 std::clog << "Open memory Map (not filebased)" << std::endl;
1290 }
1291
1292 // Lets try the source cache.
1293 unsigned long CurrentSize = 0;
1294 unsigned long TotalSize = 0;
1295 if (CheckValidity(SrcCacheFile, List, Files.begin(),
1296 Files.begin()+EndOfSource) == true)
1297 {
1298 if (Debug == true)
1299 std::clog << "srcpkgcache.bin is valid - populate MMap with it." << std::endl;
1300 // Preload the map with the source cache
1301 FileFd SCacheF(SrcCacheFile,FileFd::ReadOnly);
1302 unsigned long const alloc = Map->RawAllocate(SCacheF.Size());
1303 if ((alloc == 0 && _error->PendingError())
1304 || SCacheF.Read((unsigned char *)Map->Data() + alloc,
1305 SCacheF.Size()) == false)
1306 return false;
1307
1308 TotalSize = ComputeSize(Files.begin()+EndOfSource,Files.end());
1309
1310 // Build the status cache
1311 pkgCacheGenerator Gen(Map.Get(),Progress);
1312 if (_error->PendingError() == true)
1313 return false;
1314 if (BuildCache(Gen,Progress,CurrentSize,TotalSize,
1315 Files.begin()+EndOfSource,Files.end()) == false)
1316 return false;
1317
1318 // FIXME: move me to a better place
1319 Gen.FinishCache(Progress);
1320 }
1321 else
1322 {
1323 if (Debug == true)
1324 std::clog << "srcpkgcache.bin is NOT valid - rebuild" << std::endl;
1325 TotalSize = ComputeSize(Files.begin(),Files.end());
1326
1327 // Build the source cache
1328 pkgCacheGenerator Gen(Map.Get(),Progress);
1329 if (_error->PendingError() == true)
1330 return false;
1331 if (BuildCache(Gen,Progress,CurrentSize,TotalSize,
1332 Files.begin(),Files.begin()+EndOfSource) == false)
1333 return false;
1334
1335 // Write it back
1336 if (Writeable == true && SrcCacheFile.empty() == false)
1337 {
1338 FileFd SCacheF(SrcCacheFile,FileFd::WriteAtomic);
1339 if (_error->PendingError() == true)
1340 return false;
1341
1342 fchmod(SCacheF.Fd(),0644);
1343
1344 // Write out the main data
1345 if (SCacheF.Write(Map->Data(),Map->Size()) == false)
1346 return _error->Error(_("IO Error saving source cache"));
1347 SCacheF.Sync();
1348
1349 // Write out the proper header
1350 Gen.GetCache().HeaderP->Dirty = false;
1351 if (SCacheF.Seek(0) == false ||
1352 SCacheF.Write(Map->Data(),sizeof(*Gen.GetCache().HeaderP)) == false)
1353 return _error->Error(_("IO Error saving source cache"));
1354 Gen.GetCache().HeaderP->Dirty = true;
1355 SCacheF.Sync();
1356 }
1357
1358 // Build the status cache
1359 if (BuildCache(Gen,Progress,CurrentSize,TotalSize,
1360 Files.begin()+EndOfSource,Files.end()) == false)
1361 return false;
1362
1363 // FIXME: move me to a better place
1364 Gen.FinishCache(Progress);
1365 }
1366 if (Debug == true)
1367 std::clog << "Caches are ready for shipping" << std::endl;
1368
1369 if (_error->PendingError() == true)
1370 return false;
1371 if (OutMap != 0)
1372 {
1373 if (CacheF != 0)
1374 {
1375 delete Map.UnGuard();
1376 *OutMap = new MMap(*CacheF,0);
1377 }
1378 else
1379 {
1380 *OutMap = Map.UnGuard();
1381 }
1382 }
1383
1384 return true;
1385 }
1386 /*}}}*/
1387 // CacheGenerator::MakeOnlyStatusCache - Build only a status files cache/*{{{*/
1388 // ---------------------------------------------------------------------
1389 /* */
1390 __deprecated bool pkgMakeOnlyStatusCache(OpProgress &Progress,DynamicMMap **OutMap)
1391 { return pkgCacheGenerator::MakeOnlyStatusCache(&Progress, OutMap); }
1392 bool pkgCacheGenerator::MakeOnlyStatusCache(OpProgress *Progress,DynamicMMap **OutMap)
1393 {
1394 vector<pkgIndexFile *> Files;
1395 unsigned long EndOfSource = Files.size();
1396 if (_system->AddStatusFiles(Files) == false)
1397 return false;
1398
1399 SPtr<DynamicMMap> Map = CreateDynamicMMap(NULL);
1400 unsigned long CurrentSize = 0;
1401 unsigned long TotalSize = 0;
1402
1403 TotalSize = ComputeSize(Files.begin()+EndOfSource,Files.end());
1404
1405 // Build the status cache
1406 if (Progress != NULL)
1407 Progress->OverallProgress(0,1,1,_("Reading package lists"));
1408 pkgCacheGenerator Gen(Map.Get(),Progress);
1409 if (_error->PendingError() == true)
1410 return false;
1411 if (BuildCache(Gen,Progress,CurrentSize,TotalSize,
1412 Files.begin()+EndOfSource,Files.end()) == false)
1413 return false;
1414
1415 // FIXME: move me to a better place
1416 Gen.FinishCache(Progress);
1417
1418 if (_error->PendingError() == true)
1419 return false;
1420 *OutMap = Map.UnGuard();
1421
1422 return true;
1423 }
1424 /*}}}*/
1425 // IsDuplicateDescription /*{{{*/
1426 bool IsDuplicateDescription(pkgCache::DescIterator Desc,
1427 MD5SumValue const &CurMd5, std::string const &CurLang)
1428 {
1429 for ( ; Desc.end() == false; ++Desc)
1430 if (MD5SumValue(Desc.md5()) == CurMd5 && Desc.LanguageCode() == CurLang)
1431 return true;
1432 return false;
1433 }
1434 /*}}}*/
1435