]> git.saurik.com Git - apt.git/blob - apt-pkg/pkgcachegen.cc
Fix English spelling error in a message ('A error'). Unfuzzy
[apt.git] / apt-pkg / pkgcachegen.cc
1 // -*- mode: cpp; mode: fold -*-
2 // Description /*{{{*/
3 // $Id: pkgcachegen.cc,v 1.53.2.1 2003/12/24 23:09:17 mdz Exp $
4 /* ######################################################################
5
6 Package Cache Generator - Generator for the cache structure.
7
8 This builds the cache structure from the abstract package list parser.
9
10 ##################################################################### */
11 /*}}}*/
12 // Include Files /*{{{*/
13 #include <config.h>
14
15 #include <apt-pkg/pkgcachegen.h>
16 #include <apt-pkg/error.h>
17 #include <apt-pkg/version.h>
18 #include <apt-pkg/progress.h>
19 #include <apt-pkg/sourcelist.h>
20 #include <apt-pkg/configuration.h>
21 #include <apt-pkg/aptconfiguration.h>
22 #include <apt-pkg/strutl.h>
23 #include <apt-pkg/sptr.h>
24 #include <apt-pkg/pkgsystem.h>
25 #include <apt-pkg/macros.h>
26 #include <apt-pkg/tagfile.h>
27 #include <apt-pkg/metaindex.h>
28 #include <apt-pkg/fileutl.h>
29
30 #include <vector>
31 #include <sys/stat.h>
32 #include <unistd.h>
33 #include <errno.h>
34 #include <stdio.h>
35
36 #include <apti18n.h>
37 /*}}}*/
38 typedef std::vector<pkgIndexFile *>::iterator FileIterator;
39 template <typename Iter> std::vector<Iter*> pkgCacheGenerator::Dynamic<Iter>::toReMap;
40
41 static bool IsDuplicateDescription(pkgCache::DescIterator Desc,
42 MD5SumValue const &CurMd5, std::string const &CurLang);
43
44 using std::string;
45
46 // CacheGenerator::pkgCacheGenerator - Constructor /*{{{*/
47 // ---------------------------------------------------------------------
48 /* We set the dirty flag and make sure that is written to the disk */
49 pkgCacheGenerator::pkgCacheGenerator(DynamicMMap *pMap,OpProgress *Prog) :
50 Map(*pMap), Cache(pMap,false), Progress(Prog),
51 FoundFileDeps(0)
52 {
53 CurrentFile = 0;
54 memset(UniqHash,0,sizeof(UniqHash));
55
56 if (_error->PendingError() == true)
57 return;
58
59 if (Map.Size() == 0)
60 {
61 // Setup the map interface..
62 Cache.HeaderP = (pkgCache::Header *)Map.Data();
63 if (Map.RawAllocate(sizeof(pkgCache::Header)) == 0 && _error->PendingError() == true)
64 return;
65
66 Map.UsePools(*Cache.HeaderP->Pools,sizeof(Cache.HeaderP->Pools)/sizeof(Cache.HeaderP->Pools[0]));
67
68 // Starting header
69 *Cache.HeaderP = pkgCache::Header();
70 map_ptrloc const idxVerSysName = WriteStringInMap(_system->VS->Label);
71 Cache.HeaderP->VerSysName = idxVerSysName;
72 // this pointer is set in ReMap, but we need it now for WriteUniqString
73 Cache.StringItemP = (pkgCache::StringItem *)Map.Data();
74 map_ptrloc const idxArchitecture = WriteUniqString(_config->Find("APT::Architecture"));
75 Cache.HeaderP->Architecture = idxArchitecture;
76 if (unlikely(idxVerSysName == 0 || idxArchitecture == 0))
77 return;
78 Cache.ReMap();
79 }
80 else
81 {
82 // Map directly from the existing file
83 Cache.ReMap();
84 Map.UsePools(*Cache.HeaderP->Pools,sizeof(Cache.HeaderP->Pools)/sizeof(Cache.HeaderP->Pools[0]));
85 if (Cache.VS != _system->VS)
86 {
87 _error->Error(_("Cache has an incompatible versioning system"));
88 return;
89 }
90 }
91
92 Cache.HeaderP->Dirty = true;
93 Map.Sync(0,sizeof(pkgCache::Header));
94 }
95 /*}}}*/
96 // CacheGenerator::~pkgCacheGenerator - Destructor /*{{{*/
97 // ---------------------------------------------------------------------
98 /* We sync the data then unset the dirty flag in two steps so as to
99 advoid a problem during a crash */
100 pkgCacheGenerator::~pkgCacheGenerator()
101 {
102 if (_error->PendingError() == true)
103 return;
104 if (Map.Sync() == false)
105 return;
106
107 Cache.HeaderP->Dirty = false;
108 Cache.HeaderP->CacheFileSize = Map.Size();
109 Map.Sync(0,sizeof(pkgCache::Header));
110 }
111 /*}}}*/
112 void pkgCacheGenerator::ReMap(void const * const oldMap, void const * const newMap) {/*{{{*/
113 if (oldMap == newMap)
114 return;
115
116 if (_config->FindB("Debug::pkgCacheGen", false))
117 std::clog << "Remaping from " << oldMap << " to " << newMap << std::endl;
118
119 Cache.ReMap(false);
120
121 CurrentFile += (pkgCache::PackageFile*) newMap - (pkgCache::PackageFile*) oldMap;
122
123 for (size_t i = 0; i < _count(UniqHash); ++i)
124 if (UniqHash[i] != 0)
125 UniqHash[i] += (pkgCache::StringItem*) newMap - (pkgCache::StringItem*) oldMap;
126
127 for (std::vector<pkgCache::GrpIterator*>::const_iterator i = Dynamic<pkgCache::GrpIterator>::toReMap.begin();
128 i != Dynamic<pkgCache::GrpIterator>::toReMap.end(); ++i)
129 (*i)->ReMap(oldMap, newMap);
130 for (std::vector<pkgCache::PkgIterator*>::const_iterator i = Dynamic<pkgCache::PkgIterator>::toReMap.begin();
131 i != Dynamic<pkgCache::PkgIterator>::toReMap.end(); ++i)
132 (*i)->ReMap(oldMap, newMap);
133 for (std::vector<pkgCache::VerIterator*>::const_iterator i = Dynamic<pkgCache::VerIterator>::toReMap.begin();
134 i != Dynamic<pkgCache::VerIterator>::toReMap.end(); ++i)
135 (*i)->ReMap(oldMap, newMap);
136 for (std::vector<pkgCache::DepIterator*>::const_iterator i = Dynamic<pkgCache::DepIterator>::toReMap.begin();
137 i != Dynamic<pkgCache::DepIterator>::toReMap.end(); ++i)
138 (*i)->ReMap(oldMap, newMap);
139 for (std::vector<pkgCache::DescIterator*>::const_iterator i = Dynamic<pkgCache::DescIterator>::toReMap.begin();
140 i != Dynamic<pkgCache::DescIterator>::toReMap.end(); ++i)
141 (*i)->ReMap(oldMap, newMap);
142 for (std::vector<pkgCache::PrvIterator*>::const_iterator i = Dynamic<pkgCache::PrvIterator>::toReMap.begin();
143 i != Dynamic<pkgCache::PrvIterator>::toReMap.end(); ++i)
144 (*i)->ReMap(oldMap, newMap);
145 for (std::vector<pkgCache::PkgFileIterator*>::const_iterator i = Dynamic<pkgCache::PkgFileIterator>::toReMap.begin();
146 i != Dynamic<pkgCache::PkgFileIterator>::toReMap.end(); ++i)
147 (*i)->ReMap(oldMap, newMap);
148 } /*}}}*/
149 // CacheGenerator::WriteStringInMap /*{{{*/
150 map_ptrloc pkgCacheGenerator::WriteStringInMap(const char *String,
151 const unsigned long &Len) {
152 void const * const oldMap = Map.Data();
153 map_ptrloc const index = Map.WriteString(String, Len);
154 if (index != 0)
155 ReMap(oldMap, Map.Data());
156 return index;
157 }
158 /*}}}*/
159 // CacheGenerator::WriteStringInMap /*{{{*/
160 map_ptrloc pkgCacheGenerator::WriteStringInMap(const char *String) {
161 void const * const oldMap = Map.Data();
162 map_ptrloc const index = Map.WriteString(String);
163 if (index != 0)
164 ReMap(oldMap, Map.Data());
165 return index;
166 }
167 /*}}}*/
168 map_ptrloc pkgCacheGenerator::AllocateInMap(const unsigned long &size) {/*{{{*/
169 void const * const oldMap = Map.Data();
170 map_ptrloc const index = Map.Allocate(size);
171 if (index != 0)
172 ReMap(oldMap, Map.Data());
173 return index;
174 }
175 /*}}}*/
176 // CacheGenerator::MergeList - Merge the package list /*{{{*/
177 // ---------------------------------------------------------------------
178 /* This provides the generation of the entries in the cache. Each loop
179 goes through a single package record from the underlying parse engine. */
180 bool pkgCacheGenerator::MergeList(ListParser &List,
181 pkgCache::VerIterator *OutVer)
182 {
183 List.Owner = this;
184
185 unsigned int Counter = 0;
186 while (List.Step() == true)
187 {
188 string const PackageName = List.Package();
189 if (PackageName.empty() == true)
190 return false;
191
192 Counter++;
193 if (Counter % 100 == 0 && Progress != 0)
194 Progress->Progress(List.Offset());
195
196 string Arch = List.Architecture();
197 string const Version = List.Version();
198 if (Version.empty() == true && Arch.empty() == true)
199 {
200 // package descriptions
201 if (MergeListGroup(List, PackageName) == false)
202 return false;
203 continue;
204 }
205
206 if (Arch.empty() == true)
207 {
208 // use the pseudo arch 'none' for arch-less packages
209 Arch = "none";
210 /* We might built a SingleArchCache here, which we don't want to blow up
211 just for these :none packages to a proper MultiArchCache, so just ensure
212 that we have always a native package structure first for SingleArch */
213 pkgCache::PkgIterator NP;
214 Dynamic<pkgCache::PkgIterator> DynPkg(NP);
215 if (NewPackage(NP, PackageName, _config->Find("APT::Architecture")) == false)
216 // TRANSLATOR: The first placeholder is a package name,
217 // the other two should be copied verbatim as they include debug info
218 return _error->Error(_("Error occurred while processing %s (%s%d)"),
219 PackageName.c_str(), "NewPackage", 0);
220 }
221
222 // Get a pointer to the package structure
223 pkgCache::PkgIterator Pkg;
224 Dynamic<pkgCache::PkgIterator> DynPkg(Pkg);
225 if (NewPackage(Pkg, PackageName, Arch) == false)
226 // TRANSLATOR: The first placeholder is a package name,
227 // the other two should be copied verbatim as they include debug info
228 return _error->Error(_("Error occurred while processing %s (%s%d)"),
229 PackageName.c_str(), "NewPackage", 1);
230
231
232 if (Version.empty() == true)
233 {
234 if (MergeListPackage(List, Pkg) == false)
235 return false;
236 }
237 else
238 {
239 if (MergeListVersion(List, Pkg, Version, OutVer) == false)
240 return false;
241 }
242
243 if (OutVer != 0)
244 {
245 FoundFileDeps |= List.HasFileDeps();
246 return true;
247 }
248 }
249
250 if (Cache.HeaderP->PackageCount >= (1ULL<<sizeof(Cache.PkgP->ID)*8)-1)
251 return _error->Error(_("Wow, you exceeded the number of package "
252 "names this APT is capable of."));
253 if (Cache.HeaderP->VersionCount >= (1ULL<<(sizeof(Cache.VerP->ID)*8))-1)
254 return _error->Error(_("Wow, you exceeded the number of versions "
255 "this APT is capable of."));
256 if (Cache.HeaderP->DescriptionCount >= (1ULL<<(sizeof(Cache.DescP->ID)*8))-1)
257 return _error->Error(_("Wow, you exceeded the number of descriptions "
258 "this APT is capable of."));
259 if (Cache.HeaderP->DependsCount >= (1ULL<<(sizeof(Cache.DepP->ID)*8))-1ULL)
260 return _error->Error(_("Wow, you exceeded the number of dependencies "
261 "this APT is capable of."));
262
263 FoundFileDeps |= List.HasFileDeps();
264 return true;
265 }
266 // CacheGenerator::MergeListGroup /*{{{*/
267 bool pkgCacheGenerator::MergeListGroup(ListParser &List, std::string const &GrpName)
268 {
269 pkgCache::GrpIterator Grp = Cache.FindGrp(GrpName);
270 // a group has no data on it's own, only packages have it but these
271 // stanzas like this come from Translation- files to add descriptions,
272 // but without a version we don't need a description for it…
273 if (Grp.end() == true)
274 return true;
275 Dynamic<pkgCache::GrpIterator> DynGrp(Grp);
276
277 pkgCache::PkgIterator Pkg;
278 Dynamic<pkgCache::PkgIterator> DynPkg(Pkg);
279 for (Pkg = Grp.PackageList(); Pkg.end() == false; Pkg = Grp.NextPkg(Pkg))
280 if (MergeListPackage(List, Pkg) == false)
281 return false;
282
283 return true;
284 }
285 /*}}}*/
286 // CacheGenerator::MergeListPackage /*{{{*/
287 bool pkgCacheGenerator::MergeListPackage(ListParser &List, pkgCache::PkgIterator &Pkg)
288 {
289 // we first process the package, then the descriptions
290 // (for deb this package processing is in fact a no-op)
291 pkgCache::VerIterator Ver(Cache);
292 Dynamic<pkgCache::VerIterator> DynVer(Ver);
293 if (List.UsePackage(Pkg, Ver) == false)
294 return _error->Error(_("Error occurred while processing %s (%s%d)"),
295 Pkg.Name(), "UsePackage", 1);
296
297 // Find the right version to write the description
298 MD5SumValue CurMd5 = List.Description_md5();
299 std::string CurLang = List.DescriptionLanguage();
300
301 for (Ver = Pkg.VersionList(); Ver.end() == false; ++Ver)
302 {
303 pkgCache::DescIterator VerDesc = Ver.DescriptionList();
304
305 // a version can only have one md5 describing it
306 if (VerDesc.end() == true || MD5SumValue(VerDesc.md5()) != CurMd5)
307 continue;
308
309 // don't add a new description if we have one for the given
310 // md5 && language
311 if (IsDuplicateDescription(VerDesc, CurMd5, CurLang) == true)
312 continue;
313
314 pkgCache::DescIterator Desc;
315 Dynamic<pkgCache::DescIterator> DynDesc(Desc);
316
317 map_ptrloc const descindex = NewDescription(Desc, CurLang, CurMd5, VerDesc->md5sum);
318 if (unlikely(descindex == 0 && _error->PendingError()))
319 return _error->Error(_("Error occurred while processing %s (%s%d)"),
320 Pkg.Name(), "NewDescription", 1);
321
322 Desc->ParentPkg = Pkg.Index();
323
324 // we add at the end, so that the start is constant as we need
325 // that to be able to efficiently share these lists
326 VerDesc = Ver.DescriptionList(); // old value might be invalid after ReMap
327 for (;VerDesc.end() == false && VerDesc->NextDesc != 0; ++VerDesc);
328 map_ptrloc * const LastNextDesc = (VerDesc.end() == true) ? &Ver->DescriptionList : &VerDesc->NextDesc;
329 *LastNextDesc = descindex;
330
331 if (NewFileDesc(Desc,List) == false)
332 return _error->Error(_("Error occurred while processing %s (%s%d)"),
333 Pkg.Name(), "NewFileDesc", 1);
334
335 // we can stop here as all "same" versions will share the description
336 break;
337 }
338
339 return true;
340 }
341 /*}}}*/
342 // CacheGenerator::MergeListVersion /*{{{*/
343 bool pkgCacheGenerator::MergeListVersion(ListParser &List, pkgCache::PkgIterator &Pkg,
344 std::string const &Version, pkgCache::VerIterator* &OutVer)
345 {
346 pkgCache::VerIterator Ver = Pkg.VersionList();
347 Dynamic<pkgCache::VerIterator> DynVer(Ver);
348 map_ptrloc *LastVer = &Pkg->VersionList;
349 void const * oldMap = Map.Data();
350
351 unsigned long const Hash = List.VersionHash();
352 if (Ver.end() == false)
353 {
354 /* We know the list is sorted so we use that fact in the search.
355 Insertion of new versions is done with correct sorting */
356 int Res = 1;
357 for (; Ver.end() == false; LastVer = &Ver->NextVer, Ver++)
358 {
359 Res = Cache.VS->CmpVersion(Version,Ver.VerStr());
360 // Version is higher as current version - insert here
361 if (Res > 0)
362 break;
363 // Versionstrings are equal - is hash also equal?
364 if (Res == 0 && Ver->Hash == Hash)
365 break;
366 // proceed with the next till we have either the right
367 // or we found another version (which will be lower)
368 }
369
370 /* We already have a version for this item, record that we saw it */
371 if (Res == 0 && Ver.end() == false && Ver->Hash == Hash)
372 {
373 if (List.UsePackage(Pkg,Ver) == false)
374 return _error->Error(_("Error occurred while processing %s (%s%d)"),
375 Pkg.Name(), "UsePackage", 2);
376
377 if (NewFileVer(Ver,List) == false)
378 return _error->Error(_("Error occurred while processing %s (%s%d)"),
379 Pkg.Name(), "NewFileVer", 1);
380
381 // Read only a single record and return
382 if (OutVer != 0)
383 {
384 *OutVer = Ver;
385 return true;
386 }
387
388 return true;
389 }
390 }
391
392 // Add a new version
393 map_ptrloc const verindex = NewVersion(Ver, Version, Pkg.Index(), Hash, *LastVer);
394 if (verindex == 0 && _error->PendingError())
395 return _error->Error(_("Error occurred while processing %s (%s%d)"),
396 Pkg.Name(), "NewVersion", 1);
397
398 if (oldMap != Map.Data())
399 LastVer += (map_ptrloc*) Map.Data() - (map_ptrloc*) oldMap;
400 *LastVer = verindex;
401
402 if (unlikely(List.NewVersion(Ver) == false))
403 return _error->Error(_("Error occurred while processing %s (%s%d)"),
404 Pkg.Name(), "NewVersion", 2);
405
406 if (unlikely(List.UsePackage(Pkg,Ver) == false))
407 return _error->Error(_("Error occurred while processing %s (%s%d)"),
408 Pkg.Name(), "UsePackage", 3);
409
410 if (unlikely(NewFileVer(Ver,List) == false))
411 return _error->Error(_("Error occurred while processing %s (%s%d)"),
412 Pkg.Name(), "NewFileVer", 2);
413
414 pkgCache::GrpIterator Grp = Pkg.Group();
415 Dynamic<pkgCache::GrpIterator> DynGrp(Grp);
416
417 /* If it is the first version of this package we need to add implicit
418 Multi-Arch dependencies to all other package versions in the group now -
419 otherwise we just add them for this new version */
420 if (Pkg.VersionList()->NextVer == 0)
421 {
422 pkgCache::PkgIterator P = Grp.PackageList();
423 Dynamic<pkgCache::PkgIterator> DynP(P);
424 for (; P.end() != true; P = Grp.NextPkg(P))
425 {
426 if (P->ID == Pkg->ID)
427 continue;
428 pkgCache::VerIterator V = P.VersionList();
429 Dynamic<pkgCache::VerIterator> DynV(V);
430 for (; V.end() != true; ++V)
431 if (unlikely(AddImplicitDepends(V, Pkg) == false))
432 return _error->Error(_("Error occurred while processing %s (%s%d)"),
433 Pkg.Name(), "AddImplicitDepends", 1);
434 }
435 /* :none packages are packages without an architecture. They are forbidden by
436 debian-policy, so usually they will only be in (old) dpkg status files -
437 and dpkg will complain about them - and are pretty rare. We therefore do
438 usually not create conflicts while the parent is created, but only if a :none
439 package (= the target) appears. This creates incorrect dependencies on :none
440 for architecture-specific dependencies on the package we copy from, but we
441 will ignore this bug as architecture-specific dependencies are only allowed
442 in jessie and until then the :none packages should be extinct (hopefully).
443 In other words: This should work long enough to allow graceful removal of
444 these packages, it is not supposed to allow users to keep using them … */
445 if (strcmp(Pkg.Arch(), "none") == 0)
446 {
447 pkgCache::PkgIterator M = Grp.FindPreferredPkg();
448 if (M.end() == false && Pkg != M)
449 {
450 pkgCache::DepIterator D = M.RevDependsList();
451 Dynamic<pkgCache::DepIterator> DynD(D);
452 for (; D.end() == false; ++D)
453 {
454 if ((D->Type != pkgCache::Dep::Conflicts &&
455 D->Type != pkgCache::Dep::DpkgBreaks &&
456 D->Type != pkgCache::Dep::Replaces) ||
457 D.ParentPkg().Group() == Grp)
458 continue;
459
460 map_ptrloc *OldDepLast = NULL;
461 pkgCache::VerIterator ConVersion = D.ParentVer();
462 Dynamic<pkgCache::VerIterator> DynV(ConVersion);
463 // duplicate the Conflicts/Breaks/Replaces for :none arch
464 NewDepends(Pkg, ConVersion, D->Version,
465 D->CompareOp, D->Type, OldDepLast);
466 }
467 }
468 }
469 }
470 if (unlikely(AddImplicitDepends(Grp, Pkg, Ver) == false))
471 return _error->Error(_("Error occurred while processing %s (%s%d)"),
472 Pkg.Name(), "AddImplicitDepends", 2);
473
474 // Read only a single record and return
475 if (OutVer != 0)
476 {
477 *OutVer = Ver;
478 return true;
479 }
480
481 /* Record the Description (it is not translated) */
482 MD5SumValue CurMd5 = List.Description_md5();
483 if (CurMd5.Value().empty() == true)
484 return true;
485 std::string CurLang = List.DescriptionLanguage();
486
487 /* Before we add a new description we first search in the group for
488 a version with a description of the same MD5 - if so we reuse this
489 description group instead of creating our own for this version */
490 for (pkgCache::PkgIterator P = Grp.PackageList();
491 P.end() == false; P = Grp.NextPkg(P))
492 {
493 for (pkgCache::VerIterator V = P.VersionList();
494 V.end() == false; ++V)
495 {
496 if (IsDuplicateDescription(V.DescriptionList(), CurMd5, "") == false)
497 continue;
498 Ver->DescriptionList = V->DescriptionList;
499 return true;
500 }
501 }
502
503 // We haven't found reusable descriptions, so add the first description
504 pkgCache::DescIterator Desc = Ver.DescriptionList();
505 Dynamic<pkgCache::DescIterator> DynDesc(Desc);
506
507 map_ptrloc const descindex = NewDescription(Desc, CurLang, CurMd5, 0);
508 if (unlikely(descindex == 0 && _error->PendingError()))
509 return _error->Error(_("Error occurred while processing %s (%s%d)"),
510 Pkg.Name(), "NewDescription", 2);
511
512 Desc->ParentPkg = Pkg.Index();
513 Ver->DescriptionList = descindex;
514
515 if (NewFileDesc(Desc,List) == false)
516 return _error->Error(_("Error occurred while processing %s (%s%d)"),
517 Pkg.Name(), "NewFileDesc", 2);
518
519 return true;
520 }
521 /*}}}*/
522 /*}}}*/
523 // CacheGenerator::MergeFileProvides - Merge file provides /*{{{*/
524 // ---------------------------------------------------------------------
525 /* If we found any file depends while parsing the main list we need to
526 resolve them. Since it is undesired to load the entire list of files
527 into the cache as virtual packages we do a two stage effort. MergeList
528 identifies the file depends and this creates Provdies for them by
529 re-parsing all the indexs. */
530 bool pkgCacheGenerator::MergeFileProvides(ListParser &List)
531 {
532 List.Owner = this;
533
534 unsigned int Counter = 0;
535 while (List.Step() == true)
536 {
537 string PackageName = List.Package();
538 if (PackageName.empty() == true)
539 return false;
540 string Version = List.Version();
541 if (Version.empty() == true)
542 continue;
543
544 pkgCache::PkgIterator Pkg = Cache.FindPkg(PackageName);
545 Dynamic<pkgCache::PkgIterator> DynPkg(Pkg);
546 if (Pkg.end() == true)
547 return _error->Error(_("Error occurred while processing %s (%s%d)"),
548 PackageName.c_str(), "FindPkg", 1);
549 Counter++;
550 if (Counter % 100 == 0 && Progress != 0)
551 Progress->Progress(List.Offset());
552
553 unsigned long Hash = List.VersionHash();
554 pkgCache::VerIterator Ver = Pkg.VersionList();
555 Dynamic<pkgCache::VerIterator> DynVer(Ver);
556 for (; Ver.end() == false; ++Ver)
557 {
558 if (Ver->Hash == Hash && Version == Ver.VerStr())
559 {
560 if (List.CollectFileProvides(Cache,Ver) == false)
561 return _error->Error(_("Error occurred while processing %s (%s%d)"),
562 PackageName.c_str(), "CollectFileProvides", 1);
563 break;
564 }
565 }
566
567 if (Ver.end() == true)
568 _error->Warning(_("Package %s %s was not found while processing file dependencies"),PackageName.c_str(),Version.c_str());
569 }
570
571 return true;
572 }
573 /*}}}*/
574 // CacheGenerator::NewGroup - Add a new group /*{{{*/
575 // ---------------------------------------------------------------------
576 /* This creates a new group structure and adds it to the hash table */
577 bool pkgCacheGenerator::NewGroup(pkgCache::GrpIterator &Grp, const string &Name)
578 {
579 Grp = Cache.FindGrp(Name);
580 if (Grp.end() == false)
581 return true;
582
583 // Get a structure
584 map_ptrloc const Group = AllocateInMap(sizeof(pkgCache::Group));
585 if (unlikely(Group == 0))
586 return false;
587
588 Grp = pkgCache::GrpIterator(Cache, Cache.GrpP + Group);
589 map_ptrloc const idxName = WriteStringInMap(Name);
590 if (unlikely(idxName == 0))
591 return false;
592 Grp->Name = idxName;
593
594 // Insert it into the hash table
595 unsigned long const Hash = Cache.Hash(Name);
596 map_ptrloc *insertAt = &Cache.HeaderP->GrpHashTable[Hash];
597 while (*insertAt != 0 && strcasecmp(Name.c_str(), Cache.StrP + (Cache.GrpP + *insertAt)->Name) > 0)
598 insertAt = &(Cache.GrpP + *insertAt)->Next;
599 Grp->Next = *insertAt;
600 *insertAt = Group;
601
602 Grp->ID = Cache.HeaderP->GroupCount++;
603 return true;
604 }
605 /*}}}*/
606 // CacheGenerator::NewPackage - Add a new package /*{{{*/
607 // ---------------------------------------------------------------------
608 /* This creates a new package structure and adds it to the hash table */
609 bool pkgCacheGenerator::NewPackage(pkgCache::PkgIterator &Pkg,const string &Name,
610 const string &Arch) {
611 pkgCache::GrpIterator Grp;
612 Dynamic<pkgCache::GrpIterator> DynGrp(Grp);
613 if (unlikely(NewGroup(Grp, Name) == false))
614 return false;
615
616 Pkg = Grp.FindPkg(Arch);
617 if (Pkg.end() == false)
618 return true;
619
620 // Get a structure
621 map_ptrloc const Package = AllocateInMap(sizeof(pkgCache::Package));
622 if (unlikely(Package == 0))
623 return false;
624 Pkg = pkgCache::PkgIterator(Cache,Cache.PkgP + Package);
625
626 // Insert the package into our package list
627 if (Grp->FirstPackage == 0) // the group is new
628 {
629 Grp->FirstPackage = Package;
630 // Insert it into the hash table
631 unsigned long const Hash = Cache.Hash(Name);
632 map_ptrloc *insertAt = &Cache.HeaderP->PkgHashTable[Hash];
633 while (*insertAt != 0 && strcasecmp(Name.c_str(), Cache.StrP + (Cache.PkgP + *insertAt)->Name) > 0)
634 insertAt = &(Cache.PkgP + *insertAt)->NextPackage;
635 Pkg->NextPackage = *insertAt;
636 *insertAt = Package;
637 }
638 else // Group the Packages together
639 {
640 // this package is the new last package
641 pkgCache::PkgIterator LastPkg(Cache, Cache.PkgP + Grp->LastPackage);
642 Pkg->NextPackage = LastPkg->NextPackage;
643 LastPkg->NextPackage = Package;
644 }
645 Grp->LastPackage = Package;
646
647 // Set the name, arch and the ID
648 Pkg->Name = Grp->Name;
649 Pkg->Group = Grp.Index();
650 // all is mapped to the native architecture
651 map_ptrloc const idxArch = (Arch == "all") ? Cache.HeaderP->Architecture : WriteUniqString(Arch.c_str());
652 if (unlikely(idxArch == 0))
653 return false;
654 Pkg->Arch = idxArch;
655 Pkg->ID = Cache.HeaderP->PackageCount++;
656
657 return true;
658 }
659 /*}}}*/
660 // CacheGenerator::AddImplicitDepends /*{{{*/
661 bool pkgCacheGenerator::AddImplicitDepends(pkgCache::GrpIterator &G,
662 pkgCache::PkgIterator &P,
663 pkgCache::VerIterator &V)
664 {
665 // copy P.Arch() into a string here as a cache remap
666 // in NewDepends() later may alter the pointer location
667 string Arch = P.Arch() == NULL ? "" : P.Arch();
668 map_ptrloc *OldDepLast = NULL;
669 /* MultiArch handling introduces a lot of implicit Dependencies:
670 - MultiArch: same → Co-Installable if they have the same version
671 - All others conflict with all other group members */
672 bool const coInstall = ((V->MultiArch & pkgCache::Version::Same) == pkgCache::Version::Same);
673 pkgCache::PkgIterator D = G.PackageList();
674 Dynamic<pkgCache::PkgIterator> DynD(D);
675 map_ptrloc const VerStrIdx = V->VerStr;
676 for (; D.end() != true; D = G.NextPkg(D))
677 {
678 if (Arch == D.Arch() || D->VersionList == 0)
679 continue;
680 /* We allow only one installed arch at the time
681 per group, therefore each group member conflicts
682 with all other group members */
683 if (coInstall == true)
684 {
685 // Replaces: ${self}:other ( << ${binary:Version})
686 NewDepends(D, V, VerStrIdx,
687 pkgCache::Dep::Less, pkgCache::Dep::Replaces,
688 OldDepLast);
689 // Breaks: ${self}:other (!= ${binary:Version})
690 NewDepends(D, V, VerStrIdx,
691 pkgCache::Dep::NotEquals, pkgCache::Dep::DpkgBreaks,
692 OldDepLast);
693 } else {
694 // Conflicts: ${self}:other
695 NewDepends(D, V, 0,
696 pkgCache::Dep::NoOp, pkgCache::Dep::Conflicts,
697 OldDepLast);
698 }
699 }
700 return true;
701 }
702 bool pkgCacheGenerator::AddImplicitDepends(pkgCache::VerIterator &V,
703 pkgCache::PkgIterator &D)
704 {
705 /* MultiArch handling introduces a lot of implicit Dependencies:
706 - MultiArch: same → Co-Installable if they have the same version
707 - All others conflict with all other group members */
708 map_ptrloc *OldDepLast = NULL;
709 bool const coInstall = ((V->MultiArch & pkgCache::Version::Same) == pkgCache::Version::Same);
710 if (coInstall == true)
711 {
712 map_ptrloc const VerStrIdx = V->VerStr;
713 // Replaces: ${self}:other ( << ${binary:Version})
714 NewDepends(D, V, VerStrIdx,
715 pkgCache::Dep::Less, pkgCache::Dep::Replaces,
716 OldDepLast);
717 // Breaks: ${self}:other (!= ${binary:Version})
718 NewDepends(D, V, VerStrIdx,
719 pkgCache::Dep::NotEquals, pkgCache::Dep::DpkgBreaks,
720 OldDepLast);
721 } else {
722 // Conflicts: ${self}:other
723 NewDepends(D, V, 0,
724 pkgCache::Dep::NoOp, pkgCache::Dep::Conflicts,
725 OldDepLast);
726 }
727 return true;
728 }
729
730 /*}}}*/
731 // CacheGenerator::NewFileVer - Create a new File<->Version association /*{{{*/
732 // ---------------------------------------------------------------------
733 /* */
734 bool pkgCacheGenerator::NewFileVer(pkgCache::VerIterator &Ver,
735 ListParser &List)
736 {
737 if (CurrentFile == 0)
738 return true;
739
740 // Get a structure
741 map_ptrloc const VerFile = AllocateInMap(sizeof(pkgCache::VerFile));
742 if (VerFile == 0)
743 return 0;
744
745 pkgCache::VerFileIterator VF(Cache,Cache.VerFileP + VerFile);
746 VF->File = CurrentFile - Cache.PkgFileP;
747
748 // Link it to the end of the list
749 map_ptrloc *Last = &Ver->FileList;
750 for (pkgCache::VerFileIterator V = Ver.FileList(); V.end() == false; ++V)
751 Last = &V->NextFile;
752 VF->NextFile = *Last;
753 *Last = VF.Index();
754
755 VF->Offset = List.Offset();
756 VF->Size = List.Size();
757 if (Cache.HeaderP->MaxVerFileSize < VF->Size)
758 Cache.HeaderP->MaxVerFileSize = VF->Size;
759 Cache.HeaderP->VerFileCount++;
760
761 return true;
762 }
763 /*}}}*/
764 // CacheGenerator::NewVersion - Create a new Version /*{{{*/
765 // ---------------------------------------------------------------------
766 /* This puts a version structure in the linked list */
767 unsigned long pkgCacheGenerator::NewVersion(pkgCache::VerIterator &Ver,
768 const string &VerStr,
769 map_ptrloc const ParentPkg,
770 unsigned long const Hash,
771 unsigned long Next)
772 {
773 // Get a structure
774 map_ptrloc const Version = AllocateInMap(sizeof(pkgCache::Version));
775 if (Version == 0)
776 return 0;
777
778 // Fill it in
779 Ver = pkgCache::VerIterator(Cache,Cache.VerP + Version);
780 //Dynamic<pkgCache::VerIterator> DynV(Ver); // caller MergeListVersion already takes care of it
781 Ver->NextVer = Next;
782 Ver->ParentPkg = ParentPkg;
783 Ver->Hash = Hash;
784 Ver->ID = Cache.HeaderP->VersionCount++;
785
786 // try to find the version string in the group for reuse
787 pkgCache::PkgIterator Pkg = Ver.ParentPkg();
788 pkgCache::GrpIterator Grp = Pkg.Group();
789 if (Pkg.end() == false && Grp.end() == false)
790 {
791 for (pkgCache::PkgIterator P = Grp.PackageList(); P.end() == false; P = Grp.NextPkg(P))
792 {
793 if (Pkg == P)
794 continue;
795 for (pkgCache::VerIterator V = P.VersionList(); V.end() == false; ++V)
796 {
797 int const cmp = strcmp(V.VerStr(), VerStr.c_str());
798 if (cmp == 0)
799 {
800 Ver->VerStr = V->VerStr;
801 return Version;
802 }
803 else if (cmp < 0)
804 break;
805 }
806 }
807 }
808 // haven't found the version string, so create
809 map_ptrloc const idxVerStr = WriteStringInMap(VerStr);
810 if (unlikely(idxVerStr == 0))
811 return 0;
812 Ver->VerStr = idxVerStr;
813 return Version;
814 }
815 /*}}}*/
816 // CacheGenerator::NewFileDesc - Create a new File<->Desc association /*{{{*/
817 // ---------------------------------------------------------------------
818 /* */
819 bool pkgCacheGenerator::NewFileDesc(pkgCache::DescIterator &Desc,
820 ListParser &List)
821 {
822 if (CurrentFile == 0)
823 return true;
824
825 // Get a structure
826 map_ptrloc const DescFile = AllocateInMap(sizeof(pkgCache::DescFile));
827 if (DescFile == 0)
828 return false;
829
830 pkgCache::DescFileIterator DF(Cache,Cache.DescFileP + DescFile);
831 DF->File = CurrentFile - Cache.PkgFileP;
832
833 // Link it to the end of the list
834 map_ptrloc *Last = &Desc->FileList;
835 for (pkgCache::DescFileIterator D = Desc.FileList(); D.end() == false; ++D)
836 Last = &D->NextFile;
837
838 DF->NextFile = *Last;
839 *Last = DF.Index();
840
841 DF->Offset = List.Offset();
842 DF->Size = List.Size();
843 if (Cache.HeaderP->MaxDescFileSize < DF->Size)
844 Cache.HeaderP->MaxDescFileSize = DF->Size;
845 Cache.HeaderP->DescFileCount++;
846
847 return true;
848 }
849 /*}}}*/
850 // CacheGenerator::NewDescription - Create a new Description /*{{{*/
851 // ---------------------------------------------------------------------
852 /* This puts a description structure in the linked list */
853 map_ptrloc pkgCacheGenerator::NewDescription(pkgCache::DescIterator &Desc,
854 const string &Lang,
855 const MD5SumValue &md5sum,
856 map_ptrloc idxmd5str)
857 {
858 // Get a structure
859 map_ptrloc const Description = AllocateInMap(sizeof(pkgCache::Description));
860 if (Description == 0)
861 return 0;
862
863 // Fill it in
864 Desc = pkgCache::DescIterator(Cache,Cache.DescP + Description);
865 Desc->ID = Cache.HeaderP->DescriptionCount++;
866 map_ptrloc const idxlanguage_code = WriteUniqString(Lang);
867 if (unlikely(idxlanguage_code == 0))
868 return 0;
869 Desc->language_code = idxlanguage_code;
870
871 if (idxmd5str != 0)
872 Desc->md5sum = idxmd5str;
873 else
874 {
875 map_ptrloc const idxmd5sum = WriteStringInMap(md5sum.Value());
876 if (unlikely(idxmd5sum == 0))
877 return 0;
878 Desc->md5sum = idxmd5sum;
879 }
880
881 return Description;
882 }
883 /*}}}*/
884 // CacheGenerator::NewDepends - Create a dependency element /*{{{*/
885 // ---------------------------------------------------------------------
886 /* This creates a dependency element in the tree. It is linked to the
887 version and to the package that it is pointing to. */
888 bool pkgCacheGenerator::NewDepends(pkgCache::PkgIterator &Pkg,
889 pkgCache::VerIterator &Ver,
890 string const &Version,
891 unsigned int const &Op,
892 unsigned int const &Type,
893 map_ptrloc* &OldDepLast)
894 {
895 map_ptrloc index = 0;
896 if (Version.empty() == false)
897 {
898 int const CmpOp = Op & 0x0F;
899 // =-deps are used (79:1) for lockstep on same-source packages (e.g. data-packages)
900 if (CmpOp == pkgCache::Dep::Equals && strcmp(Version.c_str(), Ver.VerStr()) == 0)
901 index = Ver->VerStr;
902
903 if (index == 0)
904 {
905 void const * const oldMap = Map.Data();
906 index = WriteStringInMap(Version);
907 if (unlikely(index == 0))
908 return false;
909 if (OldDepLast != 0 && oldMap != Map.Data())
910 OldDepLast += (map_ptrloc*) Map.Data() - (map_ptrloc*) oldMap;
911 }
912 }
913 return NewDepends(Pkg, Ver, index, Op, Type, OldDepLast);
914 }
915 bool pkgCacheGenerator::NewDepends(pkgCache::PkgIterator &Pkg,
916 pkgCache::VerIterator &Ver,
917 map_ptrloc const Version,
918 unsigned int const &Op,
919 unsigned int const &Type,
920 map_ptrloc* &OldDepLast)
921 {
922 void const * const oldMap = Map.Data();
923 // Get a structure
924 map_ptrloc const Dependency = AllocateInMap(sizeof(pkgCache::Dependency));
925 if (unlikely(Dependency == 0))
926 return false;
927
928 // Fill it in
929 pkgCache::DepIterator Dep(Cache,Cache.DepP + Dependency);
930 Dynamic<pkgCache::DepIterator> DynDep(Dep);
931 Dep->ParentVer = Ver.Index();
932 Dep->Type = Type;
933 Dep->CompareOp = Op;
934 Dep->Version = Version;
935 Dep->ID = Cache.HeaderP->DependsCount++;
936
937 // Link it to the package
938 Dep->Package = Pkg.Index();
939 Dep->NextRevDepends = Pkg->RevDepends;
940 Pkg->RevDepends = Dep.Index();
941
942 // Do we know where to link the Dependency to?
943 if (OldDepLast == NULL)
944 {
945 OldDepLast = &Ver->DependsList;
946 for (pkgCache::DepIterator D = Ver.DependsList(); D.end() == false; ++D)
947 OldDepLast = &D->NextDepends;
948 } else if (oldMap != Map.Data())
949 OldDepLast += (map_ptrloc*) Map.Data() - (map_ptrloc*) oldMap;
950
951 Dep->NextDepends = *OldDepLast;
952 *OldDepLast = Dep.Index();
953 OldDepLast = &Dep->NextDepends;
954
955 return true;
956 }
957 /*}}}*/
958 // ListParser::NewDepends - Create the environment for a new dependency /*{{{*/
959 // ---------------------------------------------------------------------
960 /* This creates a Group and the Package to link this dependency to if
961 needed and handles also the caching of the old endpoint */
962 bool pkgCacheGenerator::ListParser::NewDepends(pkgCache::VerIterator &Ver,
963 const string &PackageName,
964 const string &Arch,
965 const string &Version,
966 unsigned int Op,
967 unsigned int Type)
968 {
969 pkgCache::GrpIterator Grp;
970 Dynamic<pkgCache::GrpIterator> DynGrp(Grp);
971 if (unlikely(Owner->NewGroup(Grp, PackageName) == false))
972 return false;
973
974 // Locate the target package
975 pkgCache::PkgIterator Pkg = Grp.FindPkg(Arch);
976 // we don't create 'none' packages and their dependencies if we can avoid it …
977 if (Pkg.end() == true && Arch == "none" && strcmp(Ver.ParentPkg().Arch(), "none") != 0)
978 return true;
979 Dynamic<pkgCache::PkgIterator> DynPkg(Pkg);
980 if (Pkg.end() == true) {
981 if (unlikely(Owner->NewPackage(Pkg, PackageName, Arch) == false))
982 return false;
983 }
984
985 // Is it a file dependency?
986 if (unlikely(PackageName[0] == '/'))
987 FoundFileDeps = true;
988
989 /* Caching the old end point speeds up generation substantially */
990 if (OldDepVer != Ver) {
991 OldDepLast = NULL;
992 OldDepVer = Ver;
993 }
994
995 return Owner->NewDepends(Pkg, Ver, Version, Op, Type, OldDepLast);
996 }
997 /*}}}*/
998 // ListParser::NewProvides - Create a Provides element /*{{{*/
999 // ---------------------------------------------------------------------
1000 /* */
1001 bool pkgCacheGenerator::ListParser::NewProvides(pkgCache::VerIterator &Ver,
1002 const string &PkgName,
1003 const string &PkgArch,
1004 const string &Version)
1005 {
1006 pkgCache &Cache = Owner->Cache;
1007
1008 // We do not add self referencing provides
1009 if (Ver.ParentPkg().Name() == PkgName && (PkgArch == Ver.ParentPkg().Arch() ||
1010 (PkgArch == "all" && strcmp((Cache.StrP + Cache.HeaderP->Architecture), Ver.ParentPkg().Arch()) == 0)))
1011 return true;
1012
1013 // Get a structure
1014 map_ptrloc const Provides = Owner->AllocateInMap(sizeof(pkgCache::Provides));
1015 if (unlikely(Provides == 0))
1016 return false;
1017 Cache.HeaderP->ProvidesCount++;
1018
1019 // Fill it in
1020 pkgCache::PrvIterator Prv(Cache,Cache.ProvideP + Provides,Cache.PkgP);
1021 Dynamic<pkgCache::PrvIterator> DynPrv(Prv);
1022 Prv->Version = Ver.Index();
1023 Prv->NextPkgProv = Ver->ProvidesList;
1024 Ver->ProvidesList = Prv.Index();
1025 if (Version.empty() == false) {
1026 map_ptrloc const idxProvideVersion = WriteString(Version);
1027 Prv->ProvideVersion = idxProvideVersion;
1028 if (unlikely(idxProvideVersion == 0))
1029 return false;
1030 }
1031
1032 // Locate the target package
1033 pkgCache::PkgIterator Pkg;
1034 Dynamic<pkgCache::PkgIterator> DynPkg(Pkg);
1035 if (unlikely(Owner->NewPackage(Pkg,PkgName, PkgArch) == false))
1036 return false;
1037
1038 // Link it to the package
1039 Prv->ParentPkg = Pkg.Index();
1040 Prv->NextProvides = Pkg->ProvidesList;
1041 Pkg->ProvidesList = Prv.Index();
1042
1043 return true;
1044 }
1045 /*}}}*/
1046 // CacheGenerator::SelectFile - Select the current file being parsed /*{{{*/
1047 // ---------------------------------------------------------------------
1048 /* This is used to select which file is to be associated with all newly
1049 added versions. The caller is responsible for setting the IMS fields. */
1050 bool pkgCacheGenerator::SelectFile(const string &File,const string &Site,
1051 const pkgIndexFile &Index,
1052 unsigned long Flags)
1053 {
1054 // Get some space for the structure
1055 map_ptrloc const idxFile = AllocateInMap(sizeof(*CurrentFile));
1056 if (unlikely(idxFile == 0))
1057 return false;
1058 CurrentFile = Cache.PkgFileP + idxFile;
1059
1060 // Fill it in
1061 map_ptrloc const idxFileName = WriteStringInMap(File);
1062 map_ptrloc const idxSite = WriteUniqString(Site);
1063 if (unlikely(idxFileName == 0 || idxSite == 0))
1064 return false;
1065 CurrentFile->FileName = idxFileName;
1066 CurrentFile->Site = idxSite;
1067 CurrentFile->NextFile = Cache.HeaderP->FileList;
1068 CurrentFile->Flags = Flags;
1069 CurrentFile->ID = Cache.HeaderP->PackageFileCount;
1070 map_ptrloc const idxIndexType = WriteUniqString(Index.GetType()->Label);
1071 if (unlikely(idxIndexType == 0))
1072 return false;
1073 CurrentFile->IndexType = idxIndexType;
1074 PkgFileName = File;
1075 Cache.HeaderP->FileList = CurrentFile - Cache.PkgFileP;
1076 Cache.HeaderP->PackageFileCount++;
1077
1078 if (Progress != 0)
1079 Progress->SubProgress(Index.Size());
1080 return true;
1081 }
1082 /*}}}*/
1083 // CacheGenerator::WriteUniqueString - Insert a unique string /*{{{*/
1084 // ---------------------------------------------------------------------
1085 /* This is used to create handles to strings. Given the same text it
1086 always returns the same number */
1087 unsigned long pkgCacheGenerator::WriteUniqString(const char *S,
1088 unsigned int Size)
1089 {
1090 /* We use a very small transient hash table here, this speeds up generation
1091 by a fair amount on slower machines */
1092 pkgCache::StringItem *&Bucket = UniqHash[(S[0]*5 + S[1]) % _count(UniqHash)];
1093 if (Bucket != 0 &&
1094 stringcmp(S,S+Size,Cache.StrP + Bucket->String) == 0)
1095 return Bucket->String;
1096
1097 // Search for an insertion point
1098 pkgCache::StringItem *I = Cache.StringItemP + Cache.HeaderP->StringList;
1099 int Res = 1;
1100 map_ptrloc *Last = &Cache.HeaderP->StringList;
1101 for (; I != Cache.StringItemP; Last = &I->NextItem,
1102 I = Cache.StringItemP + I->NextItem)
1103 {
1104 Res = stringcmp(S,S+Size,Cache.StrP + I->String);
1105 if (Res >= 0)
1106 break;
1107 }
1108
1109 // Match
1110 if (Res == 0)
1111 {
1112 Bucket = I;
1113 return I->String;
1114 }
1115
1116 // Get a structure
1117 void const * const oldMap = Map.Data();
1118 map_ptrloc const Item = AllocateInMap(sizeof(pkgCache::StringItem));
1119 if (Item == 0)
1120 return 0;
1121
1122 map_ptrloc const idxString = WriteStringInMap(S,Size);
1123 if (unlikely(idxString == 0))
1124 return 0;
1125 if (oldMap != Map.Data()) {
1126 Last += (map_ptrloc*) Map.Data() - (map_ptrloc*) oldMap;
1127 I += (pkgCache::StringItem*) Map.Data() - (pkgCache::StringItem*) oldMap;
1128 }
1129 *Last = Item;
1130
1131 // Fill in the structure
1132 pkgCache::StringItem *ItemP = Cache.StringItemP + Item;
1133 ItemP->NextItem = I - Cache.StringItemP;
1134 ItemP->String = idxString;
1135
1136 Bucket = ItemP;
1137 return ItemP->String;
1138 }
1139 /*}}}*/
1140 // CheckValidity - Check that a cache is up-to-date /*{{{*/
1141 // ---------------------------------------------------------------------
1142 /* This just verifies that each file in the list of index files exists,
1143 has matching attributes with the cache and the cache does not have
1144 any extra files. */
1145 static bool CheckValidity(const string &CacheFile,
1146 pkgSourceList &List,
1147 FileIterator Start,
1148 FileIterator End,
1149 MMap **OutMap = 0)
1150 {
1151 bool const Debug = _config->FindB("Debug::pkgCacheGen", false);
1152 // No file, certainly invalid
1153 if (CacheFile.empty() == true || FileExists(CacheFile) == false)
1154 {
1155 if (Debug == true)
1156 std::clog << "CacheFile doesn't exist" << std::endl;
1157 return false;
1158 }
1159
1160 if (List.GetLastModifiedTime() > GetModificationTime(CacheFile))
1161 {
1162 if (Debug == true)
1163 std::clog << "sources.list is newer than the cache" << std::endl;
1164 return false;
1165 }
1166
1167 // Map it
1168 FileFd CacheF(CacheFile,FileFd::ReadOnly);
1169 SPtr<MMap> Map = new MMap(CacheF,0);
1170 pkgCache Cache(Map);
1171 if (_error->PendingError() == true || Map->Size() == 0)
1172 {
1173 if (Debug == true)
1174 std::clog << "Errors are pending or Map is empty()" << std::endl;
1175 _error->Discard();
1176 return false;
1177 }
1178
1179 /* Now we check every index file, see if it is in the cache,
1180 verify the IMS data and check that it is on the disk too.. */
1181 SPtrArray<bool> Visited = new bool[Cache.HeaderP->PackageFileCount];
1182 memset(Visited,0,sizeof(*Visited)*Cache.HeaderP->PackageFileCount);
1183 for (; Start != End; ++Start)
1184 {
1185 if (Debug == true)
1186 std::clog << "Checking PkgFile " << (*Start)->Describe() << ": ";
1187 if ((*Start)->HasPackages() == false)
1188 {
1189 if (Debug == true)
1190 std::clog << "Has NO packages" << std::endl;
1191 continue;
1192 }
1193
1194 if ((*Start)->Exists() == false)
1195 {
1196 #if 0 // mvo: we no longer give a message here (Default Sources spec)
1197 _error->WarningE("stat",_("Couldn't stat source package list %s"),
1198 (*Start)->Describe().c_str());
1199 #endif
1200 if (Debug == true)
1201 std::clog << "file doesn't exist" << std::endl;
1202 continue;
1203 }
1204
1205 // FindInCache is also expected to do an IMS check.
1206 pkgCache::PkgFileIterator File = (*Start)->FindInCache(Cache);
1207 if (File.end() == true)
1208 {
1209 if (Debug == true)
1210 std::clog << "FindInCache returned end-Pointer" << std::endl;
1211 return false;
1212 }
1213
1214 Visited[File->ID] = true;
1215 if (Debug == true)
1216 std::clog << "with ID " << File->ID << " is valid" << std::endl;
1217 }
1218
1219 for (unsigned I = 0; I != Cache.HeaderP->PackageFileCount; I++)
1220 if (Visited[I] == false)
1221 {
1222 if (Debug == true)
1223 std::clog << "File with ID" << I << " wasn't visited" << std::endl;
1224 return false;
1225 }
1226
1227 if (_error->PendingError() == true)
1228 {
1229 if (Debug == true)
1230 {
1231 std::clog << "Validity failed because of pending errors:" << std::endl;
1232 _error->DumpErrors();
1233 }
1234 _error->Discard();
1235 return false;
1236 }
1237
1238 if (OutMap != 0)
1239 *OutMap = Map.UnGuard();
1240 return true;
1241 }
1242 /*}}}*/
1243 // ComputeSize - Compute the total size of a bunch of files /*{{{*/
1244 // ---------------------------------------------------------------------
1245 /* Size is kind of an abstract notion that is only used for the progress
1246 meter */
1247 static unsigned long ComputeSize(FileIterator Start,FileIterator End)
1248 {
1249 unsigned long TotalSize = 0;
1250 for (; Start != End; ++Start)
1251 {
1252 if ((*Start)->HasPackages() == false)
1253 continue;
1254 TotalSize += (*Start)->Size();
1255 }
1256 return TotalSize;
1257 }
1258 /*}}}*/
1259 // BuildCache - Merge the list of index files into the cache /*{{{*/
1260 // ---------------------------------------------------------------------
1261 /* */
1262 static bool BuildCache(pkgCacheGenerator &Gen,
1263 OpProgress *Progress,
1264 unsigned long &CurrentSize,unsigned long TotalSize,
1265 FileIterator Start, FileIterator End)
1266 {
1267 FileIterator I;
1268 for (I = Start; I != End; ++I)
1269 {
1270 if ((*I)->HasPackages() == false)
1271 continue;
1272
1273 if ((*I)->Exists() == false)
1274 continue;
1275
1276 if ((*I)->FindInCache(Gen.GetCache()).end() == false)
1277 {
1278 _error->Warning("Duplicate sources.list entry %s",
1279 (*I)->Describe().c_str());
1280 continue;
1281 }
1282
1283 unsigned long Size = (*I)->Size();
1284 if (Progress != NULL)
1285 Progress->OverallProgress(CurrentSize,TotalSize,Size,_("Reading package lists"));
1286 CurrentSize += Size;
1287
1288 if ((*I)->Merge(Gen,Progress) == false)
1289 return false;
1290 }
1291
1292 if (Gen.HasFileDeps() == true)
1293 {
1294 if (Progress != NULL)
1295 Progress->Done();
1296 TotalSize = ComputeSize(Start, End);
1297 CurrentSize = 0;
1298 for (I = Start; I != End; ++I)
1299 {
1300 unsigned long Size = (*I)->Size();
1301 if (Progress != NULL)
1302 Progress->OverallProgress(CurrentSize,TotalSize,Size,_("Collecting File Provides"));
1303 CurrentSize += Size;
1304 if ((*I)->MergeFileProvides(Gen,Progress) == false)
1305 return false;
1306 }
1307 }
1308
1309 return true;
1310 }
1311 /*}}}*/
1312 // CacheGenerator::CreateDynamicMMap - load an mmap with configuration options /*{{{*/
1313 DynamicMMap* pkgCacheGenerator::CreateDynamicMMap(FileFd *CacheF, unsigned long Flags) {
1314 unsigned long const MapStart = _config->FindI("APT::Cache-Start", 24*1024*1024);
1315 unsigned long const MapGrow = _config->FindI("APT::Cache-Grow", 1*1024*1024);
1316 unsigned long const MapLimit = _config->FindI("APT::Cache-Limit", 0);
1317 Flags |= MMap::Moveable;
1318 if (_config->FindB("APT::Cache-Fallback", false) == true)
1319 Flags |= MMap::Fallback;
1320 if (CacheF != NULL)
1321 return new DynamicMMap(*CacheF, Flags, MapStart, MapGrow, MapLimit);
1322 else
1323 return new DynamicMMap(Flags, MapStart, MapGrow, MapLimit);
1324 }
1325 /*}}}*/
1326 // CacheGenerator::MakeStatusCache - Construct the status cache /*{{{*/
1327 // ---------------------------------------------------------------------
1328 /* This makes sure that the status cache (the cache that has all
1329 index files from the sources list and all local ones) is ready
1330 to be mmaped. If OutMap is not zero then a MMap object representing
1331 the cache will be stored there. This is pretty much mandetory if you
1332 are using AllowMem. AllowMem lets the function be run as non-root
1333 where it builds the cache 'fast' into a memory buffer. */
1334 __deprecated bool pkgMakeStatusCache(pkgSourceList &List,OpProgress &Progress,
1335 MMap **OutMap, bool AllowMem)
1336 { return pkgCacheGenerator::MakeStatusCache(List, &Progress, OutMap, AllowMem); }
1337 bool pkgCacheGenerator::MakeStatusCache(pkgSourceList &List,OpProgress *Progress,
1338 MMap **OutMap,bool AllowMem)
1339 {
1340 bool const Debug = _config->FindB("Debug::pkgCacheGen", false);
1341
1342 std::vector<pkgIndexFile *> Files;
1343 for (std::vector<metaIndex *>::const_iterator i = List.begin();
1344 i != List.end();
1345 ++i)
1346 {
1347 std::vector <pkgIndexFile *> *Indexes = (*i)->GetIndexFiles();
1348 for (std::vector<pkgIndexFile *>::const_iterator j = Indexes->begin();
1349 j != Indexes->end();
1350 ++j)
1351 Files.push_back (*j);
1352 }
1353
1354 unsigned long const EndOfSource = Files.size();
1355 if (_system->AddStatusFiles(Files) == false)
1356 return false;
1357
1358 // Decide if we can write to the files..
1359 string const CacheFile = _config->FindFile("Dir::Cache::pkgcache");
1360 string const SrcCacheFile = _config->FindFile("Dir::Cache::srcpkgcache");
1361
1362 // ensure the cache directory exists
1363 if (CacheFile.empty() == false || SrcCacheFile.empty() == false)
1364 {
1365 string dir = _config->FindDir("Dir::Cache");
1366 size_t const len = dir.size();
1367 if (len > 5 && dir.find("/apt/", len - 6, 5) == len - 5)
1368 dir = dir.substr(0, len - 5);
1369 if (CacheFile.empty() == false)
1370 CreateDirectory(dir, flNotFile(CacheFile));
1371 if (SrcCacheFile.empty() == false)
1372 CreateDirectory(dir, flNotFile(SrcCacheFile));
1373 }
1374
1375 // Decide if we can write to the cache
1376 bool Writeable = false;
1377 if (CacheFile.empty() == false)
1378 Writeable = access(flNotFile(CacheFile).c_str(),W_OK) == 0;
1379 else
1380 if (SrcCacheFile.empty() == false)
1381 Writeable = access(flNotFile(SrcCacheFile).c_str(),W_OK) == 0;
1382 if (Debug == true)
1383 std::clog << "Do we have write-access to the cache files? " << (Writeable ? "YES" : "NO") << std::endl;
1384
1385 if (Writeable == false && AllowMem == false && CacheFile.empty() == false)
1386 return _error->Error(_("Unable to write to %s"),flNotFile(CacheFile).c_str());
1387
1388 if (Progress != NULL)
1389 Progress->OverallProgress(0,1,1,_("Reading package lists"));
1390
1391 // Cache is OK, Fin.
1392 if (CheckValidity(CacheFile, List, Files.begin(),Files.end(),OutMap) == true)
1393 {
1394 if (Progress != NULL)
1395 Progress->OverallProgress(1,1,1,_("Reading package lists"));
1396 if (Debug == true)
1397 std::clog << "pkgcache.bin is valid - no need to build anything" << std::endl;
1398 return true;
1399 }
1400 else if (Debug == true)
1401 std::clog << "pkgcache.bin is NOT valid" << std::endl;
1402
1403 /* At this point we know we need to reconstruct the package cache,
1404 begin. */
1405 SPtr<FileFd> CacheF;
1406 SPtr<DynamicMMap> Map;
1407 if (Writeable == true && CacheFile.empty() == false)
1408 {
1409 _error->PushToStack();
1410 unlink(CacheFile.c_str());
1411 CacheF = new FileFd(CacheFile,FileFd::WriteAtomic);
1412 fchmod(CacheF->Fd(),0644);
1413 Map = CreateDynamicMMap(CacheF, MMap::Public);
1414 if (_error->PendingError() == true)
1415 {
1416 delete CacheF.UnGuard();
1417 delete Map.UnGuard();
1418 if (Debug == true)
1419 std::clog << "Open filebased MMap FAILED" << std::endl;
1420 Writeable = false;
1421 if (AllowMem == false)
1422 {
1423 _error->MergeWithStack();
1424 return false;
1425 }
1426 _error->RevertToStack();
1427 }
1428 else
1429 {
1430 _error->MergeWithStack();
1431 if (Debug == true)
1432 std::clog << "Open filebased MMap" << std::endl;
1433 }
1434 }
1435 if (Writeable == false || CacheFile.empty() == true)
1436 {
1437 // Just build it in memory..
1438 Map = CreateDynamicMMap(NULL);
1439 if (Debug == true)
1440 std::clog << "Open memory Map (not filebased)" << std::endl;
1441 }
1442
1443 // Lets try the source cache.
1444 unsigned long CurrentSize = 0;
1445 unsigned long TotalSize = 0;
1446 if (CheckValidity(SrcCacheFile, List, Files.begin(),
1447 Files.begin()+EndOfSource) == true)
1448 {
1449 if (Debug == true)
1450 std::clog << "srcpkgcache.bin is valid - populate MMap with it." << std::endl;
1451 // Preload the map with the source cache
1452 FileFd SCacheF(SrcCacheFile,FileFd::ReadOnly);
1453 unsigned long const alloc = Map->RawAllocate(SCacheF.Size());
1454 if ((alloc == 0 && _error->PendingError())
1455 || SCacheF.Read((unsigned char *)Map->Data() + alloc,
1456 SCacheF.Size()) == false)
1457 return false;
1458
1459 TotalSize = ComputeSize(Files.begin()+EndOfSource,Files.end());
1460
1461 // Build the status cache
1462 pkgCacheGenerator Gen(Map.Get(),Progress);
1463 if (_error->PendingError() == true)
1464 return false;
1465 if (BuildCache(Gen,Progress,CurrentSize,TotalSize,
1466 Files.begin()+EndOfSource,Files.end()) == false)
1467 return false;
1468 }
1469 else
1470 {
1471 if (Debug == true)
1472 std::clog << "srcpkgcache.bin is NOT valid - rebuild" << std::endl;
1473 TotalSize = ComputeSize(Files.begin(),Files.end());
1474
1475 // Build the source cache
1476 pkgCacheGenerator Gen(Map.Get(),Progress);
1477 if (_error->PendingError() == true)
1478 return false;
1479 if (BuildCache(Gen,Progress,CurrentSize,TotalSize,
1480 Files.begin(),Files.begin()+EndOfSource) == false)
1481 return false;
1482
1483 // Write it back
1484 if (Writeable == true && SrcCacheFile.empty() == false)
1485 {
1486 FileFd SCacheF(SrcCacheFile,FileFd::WriteAtomic);
1487 if (_error->PendingError() == true)
1488 return false;
1489
1490 fchmod(SCacheF.Fd(),0644);
1491
1492 // Write out the main data
1493 if (SCacheF.Write(Map->Data(),Map->Size()) == false)
1494 return _error->Error(_("IO Error saving source cache"));
1495 SCacheF.Sync();
1496
1497 // Write out the proper header
1498 Gen.GetCache().HeaderP->Dirty = false;
1499 if (SCacheF.Seek(0) == false ||
1500 SCacheF.Write(Map->Data(),sizeof(*Gen.GetCache().HeaderP)) == false)
1501 return _error->Error(_("IO Error saving source cache"));
1502 Gen.GetCache().HeaderP->Dirty = true;
1503 SCacheF.Sync();
1504 }
1505
1506 // Build the status cache
1507 if (BuildCache(Gen,Progress,CurrentSize,TotalSize,
1508 Files.begin()+EndOfSource,Files.end()) == false)
1509 return false;
1510 }
1511 if (Debug == true)
1512 std::clog << "Caches are ready for shipping" << std::endl;
1513
1514 if (_error->PendingError() == true)
1515 return false;
1516 if (OutMap != 0)
1517 {
1518 if (CacheF != 0)
1519 {
1520 delete Map.UnGuard();
1521 *OutMap = new MMap(*CacheF,0);
1522 }
1523 else
1524 {
1525 *OutMap = Map.UnGuard();
1526 }
1527 }
1528
1529 return true;
1530 }
1531 /*}}}*/
1532 // CacheGenerator::MakeOnlyStatusCache - Build only a status files cache/*{{{*/
1533 // ---------------------------------------------------------------------
1534 /* */
1535 __deprecated bool pkgMakeOnlyStatusCache(OpProgress &Progress,DynamicMMap **OutMap)
1536 { return pkgCacheGenerator::MakeOnlyStatusCache(&Progress, OutMap); }
1537 bool pkgCacheGenerator::MakeOnlyStatusCache(OpProgress *Progress,DynamicMMap **OutMap)
1538 {
1539 std::vector<pkgIndexFile *> Files;
1540 unsigned long EndOfSource = Files.size();
1541 if (_system->AddStatusFiles(Files) == false)
1542 return false;
1543
1544 SPtr<DynamicMMap> Map = CreateDynamicMMap(NULL);
1545 unsigned long CurrentSize = 0;
1546 unsigned long TotalSize = 0;
1547
1548 TotalSize = ComputeSize(Files.begin()+EndOfSource,Files.end());
1549
1550 // Build the status cache
1551 if (Progress != NULL)
1552 Progress->OverallProgress(0,1,1,_("Reading package lists"));
1553 pkgCacheGenerator Gen(Map.Get(),Progress);
1554 if (_error->PendingError() == true)
1555 return false;
1556 if (BuildCache(Gen,Progress,CurrentSize,TotalSize,
1557 Files.begin()+EndOfSource,Files.end()) == false)
1558 return false;
1559
1560 if (_error->PendingError() == true)
1561 return false;
1562 *OutMap = Map.UnGuard();
1563
1564 return true;
1565 }
1566 /*}}}*/
1567 // IsDuplicateDescription /*{{{*/
1568 static bool IsDuplicateDescription(pkgCache::DescIterator Desc,
1569 MD5SumValue const &CurMd5, std::string const &CurLang)
1570 {
1571 // Descriptions in the same link-list have all the same md5
1572 if (Desc.end() == true || MD5SumValue(Desc.md5()) != CurMd5)
1573 return false;
1574 for (; Desc.end() == false; ++Desc)
1575 if (Desc.LanguageCode() == CurLang)
1576 return true;
1577 return false;
1578 }
1579 /*}}}*/
1580 // CacheGenerator::FinishCache /*{{{*/
1581 bool pkgCacheGenerator::FinishCache(OpProgress *Progress)
1582 {
1583 return true;
1584 }
1585 /*}}}*/