]> git.saurik.com Git - apt.git/blob - apt-pkg/pkgcachegen.cc
54b07c465390ed86f8d61fa26c0c30fb0833916e
[apt.git] / apt-pkg / pkgcachegen.cc
1 // -*- mode: cpp; mode: fold -*-
2 // Description /*{{{*/
3 // $Id: pkgcachegen.cc,v 1.53.2.1 2003/12/24 23:09:17 mdz Exp $
4 /* ######################################################################
5
6 Package Cache Generator - Generator for the cache structure.
7
8 This builds the cache structure from the abstract package list parser.
9
10 ##################################################################### */
11 /*}}}*/
12 // Include Files /*{{{*/
13 #include <config.h>
14
15 #include <apt-pkg/pkgcachegen.h>
16 #include <apt-pkg/error.h>
17 #include <apt-pkg/version.h>
18 #include <apt-pkg/progress.h>
19 #include <apt-pkg/sourcelist.h>
20 #include <apt-pkg/configuration.h>
21 #include <apt-pkg/aptconfiguration.h>
22 #include <apt-pkg/strutl.h>
23 #include <apt-pkg/sptr.h>
24 #include <apt-pkg/pkgsystem.h>
25 #include <apt-pkg/macros.h>
26 #include <apt-pkg/tagfile.h>
27 #include <apt-pkg/metaindex.h>
28 #include <apt-pkg/fileutl.h>
29
30 #include <vector>
31 #include <sys/stat.h>
32 #include <unistd.h>
33 #include <errno.h>
34 #include <stdio.h>
35
36 #include <apti18n.h>
37 /*}}}*/
38 typedef std::vector<pkgIndexFile *>::iterator FileIterator;
39 template <typename Iter> std::vector<Iter*> pkgCacheGenerator::Dynamic<Iter>::toReMap;
40
41 static bool IsDuplicateDescription(pkgCache::DescIterator Desc,
42 MD5SumValue const &CurMd5, std::string const &CurLang);
43
44 using std::string;
45
46 // CacheGenerator::pkgCacheGenerator - Constructor /*{{{*/
47 // ---------------------------------------------------------------------
48 /* We set the dirty flag and make sure that is written to the disk */
49 pkgCacheGenerator::pkgCacheGenerator(DynamicMMap *pMap,OpProgress *Prog) :
50 Map(*pMap), Cache(pMap,false), Progress(Prog),
51 FoundFileDeps(0)
52 {
53 CurrentFile = 0;
54 memset(UniqHash,0,sizeof(UniqHash));
55
56 if (_error->PendingError() == true)
57 return;
58
59 if (Map.Size() == 0)
60 {
61 // Setup the map interface..
62 Cache.HeaderP = (pkgCache::Header *)Map.Data();
63 if (Map.RawAllocate(sizeof(pkgCache::Header)) == 0 && _error->PendingError() == true)
64 return;
65
66 Map.UsePools(*Cache.HeaderP->Pools,sizeof(Cache.HeaderP->Pools)/sizeof(Cache.HeaderP->Pools[0]));
67
68 // Starting header
69 *Cache.HeaderP = pkgCache::Header();
70 map_ptrloc const idxVerSysName = WriteStringInMap(_system->VS->Label);
71 Cache.HeaderP->VerSysName = idxVerSysName;
72 map_ptrloc const idxArchitecture = WriteStringInMap(_config->Find("APT::Architecture"));
73 Cache.HeaderP->Architecture = idxArchitecture;
74 if (unlikely(idxVerSysName == 0 || idxArchitecture == 0))
75 return;
76 Cache.ReMap();
77 }
78 else
79 {
80 // Map directly from the existing file
81 Cache.ReMap();
82 Map.UsePools(*Cache.HeaderP->Pools,sizeof(Cache.HeaderP->Pools)/sizeof(Cache.HeaderP->Pools[0]));
83 if (Cache.VS != _system->VS)
84 {
85 _error->Error(_("Cache has an incompatible versioning system"));
86 return;
87 }
88 }
89
90 Cache.HeaderP->Dirty = true;
91 Map.Sync(0,sizeof(pkgCache::Header));
92 }
93 /*}}}*/
94 // CacheGenerator::~pkgCacheGenerator - Destructor /*{{{*/
95 // ---------------------------------------------------------------------
96 /* We sync the data then unset the dirty flag in two steps so as to
97 advoid a problem during a crash */
98 pkgCacheGenerator::~pkgCacheGenerator()
99 {
100 if (_error->PendingError() == true)
101 return;
102 if (Map.Sync() == false)
103 return;
104
105 Cache.HeaderP->Dirty = false;
106 Cache.HeaderP->CacheFileSize = Map.Size();
107 Map.Sync(0,sizeof(pkgCache::Header));
108 }
109 /*}}}*/
110 void pkgCacheGenerator::ReMap(void const * const oldMap, void const * const newMap) {/*{{{*/
111 if (oldMap == newMap)
112 return;
113
114 if (_config->FindB("Debug::pkgCacheGen", false))
115 std::clog << "Remaping from " << oldMap << " to " << newMap << std::endl;
116
117 Cache.ReMap(false);
118
119 CurrentFile += (pkgCache::PackageFile*) newMap - (pkgCache::PackageFile*) oldMap;
120
121 for (size_t i = 0; i < _count(UniqHash); ++i)
122 if (UniqHash[i] != 0)
123 UniqHash[i] += (pkgCache::StringItem*) newMap - (pkgCache::StringItem*) oldMap;
124
125 for (std::vector<pkgCache::GrpIterator*>::const_iterator i = Dynamic<pkgCache::GrpIterator>::toReMap.begin();
126 i != Dynamic<pkgCache::GrpIterator>::toReMap.end(); ++i)
127 (*i)->ReMap(oldMap, newMap);
128 for (std::vector<pkgCache::PkgIterator*>::const_iterator i = Dynamic<pkgCache::PkgIterator>::toReMap.begin();
129 i != Dynamic<pkgCache::PkgIterator>::toReMap.end(); ++i)
130 (*i)->ReMap(oldMap, newMap);
131 for (std::vector<pkgCache::VerIterator*>::const_iterator i = Dynamic<pkgCache::VerIterator>::toReMap.begin();
132 i != Dynamic<pkgCache::VerIterator>::toReMap.end(); ++i)
133 (*i)->ReMap(oldMap, newMap);
134 for (std::vector<pkgCache::DepIterator*>::const_iterator i = Dynamic<pkgCache::DepIterator>::toReMap.begin();
135 i != Dynamic<pkgCache::DepIterator>::toReMap.end(); ++i)
136 (*i)->ReMap(oldMap, newMap);
137 for (std::vector<pkgCache::DescIterator*>::const_iterator i = Dynamic<pkgCache::DescIterator>::toReMap.begin();
138 i != Dynamic<pkgCache::DescIterator>::toReMap.end(); ++i)
139 (*i)->ReMap(oldMap, newMap);
140 for (std::vector<pkgCache::PrvIterator*>::const_iterator i = Dynamic<pkgCache::PrvIterator>::toReMap.begin();
141 i != Dynamic<pkgCache::PrvIterator>::toReMap.end(); ++i)
142 (*i)->ReMap(oldMap, newMap);
143 for (std::vector<pkgCache::PkgFileIterator*>::const_iterator i = Dynamic<pkgCache::PkgFileIterator>::toReMap.begin();
144 i != Dynamic<pkgCache::PkgFileIterator>::toReMap.end(); ++i)
145 (*i)->ReMap(oldMap, newMap);
146 } /*}}}*/
147 // CacheGenerator::WriteStringInMap /*{{{*/
148 map_ptrloc pkgCacheGenerator::WriteStringInMap(const char *String,
149 const unsigned long &Len) {
150 void const * const oldMap = Map.Data();
151 map_ptrloc const index = Map.WriteString(String, Len);
152 if (index != 0)
153 ReMap(oldMap, Map.Data());
154 return index;
155 }
156 /*}}}*/
157 // CacheGenerator::WriteStringInMap /*{{{*/
158 map_ptrloc pkgCacheGenerator::WriteStringInMap(const char *String) {
159 void const * const oldMap = Map.Data();
160 map_ptrloc const index = Map.WriteString(String);
161 if (index != 0)
162 ReMap(oldMap, Map.Data());
163 return index;
164 }
165 /*}}}*/
166 map_ptrloc pkgCacheGenerator::AllocateInMap(const unsigned long &size) {/*{{{*/
167 void const * const oldMap = Map.Data();
168 map_ptrloc const index = Map.Allocate(size);
169 if (index != 0)
170 ReMap(oldMap, Map.Data());
171 return index;
172 }
173 /*}}}*/
174 // CacheGenerator::MergeList - Merge the package list /*{{{*/
175 // ---------------------------------------------------------------------
176 /* This provides the generation of the entries in the cache. Each loop
177 goes through a single package record from the underlying parse engine. */
178 bool pkgCacheGenerator::MergeList(ListParser &List,
179 pkgCache::VerIterator *OutVer)
180 {
181 List.Owner = this;
182
183 unsigned int Counter = 0;
184 while (List.Step() == true)
185 {
186 string const PackageName = List.Package();
187 if (PackageName.empty() == true)
188 return false;
189
190 Counter++;
191 if (Counter % 100 == 0 && Progress != 0)
192 Progress->Progress(List.Offset());
193
194 string Arch = List.Architecture();
195 string const Version = List.Version();
196 if (Version.empty() == true && Arch.empty() == true)
197 {
198 // package descriptions
199 if (MergeListGroup(List, PackageName) == false)
200 return false;
201 continue;
202 }
203
204 if (Arch.empty() == true)
205 {
206 // use the pseudo arch 'none' for arch-less packages
207 Arch = "none";
208 /* We might built a SingleArchCache here, which we don't want to blow up
209 just for these :none packages to a proper MultiArchCache, so just ensure
210 that we have always a native package structure first for SingleArch */
211 pkgCache::PkgIterator NP;
212 if (NewPackage(NP, PackageName, _config->Find("APT::Architecture")) == false)
213 // TRANSLATOR: The first placeholder is a package name,
214 // the other two should be copied verbatim as they include debug info
215 return _error->Error(_("Error occurred while processing %s (%s%d)"),
216 PackageName.c_str(), "NewPackage", 0);
217 }
218
219 // Get a pointer to the package structure
220 pkgCache::PkgIterator Pkg;
221 Dynamic<pkgCache::PkgIterator> DynPkg(Pkg);
222 if (NewPackage(Pkg, PackageName, Arch) == false)
223 // TRANSLATOR: The first placeholder is a package name,
224 // the other two should be copied verbatim as they include debug info
225 return _error->Error(_("Error occurred while processing %s (%s%d)"),
226 PackageName.c_str(), "NewPackage", 1);
227
228
229 if (Version.empty() == true)
230 {
231 if (MergeListPackage(List, Pkg) == false)
232 return false;
233 }
234 else
235 {
236 if (MergeListVersion(List, Pkg, Version, OutVer) == false)
237 return false;
238 }
239
240 if (OutVer != 0)
241 {
242 FoundFileDeps |= List.HasFileDeps();
243 return true;
244 }
245 }
246
247 if (Cache.HeaderP->PackageCount >= (1ULL<<sizeof(Cache.PkgP->ID)*8)-1)
248 return _error->Error(_("Wow, you exceeded the number of package "
249 "names this APT is capable of."));
250 if (Cache.HeaderP->VersionCount >= (1ULL<<(sizeof(Cache.VerP->ID)*8))-1)
251 return _error->Error(_("Wow, you exceeded the number of versions "
252 "this APT is capable of."));
253 if (Cache.HeaderP->DescriptionCount >= (1ULL<<(sizeof(Cache.DescP->ID)*8))-1)
254 return _error->Error(_("Wow, you exceeded the number of descriptions "
255 "this APT is capable of."));
256 if (Cache.HeaderP->DependsCount >= (1ULL<<(sizeof(Cache.DepP->ID)*8))-1ULL)
257 return _error->Error(_("Wow, you exceeded the number of dependencies "
258 "this APT is capable of."));
259
260 FoundFileDeps |= List.HasFileDeps();
261 return true;
262 }
263 // CacheGenerator::MergeListGroup /*{{{*/
264 bool pkgCacheGenerator::MergeListGroup(ListParser &List, std::string const &GrpName)
265 {
266 pkgCache::GrpIterator Grp = Cache.FindGrp(GrpName);
267 // a group has no data on it's own, only packages have it but these
268 // stanzas like this come from Translation- files to add descriptions,
269 // but without a version we don't need a description for it…
270 if (Grp.end() == true)
271 return true;
272 Dynamic<pkgCache::GrpIterator> DynGrp(Grp);
273
274 pkgCache::PkgIterator Pkg;
275 Dynamic<pkgCache::PkgIterator> DynPkg(Pkg);
276 for (Pkg = Grp.PackageList(); Pkg.end() == false; Pkg = Grp.NextPkg(Pkg))
277 if (MergeListPackage(List, Pkg) == false)
278 return false;
279
280 return true;
281 }
282 /*}}}*/
283 // CacheGenerator::MergeListPackage /*{{{*/
284 bool pkgCacheGenerator::MergeListPackage(ListParser &List, pkgCache::PkgIterator &Pkg)
285 {
286 // we first process the package, then the descriptions
287 // (for deb this package processing is in fact a no-op)
288 pkgCache::VerIterator Ver(Cache);
289 Dynamic<pkgCache::VerIterator> DynVer(Ver);
290 if (List.UsePackage(Pkg, Ver) == false)
291 return _error->Error(_("Error occurred while processing %s (%s%d)"),
292 Pkg.Name(), "UsePackage", 1);
293
294 // Find the right version to write the description
295 MD5SumValue CurMd5 = List.Description_md5();
296 std::string CurLang = List.DescriptionLanguage();
297
298 for (Ver = Pkg.VersionList(); Ver.end() == false; ++Ver)
299 {
300 pkgCache::DescIterator Desc = Ver.DescriptionList();
301
302 // a version can only have one md5 describing it
303 if (Desc.end() == true || MD5SumValue(Desc.md5()) != CurMd5)
304 continue;
305
306 // don't add a new description if we have one for the given
307 // md5 && language
308 if (IsDuplicateDescription(Desc, CurMd5, CurLang) == true)
309 continue;
310
311 Dynamic<pkgCache::DescIterator> DynDesc(Desc);
312 // we add at the end, so that the start is constant as we need
313 // that to be able to efficiently share these lists
314 map_ptrloc *LastDesc = &Ver->DescriptionList;
315 for (;Desc.end() == false && Desc->NextDesc != 0; ++Desc);
316 if (Desc.end() == false)
317 LastDesc = &Desc->NextDesc;
318
319 void const * const oldMap = Map.Data();
320 map_ptrloc const descindex = NewDescription(Desc, CurLang, CurMd5, *LastDesc);
321 if (unlikely(descindex == 0 && _error->PendingError()))
322 return _error->Error(_("Error occurred while processing %s (%s%d)"),
323 Pkg.Name(), "NewDescription", 1);
324 if (oldMap != Map.Data())
325 LastDesc += (map_ptrloc*) Map.Data() - (map_ptrloc*) oldMap;
326 *LastDesc = descindex;
327 Desc->ParentPkg = Pkg.Index();
328
329 if ((*LastDesc == 0 && _error->PendingError()) || NewFileDesc(Desc,List) == false)
330 return _error->Error(_("Error occurred while processing %s (%s%d)"),
331 Pkg.Name(), "NewFileDesc", 1);
332
333 // we can stop here as all "same" versions will share the description
334 break;
335 }
336
337 return true;
338 }
339 /*}}}*/
340 // CacheGenerator::MergeListVersion /*{{{*/
341 bool pkgCacheGenerator::MergeListVersion(ListParser &List, pkgCache::PkgIterator &Pkg,
342 std::string const &Version, pkgCache::VerIterator* &OutVer)
343 {
344 pkgCache::VerIterator Ver = Pkg.VersionList();
345 Dynamic<pkgCache::VerIterator> DynVer(Ver);
346 map_ptrloc *LastVer = &Pkg->VersionList;
347 void const * oldMap = Map.Data();
348
349 unsigned long const Hash = List.VersionHash();
350 if (Ver.end() == false)
351 {
352 /* We know the list is sorted so we use that fact in the search.
353 Insertion of new versions is done with correct sorting */
354 int Res = 1;
355 for (; Ver.end() == false; LastVer = &Ver->NextVer, Ver++)
356 {
357 Res = Cache.VS->CmpVersion(Version,Ver.VerStr());
358 // Version is higher as current version - insert here
359 if (Res > 0)
360 break;
361 // Versionstrings are equal - is hash also equal?
362 if (Res == 0 && Ver->Hash == Hash)
363 break;
364 // proceed with the next till we have either the right
365 // or we found another version (which will be lower)
366 }
367
368 /* We already have a version for this item, record that we saw it */
369 if (Res == 0 && Ver.end() == false && Ver->Hash == Hash)
370 {
371 if (List.UsePackage(Pkg,Ver) == false)
372 return _error->Error(_("Error occurred while processing %s (%s%d)"),
373 Pkg.Name(), "UsePackage", 2);
374
375 if (NewFileVer(Ver,List) == false)
376 return _error->Error(_("Error occurred while processing %s (%s%d)"),
377 Pkg.Name(), "NewFileVer", 1);
378
379 // Read only a single record and return
380 if (OutVer != 0)
381 {
382 *OutVer = Ver;
383 return true;
384 }
385
386 return true;
387 }
388 }
389
390 // Add a new version
391 map_ptrloc const verindex = NewVersion(Ver,Version,*LastVer);
392 if (verindex == 0 && _error->PendingError())
393 return _error->Error(_("Error occurred while processing %s (%s%d)"),
394 Pkg.Name(), "NewVersion", 1);
395
396 if (oldMap != Map.Data())
397 LastVer += (map_ptrloc*) Map.Data() - (map_ptrloc*) oldMap;
398 *LastVer = verindex;
399 Ver->ParentPkg = Pkg.Index();
400 Ver->Hash = Hash;
401
402 if (unlikely(List.NewVersion(Ver) == false))
403 return _error->Error(_("Error occurred while processing %s (%s%d)"),
404 Pkg.Name(), "NewVersion", 2);
405
406 if (unlikely(List.UsePackage(Pkg,Ver) == false))
407 return _error->Error(_("Error occurred while processing %s (%s%d)"),
408 Pkg.Name(), "UsePackage", 3);
409
410 if (unlikely(NewFileVer(Ver,List) == false))
411 return _error->Error(_("Error occurred while processing %s (%s%d)"),
412 Pkg.Name(), "NewFileVer", 2);
413
414 pkgCache::GrpIterator Grp = Pkg.Group();
415 Dynamic<pkgCache::GrpIterator> DynGrp(Grp);
416
417 /* If it is the first version of this package we need to add implicit
418 Multi-Arch dependencies to all other package versions in the group now -
419 otherwise we just add them for this new version */
420 if (Pkg.VersionList()->NextVer == 0)
421 {
422 pkgCache::PkgIterator P = Grp.PackageList();
423 Dynamic<pkgCache::PkgIterator> DynP(P);
424 for (; P.end() != true; P = Grp.NextPkg(P))
425 {
426 if (P->ID == Pkg->ID)
427 continue;
428 pkgCache::VerIterator V = P.VersionList();
429 Dynamic<pkgCache::VerIterator> DynV(V);
430 for (; V.end() != true; ++V)
431 if (unlikely(AddImplicitDepends(V, Pkg) == false))
432 return _error->Error(_("Error occurred while processing %s (%s%d)"),
433 Pkg.Name(), "AddImplicitDepends", 1);
434 }
435 /* :none packages are packages without an architecture. They are forbidden by
436 debian-policy, so usually they will only be in (old) dpkg status files -
437 and dpkg will complain about them - and are pretty rare. We therefore do
438 usually not create conflicts while the parent is created, but only if a :none
439 package (= the target) appears. This creates incorrect dependencies on :none
440 for architecture-specific dependencies on the package we copy from, but we
441 will ignore this bug as architecture-specific dependencies are only allowed
442 in jessie and until then the :none packages should be extinct (hopefully).
443 In other words: This should work long enough to allow graceful removal of
444 these packages, it is not supposed to allow users to keep using them … */
445 if (strcmp(Pkg.Arch(), "none") == 0)
446 {
447 pkgCache::PkgIterator M = Grp.FindPreferredPkg();
448 if (M.end() == false && Pkg != M)
449 {
450 pkgCache::DepIterator D = M.RevDependsList();
451 Dynamic<pkgCache::DepIterator> DynD(D);
452 for (; D.end() == false; ++D)
453 {
454 if ((D->Type != pkgCache::Dep::Conflicts &&
455 D->Type != pkgCache::Dep::DpkgBreaks &&
456 D->Type != pkgCache::Dep::Replaces) ||
457 D.ParentPkg().Group() == Grp)
458 continue;
459
460 map_ptrloc *OldDepLast = NULL;
461 pkgCache::VerIterator ConVersion = D.ParentVer();
462 // duplicate the Conflicts/Breaks/Replaces for :none arch
463 if (D->Version == 0)
464 NewDepends(Pkg, ConVersion, "", 0, D->Type, OldDepLast);
465 else
466 NewDepends(Pkg, ConVersion, D.TargetVer(),
467 D->CompareOp, D->Type, OldDepLast);
468 }
469 }
470 }
471 }
472 if (unlikely(AddImplicitDepends(Grp, Pkg, Ver) == false))
473 return _error->Error(_("Error occurred while processing %s (%s%d)"),
474 Pkg.Name(), "AddImplicitDepends", 2);
475
476 // Read only a single record and return
477 if (OutVer != 0)
478 {
479 *OutVer = Ver;
480 return true;
481 }
482
483 /* Record the Description (it is not translated) */
484 MD5SumValue CurMd5 = List.Description_md5();
485 if (CurMd5.Value().empty() == true)
486 return true;
487 std::string CurLang = List.DescriptionLanguage();
488
489 /* Before we add a new description we first search in the group for
490 a version with a description of the same MD5 - if so we reuse this
491 description group instead of creating our own for this version */
492 for (pkgCache::PkgIterator P = Grp.PackageList();
493 P.end() == false; P = Grp.NextPkg(P))
494 {
495 for (pkgCache::VerIterator V = P.VersionList();
496 V.end() == false; ++V)
497 {
498 if (IsDuplicateDescription(V.DescriptionList(), CurMd5, "") == false)
499 continue;
500 Ver->DescriptionList = V->DescriptionList;
501 return true;
502 }
503 }
504
505 // We haven't found reusable descriptions, so add the first description
506 pkgCache::DescIterator Desc = Ver.DescriptionList();
507 Dynamic<pkgCache::DescIterator> DynDesc(Desc);
508 map_ptrloc *LastDesc = &Ver->DescriptionList;
509
510 oldMap = Map.Data();
511 map_ptrloc const descindex = NewDescription(Desc, CurLang, CurMd5, *LastDesc);
512 if (unlikely(descindex == 0 && _error->PendingError()))
513 return _error->Error(_("Error occurred while processing %s (%s%d)"),
514 Pkg.Name(), "NewDescription", 2);
515 if (oldMap != Map.Data())
516 LastDesc += (map_ptrloc*) Map.Data() - (map_ptrloc*) oldMap;
517 *LastDesc = descindex;
518 Desc->ParentPkg = Pkg.Index();
519
520 if ((*LastDesc == 0 && _error->PendingError()) || NewFileDesc(Desc,List) == false)
521 return _error->Error(_("Error occurred while processing %s (%s%d)"),
522 Pkg.Name(), "NewFileDesc", 2);
523
524 return true;
525 }
526 /*}}}*/
527 /*}}}*/
528 // CacheGenerator::MergeFileProvides - Merge file provides /*{{{*/
529 // ---------------------------------------------------------------------
530 /* If we found any file depends while parsing the main list we need to
531 resolve them. Since it is undesired to load the entire list of files
532 into the cache as virtual packages we do a two stage effort. MergeList
533 identifies the file depends and this creates Provdies for them by
534 re-parsing all the indexs. */
535 bool pkgCacheGenerator::MergeFileProvides(ListParser &List)
536 {
537 List.Owner = this;
538
539 unsigned int Counter = 0;
540 while (List.Step() == true)
541 {
542 string PackageName = List.Package();
543 if (PackageName.empty() == true)
544 return false;
545 string Version = List.Version();
546 if (Version.empty() == true)
547 continue;
548
549 pkgCache::PkgIterator Pkg = Cache.FindPkg(PackageName);
550 Dynamic<pkgCache::PkgIterator> DynPkg(Pkg);
551 if (Pkg.end() == true)
552 return _error->Error(_("Error occurred while processing %s (%s%d)"),
553 PackageName.c_str(), "FindPkg", 1);
554 Counter++;
555 if (Counter % 100 == 0 && Progress != 0)
556 Progress->Progress(List.Offset());
557
558 unsigned long Hash = List.VersionHash();
559 pkgCache::VerIterator Ver = Pkg.VersionList();
560 Dynamic<pkgCache::VerIterator> DynVer(Ver);
561 for (; Ver.end() == false; ++Ver)
562 {
563 if (Ver->Hash == Hash && Version.c_str() == Ver.VerStr())
564 {
565 if (List.CollectFileProvides(Cache,Ver) == false)
566 return _error->Error(_("Error occurred while processing %s (%s%d)"),
567 PackageName.c_str(), "CollectFileProvides", 1);
568 break;
569 }
570 }
571
572 if (Ver.end() == true)
573 _error->Warning(_("Package %s %s was not found while processing file dependencies"),PackageName.c_str(),Version.c_str());
574 }
575
576 return true;
577 }
578 /*}}}*/
579 // CacheGenerator::NewGroup - Add a new group /*{{{*/
580 // ---------------------------------------------------------------------
581 /* This creates a new group structure and adds it to the hash table */
582 bool pkgCacheGenerator::NewGroup(pkgCache::GrpIterator &Grp, const string &Name)
583 {
584 Grp = Cache.FindGrp(Name);
585 if (Grp.end() == false)
586 return true;
587
588 // Get a structure
589 map_ptrloc const Group = AllocateInMap(sizeof(pkgCache::Group));
590 if (unlikely(Group == 0))
591 return false;
592
593 Grp = pkgCache::GrpIterator(Cache, Cache.GrpP + Group);
594 map_ptrloc const idxName = WriteStringInMap(Name);
595 if (unlikely(idxName == 0))
596 return false;
597 Grp->Name = idxName;
598
599 // Insert it into the hash table
600 unsigned long const Hash = Cache.Hash(Name);
601 Grp->Next = Cache.HeaderP->GrpHashTable[Hash];
602 Cache.HeaderP->GrpHashTable[Hash] = Group;
603
604 Grp->ID = Cache.HeaderP->GroupCount++;
605 return true;
606 }
607 /*}}}*/
608 // CacheGenerator::NewPackage - Add a new package /*{{{*/
609 // ---------------------------------------------------------------------
610 /* This creates a new package structure and adds it to the hash table */
611 bool pkgCacheGenerator::NewPackage(pkgCache::PkgIterator &Pkg,const string &Name,
612 const string &Arch) {
613 pkgCache::GrpIterator Grp;
614 Dynamic<pkgCache::GrpIterator> DynGrp(Grp);
615 if (unlikely(NewGroup(Grp, Name) == false))
616 return false;
617
618 Pkg = Grp.FindPkg(Arch);
619 if (Pkg.end() == false)
620 return true;
621
622 // Get a structure
623 map_ptrloc const Package = AllocateInMap(sizeof(pkgCache::Package));
624 if (unlikely(Package == 0))
625 return false;
626 Pkg = pkgCache::PkgIterator(Cache,Cache.PkgP + Package);
627
628 // Insert the package into our package list
629 if (Grp->FirstPackage == 0) // the group is new
630 {
631 // Insert it into the hash table
632 unsigned long const Hash = Cache.Hash(Name);
633 Pkg->NextPackage = Cache.HeaderP->PkgHashTable[Hash];
634 Cache.HeaderP->PkgHashTable[Hash] = Package;
635 Grp->FirstPackage = Package;
636 }
637 else // Group the Packages together
638 {
639 // this package is the new last package
640 pkgCache::PkgIterator LastPkg(Cache, Cache.PkgP + Grp->LastPackage);
641 Pkg->NextPackage = LastPkg->NextPackage;
642 LastPkg->NextPackage = Package;
643 }
644 Grp->LastPackage = Package;
645
646 // Set the name, arch and the ID
647 Pkg->Name = Grp->Name;
648 Pkg->Group = Grp.Index();
649 // all is mapped to the native architecture
650 map_ptrloc const idxArch = (Arch == "all") ? Cache.HeaderP->Architecture : WriteUniqString(Arch.c_str());
651 if (unlikely(idxArch == 0))
652 return false;
653 Pkg->Arch = idxArch;
654 Pkg->ID = Cache.HeaderP->PackageCount++;
655
656 return true;
657 }
658 /*}}}*/
659 // CacheGenerator::AddImplicitDepends /*{{{*/
660 bool pkgCacheGenerator::AddImplicitDepends(pkgCache::GrpIterator &G,
661 pkgCache::PkgIterator &P,
662 pkgCache::VerIterator &V)
663 {
664 // copy P.Arch() into a string here as a cache remap
665 // in NewDepends() later may alter the pointer location
666 string Arch = P.Arch() == NULL ? "" : P.Arch();
667 map_ptrloc *OldDepLast = NULL;
668 /* MultiArch handling introduces a lot of implicit Dependencies:
669 - MultiArch: same → Co-Installable if they have the same version
670 - All others conflict with all other group members */
671 bool const coInstall = ((V->MultiArch & pkgCache::Version::Same) == pkgCache::Version::Same);
672 pkgCache::PkgIterator D = G.PackageList();
673 Dynamic<pkgCache::PkgIterator> DynD(D);
674 for (; D.end() != true; D = G.NextPkg(D))
675 {
676 if (Arch == D.Arch() || D->VersionList == 0)
677 continue;
678 /* We allow only one installed arch at the time
679 per group, therefore each group member conflicts
680 with all other group members */
681 if (coInstall == true)
682 {
683 // Replaces: ${self}:other ( << ${binary:Version})
684 NewDepends(D, V, V.VerStr(),
685 pkgCache::Dep::Less, pkgCache::Dep::Replaces,
686 OldDepLast);
687 // Breaks: ${self}:other (!= ${binary:Version})
688 NewDepends(D, V, V.VerStr(),
689 pkgCache::Dep::NotEquals, pkgCache::Dep::DpkgBreaks,
690 OldDepLast);
691 } else {
692 // Conflicts: ${self}:other
693 NewDepends(D, V, "",
694 pkgCache::Dep::NoOp, pkgCache::Dep::Conflicts,
695 OldDepLast);
696 }
697 }
698 return true;
699 }
700 bool pkgCacheGenerator::AddImplicitDepends(pkgCache::VerIterator &V,
701 pkgCache::PkgIterator &D)
702 {
703 /* MultiArch handling introduces a lot of implicit Dependencies:
704 - MultiArch: same → Co-Installable if they have the same version
705 - All others conflict with all other group members */
706 map_ptrloc *OldDepLast = NULL;
707 bool const coInstall = ((V->MultiArch & pkgCache::Version::Same) == pkgCache::Version::Same);
708 if (coInstall == true)
709 {
710 // Replaces: ${self}:other ( << ${binary:Version})
711 NewDepends(D, V, V.VerStr(),
712 pkgCache::Dep::Less, pkgCache::Dep::Replaces,
713 OldDepLast);
714 // Breaks: ${self}:other (!= ${binary:Version})
715 NewDepends(D, V, V.VerStr(),
716 pkgCache::Dep::NotEquals, pkgCache::Dep::DpkgBreaks,
717 OldDepLast);
718 } else {
719 // Conflicts: ${self}:other
720 NewDepends(D, V, "",
721 pkgCache::Dep::NoOp, pkgCache::Dep::Conflicts,
722 OldDepLast);
723 }
724 return true;
725 }
726
727 /*}}}*/
728 // CacheGenerator::NewFileVer - Create a new File<->Version association /*{{{*/
729 // ---------------------------------------------------------------------
730 /* */
731 bool pkgCacheGenerator::NewFileVer(pkgCache::VerIterator &Ver,
732 ListParser &List)
733 {
734 if (CurrentFile == 0)
735 return true;
736
737 // Get a structure
738 map_ptrloc const VerFile = AllocateInMap(sizeof(pkgCache::VerFile));
739 if (VerFile == 0)
740 return 0;
741
742 pkgCache::VerFileIterator VF(Cache,Cache.VerFileP + VerFile);
743 VF->File = CurrentFile - Cache.PkgFileP;
744
745 // Link it to the end of the list
746 map_ptrloc *Last = &Ver->FileList;
747 for (pkgCache::VerFileIterator V = Ver.FileList(); V.end() == false; ++V)
748 Last = &V->NextFile;
749 VF->NextFile = *Last;
750 *Last = VF.Index();
751
752 VF->Offset = List.Offset();
753 VF->Size = List.Size();
754 if (Cache.HeaderP->MaxVerFileSize < VF->Size)
755 Cache.HeaderP->MaxVerFileSize = VF->Size;
756 Cache.HeaderP->VerFileCount++;
757
758 return true;
759 }
760 /*}}}*/
761 // CacheGenerator::NewVersion - Create a new Version /*{{{*/
762 // ---------------------------------------------------------------------
763 /* This puts a version structure in the linked list */
764 unsigned long pkgCacheGenerator::NewVersion(pkgCache::VerIterator &Ver,
765 const string &VerStr,
766 unsigned long Next)
767 {
768 // Get a structure
769 map_ptrloc const Version = AllocateInMap(sizeof(pkgCache::Version));
770 if (Version == 0)
771 return 0;
772
773 // Fill it in
774 Ver = pkgCache::VerIterator(Cache,Cache.VerP + Version);
775 Ver->NextVer = Next;
776 Ver->ID = Cache.HeaderP->VersionCount++;
777 map_ptrloc const idxVerStr = WriteStringInMap(VerStr);
778 if (unlikely(idxVerStr == 0))
779 return 0;
780 Ver->VerStr = idxVerStr;
781
782 return Version;
783 }
784 /*}}}*/
785 // CacheGenerator::NewFileDesc - Create a new File<->Desc association /*{{{*/
786 // ---------------------------------------------------------------------
787 /* */
788 bool pkgCacheGenerator::NewFileDesc(pkgCache::DescIterator &Desc,
789 ListParser &List)
790 {
791 if (CurrentFile == 0)
792 return true;
793
794 // Get a structure
795 map_ptrloc const DescFile = AllocateInMap(sizeof(pkgCache::DescFile));
796 if (DescFile == 0)
797 return false;
798
799 pkgCache::DescFileIterator DF(Cache,Cache.DescFileP + DescFile);
800 DF->File = CurrentFile - Cache.PkgFileP;
801
802 // Link it to the end of the list
803 map_ptrloc *Last = &Desc->FileList;
804 for (pkgCache::DescFileIterator D = Desc.FileList(); D.end() == false; ++D)
805 Last = &D->NextFile;
806
807 DF->NextFile = *Last;
808 *Last = DF.Index();
809
810 DF->Offset = List.Offset();
811 DF->Size = List.Size();
812 if (Cache.HeaderP->MaxDescFileSize < DF->Size)
813 Cache.HeaderP->MaxDescFileSize = DF->Size;
814 Cache.HeaderP->DescFileCount++;
815
816 return true;
817 }
818 /*}}}*/
819 // CacheGenerator::NewDescription - Create a new Description /*{{{*/
820 // ---------------------------------------------------------------------
821 /* This puts a description structure in the linked list */
822 map_ptrloc pkgCacheGenerator::NewDescription(pkgCache::DescIterator &Desc,
823 const string &Lang,
824 const MD5SumValue &md5sum,
825 map_ptrloc Next)
826 {
827 // Get a structure
828 map_ptrloc const Description = AllocateInMap(sizeof(pkgCache::Description));
829 if (Description == 0)
830 return 0;
831
832 // Fill it in
833 Desc = pkgCache::DescIterator(Cache,Cache.DescP + Description);
834 Desc->NextDesc = Next;
835 Desc->ID = Cache.HeaderP->DescriptionCount++;
836 map_ptrloc const idxlanguage_code = WriteStringInMap(Lang);
837 map_ptrloc const idxmd5sum = WriteStringInMap(md5sum.Value());
838 if (unlikely(idxlanguage_code == 0 || idxmd5sum == 0))
839 return 0;
840 Desc->language_code = idxlanguage_code;
841 Desc->md5sum = idxmd5sum;
842
843 return Description;
844 }
845 /*}}}*/
846 // CacheGenerator::NewDepends - Create a dependency element /*{{{*/
847 // ---------------------------------------------------------------------
848 /* This creates a dependency element in the tree. It is linked to the
849 version and to the package that it is pointing to. */
850 bool pkgCacheGenerator::NewDepends(pkgCache::PkgIterator &Pkg,
851 pkgCache::VerIterator &Ver,
852 string const &Version,
853 unsigned int const &Op,
854 unsigned int const &Type,
855 map_ptrloc* &OldDepLast)
856 {
857 void const * const oldMap = Map.Data();
858 // Get a structure
859 map_ptrloc const Dependency = AllocateInMap(sizeof(pkgCache::Dependency));
860 if (unlikely(Dependency == 0))
861 return false;
862
863 // Fill it in
864 pkgCache::DepIterator Dep(Cache,Cache.DepP + Dependency);
865 Dynamic<pkgCache::DepIterator> DynDep(Dep);
866 Dep->ParentVer = Ver.Index();
867 Dep->Type = Type;
868 Dep->CompareOp = Op;
869 Dep->ID = Cache.HeaderP->DependsCount++;
870
871 // Probe the reverse dependency list for a version string that matches
872 if (Version.empty() == false)
873 {
874 /* for (pkgCache::DepIterator I = Pkg.RevDependsList(); I.end() == false; I++)
875 if (I->Version != 0 && I.TargetVer() == Version)
876 Dep->Version = I->Version;*/
877 if (Dep->Version == 0) {
878 map_ptrloc const index = WriteStringInMap(Version);
879 if (unlikely(index == 0))
880 return false;
881 Dep->Version = index;
882 }
883 }
884
885 // Link it to the package
886 Dep->Package = Pkg.Index();
887 Dep->NextRevDepends = Pkg->RevDepends;
888 Pkg->RevDepends = Dep.Index();
889
890 // Do we know where to link the Dependency to?
891 if (OldDepLast == NULL)
892 {
893 OldDepLast = &Ver->DependsList;
894 for (pkgCache::DepIterator D = Ver.DependsList(); D.end() == false; ++D)
895 OldDepLast = &D->NextDepends;
896 } else if (oldMap != Map.Data())
897 OldDepLast += (map_ptrloc*) Map.Data() - (map_ptrloc*) oldMap;
898
899 Dep->NextDepends = *OldDepLast;
900 *OldDepLast = Dep.Index();
901 OldDepLast = &Dep->NextDepends;
902
903 return true;
904 }
905 /*}}}*/
906 // ListParser::NewDepends - Create the environment for a new dependency /*{{{*/
907 // ---------------------------------------------------------------------
908 /* This creates a Group and the Package to link this dependency to if
909 needed and handles also the caching of the old endpoint */
910 bool pkgCacheGenerator::ListParser::NewDepends(pkgCache::VerIterator &Ver,
911 const string &PackageName,
912 const string &Arch,
913 const string &Version,
914 unsigned int Op,
915 unsigned int Type)
916 {
917 pkgCache::GrpIterator Grp;
918 Dynamic<pkgCache::GrpIterator> DynGrp(Grp);
919 if (unlikely(Owner->NewGroup(Grp, PackageName) == false))
920 return false;
921
922 // Locate the target package
923 pkgCache::PkgIterator Pkg = Grp.FindPkg(Arch);
924 // we don't create 'none' packages and their dependencies if we can avoid it …
925 if (Pkg.end() == true && Arch == "none")
926 return true;
927 Dynamic<pkgCache::PkgIterator> DynPkg(Pkg);
928 if (Pkg.end() == true) {
929 if (unlikely(Owner->NewPackage(Pkg, PackageName, Arch) == false))
930 return false;
931 }
932
933 // Is it a file dependency?
934 if (unlikely(PackageName[0] == '/'))
935 FoundFileDeps = true;
936
937 /* Caching the old end point speeds up generation substantially */
938 if (OldDepVer != Ver) {
939 OldDepLast = NULL;
940 OldDepVer = Ver;
941 }
942
943 return Owner->NewDepends(Pkg, Ver, Version, Op, Type, OldDepLast);
944 }
945 /*}}}*/
946 // ListParser::NewProvides - Create a Provides element /*{{{*/
947 // ---------------------------------------------------------------------
948 /* */
949 bool pkgCacheGenerator::ListParser::NewProvides(pkgCache::VerIterator &Ver,
950 const string &PkgName,
951 const string &PkgArch,
952 const string &Version)
953 {
954 pkgCache &Cache = Owner->Cache;
955
956 // We do not add self referencing provides
957 if (Ver.ParentPkg().Name() == PkgName && (PkgArch == Ver.ParentPkg().Arch() ||
958 (PkgArch == "all" && strcmp((Cache.StrP + Cache.HeaderP->Architecture), Ver.ParentPkg().Arch()) == 0)))
959 return true;
960
961 // Get a structure
962 map_ptrloc const Provides = Owner->AllocateInMap(sizeof(pkgCache::Provides));
963 if (unlikely(Provides == 0))
964 return false;
965 Cache.HeaderP->ProvidesCount++;
966
967 // Fill it in
968 pkgCache::PrvIterator Prv(Cache,Cache.ProvideP + Provides,Cache.PkgP);
969 Dynamic<pkgCache::PrvIterator> DynPrv(Prv);
970 Prv->Version = Ver.Index();
971 Prv->NextPkgProv = Ver->ProvidesList;
972 Ver->ProvidesList = Prv.Index();
973 if (Version.empty() == false && unlikely((Prv->ProvideVersion = WriteString(Version)) == 0))
974 return false;
975
976 // Locate the target package
977 pkgCache::PkgIterator Pkg;
978 Dynamic<pkgCache::PkgIterator> DynPkg(Pkg);
979 if (unlikely(Owner->NewPackage(Pkg,PkgName, PkgArch) == false))
980 return false;
981
982 // Link it to the package
983 Prv->ParentPkg = Pkg.Index();
984 Prv->NextProvides = Pkg->ProvidesList;
985 Pkg->ProvidesList = Prv.Index();
986
987 return true;
988 }
989 /*}}}*/
990 // CacheGenerator::SelectFile - Select the current file being parsed /*{{{*/
991 // ---------------------------------------------------------------------
992 /* This is used to select which file is to be associated with all newly
993 added versions. The caller is responsible for setting the IMS fields. */
994 bool pkgCacheGenerator::SelectFile(const string &File,const string &Site,
995 const pkgIndexFile &Index,
996 unsigned long Flags)
997 {
998 // Get some space for the structure
999 map_ptrloc const idxFile = AllocateInMap(sizeof(*CurrentFile));
1000 if (unlikely(idxFile == 0))
1001 return false;
1002 CurrentFile = Cache.PkgFileP + idxFile;
1003
1004 // Fill it in
1005 map_ptrloc const idxFileName = WriteStringInMap(File);
1006 map_ptrloc const idxSite = WriteUniqString(Site);
1007 if (unlikely(idxFileName == 0 || idxSite == 0))
1008 return false;
1009 CurrentFile->FileName = idxFileName;
1010 CurrentFile->Site = idxSite;
1011 CurrentFile->NextFile = Cache.HeaderP->FileList;
1012 CurrentFile->Flags = Flags;
1013 CurrentFile->ID = Cache.HeaderP->PackageFileCount;
1014 map_ptrloc const idxIndexType = WriteUniqString(Index.GetType()->Label);
1015 if (unlikely(idxIndexType == 0))
1016 return false;
1017 CurrentFile->IndexType = idxIndexType;
1018 PkgFileName = File;
1019 Cache.HeaderP->FileList = CurrentFile - Cache.PkgFileP;
1020 Cache.HeaderP->PackageFileCount++;
1021
1022 if (Progress != 0)
1023 Progress->SubProgress(Index.Size());
1024 return true;
1025 }
1026 /*}}}*/
1027 // CacheGenerator::WriteUniqueString - Insert a unique string /*{{{*/
1028 // ---------------------------------------------------------------------
1029 /* This is used to create handles to strings. Given the same text it
1030 always returns the same number */
1031 unsigned long pkgCacheGenerator::WriteUniqString(const char *S,
1032 unsigned int Size)
1033 {
1034 /* We use a very small transient hash table here, this speeds up generation
1035 by a fair amount on slower machines */
1036 pkgCache::StringItem *&Bucket = UniqHash[(S[0]*5 + S[1]) % _count(UniqHash)];
1037 if (Bucket != 0 &&
1038 stringcmp(S,S+Size,Cache.StrP + Bucket->String) == 0)
1039 return Bucket->String;
1040
1041 // Search for an insertion point
1042 pkgCache::StringItem *I = Cache.StringItemP + Cache.HeaderP->StringList;
1043 int Res = 1;
1044 map_ptrloc *Last = &Cache.HeaderP->StringList;
1045 for (; I != Cache.StringItemP; Last = &I->NextItem,
1046 I = Cache.StringItemP + I->NextItem)
1047 {
1048 Res = stringcmp(S,S+Size,Cache.StrP + I->String);
1049 if (Res >= 0)
1050 break;
1051 }
1052
1053 // Match
1054 if (Res == 0)
1055 {
1056 Bucket = I;
1057 return I->String;
1058 }
1059
1060 // Get a structure
1061 void const * const oldMap = Map.Data();
1062 map_ptrloc const Item = AllocateInMap(sizeof(pkgCache::StringItem));
1063 if (Item == 0)
1064 return 0;
1065
1066 map_ptrloc const idxString = WriteStringInMap(S,Size);
1067 if (unlikely(idxString == 0))
1068 return 0;
1069 if (oldMap != Map.Data()) {
1070 Last += (map_ptrloc*) Map.Data() - (map_ptrloc*) oldMap;
1071 I += (pkgCache::StringItem*) Map.Data() - (pkgCache::StringItem*) oldMap;
1072 }
1073 *Last = Item;
1074
1075 // Fill in the structure
1076 pkgCache::StringItem *ItemP = Cache.StringItemP + Item;
1077 ItemP->NextItem = I - Cache.StringItemP;
1078 ItemP->String = idxString;
1079
1080 Bucket = ItemP;
1081 return ItemP->String;
1082 }
1083 /*}}}*/
1084 // CheckValidity - Check that a cache is up-to-date /*{{{*/
1085 // ---------------------------------------------------------------------
1086 /* This just verifies that each file in the list of index files exists,
1087 has matching attributes with the cache and the cache does not have
1088 any extra files. */
1089 static bool CheckValidity(const string &CacheFile,
1090 pkgSourceList &List,
1091 FileIterator Start,
1092 FileIterator End,
1093 MMap **OutMap = 0)
1094 {
1095 bool const Debug = _config->FindB("Debug::pkgCacheGen", false);
1096 // No file, certainly invalid
1097 if (CacheFile.empty() == true || FileExists(CacheFile) == false)
1098 {
1099 if (Debug == true)
1100 std::clog << "CacheFile doesn't exist" << std::endl;
1101 return false;
1102 }
1103
1104 if (List.GetLastModifiedTime() > GetModificationTime(CacheFile))
1105 {
1106 if (Debug == true)
1107 std::clog << "sources.list is newer than the cache" << std::endl;
1108 return false;
1109 }
1110
1111 // Map it
1112 FileFd CacheF(CacheFile,FileFd::ReadOnly);
1113 SPtr<MMap> Map = new MMap(CacheF,0);
1114 pkgCache Cache(Map);
1115 if (_error->PendingError() == true || Map->Size() == 0)
1116 {
1117 if (Debug == true)
1118 std::clog << "Errors are pending or Map is empty()" << std::endl;
1119 _error->Discard();
1120 return false;
1121 }
1122
1123 /* Now we check every index file, see if it is in the cache,
1124 verify the IMS data and check that it is on the disk too.. */
1125 SPtrArray<bool> Visited = new bool[Cache.HeaderP->PackageFileCount];
1126 memset(Visited,0,sizeof(*Visited)*Cache.HeaderP->PackageFileCount);
1127 for (; Start != End; ++Start)
1128 {
1129 if (Debug == true)
1130 std::clog << "Checking PkgFile " << (*Start)->Describe() << ": ";
1131 if ((*Start)->HasPackages() == false)
1132 {
1133 if (Debug == true)
1134 std::clog << "Has NO packages" << std::endl;
1135 continue;
1136 }
1137
1138 if ((*Start)->Exists() == false)
1139 {
1140 #if 0 // mvo: we no longer give a message here (Default Sources spec)
1141 _error->WarningE("stat",_("Couldn't stat source package list %s"),
1142 (*Start)->Describe().c_str());
1143 #endif
1144 if (Debug == true)
1145 std::clog << "file doesn't exist" << std::endl;
1146 continue;
1147 }
1148
1149 // FindInCache is also expected to do an IMS check.
1150 pkgCache::PkgFileIterator File = (*Start)->FindInCache(Cache);
1151 if (File.end() == true)
1152 {
1153 if (Debug == true)
1154 std::clog << "FindInCache returned end-Pointer" << std::endl;
1155 return false;
1156 }
1157
1158 Visited[File->ID] = true;
1159 if (Debug == true)
1160 std::clog << "with ID " << File->ID << " is valid" << std::endl;
1161 }
1162
1163 for (unsigned I = 0; I != Cache.HeaderP->PackageFileCount; I++)
1164 if (Visited[I] == false)
1165 {
1166 if (Debug == true)
1167 std::clog << "File with ID" << I << " wasn't visited" << std::endl;
1168 return false;
1169 }
1170
1171 if (_error->PendingError() == true)
1172 {
1173 if (Debug == true)
1174 {
1175 std::clog << "Validity failed because of pending errors:" << std::endl;
1176 _error->DumpErrors();
1177 }
1178 _error->Discard();
1179 return false;
1180 }
1181
1182 if (OutMap != 0)
1183 *OutMap = Map.UnGuard();
1184 return true;
1185 }
1186 /*}}}*/
1187 // ComputeSize - Compute the total size of a bunch of files /*{{{*/
1188 // ---------------------------------------------------------------------
1189 /* Size is kind of an abstract notion that is only used for the progress
1190 meter */
1191 static unsigned long ComputeSize(FileIterator Start,FileIterator End)
1192 {
1193 unsigned long TotalSize = 0;
1194 for (; Start != End; ++Start)
1195 {
1196 if ((*Start)->HasPackages() == false)
1197 continue;
1198 TotalSize += (*Start)->Size();
1199 }
1200 return TotalSize;
1201 }
1202 /*}}}*/
1203 // BuildCache - Merge the list of index files into the cache /*{{{*/
1204 // ---------------------------------------------------------------------
1205 /* */
1206 static bool BuildCache(pkgCacheGenerator &Gen,
1207 OpProgress *Progress,
1208 unsigned long &CurrentSize,unsigned long TotalSize,
1209 FileIterator Start, FileIterator End)
1210 {
1211 FileIterator I;
1212 for (I = Start; I != End; ++I)
1213 {
1214 if ((*I)->HasPackages() == false)
1215 continue;
1216
1217 if ((*I)->Exists() == false)
1218 continue;
1219
1220 if ((*I)->FindInCache(Gen.GetCache()).end() == false)
1221 {
1222 _error->Warning("Duplicate sources.list entry %s",
1223 (*I)->Describe().c_str());
1224 continue;
1225 }
1226
1227 unsigned long Size = (*I)->Size();
1228 if (Progress != NULL)
1229 Progress->OverallProgress(CurrentSize,TotalSize,Size,_("Reading package lists"));
1230 CurrentSize += Size;
1231
1232 if ((*I)->Merge(Gen,Progress) == false)
1233 return false;
1234 }
1235
1236 if (Gen.HasFileDeps() == true)
1237 {
1238 if (Progress != NULL)
1239 Progress->Done();
1240 TotalSize = ComputeSize(Start, End);
1241 CurrentSize = 0;
1242 for (I = Start; I != End; ++I)
1243 {
1244 unsigned long Size = (*I)->Size();
1245 if (Progress != NULL)
1246 Progress->OverallProgress(CurrentSize,TotalSize,Size,_("Collecting File Provides"));
1247 CurrentSize += Size;
1248 if ((*I)->MergeFileProvides(Gen,Progress) == false)
1249 return false;
1250 }
1251 }
1252
1253 return true;
1254 }
1255 /*}}}*/
1256 // CacheGenerator::CreateDynamicMMap - load an mmap with configuration options /*{{{*/
1257 DynamicMMap* pkgCacheGenerator::CreateDynamicMMap(FileFd *CacheF, unsigned long Flags) {
1258 unsigned long const MapStart = _config->FindI("APT::Cache-Start", 24*1024*1024);
1259 unsigned long const MapGrow = _config->FindI("APT::Cache-Grow", 1*1024*1024);
1260 unsigned long const MapLimit = _config->FindI("APT::Cache-Limit", 0);
1261 Flags |= MMap::Moveable;
1262 if (_config->FindB("APT::Cache-Fallback", false) == true)
1263 Flags |= MMap::Fallback;
1264 if (CacheF != NULL)
1265 return new DynamicMMap(*CacheF, Flags, MapStart, MapGrow, MapLimit);
1266 else
1267 return new DynamicMMap(Flags, MapStart, MapGrow, MapLimit);
1268 }
1269 /*}}}*/
1270 // CacheGenerator::MakeStatusCache - Construct the status cache /*{{{*/
1271 // ---------------------------------------------------------------------
1272 /* This makes sure that the status cache (the cache that has all
1273 index files from the sources list and all local ones) is ready
1274 to be mmaped. If OutMap is not zero then a MMap object representing
1275 the cache will be stored there. This is pretty much mandetory if you
1276 are using AllowMem. AllowMem lets the function be run as non-root
1277 where it builds the cache 'fast' into a memory buffer. */
1278 __deprecated bool pkgMakeStatusCache(pkgSourceList &List,OpProgress &Progress,
1279 MMap **OutMap, bool AllowMem)
1280 { return pkgCacheGenerator::MakeStatusCache(List, &Progress, OutMap, AllowMem); }
1281 bool pkgCacheGenerator::MakeStatusCache(pkgSourceList &List,OpProgress *Progress,
1282 MMap **OutMap,bool AllowMem)
1283 {
1284 bool const Debug = _config->FindB("Debug::pkgCacheGen", false);
1285
1286 std::vector<pkgIndexFile *> Files;
1287 for (std::vector<metaIndex *>::const_iterator i = List.begin();
1288 i != List.end();
1289 ++i)
1290 {
1291 std::vector <pkgIndexFile *> *Indexes = (*i)->GetIndexFiles();
1292 for (std::vector<pkgIndexFile *>::const_iterator j = Indexes->begin();
1293 j != Indexes->end();
1294 ++j)
1295 Files.push_back (*j);
1296 }
1297
1298 unsigned long const EndOfSource = Files.size();
1299 if (_system->AddStatusFiles(Files) == false)
1300 return false;
1301
1302 // Decide if we can write to the files..
1303 string const CacheFile = _config->FindFile("Dir::Cache::pkgcache");
1304 string const SrcCacheFile = _config->FindFile("Dir::Cache::srcpkgcache");
1305
1306 // ensure the cache directory exists
1307 if (CacheFile.empty() == false || SrcCacheFile.empty() == false)
1308 {
1309 string dir = _config->FindDir("Dir::Cache");
1310 size_t const len = dir.size();
1311 if (len > 5 && dir.find("/apt/", len - 6, 5) == len - 5)
1312 dir = dir.substr(0, len - 5);
1313 if (CacheFile.empty() == false)
1314 CreateDirectory(dir, flNotFile(CacheFile));
1315 if (SrcCacheFile.empty() == false)
1316 CreateDirectory(dir, flNotFile(SrcCacheFile));
1317 }
1318
1319 // Decide if we can write to the cache
1320 bool Writeable = false;
1321 if (CacheFile.empty() == false)
1322 Writeable = access(flNotFile(CacheFile).c_str(),W_OK) == 0;
1323 else
1324 if (SrcCacheFile.empty() == false)
1325 Writeable = access(flNotFile(SrcCacheFile).c_str(),W_OK) == 0;
1326 if (Debug == true)
1327 std::clog << "Do we have write-access to the cache files? " << (Writeable ? "YES" : "NO") << std::endl;
1328
1329 if (Writeable == false && AllowMem == false && CacheFile.empty() == false)
1330 return _error->Error(_("Unable to write to %s"),flNotFile(CacheFile).c_str());
1331
1332 if (Progress != NULL)
1333 Progress->OverallProgress(0,1,1,_("Reading package lists"));
1334
1335 // Cache is OK, Fin.
1336 if (CheckValidity(CacheFile, List, Files.begin(),Files.end(),OutMap) == true)
1337 {
1338 if (Progress != NULL)
1339 Progress->OverallProgress(1,1,1,_("Reading package lists"));
1340 if (Debug == true)
1341 std::clog << "pkgcache.bin is valid - no need to build anything" << std::endl;
1342 return true;
1343 }
1344 else if (Debug == true)
1345 std::clog << "pkgcache.bin is NOT valid" << std::endl;
1346
1347 /* At this point we know we need to reconstruct the package cache,
1348 begin. */
1349 SPtr<FileFd> CacheF;
1350 SPtr<DynamicMMap> Map;
1351 if (Writeable == true && CacheFile.empty() == false)
1352 {
1353 _error->PushToStack();
1354 unlink(CacheFile.c_str());
1355 CacheF = new FileFd(CacheFile,FileFd::WriteAtomic);
1356 fchmod(CacheF->Fd(),0644);
1357 Map = CreateDynamicMMap(CacheF, MMap::Public);
1358 if (_error->PendingError() == true)
1359 {
1360 delete CacheF.UnGuard();
1361 delete Map.UnGuard();
1362 if (Debug == true)
1363 std::clog << "Open filebased MMap FAILED" << std::endl;
1364 Writeable = false;
1365 if (AllowMem == false)
1366 {
1367 _error->MergeWithStack();
1368 return false;
1369 }
1370 _error->RevertToStack();
1371 }
1372 else
1373 {
1374 _error->MergeWithStack();
1375 if (Debug == true)
1376 std::clog << "Open filebased MMap" << std::endl;
1377 }
1378 }
1379 if (Writeable == false || CacheFile.empty() == true)
1380 {
1381 // Just build it in memory..
1382 Map = CreateDynamicMMap(NULL);
1383 if (Debug == true)
1384 std::clog << "Open memory Map (not filebased)" << std::endl;
1385 }
1386
1387 // Lets try the source cache.
1388 unsigned long CurrentSize = 0;
1389 unsigned long TotalSize = 0;
1390 if (CheckValidity(SrcCacheFile, List, Files.begin(),
1391 Files.begin()+EndOfSource) == true)
1392 {
1393 if (Debug == true)
1394 std::clog << "srcpkgcache.bin is valid - populate MMap with it." << std::endl;
1395 // Preload the map with the source cache
1396 FileFd SCacheF(SrcCacheFile,FileFd::ReadOnly);
1397 unsigned long const alloc = Map->RawAllocate(SCacheF.Size());
1398 if ((alloc == 0 && _error->PendingError())
1399 || SCacheF.Read((unsigned char *)Map->Data() + alloc,
1400 SCacheF.Size()) == false)
1401 return false;
1402
1403 TotalSize = ComputeSize(Files.begin()+EndOfSource,Files.end());
1404
1405 // Build the status cache
1406 pkgCacheGenerator Gen(Map.Get(),Progress);
1407 if (_error->PendingError() == true)
1408 return false;
1409 if (BuildCache(Gen,Progress,CurrentSize,TotalSize,
1410 Files.begin()+EndOfSource,Files.end()) == false)
1411 return false;
1412 }
1413 else
1414 {
1415 if (Debug == true)
1416 std::clog << "srcpkgcache.bin is NOT valid - rebuild" << std::endl;
1417 TotalSize = ComputeSize(Files.begin(),Files.end());
1418
1419 // Build the source cache
1420 pkgCacheGenerator Gen(Map.Get(),Progress);
1421 if (_error->PendingError() == true)
1422 return false;
1423 if (BuildCache(Gen,Progress,CurrentSize,TotalSize,
1424 Files.begin(),Files.begin()+EndOfSource) == false)
1425 return false;
1426
1427 // Write it back
1428 if (Writeable == true && SrcCacheFile.empty() == false)
1429 {
1430 FileFd SCacheF(SrcCacheFile,FileFd::WriteAtomic);
1431 if (_error->PendingError() == true)
1432 return false;
1433
1434 fchmod(SCacheF.Fd(),0644);
1435
1436 // Write out the main data
1437 if (SCacheF.Write(Map->Data(),Map->Size()) == false)
1438 return _error->Error(_("IO Error saving source cache"));
1439 SCacheF.Sync();
1440
1441 // Write out the proper header
1442 Gen.GetCache().HeaderP->Dirty = false;
1443 if (SCacheF.Seek(0) == false ||
1444 SCacheF.Write(Map->Data(),sizeof(*Gen.GetCache().HeaderP)) == false)
1445 return _error->Error(_("IO Error saving source cache"));
1446 Gen.GetCache().HeaderP->Dirty = true;
1447 SCacheF.Sync();
1448 }
1449
1450 // Build the status cache
1451 if (BuildCache(Gen,Progress,CurrentSize,TotalSize,
1452 Files.begin()+EndOfSource,Files.end()) == false)
1453 return false;
1454 }
1455 if (Debug == true)
1456 std::clog << "Caches are ready for shipping" << std::endl;
1457
1458 if (_error->PendingError() == true)
1459 return false;
1460 if (OutMap != 0)
1461 {
1462 if (CacheF != 0)
1463 {
1464 delete Map.UnGuard();
1465 *OutMap = new MMap(*CacheF,0);
1466 }
1467 else
1468 {
1469 *OutMap = Map.UnGuard();
1470 }
1471 }
1472
1473 return true;
1474 }
1475 /*}}}*/
1476 // CacheGenerator::MakeOnlyStatusCache - Build only a status files cache/*{{{*/
1477 // ---------------------------------------------------------------------
1478 /* */
1479 __deprecated bool pkgMakeOnlyStatusCache(OpProgress &Progress,DynamicMMap **OutMap)
1480 { return pkgCacheGenerator::MakeOnlyStatusCache(&Progress, OutMap); }
1481 bool pkgCacheGenerator::MakeOnlyStatusCache(OpProgress *Progress,DynamicMMap **OutMap)
1482 {
1483 std::vector<pkgIndexFile *> Files;
1484 unsigned long EndOfSource = Files.size();
1485 if (_system->AddStatusFiles(Files) == false)
1486 return false;
1487
1488 SPtr<DynamicMMap> Map = CreateDynamicMMap(NULL);
1489 unsigned long CurrentSize = 0;
1490 unsigned long TotalSize = 0;
1491
1492 TotalSize = ComputeSize(Files.begin()+EndOfSource,Files.end());
1493
1494 // Build the status cache
1495 if (Progress != NULL)
1496 Progress->OverallProgress(0,1,1,_("Reading package lists"));
1497 pkgCacheGenerator Gen(Map.Get(),Progress);
1498 if (_error->PendingError() == true)
1499 return false;
1500 if (BuildCache(Gen,Progress,CurrentSize,TotalSize,
1501 Files.begin()+EndOfSource,Files.end()) == false)
1502 return false;
1503
1504 if (_error->PendingError() == true)
1505 return false;
1506 *OutMap = Map.UnGuard();
1507
1508 return true;
1509 }
1510 /*}}}*/
1511 // IsDuplicateDescription /*{{{*/
1512 static bool IsDuplicateDescription(pkgCache::DescIterator Desc,
1513 MD5SumValue const &CurMd5, std::string const &CurLang)
1514 {
1515 // Descriptions in the same link-list have all the same md5
1516 if (Desc.end() == true || MD5SumValue(Desc.md5()) != CurMd5)
1517 return false;
1518 for (; Desc.end() == false; ++Desc)
1519 if (Desc.LanguageCode() == CurLang)
1520 return true;
1521 return false;
1522 }
1523 /*}}}*/
1524 // CacheGenerator::FinishCache /*{{{*/
1525 bool pkgCacheGenerator::FinishCache(OpProgress *Progress)
1526 {
1527 return true;
1528 }
1529 /*}}}*/