]> git.saurik.com Git - apt.git/blob - apt-pkg/pkgcachegen.cc
2f05310c1750e6e629c64b8075da91ff51565a89
[apt.git] / apt-pkg / pkgcachegen.cc
1 // -*- mode: cpp; mode: fold -*-
2 // Description /*{{{*/
3 // $Id: pkgcachegen.cc,v 1.53.2.1 2003/12/24 23:09:17 mdz Exp $
4 /* ######################################################################
5
6 Package Cache Generator - Generator for the cache structure.
7
8 This builds the cache structure from the abstract package list parser.
9
10 ##################################################################### */
11 /*}}}*/
12 // Include Files /*{{{*/
13 #include <config.h>
14
15 #include <apt-pkg/pkgcachegen.h>
16 #include <apt-pkg/error.h>
17 #include <apt-pkg/version.h>
18 #include <apt-pkg/progress.h>
19 #include <apt-pkg/sourcelist.h>
20 #include <apt-pkg/configuration.h>
21 #include <apt-pkg/aptconfiguration.h>
22 #include <apt-pkg/strutl.h>
23 #include <apt-pkg/sptr.h>
24 #include <apt-pkg/pkgsystem.h>
25 #include <apt-pkg/macros.h>
26 #include <apt-pkg/tagfile.h>
27 #include <apt-pkg/metaindex.h>
28 #include <apt-pkg/fileutl.h>
29
30 #include <vector>
31 #include <sys/stat.h>
32 #include <unistd.h>
33 #include <errno.h>
34 #include <stdio.h>
35
36 #include <apti18n.h>
37 /*}}}*/
38 typedef std::vector<pkgIndexFile *>::iterator FileIterator;
39 template <typename Iter> std::vector<Iter*> pkgCacheGenerator::Dynamic<Iter>::toReMap;
40
41 static bool IsDuplicateDescription(pkgCache::DescIterator Desc,
42 MD5SumValue const &CurMd5, std::string const &CurLang);
43
44 using std::string;
45
46 // CacheGenerator::pkgCacheGenerator - Constructor /*{{{*/
47 // ---------------------------------------------------------------------
48 /* We set the dirty flag and make sure that is written to the disk */
49 pkgCacheGenerator::pkgCacheGenerator(DynamicMMap *pMap,OpProgress *Prog) :
50 Map(*pMap), Cache(pMap,false), Progress(Prog),
51 FoundFileDeps(0)
52 {
53 CurrentFile = 0;
54 memset(UniqHash,0,sizeof(UniqHash));
55
56 if (_error->PendingError() == true)
57 return;
58
59 if (Map.Size() == 0)
60 {
61 // Setup the map interface..
62 Cache.HeaderP = (pkgCache::Header *)Map.Data();
63 if (Map.RawAllocate(sizeof(pkgCache::Header)) == 0 && _error->PendingError() == true)
64 return;
65
66 Map.UsePools(*Cache.HeaderP->Pools,sizeof(Cache.HeaderP->Pools)/sizeof(Cache.HeaderP->Pools[0]));
67
68 // Starting header
69 *Cache.HeaderP = pkgCache::Header();
70 map_ptrloc const idxVerSysName = WriteStringInMap(_system->VS->Label);
71 Cache.HeaderP->VerSysName = idxVerSysName;
72 // this pointer is set in ReMap, but we need it now for WriteUniqString
73 Cache.StringItemP = (pkgCache::StringItem *)Map.Data();
74 map_ptrloc const idxArchitecture = WriteUniqString(_config->Find("APT::Architecture"));
75 Cache.HeaderP->Architecture = idxArchitecture;
76 if (unlikely(idxVerSysName == 0 || idxArchitecture == 0))
77 return;
78 Cache.ReMap();
79 }
80 else
81 {
82 // Map directly from the existing file
83 Cache.ReMap();
84 Map.UsePools(*Cache.HeaderP->Pools,sizeof(Cache.HeaderP->Pools)/sizeof(Cache.HeaderP->Pools[0]));
85 if (Cache.VS != _system->VS)
86 {
87 _error->Error(_("Cache has an incompatible versioning system"));
88 return;
89 }
90 }
91
92 Cache.HeaderP->Dirty = true;
93 Map.Sync(0,sizeof(pkgCache::Header));
94 }
95 /*}}}*/
96 // CacheGenerator::~pkgCacheGenerator - Destructor /*{{{*/
97 // ---------------------------------------------------------------------
98 /* We sync the data then unset the dirty flag in two steps so as to
99 advoid a problem during a crash */
100 pkgCacheGenerator::~pkgCacheGenerator()
101 {
102 if (_error->PendingError() == true)
103 return;
104 if (Map.Sync() == false)
105 return;
106
107 Cache.HeaderP->Dirty = false;
108 Cache.HeaderP->CacheFileSize = Map.Size();
109 Map.Sync(0,sizeof(pkgCache::Header));
110 }
111 /*}}}*/
112 void pkgCacheGenerator::ReMap(void const * const oldMap, void const * const newMap) {/*{{{*/
113 if (oldMap == newMap)
114 return;
115
116 if (_config->FindB("Debug::pkgCacheGen", false))
117 std::clog << "Remaping from " << oldMap << " to " << newMap << std::endl;
118
119 Cache.ReMap(false);
120
121 CurrentFile += (pkgCache::PackageFile*) newMap - (pkgCache::PackageFile*) oldMap;
122
123 for (size_t i = 0; i < _count(UniqHash); ++i)
124 if (UniqHash[i] != 0)
125 UniqHash[i] += (pkgCache::StringItem*) newMap - (pkgCache::StringItem*) oldMap;
126
127 for (std::vector<pkgCache::GrpIterator*>::const_iterator i = Dynamic<pkgCache::GrpIterator>::toReMap.begin();
128 i != Dynamic<pkgCache::GrpIterator>::toReMap.end(); ++i)
129 (*i)->ReMap(oldMap, newMap);
130 for (std::vector<pkgCache::PkgIterator*>::const_iterator i = Dynamic<pkgCache::PkgIterator>::toReMap.begin();
131 i != Dynamic<pkgCache::PkgIterator>::toReMap.end(); ++i)
132 (*i)->ReMap(oldMap, newMap);
133 for (std::vector<pkgCache::VerIterator*>::const_iterator i = Dynamic<pkgCache::VerIterator>::toReMap.begin();
134 i != Dynamic<pkgCache::VerIterator>::toReMap.end(); ++i)
135 (*i)->ReMap(oldMap, newMap);
136 for (std::vector<pkgCache::DepIterator*>::const_iterator i = Dynamic<pkgCache::DepIterator>::toReMap.begin();
137 i != Dynamic<pkgCache::DepIterator>::toReMap.end(); ++i)
138 (*i)->ReMap(oldMap, newMap);
139 for (std::vector<pkgCache::DescIterator*>::const_iterator i = Dynamic<pkgCache::DescIterator>::toReMap.begin();
140 i != Dynamic<pkgCache::DescIterator>::toReMap.end(); ++i)
141 (*i)->ReMap(oldMap, newMap);
142 for (std::vector<pkgCache::PrvIterator*>::const_iterator i = Dynamic<pkgCache::PrvIterator>::toReMap.begin();
143 i != Dynamic<pkgCache::PrvIterator>::toReMap.end(); ++i)
144 (*i)->ReMap(oldMap, newMap);
145 for (std::vector<pkgCache::PkgFileIterator*>::const_iterator i = Dynamic<pkgCache::PkgFileIterator>::toReMap.begin();
146 i != Dynamic<pkgCache::PkgFileIterator>::toReMap.end(); ++i)
147 (*i)->ReMap(oldMap, newMap);
148 } /*}}}*/
149 // CacheGenerator::WriteStringInMap /*{{{*/
150 map_ptrloc pkgCacheGenerator::WriteStringInMap(const char *String,
151 const unsigned long &Len) {
152 void const * const oldMap = Map.Data();
153 map_ptrloc const index = Map.WriteString(String, Len);
154 if (index != 0)
155 ReMap(oldMap, Map.Data());
156 return index;
157 }
158 /*}}}*/
159 // CacheGenerator::WriteStringInMap /*{{{*/
160 map_ptrloc pkgCacheGenerator::WriteStringInMap(const char *String) {
161 void const * const oldMap = Map.Data();
162 map_ptrloc const index = Map.WriteString(String);
163 if (index != 0)
164 ReMap(oldMap, Map.Data());
165 return index;
166 }
167 /*}}}*/
168 map_ptrloc pkgCacheGenerator::AllocateInMap(const unsigned long &size) {/*{{{*/
169 void const * const oldMap = Map.Data();
170 map_ptrloc const index = Map.Allocate(size);
171 if (index != 0)
172 ReMap(oldMap, Map.Data());
173 return index;
174 }
175 /*}}}*/
176 // CacheGenerator::MergeList - Merge the package list /*{{{*/
177 // ---------------------------------------------------------------------
178 /* This provides the generation of the entries in the cache. Each loop
179 goes through a single package record from the underlying parse engine. */
180 bool pkgCacheGenerator::MergeList(ListParser &List,
181 pkgCache::VerIterator *OutVer)
182 {
183 List.Owner = this;
184
185 unsigned int Counter = 0;
186 while (List.Step() == true)
187 {
188 string const PackageName = List.Package();
189 if (PackageName.empty() == true)
190 return false;
191
192 Counter++;
193 if (Counter % 100 == 0 && Progress != 0)
194 Progress->Progress(List.Offset());
195
196 string Arch = List.Architecture();
197 string const Version = List.Version();
198 if (Version.empty() == true && Arch.empty() == true)
199 {
200 // package descriptions
201 if (MergeListGroup(List, PackageName) == false)
202 return false;
203 continue;
204 }
205
206 if (Arch.empty() == true)
207 {
208 // use the pseudo arch 'none' for arch-less packages
209 Arch = "none";
210 /* We might built a SingleArchCache here, which we don't want to blow up
211 just for these :none packages to a proper MultiArchCache, so just ensure
212 that we have always a native package structure first for SingleArch */
213 pkgCache::PkgIterator NP;
214 Dynamic<pkgCache::PkgIterator> DynPkg(NP);
215 if (NewPackage(NP, PackageName, _config->Find("APT::Architecture")) == false)
216 // TRANSLATOR: The first placeholder is a package name,
217 // the other two should be copied verbatim as they include debug info
218 return _error->Error(_("Error occurred while processing %s (%s%d)"),
219 PackageName.c_str(), "NewPackage", 0);
220 }
221
222 // Get a pointer to the package structure
223 pkgCache::PkgIterator Pkg;
224 Dynamic<pkgCache::PkgIterator> DynPkg(Pkg);
225 if (NewPackage(Pkg, PackageName, Arch) == false)
226 // TRANSLATOR: The first placeholder is a package name,
227 // the other two should be copied verbatim as they include debug info
228 return _error->Error(_("Error occurred while processing %s (%s%d)"),
229 PackageName.c_str(), "NewPackage", 1);
230
231
232 if (Version.empty() == true)
233 {
234 if (MergeListPackage(List, Pkg) == false)
235 return false;
236 }
237 else
238 {
239 if (MergeListVersion(List, Pkg, Version, OutVer) == false)
240 return false;
241 }
242
243 if (OutVer != 0)
244 {
245 FoundFileDeps |= List.HasFileDeps();
246 return true;
247 }
248 }
249
250 if (Cache.HeaderP->PackageCount >= (1ULL<<sizeof(Cache.PkgP->ID)*8)-1)
251 return _error->Error(_("Wow, you exceeded the number of package "
252 "names this APT is capable of."));
253 if (Cache.HeaderP->VersionCount >= (1ULL<<(sizeof(Cache.VerP->ID)*8))-1)
254 return _error->Error(_("Wow, you exceeded the number of versions "
255 "this APT is capable of."));
256 if (Cache.HeaderP->DescriptionCount >= (1ULL<<(sizeof(Cache.DescP->ID)*8))-1)
257 return _error->Error(_("Wow, you exceeded the number of descriptions "
258 "this APT is capable of."));
259 if (Cache.HeaderP->DependsCount >= (1ULL<<(sizeof(Cache.DepP->ID)*8))-1ULL)
260 return _error->Error(_("Wow, you exceeded the number of dependencies "
261 "this APT is capable of."));
262
263 FoundFileDeps |= List.HasFileDeps();
264 return true;
265 }
266 // CacheGenerator::MergeListGroup /*{{{*/
267 bool pkgCacheGenerator::MergeListGroup(ListParser &List, std::string const &GrpName)
268 {
269 pkgCache::GrpIterator Grp = Cache.FindGrp(GrpName);
270 // a group has no data on it's own, only packages have it but these
271 // stanzas like this come from Translation- files to add descriptions,
272 // but without a version we don't need a description for it…
273 if (Grp.end() == true)
274 return true;
275 Dynamic<pkgCache::GrpIterator> DynGrp(Grp);
276
277 pkgCache::PkgIterator Pkg;
278 Dynamic<pkgCache::PkgIterator> DynPkg(Pkg);
279 for (Pkg = Grp.PackageList(); Pkg.end() == false; Pkg = Grp.NextPkg(Pkg))
280 if (MergeListPackage(List, Pkg) == false)
281 return false;
282
283 return true;
284 }
285 /*}}}*/
286 // CacheGenerator::MergeListPackage /*{{{*/
287 bool pkgCacheGenerator::MergeListPackage(ListParser &List, pkgCache::PkgIterator &Pkg)
288 {
289 // we first process the package, then the descriptions
290 // (for deb this package processing is in fact a no-op)
291 pkgCache::VerIterator Ver(Cache);
292 Dynamic<pkgCache::VerIterator> DynVer(Ver);
293 if (List.UsePackage(Pkg, Ver) == false)
294 return _error->Error(_("Error occurred while processing %s (%s%d)"),
295 Pkg.Name(), "UsePackage", 1);
296
297 // Find the right version to write the description
298 MD5SumValue CurMd5 = List.Description_md5();
299 std::string CurLang = List.DescriptionLanguage();
300
301 for (Ver = Pkg.VersionList(); Ver.end() == false; ++Ver)
302 {
303 pkgCache::DescIterator VerDesc = Ver.DescriptionList();
304
305 // a version can only have one md5 describing it
306 if (VerDesc.end() == true || MD5SumValue(VerDesc.md5()) != CurMd5)
307 continue;
308
309 // don't add a new description if we have one for the given
310 // md5 && language
311 if (IsDuplicateDescription(VerDesc, CurMd5, CurLang) == true)
312 continue;
313
314 pkgCache::DescIterator Desc;
315 Dynamic<pkgCache::DescIterator> DynDesc(Desc);
316
317 map_ptrloc const descindex = NewDescription(Desc, CurLang, CurMd5, VerDesc->md5sum);
318 if (unlikely(descindex == 0 && _error->PendingError()))
319 return _error->Error(_("Error occurred while processing %s (%s%d)"),
320 Pkg.Name(), "NewDescription", 1);
321
322 Desc->ParentPkg = Pkg.Index();
323
324 // we add at the end, so that the start is constant as we need
325 // that to be able to efficiently share these lists
326 VerDesc = Ver.DescriptionList(); // old value might be invalid after ReMap
327 for (;VerDesc.end() == false && VerDesc->NextDesc != 0; ++VerDesc);
328 map_ptrloc * const LastNextDesc = (VerDesc.end() == true) ? &Ver->DescriptionList : &VerDesc->NextDesc;
329 *LastNextDesc = descindex;
330
331 if (NewFileDesc(Desc,List) == false)
332 return _error->Error(_("Error occurred while processing %s (%s%d)"),
333 Pkg.Name(), "NewFileDesc", 1);
334
335 // we can stop here as all "same" versions will share the description
336 break;
337 }
338
339 return true;
340 }
341 /*}}}*/
342 // CacheGenerator::MergeListVersion /*{{{*/
343 bool pkgCacheGenerator::MergeListVersion(ListParser &List, pkgCache::PkgIterator &Pkg,
344 std::string const &Version, pkgCache::VerIterator* &OutVer)
345 {
346 pkgCache::VerIterator Ver = Pkg.VersionList();
347 Dynamic<pkgCache::VerIterator> DynVer(Ver);
348 map_ptrloc *LastVer = &Pkg->VersionList;
349 void const * oldMap = Map.Data();
350
351 unsigned long const Hash = List.VersionHash();
352 if (Ver.end() == false)
353 {
354 /* We know the list is sorted so we use that fact in the search.
355 Insertion of new versions is done with correct sorting */
356 int Res = 1;
357 for (; Ver.end() == false; LastVer = &Ver->NextVer, Ver++)
358 {
359 Res = Cache.VS->CmpVersion(Version,Ver.VerStr());
360 // Version is higher as current version - insert here
361 if (Res > 0)
362 break;
363 // Versionstrings are equal - is hash also equal?
364 if (Res == 0 && Ver->Hash == Hash)
365 break;
366 // proceed with the next till we have either the right
367 // or we found another version (which will be lower)
368 }
369
370 /* We already have a version for this item, record that we saw it */
371 if (Res == 0 && Ver.end() == false && Ver->Hash == Hash)
372 {
373 if (List.UsePackage(Pkg,Ver) == false)
374 return _error->Error(_("Error occurred while processing %s (%s%d)"),
375 Pkg.Name(), "UsePackage", 2);
376
377 if (NewFileVer(Ver,List) == false)
378 return _error->Error(_("Error occurred while processing %s (%s%d)"),
379 Pkg.Name(), "NewFileVer", 1);
380
381 // Read only a single record and return
382 if (OutVer != 0)
383 {
384 *OutVer = Ver;
385 return true;
386 }
387
388 return true;
389 }
390 }
391
392 // Add a new version
393 map_ptrloc const verindex = NewVersion(Ver,Version,*LastVer);
394 if (verindex == 0 && _error->PendingError())
395 return _error->Error(_("Error occurred while processing %s (%s%d)"),
396 Pkg.Name(), "NewVersion", 1);
397
398 if (oldMap != Map.Data())
399 LastVer += (map_ptrloc*) Map.Data() - (map_ptrloc*) oldMap;
400 *LastVer = verindex;
401 Ver->ParentPkg = Pkg.Index();
402 Ver->Hash = Hash;
403
404 if (unlikely(List.NewVersion(Ver) == false))
405 return _error->Error(_("Error occurred while processing %s (%s%d)"),
406 Pkg.Name(), "NewVersion", 2);
407
408 if (unlikely(List.UsePackage(Pkg,Ver) == false))
409 return _error->Error(_("Error occurred while processing %s (%s%d)"),
410 Pkg.Name(), "UsePackage", 3);
411
412 if (unlikely(NewFileVer(Ver,List) == false))
413 return _error->Error(_("Error occurred while processing %s (%s%d)"),
414 Pkg.Name(), "NewFileVer", 2);
415
416 pkgCache::GrpIterator Grp = Pkg.Group();
417 Dynamic<pkgCache::GrpIterator> DynGrp(Grp);
418
419 /* If it is the first version of this package we need to add implicit
420 Multi-Arch dependencies to all other package versions in the group now -
421 otherwise we just add them for this new version */
422 if (Pkg.VersionList()->NextVer == 0)
423 {
424 pkgCache::PkgIterator P = Grp.PackageList();
425 Dynamic<pkgCache::PkgIterator> DynP(P);
426 for (; P.end() != true; P = Grp.NextPkg(P))
427 {
428 if (P->ID == Pkg->ID)
429 continue;
430 pkgCache::VerIterator V = P.VersionList();
431 Dynamic<pkgCache::VerIterator> DynV(V);
432 for (; V.end() != true; ++V)
433 if (unlikely(AddImplicitDepends(V, Pkg) == false))
434 return _error->Error(_("Error occurred while processing %s (%s%d)"),
435 Pkg.Name(), "AddImplicitDepends", 1);
436 }
437 /* :none packages are packages without an architecture. They are forbidden by
438 debian-policy, so usually they will only be in (old) dpkg status files -
439 and dpkg will complain about them - and are pretty rare. We therefore do
440 usually not create conflicts while the parent is created, but only if a :none
441 package (= the target) appears. This creates incorrect dependencies on :none
442 for architecture-specific dependencies on the package we copy from, but we
443 will ignore this bug as architecture-specific dependencies are only allowed
444 in jessie and until then the :none packages should be extinct (hopefully).
445 In other words: This should work long enough to allow graceful removal of
446 these packages, it is not supposed to allow users to keep using them … */
447 if (strcmp(Pkg.Arch(), "none") == 0)
448 {
449 pkgCache::PkgIterator M = Grp.FindPreferredPkg();
450 if (M.end() == false && Pkg != M)
451 {
452 pkgCache::DepIterator D = M.RevDependsList();
453 Dynamic<pkgCache::DepIterator> DynD(D);
454 for (; D.end() == false; ++D)
455 {
456 if ((D->Type != pkgCache::Dep::Conflicts &&
457 D->Type != pkgCache::Dep::DpkgBreaks &&
458 D->Type != pkgCache::Dep::Replaces) ||
459 D.ParentPkg().Group() == Grp)
460 continue;
461
462 map_ptrloc *OldDepLast = NULL;
463 pkgCache::VerIterator ConVersion = D.ParentVer();
464 Dynamic<pkgCache::VerIterator> DynV(ConVersion);
465 // duplicate the Conflicts/Breaks/Replaces for :none arch
466 if (D->Version == 0)
467 NewDepends(Pkg, ConVersion, "", 0, D->Type, OldDepLast);
468 else
469 NewDepends(Pkg, ConVersion, D.TargetVer(),
470 D->CompareOp, D->Type, OldDepLast);
471 }
472 }
473 }
474 }
475 if (unlikely(AddImplicitDepends(Grp, Pkg, Ver) == false))
476 return _error->Error(_("Error occurred while processing %s (%s%d)"),
477 Pkg.Name(), "AddImplicitDepends", 2);
478
479 // Read only a single record and return
480 if (OutVer != 0)
481 {
482 *OutVer = Ver;
483 return true;
484 }
485
486 /* Record the Description (it is not translated) */
487 MD5SumValue CurMd5 = List.Description_md5();
488 if (CurMd5.Value().empty() == true)
489 return true;
490 std::string CurLang = List.DescriptionLanguage();
491
492 /* Before we add a new description we first search in the group for
493 a version with a description of the same MD5 - if so we reuse this
494 description group instead of creating our own for this version */
495 for (pkgCache::PkgIterator P = Grp.PackageList();
496 P.end() == false; P = Grp.NextPkg(P))
497 {
498 for (pkgCache::VerIterator V = P.VersionList();
499 V.end() == false; ++V)
500 {
501 if (IsDuplicateDescription(V.DescriptionList(), CurMd5, "") == false)
502 continue;
503 Ver->DescriptionList = V->DescriptionList;
504 return true;
505 }
506 }
507
508 // We haven't found reusable descriptions, so add the first description
509 pkgCache::DescIterator Desc = Ver.DescriptionList();
510 Dynamic<pkgCache::DescIterator> DynDesc(Desc);
511
512 map_ptrloc const descindex = NewDescription(Desc, CurLang, CurMd5, 0);
513 if (unlikely(descindex == 0 && _error->PendingError()))
514 return _error->Error(_("Error occurred while processing %s (%s%d)"),
515 Pkg.Name(), "NewDescription", 2);
516
517 Desc->ParentPkg = Pkg.Index();
518 Ver->DescriptionList = descindex;
519
520 if (NewFileDesc(Desc,List) == false)
521 return _error->Error(_("Error occurred while processing %s (%s%d)"),
522 Pkg.Name(), "NewFileDesc", 2);
523
524 return true;
525 }
526 /*}}}*/
527 /*}}}*/
528 // CacheGenerator::MergeFileProvides - Merge file provides /*{{{*/
529 // ---------------------------------------------------------------------
530 /* If we found any file depends while parsing the main list we need to
531 resolve them. Since it is undesired to load the entire list of files
532 into the cache as virtual packages we do a two stage effort. MergeList
533 identifies the file depends and this creates Provdies for them by
534 re-parsing all the indexs. */
535 bool pkgCacheGenerator::MergeFileProvides(ListParser &List)
536 {
537 List.Owner = this;
538
539 unsigned int Counter = 0;
540 while (List.Step() == true)
541 {
542 string PackageName = List.Package();
543 if (PackageName.empty() == true)
544 return false;
545 string Version = List.Version();
546 if (Version.empty() == true)
547 continue;
548
549 pkgCache::PkgIterator Pkg = Cache.FindPkg(PackageName);
550 Dynamic<pkgCache::PkgIterator> DynPkg(Pkg);
551 if (Pkg.end() == true)
552 return _error->Error(_("Error occurred while processing %s (%s%d)"),
553 PackageName.c_str(), "FindPkg", 1);
554 Counter++;
555 if (Counter % 100 == 0 && Progress != 0)
556 Progress->Progress(List.Offset());
557
558 unsigned long Hash = List.VersionHash();
559 pkgCache::VerIterator Ver = Pkg.VersionList();
560 Dynamic<pkgCache::VerIterator> DynVer(Ver);
561 for (; Ver.end() == false; ++Ver)
562 {
563 if (Ver->Hash == Hash && Version.c_str() == Ver.VerStr())
564 {
565 if (List.CollectFileProvides(Cache,Ver) == false)
566 return _error->Error(_("Error occurred while processing %s (%s%d)"),
567 PackageName.c_str(), "CollectFileProvides", 1);
568 break;
569 }
570 }
571
572 if (Ver.end() == true)
573 _error->Warning(_("Package %s %s was not found while processing file dependencies"),PackageName.c_str(),Version.c_str());
574 }
575
576 return true;
577 }
578 /*}}}*/
579 // CacheGenerator::NewGroup - Add a new group /*{{{*/
580 // ---------------------------------------------------------------------
581 /* This creates a new group structure and adds it to the hash table */
582 bool pkgCacheGenerator::NewGroup(pkgCache::GrpIterator &Grp, const string &Name)
583 {
584 Grp = Cache.FindGrp(Name);
585 if (Grp.end() == false)
586 return true;
587
588 // Get a structure
589 map_ptrloc const Group = AllocateInMap(sizeof(pkgCache::Group));
590 if (unlikely(Group == 0))
591 return false;
592
593 Grp = pkgCache::GrpIterator(Cache, Cache.GrpP + Group);
594 map_ptrloc const idxName = WriteStringInMap(Name);
595 if (unlikely(idxName == 0))
596 return false;
597 Grp->Name = idxName;
598
599 // Insert it into the hash table
600 unsigned long const Hash = Cache.Hash(Name);
601 Grp->Next = Cache.HeaderP->GrpHashTable[Hash];
602 Cache.HeaderP->GrpHashTable[Hash] = Group;
603
604 Grp->ID = Cache.HeaderP->GroupCount++;
605 return true;
606 }
607 /*}}}*/
608 // CacheGenerator::NewPackage - Add a new package /*{{{*/
609 // ---------------------------------------------------------------------
610 /* This creates a new package structure and adds it to the hash table */
611 bool pkgCacheGenerator::NewPackage(pkgCache::PkgIterator &Pkg,const string &Name,
612 const string &Arch) {
613 pkgCache::GrpIterator Grp;
614 Dynamic<pkgCache::GrpIterator> DynGrp(Grp);
615 if (unlikely(NewGroup(Grp, Name) == false))
616 return false;
617
618 Pkg = Grp.FindPkg(Arch);
619 if (Pkg.end() == false)
620 return true;
621
622 // Get a structure
623 map_ptrloc const Package = AllocateInMap(sizeof(pkgCache::Package));
624 if (unlikely(Package == 0))
625 return false;
626 Pkg = pkgCache::PkgIterator(Cache,Cache.PkgP + Package);
627
628 // Insert the package into our package list
629 if (Grp->FirstPackage == 0) // the group is new
630 {
631 // Insert it into the hash table
632 unsigned long const Hash = Cache.Hash(Name);
633 Pkg->NextPackage = Cache.HeaderP->PkgHashTable[Hash];
634 Cache.HeaderP->PkgHashTable[Hash] = Package;
635 Grp->FirstPackage = Package;
636 }
637 else // Group the Packages together
638 {
639 // this package is the new last package
640 pkgCache::PkgIterator LastPkg(Cache, Cache.PkgP + Grp->LastPackage);
641 Pkg->NextPackage = LastPkg->NextPackage;
642 LastPkg->NextPackage = Package;
643 }
644 Grp->LastPackage = Package;
645
646 // Set the name, arch and the ID
647 Pkg->Name = Grp->Name;
648 Pkg->Group = Grp.Index();
649 // all is mapped to the native architecture
650 map_ptrloc const idxArch = (Arch == "all") ? Cache.HeaderP->Architecture : WriteUniqString(Arch.c_str());
651 if (unlikely(idxArch == 0))
652 return false;
653 Pkg->Arch = idxArch;
654 Pkg->ID = Cache.HeaderP->PackageCount++;
655
656 return true;
657 }
658 /*}}}*/
659 // CacheGenerator::AddImplicitDepends /*{{{*/
660 bool pkgCacheGenerator::AddImplicitDepends(pkgCache::GrpIterator &G,
661 pkgCache::PkgIterator &P,
662 pkgCache::VerIterator &V)
663 {
664 // copy P.Arch() into a string here as a cache remap
665 // in NewDepends() later may alter the pointer location
666 string Arch = P.Arch() == NULL ? "" : P.Arch();
667 map_ptrloc *OldDepLast = NULL;
668 /* MultiArch handling introduces a lot of implicit Dependencies:
669 - MultiArch: same → Co-Installable if they have the same version
670 - All others conflict with all other group members */
671 bool const coInstall = ((V->MultiArch & pkgCache::Version::Same) == pkgCache::Version::Same);
672 pkgCache::PkgIterator D = G.PackageList();
673 Dynamic<pkgCache::PkgIterator> DynD(D);
674 for (; D.end() != true; D = G.NextPkg(D))
675 {
676 if (Arch == D.Arch() || D->VersionList == 0)
677 continue;
678 /* We allow only one installed arch at the time
679 per group, therefore each group member conflicts
680 with all other group members */
681 if (coInstall == true)
682 {
683 // Replaces: ${self}:other ( << ${binary:Version})
684 NewDepends(D, V, V.VerStr(),
685 pkgCache::Dep::Less, pkgCache::Dep::Replaces,
686 OldDepLast);
687 // Breaks: ${self}:other (!= ${binary:Version})
688 NewDepends(D, V, V.VerStr(),
689 pkgCache::Dep::NotEquals, pkgCache::Dep::DpkgBreaks,
690 OldDepLast);
691 } else {
692 // Conflicts: ${self}:other
693 NewDepends(D, V, "",
694 pkgCache::Dep::NoOp, pkgCache::Dep::Conflicts,
695 OldDepLast);
696 }
697 }
698 return true;
699 }
700 bool pkgCacheGenerator::AddImplicitDepends(pkgCache::VerIterator &V,
701 pkgCache::PkgIterator &D)
702 {
703 /* MultiArch handling introduces a lot of implicit Dependencies:
704 - MultiArch: same → Co-Installable if they have the same version
705 - All others conflict with all other group members */
706 map_ptrloc *OldDepLast = NULL;
707 bool const coInstall = ((V->MultiArch & pkgCache::Version::Same) == pkgCache::Version::Same);
708 if (coInstall == true)
709 {
710 // Replaces: ${self}:other ( << ${binary:Version})
711 NewDepends(D, V, V.VerStr(),
712 pkgCache::Dep::Less, pkgCache::Dep::Replaces,
713 OldDepLast);
714 // Breaks: ${self}:other (!= ${binary:Version})
715 NewDepends(D, V, V.VerStr(),
716 pkgCache::Dep::NotEquals, pkgCache::Dep::DpkgBreaks,
717 OldDepLast);
718 } else {
719 // Conflicts: ${self}:other
720 NewDepends(D, V, "",
721 pkgCache::Dep::NoOp, pkgCache::Dep::Conflicts,
722 OldDepLast);
723 }
724 return true;
725 }
726
727 /*}}}*/
728 // CacheGenerator::NewFileVer - Create a new File<->Version association /*{{{*/
729 // ---------------------------------------------------------------------
730 /* */
731 bool pkgCacheGenerator::NewFileVer(pkgCache::VerIterator &Ver,
732 ListParser &List)
733 {
734 if (CurrentFile == 0)
735 return true;
736
737 // Get a structure
738 map_ptrloc const VerFile = AllocateInMap(sizeof(pkgCache::VerFile));
739 if (VerFile == 0)
740 return 0;
741
742 pkgCache::VerFileIterator VF(Cache,Cache.VerFileP + VerFile);
743 VF->File = CurrentFile - Cache.PkgFileP;
744
745 // Link it to the end of the list
746 map_ptrloc *Last = &Ver->FileList;
747 for (pkgCache::VerFileIterator V = Ver.FileList(); V.end() == false; ++V)
748 Last = &V->NextFile;
749 VF->NextFile = *Last;
750 *Last = VF.Index();
751
752 VF->Offset = List.Offset();
753 VF->Size = List.Size();
754 if (Cache.HeaderP->MaxVerFileSize < VF->Size)
755 Cache.HeaderP->MaxVerFileSize = VF->Size;
756 Cache.HeaderP->VerFileCount++;
757
758 return true;
759 }
760 /*}}}*/
761 // CacheGenerator::NewVersion - Create a new Version /*{{{*/
762 // ---------------------------------------------------------------------
763 /* This puts a version structure in the linked list */
764 unsigned long pkgCacheGenerator::NewVersion(pkgCache::VerIterator &Ver,
765 const string &VerStr,
766 unsigned long Next)
767 {
768 // Get a structure
769 map_ptrloc const Version = AllocateInMap(sizeof(pkgCache::Version));
770 if (Version == 0)
771 return 0;
772
773 // Fill it in
774 Ver = pkgCache::VerIterator(Cache,Cache.VerP + Version);
775 //Dynamic<pkgCache::VerIterator> DynV(Ver); // caller MergeListVersion already takes care of it
776 Ver->NextVer = Next;
777 Ver->ID = Cache.HeaderP->VersionCount++;
778 map_ptrloc const idxVerStr = WriteStringInMap(VerStr);
779 if (unlikely(idxVerStr == 0))
780 return 0;
781 Ver->VerStr = idxVerStr;
782
783 return Version;
784 }
785 /*}}}*/
786 // CacheGenerator::NewFileDesc - Create a new File<->Desc association /*{{{*/
787 // ---------------------------------------------------------------------
788 /* */
789 bool pkgCacheGenerator::NewFileDesc(pkgCache::DescIterator &Desc,
790 ListParser &List)
791 {
792 if (CurrentFile == 0)
793 return true;
794
795 // Get a structure
796 map_ptrloc const DescFile = AllocateInMap(sizeof(pkgCache::DescFile));
797 if (DescFile == 0)
798 return false;
799
800 pkgCache::DescFileIterator DF(Cache,Cache.DescFileP + DescFile);
801 DF->File = CurrentFile - Cache.PkgFileP;
802
803 // Link it to the end of the list
804 map_ptrloc *Last = &Desc->FileList;
805 for (pkgCache::DescFileIterator D = Desc.FileList(); D.end() == false; ++D)
806 Last = &D->NextFile;
807
808 DF->NextFile = *Last;
809 *Last = DF.Index();
810
811 DF->Offset = List.Offset();
812 DF->Size = List.Size();
813 if (Cache.HeaderP->MaxDescFileSize < DF->Size)
814 Cache.HeaderP->MaxDescFileSize = DF->Size;
815 Cache.HeaderP->DescFileCount++;
816
817 return true;
818 }
819 /*}}}*/
820 // CacheGenerator::NewDescription - Create a new Description /*{{{*/
821 // ---------------------------------------------------------------------
822 /* This puts a description structure in the linked list */
823 map_ptrloc pkgCacheGenerator::NewDescription(pkgCache::DescIterator &Desc,
824 const string &Lang,
825 const MD5SumValue &md5sum,
826 map_ptrloc idxmd5str)
827 {
828 // Get a structure
829 map_ptrloc const Description = AllocateInMap(sizeof(pkgCache::Description));
830 if (Description == 0)
831 return 0;
832
833 // Fill it in
834 Desc = pkgCache::DescIterator(Cache,Cache.DescP + Description);
835 Desc->ID = Cache.HeaderP->DescriptionCount++;
836 map_ptrloc const idxlanguage_code = WriteStringInMap(Lang);
837 if (unlikely(idxlanguage_code == 0))
838 return 0;
839 Desc->language_code = idxlanguage_code;
840
841 if (idxmd5str != 0)
842 Desc->md5sum = idxmd5str;
843 else
844 {
845 map_ptrloc const idxmd5sum = WriteStringInMap(md5sum.Value());
846 if (unlikely(idxmd5sum == 0))
847 return 0;
848 Desc->md5sum = idxmd5sum;
849 }
850
851 return Description;
852 }
853 /*}}}*/
854 // CacheGenerator::NewDepends - Create a dependency element /*{{{*/
855 // ---------------------------------------------------------------------
856 /* This creates a dependency element in the tree. It is linked to the
857 version and to the package that it is pointing to. */
858 bool pkgCacheGenerator::NewDepends(pkgCache::PkgIterator &Pkg,
859 pkgCache::VerIterator &Ver,
860 string const &Version,
861 unsigned int const &Op,
862 unsigned int const &Type,
863 map_ptrloc* &OldDepLast)
864 {
865 void const * const oldMap = Map.Data();
866 // Get a structure
867 map_ptrloc const Dependency = AllocateInMap(sizeof(pkgCache::Dependency));
868 if (unlikely(Dependency == 0))
869 return false;
870
871 // Fill it in
872 pkgCache::DepIterator Dep(Cache,Cache.DepP + Dependency);
873 Dynamic<pkgCache::DepIterator> DynDep(Dep);
874 Dep->ParentVer = Ver.Index();
875 Dep->Type = Type;
876 Dep->CompareOp = Op;
877 Dep->ID = Cache.HeaderP->DependsCount++;
878
879 // Probe the reverse dependency list for a version string that matches
880 if (Version.empty() == false)
881 {
882 /* for (pkgCache::DepIterator I = Pkg.RevDependsList(); I.end() == false; I++)
883 if (I->Version != 0 && I.TargetVer() == Version)
884 Dep->Version = I->Version;*/
885 if (Dep->Version == 0) {
886 map_ptrloc const index = WriteStringInMap(Version);
887 if (unlikely(index == 0))
888 return false;
889 Dep->Version = index;
890 }
891 }
892
893 // Link it to the package
894 Dep->Package = Pkg.Index();
895 Dep->NextRevDepends = Pkg->RevDepends;
896 Pkg->RevDepends = Dep.Index();
897
898 // Do we know where to link the Dependency to?
899 if (OldDepLast == NULL)
900 {
901 OldDepLast = &Ver->DependsList;
902 for (pkgCache::DepIterator D = Ver.DependsList(); D.end() == false; ++D)
903 OldDepLast = &D->NextDepends;
904 } else if (oldMap != Map.Data())
905 OldDepLast += (map_ptrloc*) Map.Data() - (map_ptrloc*) oldMap;
906
907 Dep->NextDepends = *OldDepLast;
908 *OldDepLast = Dep.Index();
909 OldDepLast = &Dep->NextDepends;
910
911 return true;
912 }
913 /*}}}*/
914 // ListParser::NewDepends - Create the environment for a new dependency /*{{{*/
915 // ---------------------------------------------------------------------
916 /* This creates a Group and the Package to link this dependency to if
917 needed and handles also the caching of the old endpoint */
918 bool pkgCacheGenerator::ListParser::NewDepends(pkgCache::VerIterator &Ver,
919 const string &PackageName,
920 const string &Arch,
921 const string &Version,
922 unsigned int Op,
923 unsigned int Type)
924 {
925 pkgCache::GrpIterator Grp;
926 Dynamic<pkgCache::GrpIterator> DynGrp(Grp);
927 if (unlikely(Owner->NewGroup(Grp, PackageName) == false))
928 return false;
929
930 // Locate the target package
931 pkgCache::PkgIterator Pkg = Grp.FindPkg(Arch);
932 // we don't create 'none' packages and their dependencies if we can avoid it …
933 if (Pkg.end() == true && Arch == "none" && strcmp(Ver.ParentPkg().Arch(), "none") != 0)
934 return true;
935 Dynamic<pkgCache::PkgIterator> DynPkg(Pkg);
936 if (Pkg.end() == true) {
937 if (unlikely(Owner->NewPackage(Pkg, PackageName, Arch) == false))
938 return false;
939 }
940
941 // Is it a file dependency?
942 if (unlikely(PackageName[0] == '/'))
943 FoundFileDeps = true;
944
945 /* Caching the old end point speeds up generation substantially */
946 if (OldDepVer != Ver) {
947 OldDepLast = NULL;
948 OldDepVer = Ver;
949 }
950
951 return Owner->NewDepends(Pkg, Ver, Version, Op, Type, OldDepLast);
952 }
953 /*}}}*/
954 // ListParser::NewProvides - Create a Provides element /*{{{*/
955 // ---------------------------------------------------------------------
956 /* */
957 bool pkgCacheGenerator::ListParser::NewProvides(pkgCache::VerIterator &Ver,
958 const string &PkgName,
959 const string &PkgArch,
960 const string &Version)
961 {
962 pkgCache &Cache = Owner->Cache;
963
964 // We do not add self referencing provides
965 if (Ver.ParentPkg().Name() == PkgName && (PkgArch == Ver.ParentPkg().Arch() ||
966 (PkgArch == "all" && strcmp((Cache.StrP + Cache.HeaderP->Architecture), Ver.ParentPkg().Arch()) == 0)))
967 return true;
968
969 // Get a structure
970 map_ptrloc const Provides = Owner->AllocateInMap(sizeof(pkgCache::Provides));
971 if (unlikely(Provides == 0))
972 return false;
973 Cache.HeaderP->ProvidesCount++;
974
975 // Fill it in
976 pkgCache::PrvIterator Prv(Cache,Cache.ProvideP + Provides,Cache.PkgP);
977 Dynamic<pkgCache::PrvIterator> DynPrv(Prv);
978 Prv->Version = Ver.Index();
979 Prv->NextPkgProv = Ver->ProvidesList;
980 Ver->ProvidesList = Prv.Index();
981 if (Version.empty() == false) {
982 map_ptrloc const idxProvideVersion = WriteString(Version);
983 Prv->ProvideVersion = idxProvideVersion;
984 if (unlikely(idxProvideVersion == 0))
985 return false;
986 }
987
988 // Locate the target package
989 pkgCache::PkgIterator Pkg;
990 Dynamic<pkgCache::PkgIterator> DynPkg(Pkg);
991 if (unlikely(Owner->NewPackage(Pkg,PkgName, PkgArch) == false))
992 return false;
993
994 // Link it to the package
995 Prv->ParentPkg = Pkg.Index();
996 Prv->NextProvides = Pkg->ProvidesList;
997 Pkg->ProvidesList = Prv.Index();
998
999 return true;
1000 }
1001 /*}}}*/
1002 // CacheGenerator::SelectFile - Select the current file being parsed /*{{{*/
1003 // ---------------------------------------------------------------------
1004 /* This is used to select which file is to be associated with all newly
1005 added versions. The caller is responsible for setting the IMS fields. */
1006 bool pkgCacheGenerator::SelectFile(const string &File,const string &Site,
1007 const pkgIndexFile &Index,
1008 unsigned long Flags)
1009 {
1010 // Get some space for the structure
1011 map_ptrloc const idxFile = AllocateInMap(sizeof(*CurrentFile));
1012 if (unlikely(idxFile == 0))
1013 return false;
1014 CurrentFile = Cache.PkgFileP + idxFile;
1015
1016 // Fill it in
1017 map_ptrloc const idxFileName = WriteStringInMap(File);
1018 map_ptrloc const idxSite = WriteUniqString(Site);
1019 if (unlikely(idxFileName == 0 || idxSite == 0))
1020 return false;
1021 CurrentFile->FileName = idxFileName;
1022 CurrentFile->Site = idxSite;
1023 CurrentFile->NextFile = Cache.HeaderP->FileList;
1024 CurrentFile->Flags = Flags;
1025 CurrentFile->ID = Cache.HeaderP->PackageFileCount;
1026 map_ptrloc const idxIndexType = WriteUniqString(Index.GetType()->Label);
1027 if (unlikely(idxIndexType == 0))
1028 return false;
1029 CurrentFile->IndexType = idxIndexType;
1030 PkgFileName = File;
1031 Cache.HeaderP->FileList = CurrentFile - Cache.PkgFileP;
1032 Cache.HeaderP->PackageFileCount++;
1033
1034 if (Progress != 0)
1035 Progress->SubProgress(Index.Size());
1036 return true;
1037 }
1038 /*}}}*/
1039 // CacheGenerator::WriteUniqueString - Insert a unique string /*{{{*/
1040 // ---------------------------------------------------------------------
1041 /* This is used to create handles to strings. Given the same text it
1042 always returns the same number */
1043 unsigned long pkgCacheGenerator::WriteUniqString(const char *S,
1044 unsigned int Size)
1045 {
1046 /* We use a very small transient hash table here, this speeds up generation
1047 by a fair amount on slower machines */
1048 pkgCache::StringItem *&Bucket = UniqHash[(S[0]*5 + S[1]) % _count(UniqHash)];
1049 if (Bucket != 0 &&
1050 stringcmp(S,S+Size,Cache.StrP + Bucket->String) == 0)
1051 return Bucket->String;
1052
1053 // Search for an insertion point
1054 pkgCache::StringItem *I = Cache.StringItemP + Cache.HeaderP->StringList;
1055 int Res = 1;
1056 map_ptrloc *Last = &Cache.HeaderP->StringList;
1057 for (; I != Cache.StringItemP; Last = &I->NextItem,
1058 I = Cache.StringItemP + I->NextItem)
1059 {
1060 Res = stringcmp(S,S+Size,Cache.StrP + I->String);
1061 if (Res >= 0)
1062 break;
1063 }
1064
1065 // Match
1066 if (Res == 0)
1067 {
1068 Bucket = I;
1069 return I->String;
1070 }
1071
1072 // Get a structure
1073 void const * const oldMap = Map.Data();
1074 map_ptrloc const Item = AllocateInMap(sizeof(pkgCache::StringItem));
1075 if (Item == 0)
1076 return 0;
1077
1078 map_ptrloc const idxString = WriteStringInMap(S,Size);
1079 if (unlikely(idxString == 0))
1080 return 0;
1081 if (oldMap != Map.Data()) {
1082 Last += (map_ptrloc*) Map.Data() - (map_ptrloc*) oldMap;
1083 I += (pkgCache::StringItem*) Map.Data() - (pkgCache::StringItem*) oldMap;
1084 }
1085 *Last = Item;
1086
1087 // Fill in the structure
1088 pkgCache::StringItem *ItemP = Cache.StringItemP + Item;
1089 ItemP->NextItem = I - Cache.StringItemP;
1090 ItemP->String = idxString;
1091
1092 Bucket = ItemP;
1093 return ItemP->String;
1094 }
1095 /*}}}*/
1096 // CheckValidity - Check that a cache is up-to-date /*{{{*/
1097 // ---------------------------------------------------------------------
1098 /* This just verifies that each file in the list of index files exists,
1099 has matching attributes with the cache and the cache does not have
1100 any extra files. */
1101 static bool CheckValidity(const string &CacheFile,
1102 pkgSourceList &List,
1103 FileIterator Start,
1104 FileIterator End,
1105 MMap **OutMap = 0)
1106 {
1107 bool const Debug = _config->FindB("Debug::pkgCacheGen", false);
1108 // No file, certainly invalid
1109 if (CacheFile.empty() == true || FileExists(CacheFile) == false)
1110 {
1111 if (Debug == true)
1112 std::clog << "CacheFile doesn't exist" << std::endl;
1113 return false;
1114 }
1115
1116 if (List.GetLastModifiedTime() > GetModificationTime(CacheFile))
1117 {
1118 if (Debug == true)
1119 std::clog << "sources.list is newer than the cache" << std::endl;
1120 return false;
1121 }
1122
1123 // Map it
1124 FileFd CacheF(CacheFile,FileFd::ReadOnly);
1125 SPtr<MMap> Map = new MMap(CacheF,0);
1126 pkgCache Cache(Map);
1127 if (_error->PendingError() == true || Map->Size() == 0)
1128 {
1129 if (Debug == true)
1130 std::clog << "Errors are pending or Map is empty()" << std::endl;
1131 _error->Discard();
1132 return false;
1133 }
1134
1135 /* Now we check every index file, see if it is in the cache,
1136 verify the IMS data and check that it is on the disk too.. */
1137 SPtrArray<bool> Visited = new bool[Cache.HeaderP->PackageFileCount];
1138 memset(Visited,0,sizeof(*Visited)*Cache.HeaderP->PackageFileCount);
1139 for (; Start != End; ++Start)
1140 {
1141 if (Debug == true)
1142 std::clog << "Checking PkgFile " << (*Start)->Describe() << ": ";
1143 if ((*Start)->HasPackages() == false)
1144 {
1145 if (Debug == true)
1146 std::clog << "Has NO packages" << std::endl;
1147 continue;
1148 }
1149
1150 if ((*Start)->Exists() == false)
1151 {
1152 #if 0 // mvo: we no longer give a message here (Default Sources spec)
1153 _error->WarningE("stat",_("Couldn't stat source package list %s"),
1154 (*Start)->Describe().c_str());
1155 #endif
1156 if (Debug == true)
1157 std::clog << "file doesn't exist" << std::endl;
1158 continue;
1159 }
1160
1161 // FindInCache is also expected to do an IMS check.
1162 pkgCache::PkgFileIterator File = (*Start)->FindInCache(Cache);
1163 if (File.end() == true)
1164 {
1165 if (Debug == true)
1166 std::clog << "FindInCache returned end-Pointer" << std::endl;
1167 return false;
1168 }
1169
1170 Visited[File->ID] = true;
1171 if (Debug == true)
1172 std::clog << "with ID " << File->ID << " is valid" << std::endl;
1173 }
1174
1175 for (unsigned I = 0; I != Cache.HeaderP->PackageFileCount; I++)
1176 if (Visited[I] == false)
1177 {
1178 if (Debug == true)
1179 std::clog << "File with ID" << I << " wasn't visited" << std::endl;
1180 return false;
1181 }
1182
1183 if (_error->PendingError() == true)
1184 {
1185 if (Debug == true)
1186 {
1187 std::clog << "Validity failed because of pending errors:" << std::endl;
1188 _error->DumpErrors();
1189 }
1190 _error->Discard();
1191 return false;
1192 }
1193
1194 if (OutMap != 0)
1195 *OutMap = Map.UnGuard();
1196 return true;
1197 }
1198 /*}}}*/
1199 // ComputeSize - Compute the total size of a bunch of files /*{{{*/
1200 // ---------------------------------------------------------------------
1201 /* Size is kind of an abstract notion that is only used for the progress
1202 meter */
1203 static unsigned long ComputeSize(FileIterator Start,FileIterator End)
1204 {
1205 unsigned long TotalSize = 0;
1206 for (; Start != End; ++Start)
1207 {
1208 if ((*Start)->HasPackages() == false)
1209 continue;
1210 TotalSize += (*Start)->Size();
1211 }
1212 return TotalSize;
1213 }
1214 /*}}}*/
1215 // BuildCache - Merge the list of index files into the cache /*{{{*/
1216 // ---------------------------------------------------------------------
1217 /* */
1218 static bool BuildCache(pkgCacheGenerator &Gen,
1219 OpProgress *Progress,
1220 unsigned long &CurrentSize,unsigned long TotalSize,
1221 FileIterator Start, FileIterator End)
1222 {
1223 FileIterator I;
1224 for (I = Start; I != End; ++I)
1225 {
1226 if ((*I)->HasPackages() == false)
1227 continue;
1228
1229 if ((*I)->Exists() == false)
1230 continue;
1231
1232 if ((*I)->FindInCache(Gen.GetCache()).end() == false)
1233 {
1234 _error->Warning("Duplicate sources.list entry %s",
1235 (*I)->Describe().c_str());
1236 continue;
1237 }
1238
1239 unsigned long Size = (*I)->Size();
1240 if (Progress != NULL)
1241 Progress->OverallProgress(CurrentSize,TotalSize,Size,_("Reading package lists"));
1242 CurrentSize += Size;
1243
1244 if ((*I)->Merge(Gen,Progress) == false)
1245 return false;
1246 }
1247
1248 if (Gen.HasFileDeps() == true)
1249 {
1250 if (Progress != NULL)
1251 Progress->Done();
1252 TotalSize = ComputeSize(Start, End);
1253 CurrentSize = 0;
1254 for (I = Start; I != End; ++I)
1255 {
1256 unsigned long Size = (*I)->Size();
1257 if (Progress != NULL)
1258 Progress->OverallProgress(CurrentSize,TotalSize,Size,_("Collecting File Provides"));
1259 CurrentSize += Size;
1260 if ((*I)->MergeFileProvides(Gen,Progress) == false)
1261 return false;
1262 }
1263 }
1264
1265 return true;
1266 }
1267 /*}}}*/
1268 // CacheGenerator::CreateDynamicMMap - load an mmap with configuration options /*{{{*/
1269 DynamicMMap* pkgCacheGenerator::CreateDynamicMMap(FileFd *CacheF, unsigned long Flags) {
1270 unsigned long const MapStart = _config->FindI("APT::Cache-Start", 24*1024*1024);
1271 unsigned long const MapGrow = _config->FindI("APT::Cache-Grow", 1*1024*1024);
1272 unsigned long const MapLimit = _config->FindI("APT::Cache-Limit", 0);
1273 Flags |= MMap::Moveable;
1274 if (_config->FindB("APT::Cache-Fallback", false) == true)
1275 Flags |= MMap::Fallback;
1276 if (CacheF != NULL)
1277 return new DynamicMMap(*CacheF, Flags, MapStart, MapGrow, MapLimit);
1278 else
1279 return new DynamicMMap(Flags, MapStart, MapGrow, MapLimit);
1280 }
1281 /*}}}*/
1282 // CacheGenerator::MakeStatusCache - Construct the status cache /*{{{*/
1283 // ---------------------------------------------------------------------
1284 /* This makes sure that the status cache (the cache that has all
1285 index files from the sources list and all local ones) is ready
1286 to be mmaped. If OutMap is not zero then a MMap object representing
1287 the cache will be stored there. This is pretty much mandetory if you
1288 are using AllowMem. AllowMem lets the function be run as non-root
1289 where it builds the cache 'fast' into a memory buffer. */
1290 __deprecated bool pkgMakeStatusCache(pkgSourceList &List,OpProgress &Progress,
1291 MMap **OutMap, bool AllowMem)
1292 { return pkgCacheGenerator::MakeStatusCache(List, &Progress, OutMap, AllowMem); }
1293 bool pkgCacheGenerator::MakeStatusCache(pkgSourceList &List,OpProgress *Progress,
1294 MMap **OutMap,bool AllowMem)
1295 {
1296 bool const Debug = _config->FindB("Debug::pkgCacheGen", false);
1297
1298 std::vector<pkgIndexFile *> Files;
1299 for (std::vector<metaIndex *>::const_iterator i = List.begin();
1300 i != List.end();
1301 ++i)
1302 {
1303 std::vector <pkgIndexFile *> *Indexes = (*i)->GetIndexFiles();
1304 for (std::vector<pkgIndexFile *>::const_iterator j = Indexes->begin();
1305 j != Indexes->end();
1306 ++j)
1307 Files.push_back (*j);
1308 }
1309
1310 unsigned long const EndOfSource = Files.size();
1311 if (_system->AddStatusFiles(Files) == false)
1312 return false;
1313
1314 // Decide if we can write to the files..
1315 string const CacheFile = _config->FindFile("Dir::Cache::pkgcache");
1316 string const SrcCacheFile = _config->FindFile("Dir::Cache::srcpkgcache");
1317
1318 // ensure the cache directory exists
1319 if (CacheFile.empty() == false || SrcCacheFile.empty() == false)
1320 {
1321 string dir = _config->FindDir("Dir::Cache");
1322 size_t const len = dir.size();
1323 if (len > 5 && dir.find("/apt/", len - 6, 5) == len - 5)
1324 dir = dir.substr(0, len - 5);
1325 if (CacheFile.empty() == false)
1326 CreateDirectory(dir, flNotFile(CacheFile));
1327 if (SrcCacheFile.empty() == false)
1328 CreateDirectory(dir, flNotFile(SrcCacheFile));
1329 }
1330
1331 // Decide if we can write to the cache
1332 bool Writeable = false;
1333 if (CacheFile.empty() == false)
1334 Writeable = access(flNotFile(CacheFile).c_str(),W_OK) == 0;
1335 else
1336 if (SrcCacheFile.empty() == false)
1337 Writeable = access(flNotFile(SrcCacheFile).c_str(),W_OK) == 0;
1338 if (Debug == true)
1339 std::clog << "Do we have write-access to the cache files? " << (Writeable ? "YES" : "NO") << std::endl;
1340
1341 if (Writeable == false && AllowMem == false && CacheFile.empty() == false)
1342 return _error->Error(_("Unable to write to %s"),flNotFile(CacheFile).c_str());
1343
1344 if (Progress != NULL)
1345 Progress->OverallProgress(0,1,1,_("Reading package lists"));
1346
1347 // Cache is OK, Fin.
1348 if (CheckValidity(CacheFile, List, Files.begin(),Files.end(),OutMap) == true)
1349 {
1350 if (Progress != NULL)
1351 Progress->OverallProgress(1,1,1,_("Reading package lists"));
1352 if (Debug == true)
1353 std::clog << "pkgcache.bin is valid - no need to build anything" << std::endl;
1354 return true;
1355 }
1356 else if (Debug == true)
1357 std::clog << "pkgcache.bin is NOT valid" << std::endl;
1358
1359 /* At this point we know we need to reconstruct the package cache,
1360 begin. */
1361 SPtr<FileFd> CacheF;
1362 SPtr<DynamicMMap> Map;
1363 if (Writeable == true && CacheFile.empty() == false)
1364 {
1365 _error->PushToStack();
1366 unlink(CacheFile.c_str());
1367 CacheF = new FileFd(CacheFile,FileFd::WriteAtomic);
1368 fchmod(CacheF->Fd(),0644);
1369 Map = CreateDynamicMMap(CacheF, MMap::Public);
1370 if (_error->PendingError() == true)
1371 {
1372 delete CacheF.UnGuard();
1373 delete Map.UnGuard();
1374 if (Debug == true)
1375 std::clog << "Open filebased MMap FAILED" << std::endl;
1376 Writeable = false;
1377 if (AllowMem == false)
1378 {
1379 _error->MergeWithStack();
1380 return false;
1381 }
1382 _error->RevertToStack();
1383 }
1384 else
1385 {
1386 _error->MergeWithStack();
1387 if (Debug == true)
1388 std::clog << "Open filebased MMap" << std::endl;
1389 }
1390 }
1391 if (Writeable == false || CacheFile.empty() == true)
1392 {
1393 // Just build it in memory..
1394 Map = CreateDynamicMMap(NULL);
1395 if (Debug == true)
1396 std::clog << "Open memory Map (not filebased)" << std::endl;
1397 }
1398
1399 // Lets try the source cache.
1400 unsigned long CurrentSize = 0;
1401 unsigned long TotalSize = 0;
1402 if (CheckValidity(SrcCacheFile, List, Files.begin(),
1403 Files.begin()+EndOfSource) == true)
1404 {
1405 if (Debug == true)
1406 std::clog << "srcpkgcache.bin is valid - populate MMap with it." << std::endl;
1407 // Preload the map with the source cache
1408 FileFd SCacheF(SrcCacheFile,FileFd::ReadOnly);
1409 unsigned long const alloc = Map->RawAllocate(SCacheF.Size());
1410 if ((alloc == 0 && _error->PendingError())
1411 || SCacheF.Read((unsigned char *)Map->Data() + alloc,
1412 SCacheF.Size()) == false)
1413 return false;
1414
1415 TotalSize = ComputeSize(Files.begin()+EndOfSource,Files.end());
1416
1417 // Build the status cache
1418 pkgCacheGenerator Gen(Map.Get(),Progress);
1419 if (_error->PendingError() == true)
1420 return false;
1421 if (BuildCache(Gen,Progress,CurrentSize,TotalSize,
1422 Files.begin()+EndOfSource,Files.end()) == false)
1423 return false;
1424 }
1425 else
1426 {
1427 if (Debug == true)
1428 std::clog << "srcpkgcache.bin is NOT valid - rebuild" << std::endl;
1429 TotalSize = ComputeSize(Files.begin(),Files.end());
1430
1431 // Build the source cache
1432 pkgCacheGenerator Gen(Map.Get(),Progress);
1433 if (_error->PendingError() == true)
1434 return false;
1435 if (BuildCache(Gen,Progress,CurrentSize,TotalSize,
1436 Files.begin(),Files.begin()+EndOfSource) == false)
1437 return false;
1438
1439 // Write it back
1440 if (Writeable == true && SrcCacheFile.empty() == false)
1441 {
1442 FileFd SCacheF(SrcCacheFile,FileFd::WriteAtomic);
1443 if (_error->PendingError() == true)
1444 return false;
1445
1446 fchmod(SCacheF.Fd(),0644);
1447
1448 // Write out the main data
1449 if (SCacheF.Write(Map->Data(),Map->Size()) == false)
1450 return _error->Error(_("IO Error saving source cache"));
1451 SCacheF.Sync();
1452
1453 // Write out the proper header
1454 Gen.GetCache().HeaderP->Dirty = false;
1455 if (SCacheF.Seek(0) == false ||
1456 SCacheF.Write(Map->Data(),sizeof(*Gen.GetCache().HeaderP)) == false)
1457 return _error->Error(_("IO Error saving source cache"));
1458 Gen.GetCache().HeaderP->Dirty = true;
1459 SCacheF.Sync();
1460 }
1461
1462 // Build the status cache
1463 if (BuildCache(Gen,Progress,CurrentSize,TotalSize,
1464 Files.begin()+EndOfSource,Files.end()) == false)
1465 return false;
1466 }
1467 if (Debug == true)
1468 std::clog << "Caches are ready for shipping" << std::endl;
1469
1470 if (_error->PendingError() == true)
1471 return false;
1472 if (OutMap != 0)
1473 {
1474 if (CacheF != 0)
1475 {
1476 delete Map.UnGuard();
1477 *OutMap = new MMap(*CacheF,0);
1478 }
1479 else
1480 {
1481 *OutMap = Map.UnGuard();
1482 }
1483 }
1484
1485 return true;
1486 }
1487 /*}}}*/
1488 // CacheGenerator::MakeOnlyStatusCache - Build only a status files cache/*{{{*/
1489 // ---------------------------------------------------------------------
1490 /* */
1491 __deprecated bool pkgMakeOnlyStatusCache(OpProgress &Progress,DynamicMMap **OutMap)
1492 { return pkgCacheGenerator::MakeOnlyStatusCache(&Progress, OutMap); }
1493 bool pkgCacheGenerator::MakeOnlyStatusCache(OpProgress *Progress,DynamicMMap **OutMap)
1494 {
1495 std::vector<pkgIndexFile *> Files;
1496 unsigned long EndOfSource = Files.size();
1497 if (_system->AddStatusFiles(Files) == false)
1498 return false;
1499
1500 SPtr<DynamicMMap> Map = CreateDynamicMMap(NULL);
1501 unsigned long CurrentSize = 0;
1502 unsigned long TotalSize = 0;
1503
1504 TotalSize = ComputeSize(Files.begin()+EndOfSource,Files.end());
1505
1506 // Build the status cache
1507 if (Progress != NULL)
1508 Progress->OverallProgress(0,1,1,_("Reading package lists"));
1509 pkgCacheGenerator Gen(Map.Get(),Progress);
1510 if (_error->PendingError() == true)
1511 return false;
1512 if (BuildCache(Gen,Progress,CurrentSize,TotalSize,
1513 Files.begin()+EndOfSource,Files.end()) == false)
1514 return false;
1515
1516 if (_error->PendingError() == true)
1517 return false;
1518 *OutMap = Map.UnGuard();
1519
1520 return true;
1521 }
1522 /*}}}*/
1523 // IsDuplicateDescription /*{{{*/
1524 static bool IsDuplicateDescription(pkgCache::DescIterator Desc,
1525 MD5SumValue const &CurMd5, std::string const &CurLang)
1526 {
1527 // Descriptions in the same link-list have all the same md5
1528 if (Desc.end() == true || MD5SumValue(Desc.md5()) != CurMd5)
1529 return false;
1530 for (; Desc.end() == false; ++Desc)
1531 if (Desc.LanguageCode() == CurLang)
1532 return true;
1533 return false;
1534 }
1535 /*}}}*/
1536 // CacheGenerator::FinishCache /*{{{*/
1537 bool pkgCacheGenerator::FinishCache(OpProgress *Progress)
1538 {
1539 return true;
1540 }
1541 /*}}}*/