]> git.saurik.com Git - apt.git/blob - apt-pkg/pkgcachegen.cc
add #ifdefed ABI break & cleaner solution once the abi break is in place
[apt.git] / apt-pkg / pkgcachegen.cc
1 // -*- mode: cpp; mode: fold -*-
2 // Description /*{{{*/
3 // $Id: pkgcachegen.cc,v 1.53.2.1 2003/12/24 23:09:17 mdz Exp $
4 /* ######################################################################
5
6 Package Cache Generator - Generator for the cache structure.
7
8 This builds the cache structure from the abstract package list parser.
9
10 ##################################################################### */
11 /*}}}*/
12 // Include Files /*{{{*/
13 #include <config.h>
14
15 #include <apt-pkg/pkgcachegen.h>
16 #include <apt-pkg/error.h>
17 #include <apt-pkg/version.h>
18 #include <apt-pkg/progress.h>
19 #include <apt-pkg/sourcelist.h>
20 #include <apt-pkg/configuration.h>
21 #include <apt-pkg/aptconfiguration.h>
22 #include <apt-pkg/strutl.h>
23 #include <apt-pkg/sptr.h>
24 #include <apt-pkg/pkgsystem.h>
25 #include <apt-pkg/macros.h>
26 #include <apt-pkg/tagfile.h>
27 #include <apt-pkg/metaindex.h>
28 #include <apt-pkg/fileutl.h>
29
30 #include <vector>
31 #include <sys/stat.h>
32 #include <unistd.h>
33 #include <errno.h>
34 #include <stdio.h>
35
36 #include <apti18n.h>
37 /*}}}*/
38 typedef std::vector<pkgIndexFile *>::iterator FileIterator;
39 template <typename Iter> std::vector<Iter*> pkgCacheGenerator::Dynamic<Iter>::toReMap;
40
41 static bool IsDuplicateDescription(pkgCache::DescIterator Desc,
42 MD5SumValue const &CurMd5, std::string const &CurLang);
43
44 using std::string;
45
46 // CacheGenerator::pkgCacheGenerator - Constructor /*{{{*/
47 // ---------------------------------------------------------------------
48 /* We set the dirty flag and make sure that is written to the disk */
49 pkgCacheGenerator::pkgCacheGenerator(DynamicMMap *pMap,OpProgress *Prog) :
50 Map(*pMap), Cache(pMap,false), Progress(Prog),
51 FoundFileDeps(0)
52 {
53 CurrentFile = 0;
54 memset(UniqHash,0,sizeof(UniqHash));
55
56 if (_error->PendingError() == true)
57 return;
58
59 if (Map.Size() == 0)
60 {
61 // Setup the map interface..
62 Cache.HeaderP = (pkgCache::Header *)Map.Data();
63 if (Map.RawAllocate(sizeof(pkgCache::Header)) == 0 && _error->PendingError() == true)
64 return;
65
66 Map.UsePools(*Cache.HeaderP->Pools,sizeof(Cache.HeaderP->Pools)/sizeof(Cache.HeaderP->Pools[0]));
67
68 // Starting header
69 *Cache.HeaderP = pkgCache::Header();
70 map_ptrloc const idxVerSysName = WriteStringInMap(_system->VS->Label);
71 Cache.HeaderP->VerSysName = idxVerSysName;
72 // this pointer is set in ReMap, but we need it now for WriteUniqString
73 Cache.StringItemP = (pkgCache::StringItem *)Map.Data();
74 map_ptrloc const idxArchitecture = WriteUniqString(_config->Find("APT::Architecture"));
75 Cache.HeaderP->Architecture = idxArchitecture;
76 if (unlikely(idxVerSysName == 0 || idxArchitecture == 0))
77 return;
78 Cache.ReMap();
79 }
80 else
81 {
82 // Map directly from the existing file
83 Cache.ReMap();
84 Map.UsePools(*Cache.HeaderP->Pools,sizeof(Cache.HeaderP->Pools)/sizeof(Cache.HeaderP->Pools[0]));
85 if (Cache.VS != _system->VS)
86 {
87 _error->Error(_("Cache has an incompatible versioning system"));
88 return;
89 }
90 }
91
92 Cache.HeaderP->Dirty = true;
93 Map.Sync(0,sizeof(pkgCache::Header));
94 }
95 /*}}}*/
96 // CacheGenerator::~pkgCacheGenerator - Destructor /*{{{*/
97 // ---------------------------------------------------------------------
98 /* We sync the data then unset the dirty flag in two steps so as to
99 advoid a problem during a crash */
100 pkgCacheGenerator::~pkgCacheGenerator()
101 {
102 if (_error->PendingError() == true)
103 return;
104 if (Map.Sync() == false)
105 return;
106
107 Cache.HeaderP->Dirty = false;
108 Cache.HeaderP->CacheFileSize = Map.Size();
109 Map.Sync(0,sizeof(pkgCache::Header));
110 }
111 /*}}}*/
112 void pkgCacheGenerator::ReMap(void const * const oldMap, void const * const newMap) {/*{{{*/
113 if (oldMap == newMap)
114 return;
115
116 if (_config->FindB("Debug::pkgCacheGen", false))
117 std::clog << "Remaping from " << oldMap << " to " << newMap << std::endl;
118
119 Cache.ReMap(false);
120
121 CurrentFile += (pkgCache::PackageFile*) newMap - (pkgCache::PackageFile*) oldMap;
122
123 for (size_t i = 0; i < _count(UniqHash); ++i)
124 if (UniqHash[i] != 0)
125 UniqHash[i] += (pkgCache::StringItem*) newMap - (pkgCache::StringItem*) oldMap;
126
127 for (std::vector<pkgCache::GrpIterator*>::const_iterator i = Dynamic<pkgCache::GrpIterator>::toReMap.begin();
128 i != Dynamic<pkgCache::GrpIterator>::toReMap.end(); ++i)
129 (*i)->ReMap(oldMap, newMap);
130 for (std::vector<pkgCache::PkgIterator*>::const_iterator i = Dynamic<pkgCache::PkgIterator>::toReMap.begin();
131 i != Dynamic<pkgCache::PkgIterator>::toReMap.end(); ++i)
132 (*i)->ReMap(oldMap, newMap);
133 for (std::vector<pkgCache::VerIterator*>::const_iterator i = Dynamic<pkgCache::VerIterator>::toReMap.begin();
134 i != Dynamic<pkgCache::VerIterator>::toReMap.end(); ++i)
135 (*i)->ReMap(oldMap, newMap);
136 for (std::vector<pkgCache::DepIterator*>::const_iterator i = Dynamic<pkgCache::DepIterator>::toReMap.begin();
137 i != Dynamic<pkgCache::DepIterator>::toReMap.end(); ++i)
138 (*i)->ReMap(oldMap, newMap);
139 for (std::vector<pkgCache::DescIterator*>::const_iterator i = Dynamic<pkgCache::DescIterator>::toReMap.begin();
140 i != Dynamic<pkgCache::DescIterator>::toReMap.end(); ++i)
141 (*i)->ReMap(oldMap, newMap);
142 for (std::vector<pkgCache::PrvIterator*>::const_iterator i = Dynamic<pkgCache::PrvIterator>::toReMap.begin();
143 i != Dynamic<pkgCache::PrvIterator>::toReMap.end(); ++i)
144 (*i)->ReMap(oldMap, newMap);
145 for (std::vector<pkgCache::PkgFileIterator*>::const_iterator i = Dynamic<pkgCache::PkgFileIterator>::toReMap.begin();
146 i != Dynamic<pkgCache::PkgFileIterator>::toReMap.end(); ++i)
147 (*i)->ReMap(oldMap, newMap);
148 } /*}}}*/
149 // CacheGenerator::WriteStringInMap /*{{{*/
150 map_ptrloc pkgCacheGenerator::WriteStringInMap(const char *String,
151 const unsigned long &Len) {
152 void const * const oldMap = Map.Data();
153 map_ptrloc const index = Map.WriteString(String, Len);
154 if (index != 0)
155 ReMap(oldMap, Map.Data());
156 return index;
157 }
158 /*}}}*/
159 // CacheGenerator::WriteStringInMap /*{{{*/
160 map_ptrloc pkgCacheGenerator::WriteStringInMap(const char *String) {
161 void const * const oldMap = Map.Data();
162 map_ptrloc const index = Map.WriteString(String);
163 if (index != 0)
164 ReMap(oldMap, Map.Data());
165 return index;
166 }
167 /*}}}*/
168 map_ptrloc pkgCacheGenerator::AllocateInMap(const unsigned long &size) {/*{{{*/
169 void const * const oldMap = Map.Data();
170 map_ptrloc const index = Map.Allocate(size);
171 if (index != 0)
172 ReMap(oldMap, Map.Data());
173 return index;
174 }
175 /*}}}*/
176 // CacheGenerator::MergeList - Merge the package list /*{{{*/
177 // ---------------------------------------------------------------------
178 /* This provides the generation of the entries in the cache. Each loop
179 goes through a single package record from the underlying parse engine. */
180 bool pkgCacheGenerator::MergeList(ListParser &List,
181 pkgCache::VerIterator *OutVer)
182 {
183 List.Owner = this;
184
185 unsigned int Counter = 0;
186 while (List.Step() == true)
187 {
188 string const PackageName = List.Package();
189 if (PackageName.empty() == true)
190 return false;
191
192 Counter++;
193 if (Counter % 100 == 0 && Progress != 0)
194 Progress->Progress(List.Offset());
195
196 string Arch = List.Architecture();
197 string const Version = List.Version();
198 if (Version.empty() == true && Arch.empty() == true)
199 {
200 // package descriptions
201 if (MergeListGroup(List, PackageName) == false)
202 return false;
203 continue;
204 }
205
206 if (Arch.empty() == true)
207 {
208 // use the pseudo arch 'none' for arch-less packages
209 Arch = "none";
210 /* We might built a SingleArchCache here, which we don't want to blow up
211 just for these :none packages to a proper MultiArchCache, so just ensure
212 that we have always a native package structure first for SingleArch */
213 pkgCache::PkgIterator NP;
214 Dynamic<pkgCache::PkgIterator> DynPkg(NP);
215 if (NewPackage(NP, PackageName, _config->Find("APT::Architecture")) == false)
216 // TRANSLATOR: The first placeholder is a package name,
217 // the other two should be copied verbatim as they include debug info
218 return _error->Error(_("Error occurred while processing %s (%s%d)"),
219 PackageName.c_str(), "NewPackage", 0);
220 }
221
222 // Get a pointer to the package structure
223 pkgCache::PkgIterator Pkg;
224 Dynamic<pkgCache::PkgIterator> DynPkg(Pkg);
225 if (NewPackage(Pkg, PackageName, Arch) == false)
226 // TRANSLATOR: The first placeholder is a package name,
227 // the other two should be copied verbatim as they include debug info
228 return _error->Error(_("Error occurred while processing %s (%s%d)"),
229 PackageName.c_str(), "NewPackage", 1);
230
231
232 if (Version.empty() == true)
233 {
234 if (MergeListPackage(List, Pkg) == false)
235 return false;
236 }
237 else
238 {
239 if (MergeListVersion(List, Pkg, Version, OutVer) == false)
240 return false;
241 }
242
243 if (OutVer != 0)
244 {
245 FoundFileDeps |= List.HasFileDeps();
246 return true;
247 }
248 }
249
250 if (Cache.HeaderP->PackageCount >= (1ULL<<sizeof(Cache.PkgP->ID)*8)-1)
251 return _error->Error(_("Wow, you exceeded the number of package "
252 "names this APT is capable of."));
253 if (Cache.HeaderP->VersionCount >= (1ULL<<(sizeof(Cache.VerP->ID)*8))-1)
254 return _error->Error(_("Wow, you exceeded the number of versions "
255 "this APT is capable of."));
256 if (Cache.HeaderP->DescriptionCount >= (1ULL<<(sizeof(Cache.DescP->ID)*8))-1)
257 return _error->Error(_("Wow, you exceeded the number of descriptions "
258 "this APT is capable of."));
259 if (Cache.HeaderP->DependsCount >= (1ULL<<(sizeof(Cache.DepP->ID)*8))-1ULL)
260 return _error->Error(_("Wow, you exceeded the number of dependencies "
261 "this APT is capable of."));
262
263 FoundFileDeps |= List.HasFileDeps();
264 return true;
265 }
266 // CacheGenerator::MergeListGroup /*{{{*/
267 bool pkgCacheGenerator::MergeListGroup(ListParser &List, std::string const &GrpName)
268 {
269 pkgCache::GrpIterator Grp = Cache.FindGrp(GrpName);
270 // a group has no data on it's own, only packages have it but these
271 // stanzas like this come from Translation- files to add descriptions,
272 // but without a version we don't need a description for it…
273 if (Grp.end() == true)
274 return true;
275 Dynamic<pkgCache::GrpIterator> DynGrp(Grp);
276
277 pkgCache::PkgIterator Pkg;
278 Dynamic<pkgCache::PkgIterator> DynPkg(Pkg);
279 for (Pkg = Grp.PackageList(); Pkg.end() == false; Pkg = Grp.NextPkg(Pkg))
280 if (MergeListPackage(List, Pkg) == false)
281 return false;
282
283 return true;
284 }
285 /*}}}*/
286 // CacheGenerator::MergeListPackage /*{{{*/
287 bool pkgCacheGenerator::MergeListPackage(ListParser &List, pkgCache::PkgIterator &Pkg)
288 {
289 // we first process the package, then the descriptions
290 // (for deb this package processing is in fact a no-op)
291 pkgCache::VerIterator Ver(Cache);
292 Dynamic<pkgCache::VerIterator> DynVer(Ver);
293 if (List.UsePackage(Pkg, Ver) == false)
294 return _error->Error(_("Error occurred while processing %s (%s%d)"),
295 Pkg.Name(), "UsePackage", 1);
296
297 // Find the right version to write the description
298 MD5SumValue CurMd5 = List.Description_md5();
299 if (CurMd5.Value().empty() == true || List.Description().empty() == true)
300 return true;
301 std::string CurLang = List.DescriptionLanguage();
302
303 for (Ver = Pkg.VersionList(); Ver.end() == false; ++Ver)
304 {
305 pkgCache::DescIterator VerDesc = Ver.DescriptionList();
306
307 // a version can only have one md5 describing it
308 if (VerDesc.end() == true || MD5SumValue(VerDesc.md5()) != CurMd5)
309 continue;
310
311 // don't add a new description if we have one for the given
312 // md5 && language
313 if (IsDuplicateDescription(VerDesc, CurMd5, CurLang) == true)
314 continue;
315
316 pkgCache::DescIterator Desc;
317 Dynamic<pkgCache::DescIterator> DynDesc(Desc);
318
319 map_ptrloc const descindex = NewDescription(Desc, CurLang, CurMd5, VerDesc->md5sum);
320 if (unlikely(descindex == 0 && _error->PendingError()))
321 return _error->Error(_("Error occurred while processing %s (%s%d)"),
322 Pkg.Name(), "NewDescription", 1);
323
324 Desc->ParentPkg = Pkg.Index();
325
326 // we add at the end, so that the start is constant as we need
327 // that to be able to efficiently share these lists
328 VerDesc = Ver.DescriptionList(); // old value might be invalid after ReMap
329 for (;VerDesc.end() == false && VerDesc->NextDesc != 0; ++VerDesc);
330 map_ptrloc * const LastNextDesc = (VerDesc.end() == true) ? &Ver->DescriptionList : &VerDesc->NextDesc;
331 *LastNextDesc = descindex;
332
333 if (NewFileDesc(Desc,List) == false)
334 return _error->Error(_("Error occurred while processing %s (%s%d)"),
335 Pkg.Name(), "NewFileDesc", 1);
336
337 // we can stop here as all "same" versions will share the description
338 break;
339 }
340
341 return true;
342 }
343 /*}}}*/
344 // CacheGenerator::MergeListVersion /*{{{*/
345 bool pkgCacheGenerator::MergeListVersion(ListParser &List, pkgCache::PkgIterator &Pkg,
346 std::string const &Version, pkgCache::VerIterator* &OutVer)
347 {
348 pkgCache::VerIterator Ver = Pkg.VersionList();
349 Dynamic<pkgCache::VerIterator> DynVer(Ver);
350 map_ptrloc *LastVer = &Pkg->VersionList;
351 void const * oldMap = Map.Data();
352
353 unsigned long const Hash = List.VersionHash();
354 if (Ver.end() == false)
355 {
356 /* We know the list is sorted so we use that fact in the search.
357 Insertion of new versions is done with correct sorting */
358 int Res = 1;
359 for (; Ver.end() == false; LastVer = &Ver->NextVer, Ver++)
360 {
361 Res = Cache.VS->CmpVersion(Version,Ver.VerStr());
362 // Version is higher as current version - insert here
363 if (Res > 0)
364 break;
365 // Versionstrings are equal - is hash also equal?
366 if (Res == 0 && Ver->Hash == Hash)
367 break;
368 // proceed with the next till we have either the right
369 // or we found another version (which will be lower)
370 }
371
372 /* We already have a version for this item, record that we saw it */
373 if (Res == 0 && Ver.end() == false && Ver->Hash == Hash)
374 {
375 if (List.UsePackage(Pkg,Ver) == false)
376 return _error->Error(_("Error occurred while processing %s (%s%d)"),
377 Pkg.Name(), "UsePackage", 2);
378
379 if (NewFileVer(Ver,List) == false)
380 return _error->Error(_("Error occurred while processing %s (%s%d)"),
381 Pkg.Name(), "NewFileVer", 1);
382
383 // Read only a single record and return
384 if (OutVer != 0)
385 {
386 *OutVer = Ver;
387 return true;
388 }
389
390 return true;
391 }
392 }
393
394 // Add a new version
395 map_ptrloc const verindex = NewVersion(Ver, Version, Pkg.Index(), Hash, *LastVer);
396 if (verindex == 0 && _error->PendingError())
397 return _error->Error(_("Error occurred while processing %s (%s%d)"),
398 Pkg.Name(), "NewVersion", 1);
399
400 if (oldMap != Map.Data())
401 LastVer += (map_ptrloc*) Map.Data() - (map_ptrloc*) oldMap;
402 *LastVer = verindex;
403
404 if (unlikely(List.NewVersion(Ver) == false))
405 return _error->Error(_("Error occurred while processing %s (%s%d)"),
406 Pkg.Name(), "NewVersion", 2);
407
408 if (unlikely(List.UsePackage(Pkg,Ver) == false))
409 return _error->Error(_("Error occurred while processing %s (%s%d)"),
410 Pkg.Name(), "UsePackage", 3);
411
412 if (unlikely(NewFileVer(Ver,List) == false))
413 return _error->Error(_("Error occurred while processing %s (%s%d)"),
414 Pkg.Name(), "NewFileVer", 2);
415
416 pkgCache::GrpIterator Grp = Pkg.Group();
417 Dynamic<pkgCache::GrpIterator> DynGrp(Grp);
418
419 /* If it is the first version of this package we need to add implicit
420 Multi-Arch dependencies to all other package versions in the group now -
421 otherwise we just add them for this new version */
422 if (Pkg.VersionList()->NextVer == 0)
423 {
424 pkgCache::PkgIterator P = Grp.PackageList();
425 Dynamic<pkgCache::PkgIterator> DynP(P);
426 for (; P.end() != true; P = Grp.NextPkg(P))
427 {
428 if (P->ID == Pkg->ID)
429 continue;
430 pkgCache::VerIterator V = P.VersionList();
431 Dynamic<pkgCache::VerIterator> DynV(V);
432 for (; V.end() != true; ++V)
433 if (unlikely(AddImplicitDepends(V, Pkg) == false))
434 return _error->Error(_("Error occurred while processing %s (%s%d)"),
435 Pkg.Name(), "AddImplicitDepends", 1);
436 }
437 /* :none packages are packages without an architecture. They are forbidden by
438 debian-policy, so usually they will only be in (old) dpkg status files -
439 and dpkg will complain about them - and are pretty rare. We therefore do
440 usually not create conflicts while the parent is created, but only if a :none
441 package (= the target) appears. This creates incorrect dependencies on :none
442 for architecture-specific dependencies on the package we copy from, but we
443 will ignore this bug as architecture-specific dependencies are only allowed
444 in jessie and until then the :none packages should be extinct (hopefully).
445 In other words: This should work long enough to allow graceful removal of
446 these packages, it is not supposed to allow users to keep using them … */
447 if (strcmp(Pkg.Arch(), "none") == 0)
448 {
449 pkgCache::PkgIterator M = Grp.FindPreferredPkg();
450 if (M.end() == false && Pkg != M)
451 {
452 pkgCache::DepIterator D = M.RevDependsList();
453 Dynamic<pkgCache::DepIterator> DynD(D);
454 for (; D.end() == false; ++D)
455 {
456 if ((D->Type != pkgCache::Dep::Conflicts &&
457 D->Type != pkgCache::Dep::DpkgBreaks &&
458 D->Type != pkgCache::Dep::Replaces) ||
459 D.ParentPkg().Group() == Grp)
460 continue;
461
462 map_ptrloc *OldDepLast = NULL;
463 pkgCache::VerIterator ConVersion = D.ParentVer();
464 Dynamic<pkgCache::VerIterator> DynV(ConVersion);
465 // duplicate the Conflicts/Breaks/Replaces for :none arch
466 NewDepends(Pkg, ConVersion, D->Version,
467 D->CompareOp, D->Type, OldDepLast);
468 }
469 }
470 }
471 }
472 if (unlikely(AddImplicitDepends(Grp, Pkg, Ver) == false))
473 return _error->Error(_("Error occurred while processing %s (%s%d)"),
474 Pkg.Name(), "AddImplicitDepends", 2);
475
476 // Read only a single record and return
477 if (OutVer != 0)
478 {
479 *OutVer = Ver;
480 return true;
481 }
482
483 /* Record the Description (it is not translated) */
484 MD5SumValue CurMd5 = List.Description_md5();
485 if (CurMd5.Value().empty() == true || List.Description().empty() == true)
486 return true;
487 std::string CurLang = List.DescriptionLanguage();
488
489 /* Before we add a new description we first search in the group for
490 a version with a description of the same MD5 - if so we reuse this
491 description group instead of creating our own for this version */
492 for (pkgCache::PkgIterator P = Grp.PackageList();
493 P.end() == false; P = Grp.NextPkg(P))
494 {
495 for (pkgCache::VerIterator V = P.VersionList();
496 V.end() == false; ++V)
497 {
498 if (IsDuplicateDescription(V.DescriptionList(), CurMd5, "") == false)
499 continue;
500 Ver->DescriptionList = V->DescriptionList;
501 return true;
502 }
503 }
504
505 // We haven't found reusable descriptions, so add the first description
506 pkgCache::DescIterator Desc = Ver.DescriptionList();
507 Dynamic<pkgCache::DescIterator> DynDesc(Desc);
508
509 map_ptrloc const descindex = NewDescription(Desc, CurLang, CurMd5, 0);
510 if (unlikely(descindex == 0 && _error->PendingError()))
511 return _error->Error(_("Error occurred while processing %s (%s%d)"),
512 Pkg.Name(), "NewDescription", 2);
513
514 Desc->ParentPkg = Pkg.Index();
515 Ver->DescriptionList = descindex;
516
517 if (NewFileDesc(Desc,List) == false)
518 return _error->Error(_("Error occurred while processing %s (%s%d)"),
519 Pkg.Name(), "NewFileDesc", 2);
520
521 return true;
522 }
523 /*}}}*/
524 /*}}}*/
525 // CacheGenerator::MergeFileProvides - Merge file provides /*{{{*/
526 // ---------------------------------------------------------------------
527 /* If we found any file depends while parsing the main list we need to
528 resolve them. Since it is undesired to load the entire list of files
529 into the cache as virtual packages we do a two stage effort. MergeList
530 identifies the file depends and this creates Provdies for them by
531 re-parsing all the indexs. */
532 bool pkgCacheGenerator::MergeFileProvides(ListParser &List)
533 {
534 List.Owner = this;
535
536 unsigned int Counter = 0;
537 while (List.Step() == true)
538 {
539 string PackageName = List.Package();
540 if (PackageName.empty() == true)
541 return false;
542 string Version = List.Version();
543 if (Version.empty() == true)
544 continue;
545
546 pkgCache::PkgIterator Pkg = Cache.FindPkg(PackageName);
547 Dynamic<pkgCache::PkgIterator> DynPkg(Pkg);
548 if (Pkg.end() == true)
549 return _error->Error(_("Error occurred while processing %s (%s%d)"),
550 PackageName.c_str(), "FindPkg", 1);
551 Counter++;
552 if (Counter % 100 == 0 && Progress != 0)
553 Progress->Progress(List.Offset());
554
555 unsigned long Hash = List.VersionHash();
556 pkgCache::VerIterator Ver = Pkg.VersionList();
557 Dynamic<pkgCache::VerIterator> DynVer(Ver);
558 for (; Ver.end() == false; ++Ver)
559 {
560 if (Ver->Hash == Hash && Version == Ver.VerStr())
561 {
562 if (List.CollectFileProvides(Cache,Ver) == false)
563 return _error->Error(_("Error occurred while processing %s (%s%d)"),
564 PackageName.c_str(), "CollectFileProvides", 1);
565 break;
566 }
567 }
568
569 if (Ver.end() == true)
570 _error->Warning(_("Package %s %s was not found while processing file dependencies"),PackageName.c_str(),Version.c_str());
571 }
572
573 return true;
574 }
575 /*}}}*/
576 // CacheGenerator::NewGroup - Add a new group /*{{{*/
577 // ---------------------------------------------------------------------
578 /* This creates a new group structure and adds it to the hash table */
579 bool pkgCacheGenerator::NewGroup(pkgCache::GrpIterator &Grp, const string &Name)
580 {
581 Grp = Cache.FindGrp(Name);
582 if (Grp.end() == false)
583 return true;
584
585 // Get a structure
586 map_ptrloc const Group = AllocateInMap(sizeof(pkgCache::Group));
587 if (unlikely(Group == 0))
588 return false;
589
590 Grp = pkgCache::GrpIterator(Cache, Cache.GrpP + Group);
591 map_ptrloc const idxName = WriteStringInMap(Name);
592 if (unlikely(idxName == 0))
593 return false;
594 Grp->Name = idxName;
595
596 // Insert it into the hash table
597 unsigned long const Hash = Cache.Hash(Name);
598 map_ptrloc *insertAt = &Cache.HeaderP->GrpHashTable[Hash];
599 while (*insertAt != 0 && strcasecmp(Name.c_str(), Cache.StrP + (Cache.GrpP + *insertAt)->Name) > 0)
600 insertAt = &(Cache.GrpP + *insertAt)->Next;
601 Grp->Next = *insertAt;
602 *insertAt = Group;
603
604 Grp->ID = Cache.HeaderP->GroupCount++;
605 return true;
606 }
607 /*}}}*/
608 // CacheGenerator::NewPackage - Add a new package /*{{{*/
609 // ---------------------------------------------------------------------
610 /* This creates a new package structure and adds it to the hash table */
611 bool pkgCacheGenerator::NewPackage(pkgCache::PkgIterator &Pkg,const string &Name,
612 const string &Arch) {
613 pkgCache::GrpIterator Grp;
614 Dynamic<pkgCache::GrpIterator> DynGrp(Grp);
615 if (unlikely(NewGroup(Grp, Name) == false))
616 return false;
617
618 Pkg = Grp.FindPkg(Arch);
619 if (Pkg.end() == false)
620 return true;
621
622 // Get a structure
623 map_ptrloc const Package = AllocateInMap(sizeof(pkgCache::Package));
624 if (unlikely(Package == 0))
625 return false;
626 Pkg = pkgCache::PkgIterator(Cache,Cache.PkgP + Package);
627
628 // Insert the package into our package list
629 if (Grp->FirstPackage == 0) // the group is new
630 {
631 Grp->FirstPackage = Package;
632 // Insert it into the hash table
633 unsigned long const Hash = Cache.Hash(Name);
634 map_ptrloc *insertAt = &Cache.HeaderP->PkgHashTable[Hash];
635 while (*insertAt != 0 && strcasecmp(Name.c_str(), Cache.StrP + (Cache.PkgP + *insertAt)->Name) > 0)
636 insertAt = &(Cache.PkgP + *insertAt)->NextPackage;
637 Pkg->NextPackage = *insertAt;
638 *insertAt = Package;
639 }
640 else // Group the Packages together
641 {
642 // this package is the new last package
643 pkgCache::PkgIterator LastPkg(Cache, Cache.PkgP + Grp->LastPackage);
644 Pkg->NextPackage = LastPkg->NextPackage;
645 LastPkg->NextPackage = Package;
646 }
647 Grp->LastPackage = Package;
648
649 // Set the name, arch and the ID
650 Pkg->Name = Grp->Name;
651 Pkg->Group = Grp.Index();
652 // all is mapped to the native architecture
653 map_ptrloc const idxArch = (Arch == "all") ? Cache.HeaderP->Architecture : WriteUniqString(Arch.c_str());
654 if (unlikely(idxArch == 0))
655 return false;
656 Pkg->Arch = idxArch;
657 Pkg->ID = Cache.HeaderP->PackageCount++;
658
659 return true;
660 }
661 /*}}}*/
662 // CacheGenerator::AddImplicitDepends /*{{{*/
663 bool pkgCacheGenerator::AddImplicitDepends(pkgCache::GrpIterator &G,
664 pkgCache::PkgIterator &P,
665 pkgCache::VerIterator &V)
666 {
667 // copy P.Arch() into a string here as a cache remap
668 // in NewDepends() later may alter the pointer location
669 string Arch = P.Arch() == NULL ? "" : P.Arch();
670 map_ptrloc *OldDepLast = NULL;
671 /* MultiArch handling introduces a lot of implicit Dependencies:
672 - MultiArch: same → Co-Installable if they have the same version
673 - All others conflict with all other group members */
674 bool const coInstall = ((V->MultiArch & pkgCache::Version::Same) == pkgCache::Version::Same);
675 pkgCache::PkgIterator D = G.PackageList();
676 Dynamic<pkgCache::PkgIterator> DynD(D);
677 map_ptrloc const VerStrIdx = V->VerStr;
678 for (; D.end() != true; D = G.NextPkg(D))
679 {
680 if (Arch == D.Arch() || D->VersionList == 0)
681 continue;
682 /* We allow only one installed arch at the time
683 per group, therefore each group member conflicts
684 with all other group members */
685 if (coInstall == true)
686 {
687 // Replaces: ${self}:other ( << ${binary:Version})
688 NewDepends(D, V, VerStrIdx,
689 pkgCache::Dep::Less, pkgCache::Dep::Replaces,
690 OldDepLast);
691 // Breaks: ${self}:other (!= ${binary:Version})
692 NewDepends(D, V, VerStrIdx,
693 pkgCache::Dep::NotEquals, pkgCache::Dep::DpkgBreaks,
694 OldDepLast);
695 } else {
696 // Conflicts: ${self}:other
697 NewDepends(D, V, 0,
698 pkgCache::Dep::NoOp, pkgCache::Dep::Conflicts,
699 OldDepLast);
700 }
701 }
702 return true;
703 }
704 bool pkgCacheGenerator::AddImplicitDepends(pkgCache::VerIterator &V,
705 pkgCache::PkgIterator &D)
706 {
707 /* MultiArch handling introduces a lot of implicit Dependencies:
708 - MultiArch: same → Co-Installable if they have the same version
709 - All others conflict with all other group members */
710 map_ptrloc *OldDepLast = NULL;
711 bool const coInstall = ((V->MultiArch & pkgCache::Version::Same) == pkgCache::Version::Same);
712 if (coInstall == true)
713 {
714 map_ptrloc const VerStrIdx = V->VerStr;
715 // Replaces: ${self}:other ( << ${binary:Version})
716 NewDepends(D, V, VerStrIdx,
717 pkgCache::Dep::Less, pkgCache::Dep::Replaces,
718 OldDepLast);
719 // Breaks: ${self}:other (!= ${binary:Version})
720 NewDepends(D, V, VerStrIdx,
721 pkgCache::Dep::NotEquals, pkgCache::Dep::DpkgBreaks,
722 OldDepLast);
723 } else {
724 // Conflicts: ${self}:other
725 NewDepends(D, V, 0,
726 pkgCache::Dep::NoOp, pkgCache::Dep::Conflicts,
727 OldDepLast);
728 }
729 return true;
730 }
731
732 /*}}}*/
733 // CacheGenerator::NewFileVer - Create a new File<->Version association /*{{{*/
734 // ---------------------------------------------------------------------
735 /* */
736 bool pkgCacheGenerator::NewFileVer(pkgCache::VerIterator &Ver,
737 ListParser &List)
738 {
739 if (CurrentFile == 0)
740 return true;
741
742 // Get a structure
743 map_ptrloc const VerFile = AllocateInMap(sizeof(pkgCache::VerFile));
744 if (VerFile == 0)
745 return 0;
746
747 pkgCache::VerFileIterator VF(Cache,Cache.VerFileP + VerFile);
748 VF->File = CurrentFile - Cache.PkgFileP;
749
750 // Link it to the end of the list
751 map_ptrloc *Last = &Ver->FileList;
752 for (pkgCache::VerFileIterator V = Ver.FileList(); V.end() == false; ++V)
753 Last = &V->NextFile;
754 VF->NextFile = *Last;
755 *Last = VF.Index();
756
757 VF->Offset = List.Offset();
758 VF->Size = List.Size();
759 if (Cache.HeaderP->MaxVerFileSize < VF->Size)
760 Cache.HeaderP->MaxVerFileSize = VF->Size;
761 Cache.HeaderP->VerFileCount++;
762
763 return true;
764 }
765 /*}}}*/
766 // CacheGenerator::NewVersion - Create a new Version /*{{{*/
767 // ---------------------------------------------------------------------
768 /* This puts a version structure in the linked list */
769 unsigned long pkgCacheGenerator::NewVersion(pkgCache::VerIterator &Ver,
770 const string &VerStr,
771 map_ptrloc const ParentPkg,
772 unsigned long const Hash,
773 unsigned long Next)
774 {
775 // Get a structure
776 map_ptrloc const Version = AllocateInMap(sizeof(pkgCache::Version));
777 if (Version == 0)
778 return 0;
779
780 // Fill it in
781 Ver = pkgCache::VerIterator(Cache,Cache.VerP + Version);
782 //Dynamic<pkgCache::VerIterator> DynV(Ver); // caller MergeListVersion already takes care of it
783 Ver->NextVer = Next;
784 Ver->ParentPkg = ParentPkg;
785 Ver->Hash = Hash;
786 Ver->ID = Cache.HeaderP->VersionCount++;
787
788 // try to find the version string in the group for reuse
789 pkgCache::PkgIterator Pkg = Ver.ParentPkg();
790 pkgCache::GrpIterator Grp = Pkg.Group();
791 if (Pkg.end() == false && Grp.end() == false)
792 {
793 for (pkgCache::PkgIterator P = Grp.PackageList(); P.end() == false; P = Grp.NextPkg(P))
794 {
795 if (Pkg == P)
796 continue;
797 for (pkgCache::VerIterator V = P.VersionList(); V.end() == false; ++V)
798 {
799 int const cmp = strcmp(V.VerStr(), VerStr.c_str());
800 if (cmp == 0)
801 {
802 Ver->VerStr = V->VerStr;
803 return Version;
804 }
805 else if (cmp < 0)
806 break;
807 }
808 }
809 }
810 // haven't found the version string, so create
811 map_ptrloc const idxVerStr = WriteStringInMap(VerStr);
812 if (unlikely(idxVerStr == 0))
813 return 0;
814 Ver->VerStr = idxVerStr;
815 return Version;
816 }
817 /*}}}*/
818 // CacheGenerator::NewFileDesc - Create a new File<->Desc association /*{{{*/
819 // ---------------------------------------------------------------------
820 /* */
821 bool pkgCacheGenerator::NewFileDesc(pkgCache::DescIterator &Desc,
822 ListParser &List)
823 {
824 if (CurrentFile == 0)
825 return true;
826
827 // Get a structure
828 map_ptrloc const DescFile = AllocateInMap(sizeof(pkgCache::DescFile));
829 if (DescFile == 0)
830 return false;
831
832 pkgCache::DescFileIterator DF(Cache,Cache.DescFileP + DescFile);
833 DF->File = CurrentFile - Cache.PkgFileP;
834
835 // Link it to the end of the list
836 map_ptrloc *Last = &Desc->FileList;
837 for (pkgCache::DescFileIterator D = Desc.FileList(); D.end() == false; ++D)
838 Last = &D->NextFile;
839
840 DF->NextFile = *Last;
841 *Last = DF.Index();
842
843 DF->Offset = List.Offset();
844 DF->Size = List.Size();
845 if (Cache.HeaderP->MaxDescFileSize < DF->Size)
846 Cache.HeaderP->MaxDescFileSize = DF->Size;
847 Cache.HeaderP->DescFileCount++;
848
849 return true;
850 }
851 /*}}}*/
852 // CacheGenerator::NewDescription - Create a new Description /*{{{*/
853 // ---------------------------------------------------------------------
854 /* This puts a description structure in the linked list */
855 map_ptrloc pkgCacheGenerator::NewDescription(pkgCache::DescIterator &Desc,
856 const string &Lang,
857 const MD5SumValue &md5sum,
858 map_ptrloc idxmd5str)
859 {
860 // Get a structure
861 map_ptrloc const Description = AllocateInMap(sizeof(pkgCache::Description));
862 if (Description == 0)
863 return 0;
864
865 // Fill it in
866 Desc = pkgCache::DescIterator(Cache,Cache.DescP + Description);
867 Desc->ID = Cache.HeaderP->DescriptionCount++;
868 map_ptrloc const idxlanguage_code = WriteUniqString(Lang);
869 if (unlikely(idxlanguage_code == 0))
870 return 0;
871 Desc->language_code = idxlanguage_code;
872
873 if (idxmd5str != 0)
874 Desc->md5sum = idxmd5str;
875 else
876 {
877 map_ptrloc const idxmd5sum = WriteStringInMap(md5sum.Value());
878 if (unlikely(idxmd5sum == 0))
879 return 0;
880 Desc->md5sum = idxmd5sum;
881 }
882
883 return Description;
884 }
885 /*}}}*/
886 // CacheGenerator::NewDepends - Create a dependency element /*{{{*/
887 // ---------------------------------------------------------------------
888 /* This creates a dependency element in the tree. It is linked to the
889 version and to the package that it is pointing to. */
890 bool pkgCacheGenerator::NewDepends(pkgCache::PkgIterator &Pkg,
891 pkgCache::VerIterator &Ver,
892 string const &Version,
893 unsigned int const &Op,
894 unsigned int const &Type,
895 map_ptrloc* &OldDepLast)
896 {
897 map_ptrloc index = 0;
898 if (Version.empty() == false)
899 {
900 int const CmpOp = Op & 0x0F;
901 // =-deps are used (79:1) for lockstep on same-source packages (e.g. data-packages)
902 if (CmpOp == pkgCache::Dep::Equals && strcmp(Version.c_str(), Ver.VerStr()) == 0)
903 index = Ver->VerStr;
904
905 if (index == 0)
906 {
907 void const * const oldMap = Map.Data();
908 index = WriteStringInMap(Version);
909 if (unlikely(index == 0))
910 return false;
911 if (OldDepLast != 0 && oldMap != Map.Data())
912 OldDepLast += (map_ptrloc*) Map.Data() - (map_ptrloc*) oldMap;
913 }
914 }
915 return NewDepends(Pkg, Ver, index, Op, Type, OldDepLast);
916 }
917 bool pkgCacheGenerator::NewDepends(pkgCache::PkgIterator &Pkg,
918 pkgCache::VerIterator &Ver,
919 map_ptrloc const Version,
920 unsigned int const &Op,
921 unsigned int const &Type,
922 map_ptrloc* &OldDepLast)
923 {
924 void const * const oldMap = Map.Data();
925 // Get a structure
926 map_ptrloc const Dependency = AllocateInMap(sizeof(pkgCache::Dependency));
927 if (unlikely(Dependency == 0))
928 return false;
929
930 // Fill it in
931 pkgCache::DepIterator Dep(Cache,Cache.DepP + Dependency);
932 Dynamic<pkgCache::DepIterator> DynDep(Dep);
933 Dep->ParentVer = Ver.Index();
934 Dep->Type = Type;
935 Dep->CompareOp = Op;
936 Dep->Version = Version;
937 Dep->ID = Cache.HeaderP->DependsCount++;
938
939 // Link it to the package
940 Dep->Package = Pkg.Index();
941 Dep->NextRevDepends = Pkg->RevDepends;
942 Pkg->RevDepends = Dep.Index();
943
944 // Do we know where to link the Dependency to?
945 if (OldDepLast == NULL)
946 {
947 OldDepLast = &Ver->DependsList;
948 for (pkgCache::DepIterator D = Ver.DependsList(); D.end() == false; ++D)
949 OldDepLast = &D->NextDepends;
950 } else if (oldMap != Map.Data())
951 OldDepLast += (map_ptrloc*) Map.Data() - (map_ptrloc*) oldMap;
952
953 Dep->NextDepends = *OldDepLast;
954 *OldDepLast = Dep.Index();
955 OldDepLast = &Dep->NextDepends;
956
957 return true;
958 }
959 /*}}}*/
960 // ListParser::NewDepends - Create the environment for a new dependency /*{{{*/
961 // ---------------------------------------------------------------------
962 /* This creates a Group and the Package to link this dependency to if
963 needed and handles also the caching of the old endpoint */
964 bool pkgCacheGenerator::ListParser::NewDepends(pkgCache::VerIterator &Ver,
965 const string &PackageName,
966 const string &Arch,
967 const string &Version,
968 unsigned int Op,
969 unsigned int Type)
970 {
971 pkgCache::GrpIterator Grp;
972 Dynamic<pkgCache::GrpIterator> DynGrp(Grp);
973 if (unlikely(Owner->NewGroup(Grp, PackageName) == false))
974 return false;
975
976 // Locate the target package
977 pkgCache::PkgIterator Pkg = Grp.FindPkg(Arch);
978 // we don't create 'none' packages and their dependencies if we can avoid it …
979 if (Pkg.end() == true && Arch == "none" && strcmp(Ver.ParentPkg().Arch(), "none") != 0)
980 return true;
981 Dynamic<pkgCache::PkgIterator> DynPkg(Pkg);
982 if (Pkg.end() == true) {
983 if (unlikely(Owner->NewPackage(Pkg, PackageName, Arch) == false))
984 return false;
985 }
986
987 // Is it a file dependency?
988 if (unlikely(PackageName[0] == '/'))
989 FoundFileDeps = true;
990
991 /* Caching the old end point speeds up generation substantially */
992 if (OldDepVer != Ver) {
993 OldDepLast = NULL;
994 OldDepVer = Ver;
995 }
996
997 return Owner->NewDepends(Pkg, Ver, Version, Op, Type, OldDepLast);
998 }
999 /*}}}*/
1000 // ListParser::NewProvides - Create a Provides element /*{{{*/
1001 // ---------------------------------------------------------------------
1002 /* */
1003 bool pkgCacheGenerator::ListParser::NewProvides(pkgCache::VerIterator &Ver,
1004 const string &PkgName,
1005 const string &PkgArch,
1006 const string &Version)
1007 {
1008 pkgCache &Cache = Owner->Cache;
1009
1010 // We do not add self referencing provides
1011 if (Ver.ParentPkg().Name() == PkgName && (PkgArch == Ver.ParentPkg().Arch() ||
1012 (PkgArch == "all" && strcmp((Cache.StrP + Cache.HeaderP->Architecture), Ver.ParentPkg().Arch()) == 0)))
1013 return true;
1014
1015 // Get a structure
1016 map_ptrloc const Provides = Owner->AllocateInMap(sizeof(pkgCache::Provides));
1017 if (unlikely(Provides == 0))
1018 return false;
1019 Cache.HeaderP->ProvidesCount++;
1020
1021 // Fill it in
1022 pkgCache::PrvIterator Prv(Cache,Cache.ProvideP + Provides,Cache.PkgP);
1023 Dynamic<pkgCache::PrvIterator> DynPrv(Prv);
1024 Prv->Version = Ver.Index();
1025 Prv->NextPkgProv = Ver->ProvidesList;
1026 Ver->ProvidesList = Prv.Index();
1027 if (Version.empty() == false) {
1028 map_ptrloc const idxProvideVersion = WriteString(Version);
1029 Prv->ProvideVersion = idxProvideVersion;
1030 if (unlikely(idxProvideVersion == 0))
1031 return false;
1032 }
1033
1034 // Locate the target package
1035 pkgCache::PkgIterator Pkg;
1036 Dynamic<pkgCache::PkgIterator> DynPkg(Pkg);
1037 if (unlikely(Owner->NewPackage(Pkg,PkgName, PkgArch) == false))
1038 return false;
1039
1040 // Link it to the package
1041 Prv->ParentPkg = Pkg.Index();
1042 Prv->NextProvides = Pkg->ProvidesList;
1043 Pkg->ProvidesList = Prv.Index();
1044
1045 return true;
1046 }
1047 /*}}}*/
1048 // CacheGenerator::SelectFile - Select the current file being parsed /*{{{*/
1049 // ---------------------------------------------------------------------
1050 /* This is used to select which file is to be associated with all newly
1051 added versions. The caller is responsible for setting the IMS fields. */
1052 bool pkgCacheGenerator::SelectFile(const string &File,const string &Site,
1053 const pkgIndexFile &Index,
1054 unsigned long Flags)
1055 {
1056 // Get some space for the structure
1057 map_ptrloc const idxFile = AllocateInMap(sizeof(*CurrentFile));
1058 if (unlikely(idxFile == 0))
1059 return false;
1060 CurrentFile = Cache.PkgFileP + idxFile;
1061
1062 // Fill it in
1063 map_ptrloc const idxFileName = WriteStringInMap(File);
1064 map_ptrloc const idxSite = WriteUniqString(Site);
1065 if (unlikely(idxFileName == 0 || idxSite == 0))
1066 return false;
1067 CurrentFile->FileName = idxFileName;
1068 CurrentFile->Site = idxSite;
1069 CurrentFile->NextFile = Cache.HeaderP->FileList;
1070 CurrentFile->Flags = Flags;
1071 CurrentFile->ID = Cache.HeaderP->PackageFileCount;
1072 map_ptrloc const idxIndexType = WriteUniqString(Index.GetType()->Label);
1073 if (unlikely(idxIndexType == 0))
1074 return false;
1075 CurrentFile->IndexType = idxIndexType;
1076 PkgFileName = File;
1077 Cache.HeaderP->FileList = CurrentFile - Cache.PkgFileP;
1078 Cache.HeaderP->PackageFileCount++;
1079
1080 if (Progress != 0)
1081 Progress->SubProgress(Index.Size());
1082 return true;
1083 }
1084 /*}}}*/
1085 // CacheGenerator::WriteUniqueString - Insert a unique string /*{{{*/
1086 // ---------------------------------------------------------------------
1087 /* This is used to create handles to strings. Given the same text it
1088 always returns the same number */
1089 unsigned long pkgCacheGenerator::WriteUniqString(const char *S,
1090 unsigned int Size)
1091 {
1092 /* We use a very small transient hash table here, this speeds up generation
1093 by a fair amount on slower machines */
1094 pkgCache::StringItem *&Bucket = UniqHash[(S[0]*5 + S[1]) % _count(UniqHash)];
1095 if (Bucket != 0 &&
1096 stringcmp(S,S+Size,Cache.StrP + Bucket->String) == 0)
1097 return Bucket->String;
1098
1099 // Search for an insertion point
1100 pkgCache::StringItem *I = Cache.StringItemP + Cache.HeaderP->StringList;
1101 int Res = 1;
1102 map_ptrloc *Last = &Cache.HeaderP->StringList;
1103 for (; I != Cache.StringItemP; Last = &I->NextItem,
1104 I = Cache.StringItemP + I->NextItem)
1105 {
1106 Res = stringcmp(S,S+Size,Cache.StrP + I->String);
1107 if (Res >= 0)
1108 break;
1109 }
1110
1111 // Match
1112 if (Res == 0)
1113 {
1114 Bucket = I;
1115 return I->String;
1116 }
1117
1118 // Get a structure
1119 void const * const oldMap = Map.Data();
1120 map_ptrloc const Item = AllocateInMap(sizeof(pkgCache::StringItem));
1121 if (Item == 0)
1122 return 0;
1123
1124 map_ptrloc const idxString = WriteStringInMap(S,Size);
1125 if (unlikely(idxString == 0))
1126 return 0;
1127 if (oldMap != Map.Data()) {
1128 Last += (map_ptrloc*) Map.Data() - (map_ptrloc*) oldMap;
1129 I += (pkgCache::StringItem*) Map.Data() - (pkgCache::StringItem*) oldMap;
1130 }
1131 *Last = Item;
1132
1133 // Fill in the structure
1134 pkgCache::StringItem *ItemP = Cache.StringItemP + Item;
1135 ItemP->NextItem = I - Cache.StringItemP;
1136 ItemP->String = idxString;
1137
1138 Bucket = ItemP;
1139 return ItemP->String;
1140 }
1141 /*}}}*/
1142 // CheckValidity - Check that a cache is up-to-date /*{{{*/
1143 // ---------------------------------------------------------------------
1144 /* This just verifies that each file in the list of index files exists,
1145 has matching attributes with the cache and the cache does not have
1146 any extra files. */
1147 static bool CheckValidity(const string &CacheFile,
1148 pkgSourceList &List,
1149 FileIterator Start,
1150 FileIterator End,
1151 MMap **OutMap = 0)
1152 {
1153 bool const Debug = _config->FindB("Debug::pkgCacheGen", false);
1154 // No file, certainly invalid
1155 if (CacheFile.empty() == true || FileExists(CacheFile) == false)
1156 {
1157 if (Debug == true)
1158 std::clog << "CacheFile doesn't exist" << std::endl;
1159 return false;
1160 }
1161
1162 if (List.GetLastModifiedTime() > GetModificationTime(CacheFile))
1163 {
1164 if (Debug == true)
1165 std::clog << "sources.list is newer than the cache" << std::endl;
1166 return false;
1167 }
1168
1169 // Map it
1170 FileFd CacheF(CacheFile,FileFd::ReadOnly);
1171 SPtr<MMap> Map = new MMap(CacheF,0);
1172 pkgCache Cache(Map);
1173 if (_error->PendingError() == true || Map->Size() == 0)
1174 {
1175 if (Debug == true)
1176 std::clog << "Errors are pending or Map is empty()" << std::endl;
1177 _error->Discard();
1178 return false;
1179 }
1180
1181 /* Now we check every index file, see if it is in the cache,
1182 verify the IMS data and check that it is on the disk too.. */
1183 SPtrArray<bool> Visited = new bool[Cache.HeaderP->PackageFileCount];
1184 memset(Visited,0,sizeof(*Visited)*Cache.HeaderP->PackageFileCount);
1185 for (; Start != End; ++Start)
1186 {
1187 if (Debug == true)
1188 std::clog << "Checking PkgFile " << (*Start)->Describe() << ": ";
1189 if ((*Start)->HasPackages() == false)
1190 {
1191 if (Debug == true)
1192 std::clog << "Has NO packages" << std::endl;
1193 continue;
1194 }
1195
1196 if ((*Start)->Exists() == false)
1197 {
1198 #if 0 // mvo: we no longer give a message here (Default Sources spec)
1199 _error->WarningE("stat",_("Couldn't stat source package list %s"),
1200 (*Start)->Describe().c_str());
1201 #endif
1202 if (Debug == true)
1203 std::clog << "file doesn't exist" << std::endl;
1204 continue;
1205 }
1206
1207 // FindInCache is also expected to do an IMS check.
1208 pkgCache::PkgFileIterator File = (*Start)->FindInCache(Cache);
1209 if (File.end() == true)
1210 {
1211 if (Debug == true)
1212 std::clog << "FindInCache returned end-Pointer" << std::endl;
1213 return false;
1214 }
1215
1216 Visited[File->ID] = true;
1217 if (Debug == true)
1218 std::clog << "with ID " << File->ID << " is valid" << std::endl;
1219 }
1220
1221 for (unsigned I = 0; I != Cache.HeaderP->PackageFileCount; I++)
1222 if (Visited[I] == false)
1223 {
1224 if (Debug == true)
1225 std::clog << "File with ID" << I << " wasn't visited" << std::endl;
1226 return false;
1227 }
1228
1229 if (_error->PendingError() == true)
1230 {
1231 if (Debug == true)
1232 {
1233 std::clog << "Validity failed because of pending errors:" << std::endl;
1234 _error->DumpErrors();
1235 }
1236 _error->Discard();
1237 return false;
1238 }
1239
1240 if (OutMap != 0)
1241 *OutMap = Map.UnGuard();
1242 return true;
1243 }
1244 /*}}}*/
1245 // ComputeSize - Compute the total size of a bunch of files /*{{{*/
1246 // ---------------------------------------------------------------------
1247 /* Size is kind of an abstract notion that is only used for the progress
1248 meter */
1249 static unsigned long ComputeSize(FileIterator Start,FileIterator End)
1250 {
1251 unsigned long TotalSize = 0;
1252 for (; Start != End; ++Start)
1253 {
1254 if ((*Start)->HasPackages() == false)
1255 continue;
1256 TotalSize += (*Start)->Size();
1257 }
1258 return TotalSize;
1259 }
1260 /*}}}*/
1261 // BuildCache - Merge the list of index files into the cache /*{{{*/
1262 // ---------------------------------------------------------------------
1263 /* */
1264 static bool BuildCache(pkgCacheGenerator &Gen,
1265 OpProgress *Progress,
1266 unsigned long &CurrentSize,unsigned long TotalSize,
1267 FileIterator Start, FileIterator End)
1268 {
1269 FileIterator I;
1270 for (I = Start; I != End; ++I)
1271 {
1272 if ((*I)->HasPackages() == false)
1273 continue;
1274
1275 if ((*I)->Exists() == false)
1276 continue;
1277
1278 if ((*I)->FindInCache(Gen.GetCache()).end() == false)
1279 {
1280 _error->Warning("Duplicate sources.list entry %s",
1281 (*I)->Describe().c_str());
1282 continue;
1283 }
1284
1285 unsigned long Size = (*I)->Size();
1286 if (Progress != NULL)
1287 Progress->OverallProgress(CurrentSize,TotalSize,Size,_("Reading package lists"));
1288 CurrentSize += Size;
1289
1290 if ((*I)->Merge(Gen,Progress) == false)
1291 return false;
1292 }
1293
1294 if (Gen.HasFileDeps() == true)
1295 {
1296 if (Progress != NULL)
1297 Progress->Done();
1298 TotalSize = ComputeSize(Start, End);
1299 CurrentSize = 0;
1300 for (I = Start; I != End; ++I)
1301 {
1302 unsigned long Size = (*I)->Size();
1303 if (Progress != NULL)
1304 Progress->OverallProgress(CurrentSize,TotalSize,Size,_("Collecting File Provides"));
1305 CurrentSize += Size;
1306 if ((*I)->MergeFileProvides(Gen,Progress) == false)
1307 return false;
1308 }
1309 }
1310
1311 return true;
1312 }
1313 /*}}}*/
1314 // CacheGenerator::CreateDynamicMMap - load an mmap with configuration options /*{{{*/
1315 DynamicMMap* pkgCacheGenerator::CreateDynamicMMap(FileFd *CacheF, unsigned long Flags) {
1316 unsigned long const MapStart = _config->FindI("APT::Cache-Start", 24*1024*1024);
1317 unsigned long const MapGrow = _config->FindI("APT::Cache-Grow", 1*1024*1024);
1318 unsigned long const MapLimit = _config->FindI("APT::Cache-Limit", 0);
1319 Flags |= MMap::Moveable;
1320 if (_config->FindB("APT::Cache-Fallback", false) == true)
1321 Flags |= MMap::Fallback;
1322 if (CacheF != NULL)
1323 return new DynamicMMap(*CacheF, Flags, MapStart, MapGrow, MapLimit);
1324 else
1325 return new DynamicMMap(Flags, MapStart, MapGrow, MapLimit);
1326 }
1327 /*}}}*/
1328 // CacheGenerator::MakeStatusCache - Construct the status cache /*{{{*/
1329 // ---------------------------------------------------------------------
1330 /* This makes sure that the status cache (the cache that has all
1331 index files from the sources list and all local ones) is ready
1332 to be mmaped. If OutMap is not zero then a MMap object representing
1333 the cache will be stored there. This is pretty much mandetory if you
1334 are using AllowMem. AllowMem lets the function be run as non-root
1335 where it builds the cache 'fast' into a memory buffer. */
1336 __deprecated bool pkgMakeStatusCache(pkgSourceList &List,OpProgress &Progress,
1337 MMap **OutMap, bool AllowMem)
1338 { return pkgCacheGenerator::MakeStatusCache(List, &Progress, OutMap, AllowMem); }
1339 bool pkgCacheGenerator::MakeStatusCache(pkgSourceList &List,OpProgress *Progress,
1340 MMap **OutMap,bool AllowMem)
1341 {
1342 bool const Debug = _config->FindB("Debug::pkgCacheGen", false);
1343
1344 std::vector<pkgIndexFile *> Files;
1345 for (std::vector<metaIndex *>::const_iterator i = List.begin();
1346 i != List.end();
1347 ++i)
1348 {
1349 std::vector <pkgIndexFile *> *Indexes = (*i)->GetIndexFiles();
1350 for (std::vector<pkgIndexFile *>::const_iterator j = Indexes->begin();
1351 j != Indexes->end();
1352 ++j)
1353 Files.push_back (*j);
1354 }
1355
1356 unsigned long const EndOfSource = Files.size();
1357 if (_system->AddStatusFiles(Files) == false)
1358 return false;
1359
1360 // Decide if we can write to the files..
1361 string const CacheFile = _config->FindFile("Dir::Cache::pkgcache");
1362 string const SrcCacheFile = _config->FindFile("Dir::Cache::srcpkgcache");
1363
1364 // ensure the cache directory exists
1365 if (CacheFile.empty() == false || SrcCacheFile.empty() == false)
1366 {
1367 string dir = _config->FindDir("Dir::Cache");
1368 size_t const len = dir.size();
1369 if (len > 5 && dir.find("/apt/", len - 6, 5) == len - 5)
1370 dir = dir.substr(0, len - 5);
1371 if (CacheFile.empty() == false)
1372 CreateDirectory(dir, flNotFile(CacheFile));
1373 if (SrcCacheFile.empty() == false)
1374 CreateDirectory(dir, flNotFile(SrcCacheFile));
1375 }
1376
1377 // Decide if we can write to the cache
1378 bool Writeable = false;
1379 if (CacheFile.empty() == false)
1380 Writeable = access(flNotFile(CacheFile).c_str(),W_OK) == 0;
1381 else
1382 if (SrcCacheFile.empty() == false)
1383 Writeable = access(flNotFile(SrcCacheFile).c_str(),W_OK) == 0;
1384 if (Debug == true)
1385 std::clog << "Do we have write-access to the cache files? " << (Writeable ? "YES" : "NO") << std::endl;
1386
1387 if (Writeable == false && AllowMem == false && CacheFile.empty() == false)
1388 return _error->Error(_("Unable to write to %s"),flNotFile(CacheFile).c_str());
1389
1390 if (Progress != NULL)
1391 Progress->OverallProgress(0,1,1,_("Reading package lists"));
1392
1393 // Cache is OK, Fin.
1394 if (CheckValidity(CacheFile, List, Files.begin(),Files.end(),OutMap) == true)
1395 {
1396 if (Progress != NULL)
1397 Progress->OverallProgress(1,1,1,_("Reading package lists"));
1398 if (Debug == true)
1399 std::clog << "pkgcache.bin is valid - no need to build anything" << std::endl;
1400 return true;
1401 }
1402 else if (Debug == true)
1403 std::clog << "pkgcache.bin is NOT valid" << std::endl;
1404
1405 /* At this point we know we need to reconstruct the package cache,
1406 begin. */
1407 SPtr<FileFd> CacheF;
1408 SPtr<DynamicMMap> Map;
1409 if (Writeable == true && CacheFile.empty() == false)
1410 {
1411 _error->PushToStack();
1412 unlink(CacheFile.c_str());
1413 CacheF = new FileFd(CacheFile,FileFd::WriteAtomic);
1414 fchmod(CacheF->Fd(),0644);
1415 Map = CreateDynamicMMap(CacheF, MMap::Public);
1416 if (_error->PendingError() == true)
1417 {
1418 delete CacheF.UnGuard();
1419 delete Map.UnGuard();
1420 if (Debug == true)
1421 std::clog << "Open filebased MMap FAILED" << std::endl;
1422 Writeable = false;
1423 if (AllowMem == false)
1424 {
1425 _error->MergeWithStack();
1426 return false;
1427 }
1428 _error->RevertToStack();
1429 }
1430 else
1431 {
1432 _error->MergeWithStack();
1433 if (Debug == true)
1434 std::clog << "Open filebased MMap" << std::endl;
1435 }
1436 }
1437 if (Writeable == false || CacheFile.empty() == true)
1438 {
1439 // Just build it in memory..
1440 Map = CreateDynamicMMap(NULL);
1441 if (Debug == true)
1442 std::clog << "Open memory Map (not filebased)" << std::endl;
1443 }
1444
1445 // Lets try the source cache.
1446 unsigned long CurrentSize = 0;
1447 unsigned long TotalSize = 0;
1448 if (CheckValidity(SrcCacheFile, List, Files.begin(),
1449 Files.begin()+EndOfSource) == true)
1450 {
1451 if (Debug == true)
1452 std::clog << "srcpkgcache.bin is valid - populate MMap with it." << std::endl;
1453 // Preload the map with the source cache
1454 FileFd SCacheF(SrcCacheFile,FileFd::ReadOnly);
1455 unsigned long const alloc = Map->RawAllocate(SCacheF.Size());
1456 if ((alloc == 0 && _error->PendingError())
1457 || SCacheF.Read((unsigned char *)Map->Data() + alloc,
1458 SCacheF.Size()) == false)
1459 return false;
1460
1461 TotalSize = ComputeSize(Files.begin()+EndOfSource,Files.end());
1462
1463 // Build the status cache
1464 pkgCacheGenerator Gen(Map.Get(),Progress);
1465 if (_error->PendingError() == true)
1466 return false;
1467 if (BuildCache(Gen,Progress,CurrentSize,TotalSize,
1468 Files.begin()+EndOfSource,Files.end()) == false)
1469 return false;
1470 }
1471 else
1472 {
1473 if (Debug == true)
1474 std::clog << "srcpkgcache.bin is NOT valid - rebuild" << std::endl;
1475 TotalSize = ComputeSize(Files.begin(),Files.end());
1476
1477 // Build the source cache
1478 pkgCacheGenerator Gen(Map.Get(),Progress);
1479 if (_error->PendingError() == true)
1480 return false;
1481 if (BuildCache(Gen,Progress,CurrentSize,TotalSize,
1482 Files.begin(),Files.begin()+EndOfSource) == false)
1483 return false;
1484
1485 // Write it back
1486 if (Writeable == true && SrcCacheFile.empty() == false)
1487 {
1488 FileFd SCacheF(SrcCacheFile,FileFd::WriteAtomic);
1489 if (_error->PendingError() == true)
1490 return false;
1491
1492 fchmod(SCacheF.Fd(),0644);
1493
1494 // Write out the main data
1495 if (SCacheF.Write(Map->Data(),Map->Size()) == false)
1496 return _error->Error(_("IO Error saving source cache"));
1497 SCacheF.Sync();
1498
1499 // Write out the proper header
1500 Gen.GetCache().HeaderP->Dirty = false;
1501 if (SCacheF.Seek(0) == false ||
1502 SCacheF.Write(Map->Data(),sizeof(*Gen.GetCache().HeaderP)) == false)
1503 return _error->Error(_("IO Error saving source cache"));
1504 Gen.GetCache().HeaderP->Dirty = true;
1505 SCacheF.Sync();
1506 }
1507
1508 // Build the status cache
1509 if (BuildCache(Gen,Progress,CurrentSize,TotalSize,
1510 Files.begin()+EndOfSource,Files.end()) == false)
1511 return false;
1512 }
1513 if (Debug == true)
1514 std::clog << "Caches are ready for shipping" << std::endl;
1515
1516 if (_error->PendingError() == true)
1517 return false;
1518 if (OutMap != 0)
1519 {
1520 if (CacheF != 0)
1521 {
1522 delete Map.UnGuard();
1523 *OutMap = new MMap(*CacheF,0);
1524 }
1525 else
1526 {
1527 *OutMap = Map.UnGuard();
1528 }
1529 }
1530
1531 return true;
1532 }
1533 /*}}}*/
1534 // CacheGenerator::MakeOnlyStatusCache - Build only a status files cache/*{{{*/
1535 // ---------------------------------------------------------------------
1536 /* */
1537 __deprecated bool pkgMakeOnlyStatusCache(OpProgress &Progress,DynamicMMap **OutMap)
1538 { return pkgCacheGenerator::MakeOnlyStatusCache(&Progress, OutMap); }
1539 bool pkgCacheGenerator::MakeOnlyStatusCache(OpProgress *Progress,DynamicMMap **OutMap)
1540 {
1541 std::vector<pkgIndexFile *> Files;
1542 unsigned long EndOfSource = Files.size();
1543 if (_system->AddStatusFiles(Files) == false)
1544 return false;
1545
1546 SPtr<DynamicMMap> Map = CreateDynamicMMap(NULL);
1547 unsigned long CurrentSize = 0;
1548 unsigned long TotalSize = 0;
1549
1550 TotalSize = ComputeSize(Files.begin()+EndOfSource,Files.end());
1551
1552 // Build the status cache
1553 if (Progress != NULL)
1554 Progress->OverallProgress(0,1,1,_("Reading package lists"));
1555 pkgCacheGenerator Gen(Map.Get(),Progress);
1556 if (_error->PendingError() == true)
1557 return false;
1558 if (BuildCache(Gen,Progress,CurrentSize,TotalSize,
1559 Files.begin()+EndOfSource,Files.end()) == false)
1560 return false;
1561
1562 if (_error->PendingError() == true)
1563 return false;
1564 *OutMap = Map.UnGuard();
1565
1566 return true;
1567 }
1568 /*}}}*/
1569 // IsDuplicateDescription /*{{{*/
1570 static bool IsDuplicateDescription(pkgCache::DescIterator Desc,
1571 MD5SumValue const &CurMd5, std::string const &CurLang)
1572 {
1573 // Descriptions in the same link-list have all the same md5
1574 if (Desc.end() == true || MD5SumValue(Desc.md5()) != CurMd5)
1575 return false;
1576 for (; Desc.end() == false; ++Desc)
1577 if (Desc.LanguageCode() == CurLang)
1578 return true;
1579 return false;
1580 }
1581 /*}}}*/
1582 // CacheGenerator::FinishCache /*{{{*/
1583 bool pkgCacheGenerator::FinishCache(OpProgress *Progress)
1584 {
1585 return true;
1586 }
1587 /*}}}*/