]> git.saurik.com Git - apt.git/blob - apt-pkg/pkgcachegen.cc
Merge remote-tracking branch 'upstream/debian/sid' into debian/sid
[apt.git] / apt-pkg / pkgcachegen.cc
1 // -*- mode: cpp; mode: fold -*-
2 // Description /*{{{*/
3 // $Id: pkgcachegen.cc,v 1.53.2.1 2003/12/24 23:09:17 mdz Exp $
4 /* ######################################################################
5
6 Package Cache Generator - Generator for the cache structure.
7
8 This builds the cache structure from the abstract package list parser.
9
10 ##################################################################### */
11 /*}}}*/
12 // Include Files /*{{{*/
13 #include <config.h>
14
15 #include <apt-pkg/pkgcachegen.h>
16 #include <apt-pkg/error.h>
17 #include <apt-pkg/version.h>
18 #include <apt-pkg/progress.h>
19 #include <apt-pkg/sourcelist.h>
20 #include <apt-pkg/configuration.h>
21 #include <apt-pkg/strutl.h>
22 #include <apt-pkg/sptr.h>
23 #include <apt-pkg/pkgsystem.h>
24 #include <apt-pkg/macros.h>
25 #include <apt-pkg/metaindex.h>
26 #include <apt-pkg/fileutl.h>
27 #include <apt-pkg/hashsum_template.h>
28 #include <apt-pkg/indexfile.h>
29 #include <apt-pkg/md5.h>
30 #include <apt-pkg/mmap.h>
31 #include <apt-pkg/pkgcache.h>
32 #include <apt-pkg/cacheiterators.h>
33
34 #include <stddef.h>
35 #include <string.h>
36 #include <iostream>
37 #include <string>
38 #include <vector>
39 #include <sys/stat.h>
40 #include <unistd.h>
41
42 #include <apti18n.h>
43 /*}}}*/
44 typedef std::vector<pkgIndexFile *>::iterator FileIterator;
45 template <typename Iter> std::vector<Iter*> pkgCacheGenerator::Dynamic<Iter>::toReMap;
46
47 static bool IsDuplicateDescription(pkgCache::DescIterator Desc,
48 MD5SumValue const &CurMd5, std::string const &CurLang);
49
50 using std::string;
51
52 // CacheGenerator::pkgCacheGenerator - Constructor /*{{{*/
53 // ---------------------------------------------------------------------
54 /* We set the dirty flag and make sure that is written to the disk */
55 pkgCacheGenerator::pkgCacheGenerator(DynamicMMap *pMap,OpProgress *Prog) :
56 Map(*pMap), Cache(pMap,false), Progress(Prog),
57 FoundFileDeps(0)
58 {
59 CurrentFile = 0;
60 memset(UniqHash,0,sizeof(UniqHash));
61
62 if (_error->PendingError() == true)
63 return;
64
65 if (Map.Size() == 0)
66 {
67 // Setup the map interface..
68 Cache.HeaderP = (pkgCache::Header *)Map.Data();
69 if (Map.RawAllocate(sizeof(pkgCache::Header)) == 0 && _error->PendingError() == true)
70 return;
71
72 Map.UsePools(*Cache.HeaderP->Pools,sizeof(Cache.HeaderP->Pools)/sizeof(Cache.HeaderP->Pools[0]));
73
74 // Starting header
75 *Cache.HeaderP = pkgCache::Header();
76 map_ptrloc const idxVerSysName = WriteStringInMap(_system->VS->Label);
77 Cache.HeaderP->VerSysName = idxVerSysName;
78 // this pointer is set in ReMap, but we need it now for WriteUniqString
79 Cache.StringItemP = (pkgCache::StringItem *)Map.Data();
80 map_ptrloc const idxArchitecture = WriteUniqString(_config->Find("APT::Architecture"));
81 Cache.HeaderP->Architecture = idxArchitecture;
82 if (unlikely(idxVerSysName == 0 || idxArchitecture == 0))
83 return;
84 Cache.ReMap();
85 }
86 else
87 {
88 // Map directly from the existing file
89 Cache.ReMap();
90 Map.UsePools(*Cache.HeaderP->Pools,sizeof(Cache.HeaderP->Pools)/sizeof(Cache.HeaderP->Pools[0]));
91 if (Cache.VS != _system->VS)
92 {
93 _error->Error(_("Cache has an incompatible versioning system"));
94 return;
95 }
96 }
97
98 Cache.HeaderP->Dirty = true;
99 Map.Sync(0,sizeof(pkgCache::Header));
100 }
101 /*}}}*/
102 // CacheGenerator::~pkgCacheGenerator - Destructor /*{{{*/
103 // ---------------------------------------------------------------------
104 /* We sync the data then unset the dirty flag in two steps so as to
105 advoid a problem during a crash */
106 pkgCacheGenerator::~pkgCacheGenerator()
107 {
108 if (_error->PendingError() == true)
109 return;
110 if (Map.Sync() == false)
111 return;
112
113 Cache.HeaderP->Dirty = false;
114 Cache.HeaderP->CacheFileSize = Map.Size();
115 Map.Sync(0,sizeof(pkgCache::Header));
116 }
117 /*}}}*/
118 void pkgCacheGenerator::ReMap(void const * const oldMap, void const * const newMap) {/*{{{*/
119 if (oldMap == newMap)
120 return;
121
122 if (_config->FindB("Debug::pkgCacheGen", false))
123 std::clog << "Remaping from " << oldMap << " to " << newMap << std::endl;
124
125 Cache.ReMap(false);
126
127 CurrentFile += (pkgCache::PackageFile const * const) newMap - (pkgCache::PackageFile const * const) oldMap;
128
129 for (size_t i = 0; i < _count(UniqHash); ++i)
130 if (UniqHash[i] != 0)
131 UniqHash[i] += (pkgCache::StringItem const * const) newMap - (pkgCache::StringItem const * const) oldMap;
132
133 for (std::vector<pkgCache::GrpIterator*>::const_iterator i = Dynamic<pkgCache::GrpIterator>::toReMap.begin();
134 i != Dynamic<pkgCache::GrpIterator>::toReMap.end(); ++i)
135 (*i)->ReMap(oldMap, newMap);
136 for (std::vector<pkgCache::PkgIterator*>::const_iterator i = Dynamic<pkgCache::PkgIterator>::toReMap.begin();
137 i != Dynamic<pkgCache::PkgIterator>::toReMap.end(); ++i)
138 (*i)->ReMap(oldMap, newMap);
139 for (std::vector<pkgCache::VerIterator*>::const_iterator i = Dynamic<pkgCache::VerIterator>::toReMap.begin();
140 i != Dynamic<pkgCache::VerIterator>::toReMap.end(); ++i)
141 (*i)->ReMap(oldMap, newMap);
142 for (std::vector<pkgCache::DepIterator*>::const_iterator i = Dynamic<pkgCache::DepIterator>::toReMap.begin();
143 i != Dynamic<pkgCache::DepIterator>::toReMap.end(); ++i)
144 (*i)->ReMap(oldMap, newMap);
145 for (std::vector<pkgCache::DescIterator*>::const_iterator i = Dynamic<pkgCache::DescIterator>::toReMap.begin();
146 i != Dynamic<pkgCache::DescIterator>::toReMap.end(); ++i)
147 (*i)->ReMap(oldMap, newMap);
148 for (std::vector<pkgCache::PrvIterator*>::const_iterator i = Dynamic<pkgCache::PrvIterator>::toReMap.begin();
149 i != Dynamic<pkgCache::PrvIterator>::toReMap.end(); ++i)
150 (*i)->ReMap(oldMap, newMap);
151 for (std::vector<pkgCache::PkgFileIterator*>::const_iterator i = Dynamic<pkgCache::PkgFileIterator>::toReMap.begin();
152 i != Dynamic<pkgCache::PkgFileIterator>::toReMap.end(); ++i)
153 (*i)->ReMap(oldMap, newMap);
154 } /*}}}*/
155 // CacheGenerator::WriteStringInMap /*{{{*/
156 map_ptrloc pkgCacheGenerator::WriteStringInMap(const char *String,
157 const unsigned long &Len) {
158 void const * const oldMap = Map.Data();
159 map_ptrloc const index = Map.WriteString(String, Len);
160 if (index != 0)
161 ReMap(oldMap, Map.Data());
162 return index;
163 }
164 /*}}}*/
165 // CacheGenerator::WriteStringInMap /*{{{*/
166 map_ptrloc pkgCacheGenerator::WriteStringInMap(const char *String) {
167 void const * const oldMap = Map.Data();
168 map_ptrloc const index = Map.WriteString(String);
169 if (index != 0)
170 ReMap(oldMap, Map.Data());
171 return index;
172 }
173 /*}}}*/
174 map_ptrloc pkgCacheGenerator::AllocateInMap(const unsigned long &size) {/*{{{*/
175 void const * const oldMap = Map.Data();
176 map_ptrloc const index = Map.Allocate(size);
177 if (index != 0)
178 ReMap(oldMap, Map.Data());
179 return index;
180 }
181 /*}}}*/
182 // CacheGenerator::MergeList - Merge the package list /*{{{*/
183 // ---------------------------------------------------------------------
184 /* This provides the generation of the entries in the cache. Each loop
185 goes through a single package record from the underlying parse engine. */
186 bool pkgCacheGenerator::MergeList(ListParser &List,
187 pkgCache::VerIterator *OutVer)
188 {
189 List.Owner = this;
190
191 unsigned int Counter = 0;
192 while (List.Step() == true)
193 {
194 string const PackageName = List.Package();
195 if (PackageName.empty() == true)
196 return false;
197
198 Counter++;
199 if (Counter % 100 == 0 && Progress != 0)
200 Progress->Progress(List.Offset());
201
202 string Arch = List.Architecture();
203 string const Version = List.Version();
204 if (Version.empty() == true && Arch.empty() == true)
205 {
206 // package descriptions
207 if (MergeListGroup(List, PackageName) == false)
208 return false;
209 continue;
210 }
211
212 if (Arch.empty() == true)
213 {
214 // use the pseudo arch 'none' for arch-less packages
215 Arch = "none";
216 /* We might built a SingleArchCache here, which we don't want to blow up
217 just for these :none packages to a proper MultiArchCache, so just ensure
218 that we have always a native package structure first for SingleArch */
219 pkgCache::PkgIterator NP;
220 Dynamic<pkgCache::PkgIterator> DynPkg(NP);
221 if (NewPackage(NP, PackageName, _config->Find("APT::Architecture")) == false)
222 // TRANSLATOR: The first placeholder is a package name,
223 // the other two should be copied verbatim as they include debug info
224 return _error->Error(_("Error occurred while processing %s (%s%d)"),
225 PackageName.c_str(), "NewPackage", 0);
226 }
227
228 // Get a pointer to the package structure
229 pkgCache::PkgIterator Pkg;
230 Dynamic<pkgCache::PkgIterator> DynPkg(Pkg);
231 if (NewPackage(Pkg, PackageName, Arch) == false)
232 // TRANSLATOR: The first placeholder is a package name,
233 // the other two should be copied verbatim as they include debug info
234 return _error->Error(_("Error occurred while processing %s (%s%d)"),
235 PackageName.c_str(), "NewPackage", 1);
236
237
238 if (Version.empty() == true)
239 {
240 if (MergeListPackage(List, Pkg) == false)
241 return false;
242 }
243 else
244 {
245 if (MergeListVersion(List, Pkg, Version, OutVer) == false)
246 return false;
247 }
248
249 if (OutVer != 0)
250 {
251 FoundFileDeps |= List.HasFileDeps();
252 return true;
253 }
254 }
255
256 if (Cache.HeaderP->PackageCount >= (1ULL<<sizeof(Cache.PkgP->ID)*8)-1)
257 return _error->Error(_("Wow, you exceeded the number of package "
258 "names this APT is capable of."));
259 if (Cache.HeaderP->VersionCount >= (1ULL<<(sizeof(Cache.VerP->ID)*8))-1)
260 return _error->Error(_("Wow, you exceeded the number of versions "
261 "this APT is capable of."));
262 if (Cache.HeaderP->DescriptionCount >= (1ULL<<(sizeof(Cache.DescP->ID)*8))-1)
263 return _error->Error(_("Wow, you exceeded the number of descriptions "
264 "this APT is capable of."));
265 if (Cache.HeaderP->DependsCount >= (1ULL<<(sizeof(Cache.DepP->ID)*8))-1ULL)
266 return _error->Error(_("Wow, you exceeded the number of dependencies "
267 "this APT is capable of."));
268
269 FoundFileDeps |= List.HasFileDeps();
270 return true;
271 }
272 // CacheGenerator::MergeListGroup /*{{{*/
273 bool pkgCacheGenerator::MergeListGroup(ListParser &List, std::string const &GrpName)
274 {
275 pkgCache::GrpIterator Grp = Cache.FindGrp(GrpName);
276 // a group has no data on it's own, only packages have it but these
277 // stanzas like this come from Translation- files to add descriptions,
278 // but without a version we don't need a description for it…
279 if (Grp.end() == true)
280 return true;
281 Dynamic<pkgCache::GrpIterator> DynGrp(Grp);
282
283 pkgCache::PkgIterator Pkg;
284 Dynamic<pkgCache::PkgIterator> DynPkg(Pkg);
285 for (Pkg = Grp.PackageList(); Pkg.end() == false; Pkg = Grp.NextPkg(Pkg))
286 if (MergeListPackage(List, Pkg) == false)
287 return false;
288
289 return true;
290 }
291 /*}}}*/
292 // CacheGenerator::MergeListPackage /*{{{*/
293 bool pkgCacheGenerator::MergeListPackage(ListParser &List, pkgCache::PkgIterator &Pkg)
294 {
295 // we first process the package, then the descriptions
296 // (for deb this package processing is in fact a no-op)
297 pkgCache::VerIterator Ver(Cache);
298 Dynamic<pkgCache::VerIterator> DynVer(Ver);
299 if (List.UsePackage(Pkg, Ver) == false)
300 return _error->Error(_("Error occurred while processing %s (%s%d)"),
301 Pkg.Name(), "UsePackage", 1);
302
303 // Find the right version to write the description
304 MD5SumValue CurMd5 = List.Description_md5();
305 if (CurMd5.Value().empty() == true || List.Description().empty() == true)
306 return true;
307 std::string CurLang = List.DescriptionLanguage();
308
309 for (Ver = Pkg.VersionList(); Ver.end() == false; ++Ver)
310 {
311 pkgCache::DescIterator VerDesc = Ver.DescriptionList();
312
313 // a version can only have one md5 describing it
314 if (VerDesc.end() == true || MD5SumValue(VerDesc.md5()) != CurMd5)
315 continue;
316
317 // don't add a new description if we have one for the given
318 // md5 && language
319 if (IsDuplicateDescription(VerDesc, CurMd5, CurLang) == true)
320 continue;
321
322 pkgCache::DescIterator Desc;
323 Dynamic<pkgCache::DescIterator> DynDesc(Desc);
324
325 map_ptrloc const descindex = NewDescription(Desc, CurLang, CurMd5, VerDesc->md5sum);
326 if (unlikely(descindex == 0 && _error->PendingError()))
327 return _error->Error(_("Error occurred while processing %s (%s%d)"),
328 Pkg.Name(), "NewDescription", 1);
329
330 Desc->ParentPkg = Pkg.Index();
331
332 // we add at the end, so that the start is constant as we need
333 // that to be able to efficiently share these lists
334 VerDesc = Ver.DescriptionList(); // old value might be invalid after ReMap
335 for (;VerDesc.end() == false && VerDesc->NextDesc != 0; ++VerDesc);
336 map_ptrloc * const LastNextDesc = (VerDesc.end() == true) ? &Ver->DescriptionList : &VerDesc->NextDesc;
337 *LastNextDesc = descindex;
338
339 if (NewFileDesc(Desc,List) == false)
340 return _error->Error(_("Error occurred while processing %s (%s%d)"),
341 Pkg.Name(), "NewFileDesc", 1);
342
343 // we can stop here as all "same" versions will share the description
344 break;
345 }
346
347 return true;
348 }
349 /*}}}*/
350 // CacheGenerator::MergeListVersion /*{{{*/
351 bool pkgCacheGenerator::MergeListVersion(ListParser &List, pkgCache::PkgIterator &Pkg,
352 std::string const &Version, pkgCache::VerIterator* &OutVer)
353 {
354 pkgCache::VerIterator Ver = Pkg.VersionList();
355 Dynamic<pkgCache::VerIterator> DynVer(Ver);
356 map_ptrloc *LastVer = &Pkg->VersionList;
357 void const * oldMap = Map.Data();
358
359 unsigned short const Hash = List.VersionHash();
360 if (Ver.end() == false)
361 {
362 /* We know the list is sorted so we use that fact in the search.
363 Insertion of new versions is done with correct sorting */
364 int Res = 1;
365 for (; Ver.end() == false; LastVer = &Ver->NextVer, Ver++)
366 {
367 Res = Cache.VS->CmpVersion(Version,Ver.VerStr());
368 // Version is higher as current version - insert here
369 if (Res > 0)
370 break;
371 // Versionstrings are equal - is hash also equal?
372 if (Res == 0 && List.SameVersion(Hash, Ver) == true)
373 break;
374 // proceed with the next till we have either the right
375 // or we found another version (which will be lower)
376 }
377
378 /* We already have a version for this item, record that we saw it */
379 if (Res == 0 && Ver.end() == false && Ver->Hash == Hash)
380 {
381 if (List.UsePackage(Pkg,Ver) == false)
382 return _error->Error(_("Error occurred while processing %s (%s%d)"),
383 Pkg.Name(), "UsePackage", 2);
384
385 if (NewFileVer(Ver,List) == false)
386 return _error->Error(_("Error occurred while processing %s (%s%d)"),
387 Pkg.Name(), "NewFileVer", 1);
388
389 // Read only a single record and return
390 if (OutVer != 0)
391 {
392 *OutVer = Ver;
393 return true;
394 }
395
396 return true;
397 }
398 }
399
400 // Add a new version
401 map_ptrloc const verindex = NewVersion(Ver, Version, Pkg.Index(), Hash, *LastVer);
402 if (verindex == 0 && _error->PendingError())
403 return _error->Error(_("Error occurred while processing %s (%s%d)"),
404 Pkg.Name(), "NewVersion", 1);
405
406 if (oldMap != Map.Data())
407 LastVer += (map_ptrloc const * const) Map.Data() - (map_ptrloc const * const) oldMap;
408 *LastVer = verindex;
409
410 if (unlikely(List.NewVersion(Ver) == false))
411 return _error->Error(_("Error occurred while processing %s (%s%d)"),
412 Pkg.Name(), "NewVersion", 2);
413
414 if (unlikely(List.UsePackage(Pkg,Ver) == false))
415 return _error->Error(_("Error occurred while processing %s (%s%d)"),
416 Pkg.Name(), "UsePackage", 3);
417
418 if (unlikely(NewFileVer(Ver,List) == false))
419 return _error->Error(_("Error occurred while processing %s (%s%d)"),
420 Pkg.Name(), "NewFileVer", 2);
421
422 pkgCache::GrpIterator Grp = Pkg.Group();
423 Dynamic<pkgCache::GrpIterator> DynGrp(Grp);
424
425 /* If it is the first version of this package we need to add implicit
426 Multi-Arch dependencies to all other package versions in the group now -
427 otherwise we just add them for this new version */
428 if (Pkg.VersionList()->NextVer == 0)
429 {
430 pkgCache::PkgIterator P = Grp.PackageList();
431 Dynamic<pkgCache::PkgIterator> DynP(P);
432 for (; P.end() != true; P = Grp.NextPkg(P))
433 {
434 if (P->ID == Pkg->ID)
435 continue;
436 pkgCache::VerIterator V = P.VersionList();
437 Dynamic<pkgCache::VerIterator> DynV(V);
438 for (; V.end() != true; ++V)
439 if (unlikely(AddImplicitDepends(V, Pkg) == false))
440 return _error->Error(_("Error occurred while processing %s (%s%d)"),
441 Pkg.Name(), "AddImplicitDepends", 1);
442 }
443 /* :none packages are packages without an architecture. They are forbidden by
444 debian-policy, so usually they will only be in (old) dpkg status files -
445 and dpkg will complain about them - and are pretty rare. We therefore do
446 usually not create conflicts while the parent is created, but only if a :none
447 package (= the target) appears. This creates incorrect dependencies on :none
448 for architecture-specific dependencies on the package we copy from, but we
449 will ignore this bug as architecture-specific dependencies are only allowed
450 in jessie and until then the :none packages should be extinct (hopefully).
451 In other words: This should work long enough to allow graceful removal of
452 these packages, it is not supposed to allow users to keep using them … */
453 if (strcmp(Pkg.Arch(), "none") == 0)
454 {
455 pkgCache::PkgIterator M = Grp.FindPreferredPkg();
456 if (M.end() == false && Pkg != M)
457 {
458 pkgCache::DepIterator D = M.RevDependsList();
459 Dynamic<pkgCache::DepIterator> DynD(D);
460 for (; D.end() == false; ++D)
461 {
462 if ((D->Type != pkgCache::Dep::Conflicts &&
463 D->Type != pkgCache::Dep::DpkgBreaks &&
464 D->Type != pkgCache::Dep::Replaces) ||
465 D.ParentPkg().Group() == Grp)
466 continue;
467
468 map_ptrloc *OldDepLast = NULL;
469 pkgCache::VerIterator ConVersion = D.ParentVer();
470 Dynamic<pkgCache::VerIterator> DynV(ConVersion);
471 // duplicate the Conflicts/Breaks/Replaces for :none arch
472 NewDepends(Pkg, ConVersion, D->Version,
473 D->CompareOp, D->Type, OldDepLast);
474 }
475 }
476 }
477 }
478 if (unlikely(AddImplicitDepends(Grp, Pkg, Ver) == false))
479 return _error->Error(_("Error occurred while processing %s (%s%d)"),
480 Pkg.Name(), "AddImplicitDepends", 2);
481
482 // Read only a single record and return
483 if (OutVer != 0)
484 {
485 *OutVer = Ver;
486 return true;
487 }
488
489 /* Record the Description (it is not translated) */
490 MD5SumValue CurMd5 = List.Description_md5();
491 if (CurMd5.Value().empty() == true || List.Description().empty() == true)
492 return true;
493 std::string CurLang = List.DescriptionLanguage();
494
495 /* Before we add a new description we first search in the group for
496 a version with a description of the same MD5 - if so we reuse this
497 description group instead of creating our own for this version */
498 for (pkgCache::PkgIterator P = Grp.PackageList();
499 P.end() == false; P = Grp.NextPkg(P))
500 {
501 for (pkgCache::VerIterator V = P.VersionList();
502 V.end() == false; ++V)
503 {
504 if (IsDuplicateDescription(V.DescriptionList(), CurMd5, "") == false)
505 continue;
506 Ver->DescriptionList = V->DescriptionList;
507 return true;
508 }
509 }
510
511 // We haven't found reusable descriptions, so add the first description
512 pkgCache::DescIterator Desc = Ver.DescriptionList();
513 Dynamic<pkgCache::DescIterator> DynDesc(Desc);
514
515 map_ptrloc const descindex = NewDescription(Desc, CurLang, CurMd5, 0);
516 if (unlikely(descindex == 0 && _error->PendingError()))
517 return _error->Error(_("Error occurred while processing %s (%s%d)"),
518 Pkg.Name(), "NewDescription", 2);
519
520 Desc->ParentPkg = Pkg.Index();
521 Ver->DescriptionList = descindex;
522
523 if (NewFileDesc(Desc,List) == false)
524 return _error->Error(_("Error occurred while processing %s (%s%d)"),
525 Pkg.Name(), "NewFileDesc", 2);
526
527 return true;
528 }
529 /*}}}*/
530 /*}}}*/
531 // CacheGenerator::MergeFileProvides - Merge file provides /*{{{*/
532 // ---------------------------------------------------------------------
533 /* If we found any file depends while parsing the main list we need to
534 resolve them. Since it is undesired to load the entire list of files
535 into the cache as virtual packages we do a two stage effort. MergeList
536 identifies the file depends and this creates Provdies for them by
537 re-parsing all the indexs. */
538 bool pkgCacheGenerator::MergeFileProvides(ListParser &List)
539 {
540 List.Owner = this;
541
542 unsigned int Counter = 0;
543 while (List.Step() == true)
544 {
545 string PackageName = List.Package();
546 if (PackageName.empty() == true)
547 return false;
548 string Version = List.Version();
549 if (Version.empty() == true)
550 continue;
551
552 pkgCache::PkgIterator Pkg = Cache.FindPkg(PackageName);
553 Dynamic<pkgCache::PkgIterator> DynPkg(Pkg);
554 if (Pkg.end() == true)
555 return _error->Error(_("Error occurred while processing %s (%s%d)"),
556 PackageName.c_str(), "FindPkg", 1);
557 Counter++;
558 if (Counter % 100 == 0 && Progress != 0)
559 Progress->Progress(List.Offset());
560
561 unsigned short Hash = List.VersionHash();
562 pkgCache::VerIterator Ver = Pkg.VersionList();
563 Dynamic<pkgCache::VerIterator> DynVer(Ver);
564 for (; Ver.end() == false; ++Ver)
565 {
566 if (List.SameVersion(Hash, Ver) == true && Version == Ver.VerStr())
567 {
568 if (List.CollectFileProvides(Cache,Ver) == false)
569 return _error->Error(_("Error occurred while processing %s (%s%d)"),
570 PackageName.c_str(), "CollectFileProvides", 1);
571 break;
572 }
573 }
574
575 if (Ver.end() == true)
576 _error->Warning(_("Package %s %s was not found while processing file dependencies"),PackageName.c_str(),Version.c_str());
577 }
578
579 return true;
580 }
581 /*}}}*/
582 // CacheGenerator::NewGroup - Add a new group /*{{{*/
583 // ---------------------------------------------------------------------
584 /* This creates a new group structure and adds it to the hash table */
585 bool pkgCacheGenerator::NewGroup(pkgCache::GrpIterator &Grp, const string &Name)
586 {
587 Grp = Cache.FindGrp(Name);
588 if (Grp.end() == false)
589 return true;
590
591 // Get a structure
592 map_ptrloc const Group = AllocateInMap(sizeof(pkgCache::Group));
593 if (unlikely(Group == 0))
594 return false;
595
596 Grp = pkgCache::GrpIterator(Cache, Cache.GrpP + Group);
597 map_ptrloc const idxName = WriteStringInMap(Name);
598 if (unlikely(idxName == 0))
599 return false;
600 Grp->Name = idxName;
601
602 // Insert it into the hash table
603 unsigned long const Hash = Cache.Hash(Name);
604 map_ptrloc *insertAt = &Cache.HeaderP->GrpHashTable[Hash];
605 while (*insertAt != 0 && strcasecmp(Name.c_str(), Cache.StrP + (Cache.GrpP + *insertAt)->Name) > 0)
606 insertAt = &(Cache.GrpP + *insertAt)->Next;
607 Grp->Next = *insertAt;
608 *insertAt = Group;
609
610 Grp->ID = Cache.HeaderP->GroupCount++;
611 return true;
612 }
613 /*}}}*/
614 // CacheGenerator::NewPackage - Add a new package /*{{{*/
615 // ---------------------------------------------------------------------
616 /* This creates a new package structure and adds it to the hash table */
617 bool pkgCacheGenerator::NewPackage(pkgCache::PkgIterator &Pkg,const string &Name,
618 const string &Arch) {
619 pkgCache::GrpIterator Grp;
620 Dynamic<pkgCache::GrpIterator> DynGrp(Grp);
621 if (unlikely(NewGroup(Grp, Name) == false))
622 return false;
623
624 Pkg = Grp.FindPkg(Arch);
625 if (Pkg.end() == false)
626 return true;
627
628 // Get a structure
629 map_ptrloc const Package = AllocateInMap(sizeof(pkgCache::Package));
630 if (unlikely(Package == 0))
631 return false;
632 Pkg = pkgCache::PkgIterator(Cache,Cache.PkgP + Package);
633
634 // Insert the package into our package list
635 if (Grp->FirstPackage == 0) // the group is new
636 {
637 Grp->FirstPackage = Package;
638 // Insert it into the hash table
639 unsigned long const Hash = Cache.Hash(Name);
640 map_ptrloc *insertAt = &Cache.HeaderP->PkgHashTable[Hash];
641 while (*insertAt != 0 && strcasecmp(Name.c_str(), Cache.StrP + (Cache.PkgP + *insertAt)->Name) > 0)
642 insertAt = &(Cache.PkgP + *insertAt)->NextPackage;
643 Pkg->NextPackage = *insertAt;
644 *insertAt = Package;
645 }
646 else // Group the Packages together
647 {
648 // this package is the new last package
649 pkgCache::PkgIterator LastPkg(Cache, Cache.PkgP + Grp->LastPackage);
650 Pkg->NextPackage = LastPkg->NextPackage;
651 LastPkg->NextPackage = Package;
652 }
653 Grp->LastPackage = Package;
654
655 // Set the name, arch and the ID
656 Pkg->Name = Grp->Name;
657 Pkg->Group = Grp.Index();
658 // all is mapped to the native architecture
659 map_ptrloc const idxArch = (Arch == "all") ? Cache.HeaderP->Architecture : WriteUniqString(Arch.c_str());
660 if (unlikely(idxArch == 0))
661 return false;
662 Pkg->Arch = idxArch;
663 Pkg->ID = Cache.HeaderP->PackageCount++;
664
665 return true;
666 }
667 /*}}}*/
668 // CacheGenerator::AddImplicitDepends /*{{{*/
669 bool pkgCacheGenerator::AddImplicitDepends(pkgCache::GrpIterator &G,
670 pkgCache::PkgIterator &P,
671 pkgCache::VerIterator &V)
672 {
673 // copy P.Arch() into a string here as a cache remap
674 // in NewDepends() later may alter the pointer location
675 string Arch = P.Arch() == NULL ? "" : P.Arch();
676 map_ptrloc *OldDepLast = NULL;
677 /* MultiArch handling introduces a lot of implicit Dependencies:
678 - MultiArch: same → Co-Installable if they have the same version
679 - All others conflict with all other group members */
680 bool const coInstall = ((V->MultiArch & pkgCache::Version::Same) == pkgCache::Version::Same);
681 pkgCache::PkgIterator D = G.PackageList();
682 Dynamic<pkgCache::PkgIterator> DynD(D);
683 map_ptrloc const VerStrIdx = V->VerStr;
684 for (; D.end() != true; D = G.NextPkg(D))
685 {
686 if (Arch == D.Arch() || D->VersionList == 0)
687 continue;
688 /* We allow only one installed arch at the time
689 per group, therefore each group member conflicts
690 with all other group members */
691 if (coInstall == true)
692 {
693 // Replaces: ${self}:other ( << ${binary:Version})
694 NewDepends(D, V, VerStrIdx,
695 pkgCache::Dep::Less, pkgCache::Dep::Replaces,
696 OldDepLast);
697 // Breaks: ${self}:other (!= ${binary:Version})
698 NewDepends(D, V, VerStrIdx,
699 pkgCache::Dep::NotEquals, pkgCache::Dep::DpkgBreaks,
700 OldDepLast);
701 } else {
702 // Conflicts: ${self}:other
703 NewDepends(D, V, 0,
704 pkgCache::Dep::NoOp, pkgCache::Dep::Conflicts,
705 OldDepLast);
706 }
707 }
708 return true;
709 }
710 bool pkgCacheGenerator::AddImplicitDepends(pkgCache::VerIterator &V,
711 pkgCache::PkgIterator &D)
712 {
713 /* MultiArch handling introduces a lot of implicit Dependencies:
714 - MultiArch: same → Co-Installable if they have the same version
715 - All others conflict with all other group members */
716 map_ptrloc *OldDepLast = NULL;
717 bool const coInstall = ((V->MultiArch & pkgCache::Version::Same) == pkgCache::Version::Same);
718 if (coInstall == true)
719 {
720 map_ptrloc const VerStrIdx = V->VerStr;
721 // Replaces: ${self}:other ( << ${binary:Version})
722 NewDepends(D, V, VerStrIdx,
723 pkgCache::Dep::Less, pkgCache::Dep::Replaces,
724 OldDepLast);
725 // Breaks: ${self}:other (!= ${binary:Version})
726 NewDepends(D, V, VerStrIdx,
727 pkgCache::Dep::NotEquals, pkgCache::Dep::DpkgBreaks,
728 OldDepLast);
729 } else {
730 // Conflicts: ${self}:other
731 NewDepends(D, V, 0,
732 pkgCache::Dep::NoOp, pkgCache::Dep::Conflicts,
733 OldDepLast);
734 }
735 return true;
736 }
737
738 /*}}}*/
739 // CacheGenerator::NewFileVer - Create a new File<->Version association /*{{{*/
740 // ---------------------------------------------------------------------
741 /* */
742 bool pkgCacheGenerator::NewFileVer(pkgCache::VerIterator &Ver,
743 ListParser &List)
744 {
745 if (CurrentFile == 0)
746 return true;
747
748 // Get a structure
749 map_ptrloc const VerFile = AllocateInMap(sizeof(pkgCache::VerFile));
750 if (VerFile == 0)
751 return 0;
752
753 pkgCache::VerFileIterator VF(Cache,Cache.VerFileP + VerFile);
754 VF->File = CurrentFile - Cache.PkgFileP;
755
756 // Link it to the end of the list
757 map_ptrloc *Last = &Ver->FileList;
758 for (pkgCache::VerFileIterator V = Ver.FileList(); V.end() == false; ++V)
759 Last = &V->NextFile;
760 VF->NextFile = *Last;
761 *Last = VF.Index();
762
763 VF->Offset = List.Offset();
764 VF->Size = List.Size();
765 if (Cache.HeaderP->MaxVerFileSize < VF->Size)
766 Cache.HeaderP->MaxVerFileSize = VF->Size;
767 Cache.HeaderP->VerFileCount++;
768
769 return true;
770 }
771 /*}}}*/
772 // CacheGenerator::NewVersion - Create a new Version /*{{{*/
773 // ---------------------------------------------------------------------
774 /* This puts a version structure in the linked list */
775 unsigned long pkgCacheGenerator::NewVersion(pkgCache::VerIterator &Ver,
776 const string &VerStr,
777 map_ptrloc const ParentPkg,
778 unsigned long const Hash,
779 unsigned long Next)
780 {
781 // Get a structure
782 map_ptrloc const Version = AllocateInMap(sizeof(pkgCache::Version));
783 if (Version == 0)
784 return 0;
785
786 // Fill it in
787 Ver = pkgCache::VerIterator(Cache,Cache.VerP + Version);
788 //Dynamic<pkgCache::VerIterator> DynV(Ver); // caller MergeListVersion already takes care of it
789 Ver->NextVer = Next;
790 Ver->ParentPkg = ParentPkg;
791 Ver->Hash = Hash;
792 Ver->ID = Cache.HeaderP->VersionCount++;
793
794 // try to find the version string in the group for reuse
795 pkgCache::PkgIterator Pkg = Ver.ParentPkg();
796 pkgCache::GrpIterator Grp = Pkg.Group();
797 if (Pkg.end() == false && Grp.end() == false)
798 {
799 for (pkgCache::PkgIterator P = Grp.PackageList(); P.end() == false; P = Grp.NextPkg(P))
800 {
801 if (Pkg == P)
802 continue;
803 for (pkgCache::VerIterator V = P.VersionList(); V.end() == false; ++V)
804 {
805 int const cmp = strcmp(V.VerStr(), VerStr.c_str());
806 if (cmp == 0)
807 {
808 Ver->VerStr = V->VerStr;
809 return Version;
810 }
811 else if (cmp < 0)
812 break;
813 }
814 }
815 }
816 // haven't found the version string, so create
817 map_ptrloc const idxVerStr = WriteStringInMap(VerStr);
818 if (unlikely(idxVerStr == 0))
819 return 0;
820 Ver->VerStr = idxVerStr;
821 return Version;
822 }
823 /*}}}*/
824 // CacheGenerator::NewFileDesc - Create a new File<->Desc association /*{{{*/
825 // ---------------------------------------------------------------------
826 /* */
827 bool pkgCacheGenerator::NewFileDesc(pkgCache::DescIterator &Desc,
828 ListParser &List)
829 {
830 if (CurrentFile == 0)
831 return true;
832
833 // Get a structure
834 map_ptrloc const DescFile = AllocateInMap(sizeof(pkgCache::DescFile));
835 if (DescFile == 0)
836 return false;
837
838 pkgCache::DescFileIterator DF(Cache,Cache.DescFileP + DescFile);
839 DF->File = CurrentFile - Cache.PkgFileP;
840
841 // Link it to the end of the list
842 map_ptrloc *Last = &Desc->FileList;
843 for (pkgCache::DescFileIterator D = Desc.FileList(); D.end() == false; ++D)
844 Last = &D->NextFile;
845
846 DF->NextFile = *Last;
847 *Last = DF.Index();
848
849 DF->Offset = List.Offset();
850 DF->Size = List.Size();
851 if (Cache.HeaderP->MaxDescFileSize < DF->Size)
852 Cache.HeaderP->MaxDescFileSize = DF->Size;
853 Cache.HeaderP->DescFileCount++;
854
855 return true;
856 }
857 /*}}}*/
858 // CacheGenerator::NewDescription - Create a new Description /*{{{*/
859 // ---------------------------------------------------------------------
860 /* This puts a description structure in the linked list */
861 map_ptrloc pkgCacheGenerator::NewDescription(pkgCache::DescIterator &Desc,
862 const string &Lang,
863 const MD5SumValue &md5sum,
864 map_ptrloc idxmd5str)
865 {
866 // Get a structure
867 map_ptrloc const Description = AllocateInMap(sizeof(pkgCache::Description));
868 if (Description == 0)
869 return 0;
870
871 // Fill it in
872 Desc = pkgCache::DescIterator(Cache,Cache.DescP + Description);
873 Desc->ID = Cache.HeaderP->DescriptionCount++;
874 map_ptrloc const idxlanguage_code = WriteUniqString(Lang);
875 if (unlikely(idxlanguage_code == 0))
876 return 0;
877 Desc->language_code = idxlanguage_code;
878
879 if (idxmd5str != 0)
880 Desc->md5sum = idxmd5str;
881 else
882 {
883 map_ptrloc const idxmd5sum = WriteStringInMap(md5sum.Value());
884 if (unlikely(idxmd5sum == 0))
885 return 0;
886 Desc->md5sum = idxmd5sum;
887 }
888
889 return Description;
890 }
891 /*}}}*/
892 // CacheGenerator::NewDepends - Create a dependency element /*{{{*/
893 // ---------------------------------------------------------------------
894 /* This creates a dependency element in the tree. It is linked to the
895 version and to the package that it is pointing to. */
896 bool pkgCacheGenerator::NewDepends(pkgCache::PkgIterator &Pkg,
897 pkgCache::VerIterator &Ver,
898 string const &Version,
899 unsigned int const &Op,
900 unsigned int const &Type,
901 map_ptrloc* &OldDepLast)
902 {
903 map_ptrloc index = 0;
904 if (Version.empty() == false)
905 {
906 int const CmpOp = Op & 0x0F;
907 // =-deps are used (79:1) for lockstep on same-source packages (e.g. data-packages)
908 if (CmpOp == pkgCache::Dep::Equals && strcmp(Version.c_str(), Ver.VerStr()) == 0)
909 index = Ver->VerStr;
910
911 if (index == 0)
912 {
913 void const * const oldMap = Map.Data();
914 index = WriteStringInMap(Version);
915 if (unlikely(index == 0))
916 return false;
917 if (OldDepLast != 0 && oldMap != Map.Data())
918 OldDepLast += (map_ptrloc const * const) Map.Data() - (map_ptrloc const * const) oldMap;
919 }
920 }
921 return NewDepends(Pkg, Ver, index, Op, Type, OldDepLast);
922 }
923 bool pkgCacheGenerator::NewDepends(pkgCache::PkgIterator &Pkg,
924 pkgCache::VerIterator &Ver,
925 map_ptrloc const Version,
926 unsigned int const &Op,
927 unsigned int const &Type,
928 map_ptrloc* &OldDepLast)
929 {
930 void const * const oldMap = Map.Data();
931 // Get a structure
932 map_ptrloc const Dependency = AllocateInMap(sizeof(pkgCache::Dependency));
933 if (unlikely(Dependency == 0))
934 return false;
935
936 // Fill it in
937 pkgCache::DepIterator Dep(Cache,Cache.DepP + Dependency);
938 Dynamic<pkgCache::DepIterator> DynDep(Dep);
939 Dep->ParentVer = Ver.Index();
940 Dep->Type = Type;
941 Dep->CompareOp = Op;
942 Dep->Version = Version;
943 Dep->ID = Cache.HeaderP->DependsCount++;
944
945 // Link it to the package
946 Dep->Package = Pkg.Index();
947 Dep->NextRevDepends = Pkg->RevDepends;
948 Pkg->RevDepends = Dep.Index();
949
950 // Do we know where to link the Dependency to?
951 if (OldDepLast == NULL)
952 {
953 OldDepLast = &Ver->DependsList;
954 for (pkgCache::DepIterator D = Ver.DependsList(); D.end() == false; ++D)
955 OldDepLast = &D->NextDepends;
956 } else if (oldMap != Map.Data())
957 OldDepLast += (map_ptrloc const * const) Map.Data() - (map_ptrloc const * const) oldMap;
958
959 Dep->NextDepends = *OldDepLast;
960 *OldDepLast = Dep.Index();
961 OldDepLast = &Dep->NextDepends;
962
963 return true;
964 }
965 /*}}}*/
966 // ListParser::NewDepends - Create the environment for a new dependency /*{{{*/
967 // ---------------------------------------------------------------------
968 /* This creates a Group and the Package to link this dependency to if
969 needed and handles also the caching of the old endpoint */
970 bool pkgCacheGenerator::ListParser::NewDepends(pkgCache::VerIterator &Ver,
971 const string &PackageName,
972 const string &Arch,
973 const string &Version,
974 unsigned int Op,
975 unsigned int Type)
976 {
977 pkgCache::GrpIterator Grp;
978 Dynamic<pkgCache::GrpIterator> DynGrp(Grp);
979 if (unlikely(Owner->NewGroup(Grp, PackageName) == false))
980 return false;
981
982 // Locate the target package
983 pkgCache::PkgIterator Pkg = Grp.FindPkg(Arch);
984 // we don't create 'none' packages and their dependencies if we can avoid it …
985 if (Pkg.end() == true && Arch == "none" && strcmp(Ver.ParentPkg().Arch(), "none") != 0)
986 return true;
987 Dynamic<pkgCache::PkgIterator> DynPkg(Pkg);
988 if (Pkg.end() == true) {
989 if (unlikely(Owner->NewPackage(Pkg, PackageName, Arch) == false))
990 return false;
991 }
992
993 // Is it a file dependency?
994 if (unlikely(PackageName[0] == '/'))
995 FoundFileDeps = true;
996
997 /* Caching the old end point speeds up generation substantially */
998 if (OldDepVer != Ver) {
999 OldDepLast = NULL;
1000 OldDepVer = Ver;
1001 }
1002
1003 return Owner->NewDepends(Pkg, Ver, Version, Op, Type, OldDepLast);
1004 }
1005 /*}}}*/
1006 // ListParser::NewProvides - Create a Provides element /*{{{*/
1007 // ---------------------------------------------------------------------
1008 /* */
1009 bool pkgCacheGenerator::ListParser::NewProvides(pkgCache::VerIterator &Ver,
1010 const string &PkgName,
1011 const string &PkgArch,
1012 const string &Version)
1013 {
1014 pkgCache &Cache = Owner->Cache;
1015
1016 // We do not add self referencing provides
1017 if (Ver.ParentPkg().Name() == PkgName && (PkgArch == Ver.ParentPkg().Arch() ||
1018 (PkgArch == "all" && strcmp((Cache.StrP + Cache.HeaderP->Architecture), Ver.ParentPkg().Arch()) == 0)))
1019 return true;
1020
1021 // Get a structure
1022 map_ptrloc const Provides = Owner->AllocateInMap(sizeof(pkgCache::Provides));
1023 if (unlikely(Provides == 0))
1024 return false;
1025 Cache.HeaderP->ProvidesCount++;
1026
1027 // Fill it in
1028 pkgCache::PrvIterator Prv(Cache,Cache.ProvideP + Provides,Cache.PkgP);
1029 Dynamic<pkgCache::PrvIterator> DynPrv(Prv);
1030 Prv->Version = Ver.Index();
1031 Prv->NextPkgProv = Ver->ProvidesList;
1032 Ver->ProvidesList = Prv.Index();
1033 if (Version.empty() == false) {
1034 map_ptrloc const idxProvideVersion = WriteString(Version);
1035 Prv->ProvideVersion = idxProvideVersion;
1036 if (unlikely(idxProvideVersion == 0))
1037 return false;
1038 }
1039
1040 // Locate the target package
1041 pkgCache::PkgIterator Pkg;
1042 Dynamic<pkgCache::PkgIterator> DynPkg(Pkg);
1043 if (unlikely(Owner->NewPackage(Pkg,PkgName, PkgArch) == false))
1044 return false;
1045
1046 // Link it to the package
1047 Prv->ParentPkg = Pkg.Index();
1048 Prv->NextProvides = Pkg->ProvidesList;
1049 Pkg->ProvidesList = Prv.Index();
1050
1051 return true;
1052 }
1053 /*}}}*/
1054 bool pkgCacheGenerator::ListParser::SameVersion(unsigned short const Hash,/*{{{*/
1055 pkgCache::VerIterator const &Ver)
1056 {
1057 return Hash == Ver->Hash;
1058 }
1059 /*}}}*/
1060 // CacheGenerator::SelectFile - Select the current file being parsed /*{{{*/
1061 // ---------------------------------------------------------------------
1062 /* This is used to select which file is to be associated with all newly
1063 added versions. The caller is responsible for setting the IMS fields. */
1064 bool pkgCacheGenerator::SelectFile(const string &File,const string &Site,
1065 const pkgIndexFile &Index,
1066 unsigned long Flags)
1067 {
1068 // Get some space for the structure
1069 map_ptrloc const idxFile = AllocateInMap(sizeof(*CurrentFile));
1070 if (unlikely(idxFile == 0))
1071 return false;
1072 CurrentFile = Cache.PkgFileP + idxFile;
1073
1074 // Fill it in
1075 map_ptrloc const idxFileName = WriteStringInMap(File);
1076 map_ptrloc const idxSite = WriteUniqString(Site);
1077 if (unlikely(idxFileName == 0 || idxSite == 0))
1078 return false;
1079 CurrentFile->FileName = idxFileName;
1080 CurrentFile->Site = idxSite;
1081 CurrentFile->NextFile = Cache.HeaderP->FileList;
1082 CurrentFile->Flags = Flags;
1083 CurrentFile->ID = Cache.HeaderP->PackageFileCount;
1084 map_ptrloc const idxIndexType = WriteUniqString(Index.GetType()->Label);
1085 if (unlikely(idxIndexType == 0))
1086 return false;
1087 CurrentFile->IndexType = idxIndexType;
1088 PkgFileName = File;
1089 Cache.HeaderP->FileList = CurrentFile - Cache.PkgFileP;
1090 Cache.HeaderP->PackageFileCount++;
1091
1092 if (Progress != 0)
1093 Progress->SubProgress(Index.Size());
1094 return true;
1095 }
1096 /*}}}*/
1097 // CacheGenerator::WriteUniqueString - Insert a unique string /*{{{*/
1098 // ---------------------------------------------------------------------
1099 /* This is used to create handles to strings. Given the same text it
1100 always returns the same number */
1101 unsigned long pkgCacheGenerator::WriteUniqString(const char *S,
1102 unsigned int Size)
1103 {
1104 /* We use a very small transient hash table here, this speeds up generation
1105 by a fair amount on slower machines */
1106 pkgCache::StringItem *&Bucket = UniqHash[(S[0]*5 + S[1]) % _count(UniqHash)];
1107 if (Bucket != 0 &&
1108 stringcmp(S,S+Size,Cache.StrP + Bucket->String) == 0)
1109 return Bucket->String;
1110
1111 // Search for an insertion point
1112 pkgCache::StringItem *I = Cache.StringItemP + Cache.HeaderP->StringList;
1113 int Res = 1;
1114 map_ptrloc *Last = &Cache.HeaderP->StringList;
1115 for (; I != Cache.StringItemP; Last = &I->NextItem,
1116 I = Cache.StringItemP + I->NextItem)
1117 {
1118 Res = stringcmp(S,S+Size,Cache.StrP + I->String);
1119 if (Res >= 0)
1120 break;
1121 }
1122
1123 // Match
1124 if (Res == 0)
1125 {
1126 Bucket = I;
1127 return I->String;
1128 }
1129
1130 // Get a structure
1131 void const * const oldMap = Map.Data();
1132 map_ptrloc const Item = AllocateInMap(sizeof(pkgCache::StringItem));
1133 if (Item == 0)
1134 return 0;
1135
1136 map_ptrloc const idxString = WriteStringInMap(S,Size);
1137 if (unlikely(idxString == 0))
1138 return 0;
1139 if (oldMap != Map.Data()) {
1140 Last += (map_ptrloc const * const) Map.Data() - (map_ptrloc const * const) oldMap;
1141 I += (pkgCache::StringItem const * const) Map.Data() - (pkgCache::StringItem const * const) oldMap;
1142 }
1143 *Last = Item;
1144
1145 // Fill in the structure
1146 pkgCache::StringItem *ItemP = Cache.StringItemP + Item;
1147 ItemP->NextItem = I - Cache.StringItemP;
1148 ItemP->String = idxString;
1149
1150 Bucket = ItemP;
1151 return ItemP->String;
1152 }
1153 /*}}}*/
1154 // CheckValidity - Check that a cache is up-to-date /*{{{*/
1155 // ---------------------------------------------------------------------
1156 /* This just verifies that each file in the list of index files exists,
1157 has matching attributes with the cache and the cache does not have
1158 any extra files. */
1159 static bool CheckValidity(const string &CacheFile,
1160 pkgSourceList &List,
1161 FileIterator Start,
1162 FileIterator End,
1163 MMap **OutMap = 0)
1164 {
1165 bool const Debug = _config->FindB("Debug::pkgCacheGen", false);
1166 // No file, certainly invalid
1167 if (CacheFile.empty() == true || FileExists(CacheFile) == false)
1168 {
1169 if (Debug == true)
1170 std::clog << "CacheFile doesn't exist" << std::endl;
1171 return false;
1172 }
1173
1174 if (List.GetLastModifiedTime() > GetModificationTime(CacheFile))
1175 {
1176 if (Debug == true)
1177 std::clog << "sources.list is newer than the cache" << std::endl;
1178 return false;
1179 }
1180
1181 // Map it
1182 FileFd CacheF(CacheFile,FileFd::ReadOnly);
1183 SPtr<MMap> Map = new MMap(CacheF,0);
1184 pkgCache Cache(Map);
1185 if (_error->PendingError() == true || Map->Size() == 0)
1186 {
1187 if (Debug == true)
1188 std::clog << "Errors are pending or Map is empty()" << std::endl;
1189 _error->Discard();
1190 return false;
1191 }
1192
1193 /* Now we check every index file, see if it is in the cache,
1194 verify the IMS data and check that it is on the disk too.. */
1195 SPtrArray<bool> Visited = new bool[Cache.HeaderP->PackageFileCount];
1196 memset(Visited,0,sizeof(*Visited)*Cache.HeaderP->PackageFileCount);
1197 for (; Start != End; ++Start)
1198 {
1199 if (Debug == true)
1200 std::clog << "Checking PkgFile " << (*Start)->Describe() << ": ";
1201 if ((*Start)->HasPackages() == false)
1202 {
1203 if (Debug == true)
1204 std::clog << "Has NO packages" << std::endl;
1205 continue;
1206 }
1207
1208 if ((*Start)->Exists() == false)
1209 {
1210 #if 0 // mvo: we no longer give a message here (Default Sources spec)
1211 _error->WarningE("stat",_("Couldn't stat source package list %s"),
1212 (*Start)->Describe().c_str());
1213 #endif
1214 if (Debug == true)
1215 std::clog << "file doesn't exist" << std::endl;
1216 continue;
1217 }
1218
1219 // FindInCache is also expected to do an IMS check.
1220 pkgCache::PkgFileIterator File = (*Start)->FindInCache(Cache);
1221 if (File.end() == true)
1222 {
1223 if (Debug == true)
1224 std::clog << "FindInCache returned end-Pointer" << std::endl;
1225 return false;
1226 }
1227
1228 Visited[File->ID] = true;
1229 if (Debug == true)
1230 std::clog << "with ID " << File->ID << " is valid" << std::endl;
1231 }
1232
1233 for (unsigned I = 0; I != Cache.HeaderP->PackageFileCount; I++)
1234 if (Visited[I] == false)
1235 {
1236 if (Debug == true)
1237 std::clog << "File with ID" << I << " wasn't visited" << std::endl;
1238 return false;
1239 }
1240
1241 if (_error->PendingError() == true)
1242 {
1243 if (Debug == true)
1244 {
1245 std::clog << "Validity failed because of pending errors:" << std::endl;
1246 _error->DumpErrors();
1247 }
1248 _error->Discard();
1249 return false;
1250 }
1251
1252 if (OutMap != 0)
1253 *OutMap = Map.UnGuard();
1254 return true;
1255 }
1256 /*}}}*/
1257 // ComputeSize - Compute the total size of a bunch of files /*{{{*/
1258 // ---------------------------------------------------------------------
1259 /* Size is kind of an abstract notion that is only used for the progress
1260 meter */
1261 static unsigned long ComputeSize(FileIterator Start,FileIterator End)
1262 {
1263 unsigned long TotalSize = 0;
1264 for (; Start < End; ++Start)
1265 {
1266 if ((*Start)->HasPackages() == false)
1267 continue;
1268 TotalSize += (*Start)->Size();
1269 }
1270 return TotalSize;
1271 }
1272 /*}}}*/
1273 // BuildCache - Merge the list of index files into the cache /*{{{*/
1274 // ---------------------------------------------------------------------
1275 /* */
1276 static bool BuildCache(pkgCacheGenerator &Gen,
1277 OpProgress *Progress,
1278 unsigned long &CurrentSize,unsigned long TotalSize,
1279 FileIterator Start, FileIterator End)
1280 {
1281 FileIterator I;
1282 for (I = Start; I != End; ++I)
1283 {
1284 if ((*I)->HasPackages() == false)
1285 continue;
1286
1287 if ((*I)->Exists() == false)
1288 continue;
1289
1290 if ((*I)->FindInCache(Gen.GetCache()).end() == false)
1291 {
1292 _error->Warning("Duplicate sources.list entry %s",
1293 (*I)->Describe().c_str());
1294 continue;
1295 }
1296
1297 unsigned long Size = (*I)->Size();
1298 if (Progress != NULL)
1299 Progress->OverallProgress(CurrentSize,TotalSize,Size,_("Reading package lists"));
1300 CurrentSize += Size;
1301
1302 if ((*I)->Merge(Gen,Progress) == false)
1303 return false;
1304 }
1305
1306 if (Gen.HasFileDeps() == true)
1307 {
1308 if (Progress != NULL)
1309 Progress->Done();
1310 TotalSize = ComputeSize(Start, End);
1311 CurrentSize = 0;
1312 for (I = Start; I != End; ++I)
1313 {
1314 unsigned long Size = (*I)->Size();
1315 if (Progress != NULL)
1316 Progress->OverallProgress(CurrentSize,TotalSize,Size,_("Collecting File Provides"));
1317 CurrentSize += Size;
1318 if ((*I)->MergeFileProvides(Gen,Progress) == false)
1319 return false;
1320 }
1321 }
1322
1323 return true;
1324 }
1325 /*}}}*/
1326 // CacheGenerator::CreateDynamicMMap - load an mmap with configuration options /*{{{*/
1327 DynamicMMap* pkgCacheGenerator::CreateDynamicMMap(FileFd *CacheF, unsigned long Flags) {
1328 unsigned long const MapStart = _config->FindI("APT::Cache-Start", 24*1024*1024);
1329 unsigned long const MapGrow = _config->FindI("APT::Cache-Grow", 1*1024*1024);
1330 unsigned long const MapLimit = _config->FindI("APT::Cache-Limit", 0);
1331 Flags |= MMap::Moveable;
1332 if (_config->FindB("APT::Cache-Fallback", false) == true)
1333 Flags |= MMap::Fallback;
1334 if (CacheF != NULL)
1335 return new DynamicMMap(*CacheF, Flags, MapStart, MapGrow, MapLimit);
1336 else
1337 return new DynamicMMap(Flags, MapStart, MapGrow, MapLimit);
1338 }
1339 /*}}}*/
1340 // CacheGenerator::MakeStatusCache - Construct the status cache /*{{{*/
1341 // ---------------------------------------------------------------------
1342 /* This makes sure that the status cache (the cache that has all
1343 index files from the sources list and all local ones) is ready
1344 to be mmaped. If OutMap is not zero then a MMap object representing
1345 the cache will be stored there. This is pretty much mandetory if you
1346 are using AllowMem. AllowMem lets the function be run as non-root
1347 where it builds the cache 'fast' into a memory buffer. */
1348 APT_DEPRECATED bool pkgMakeStatusCache(pkgSourceList &List,OpProgress &Progress,
1349 MMap **OutMap, bool AllowMem)
1350 { return pkgCacheGenerator::MakeStatusCache(List, &Progress, OutMap, AllowMem); }
1351 bool pkgCacheGenerator::MakeStatusCache(pkgSourceList &List,OpProgress *Progress,
1352 MMap **OutMap,bool AllowMem)
1353 {
1354 bool const Debug = _config->FindB("Debug::pkgCacheGen", false);
1355
1356 std::vector<pkgIndexFile *> Files;
1357 for (std::vector<metaIndex *>::const_iterator i = List.begin();
1358 i != List.end();
1359 ++i)
1360 {
1361 std::vector <pkgIndexFile *> *Indexes = (*i)->GetIndexFiles();
1362 for (std::vector<pkgIndexFile *>::const_iterator j = Indexes->begin();
1363 j != Indexes->end();
1364 ++j)
1365 Files.push_back (*j);
1366 }
1367
1368 unsigned long const EndOfSource = Files.size();
1369 if (_system->AddStatusFiles(Files) == false)
1370 return false;
1371
1372 // Decide if we can write to the files..
1373 string const CacheFile = _config->FindFile("Dir::Cache::pkgcache");
1374 string const SrcCacheFile = _config->FindFile("Dir::Cache::srcpkgcache");
1375
1376 // ensure the cache directory exists
1377 if (CacheFile.empty() == false || SrcCacheFile.empty() == false)
1378 {
1379 string dir = _config->FindDir("Dir::Cache");
1380 size_t const len = dir.size();
1381 if (len > 5 && dir.find("/apt/", len - 6, 5) == len - 5)
1382 dir = dir.substr(0, len - 5);
1383 if (CacheFile.empty() == false)
1384 CreateDirectory(dir, flNotFile(CacheFile));
1385 if (SrcCacheFile.empty() == false)
1386 CreateDirectory(dir, flNotFile(SrcCacheFile));
1387 }
1388
1389 // Decide if we can write to the cache
1390 bool Writeable = false;
1391 if (CacheFile.empty() == false)
1392 Writeable = access(flNotFile(CacheFile).c_str(),W_OK) == 0;
1393 else
1394 if (SrcCacheFile.empty() == false)
1395 Writeable = access(flNotFile(SrcCacheFile).c_str(),W_OK) == 0;
1396 if (Debug == true)
1397 std::clog << "Do we have write-access to the cache files? " << (Writeable ? "YES" : "NO") << std::endl;
1398
1399 if (Writeable == false && AllowMem == false && CacheFile.empty() == false)
1400 return _error->Error(_("Unable to write to %s"),flNotFile(CacheFile).c_str());
1401
1402 if (Progress != NULL)
1403 Progress->OverallProgress(0,1,1,_("Reading package lists"));
1404
1405 // Cache is OK, Fin.
1406 if (CheckValidity(CacheFile, List, Files.begin(),Files.end(),OutMap) == true)
1407 {
1408 if (Progress != NULL)
1409 Progress->OverallProgress(1,1,1,_("Reading package lists"));
1410 if (Debug == true)
1411 std::clog << "pkgcache.bin is valid - no need to build anything" << std::endl;
1412 return true;
1413 }
1414 else if (Debug == true)
1415 std::clog << "pkgcache.bin is NOT valid" << std::endl;
1416
1417 /* At this point we know we need to reconstruct the package cache,
1418 begin. */
1419 SPtr<FileFd> CacheF;
1420 SPtr<DynamicMMap> Map;
1421 if (Writeable == true && CacheFile.empty() == false)
1422 {
1423 _error->PushToStack();
1424 unlink(CacheFile.c_str());
1425 CacheF = new FileFd(CacheFile,FileFd::WriteAtomic);
1426 fchmod(CacheF->Fd(),0644);
1427 Map = CreateDynamicMMap(CacheF, MMap::Public);
1428 if (_error->PendingError() == true)
1429 {
1430 delete CacheF.UnGuard();
1431 delete Map.UnGuard();
1432 if (Debug == true)
1433 std::clog << "Open filebased MMap FAILED" << std::endl;
1434 Writeable = false;
1435 if (AllowMem == false)
1436 {
1437 _error->MergeWithStack();
1438 return false;
1439 }
1440 _error->RevertToStack();
1441 }
1442 else
1443 {
1444 _error->MergeWithStack();
1445 if (Debug == true)
1446 std::clog << "Open filebased MMap" << std::endl;
1447 }
1448 }
1449 if (Writeable == false || CacheFile.empty() == true)
1450 {
1451 // Just build it in memory..
1452 Map = CreateDynamicMMap(NULL);
1453 if (Debug == true)
1454 std::clog << "Open memory Map (not filebased)" << std::endl;
1455 }
1456
1457 // Lets try the source cache.
1458 unsigned long CurrentSize = 0;
1459 unsigned long TotalSize = 0;
1460 if (CheckValidity(SrcCacheFile, List, Files.begin(),
1461 Files.begin()+EndOfSource) == true)
1462 {
1463 if (Debug == true)
1464 std::clog << "srcpkgcache.bin is valid - populate MMap with it." << std::endl;
1465 // Preload the map with the source cache
1466 FileFd SCacheF(SrcCacheFile,FileFd::ReadOnly);
1467 unsigned long const alloc = Map->RawAllocate(SCacheF.Size());
1468 if ((alloc == 0 && _error->PendingError())
1469 || SCacheF.Read((unsigned char *)Map->Data() + alloc,
1470 SCacheF.Size()) == false)
1471 return false;
1472
1473 TotalSize = ComputeSize(Files.begin()+EndOfSource,Files.end());
1474
1475 // Build the status cache
1476 pkgCacheGenerator Gen(Map.Get(),Progress);
1477 if (_error->PendingError() == true)
1478 return false;
1479 if (BuildCache(Gen,Progress,CurrentSize,TotalSize,
1480 Files.begin()+EndOfSource,Files.end()) == false)
1481 return false;
1482 }
1483 else
1484 {
1485 if (Debug == true)
1486 std::clog << "srcpkgcache.bin is NOT valid - rebuild" << std::endl;
1487 TotalSize = ComputeSize(Files.begin(),Files.end());
1488
1489 // Build the source cache
1490 pkgCacheGenerator Gen(Map.Get(),Progress);
1491 if (_error->PendingError() == true)
1492 return false;
1493 if (BuildCache(Gen,Progress,CurrentSize,TotalSize,
1494 Files.begin(),Files.begin()+EndOfSource) == false)
1495 return false;
1496
1497 // Write it back
1498 if (Writeable == true && SrcCacheFile.empty() == false)
1499 {
1500 FileFd SCacheF(SrcCacheFile,FileFd::WriteAtomic);
1501 if (_error->PendingError() == true)
1502 return false;
1503
1504 fchmod(SCacheF.Fd(),0644);
1505
1506 // Write out the main data
1507 if (SCacheF.Write(Map->Data(),Map->Size()) == false)
1508 return _error->Error(_("IO Error saving source cache"));
1509 SCacheF.Sync();
1510
1511 // Write out the proper header
1512 Gen.GetCache().HeaderP->Dirty = false;
1513 if (SCacheF.Seek(0) == false ||
1514 SCacheF.Write(Map->Data(),sizeof(*Gen.GetCache().HeaderP)) == false)
1515 return _error->Error(_("IO Error saving source cache"));
1516 Gen.GetCache().HeaderP->Dirty = true;
1517 SCacheF.Sync();
1518 }
1519
1520 // Build the status cache
1521 if (BuildCache(Gen,Progress,CurrentSize,TotalSize,
1522 Files.begin()+EndOfSource,Files.end()) == false)
1523 return false;
1524 }
1525 if (Debug == true)
1526 std::clog << "Caches are ready for shipping" << std::endl;
1527
1528 if (_error->PendingError() == true)
1529 return false;
1530 if (OutMap != 0)
1531 {
1532 if (CacheF != 0)
1533 {
1534 delete Map.UnGuard();
1535 *OutMap = new MMap(*CacheF,0);
1536 }
1537 else
1538 {
1539 *OutMap = Map.UnGuard();
1540 }
1541 }
1542
1543 return true;
1544 }
1545 /*}}}*/
1546 // CacheGenerator::MakeOnlyStatusCache - Build only a status files cache/*{{{*/
1547 // ---------------------------------------------------------------------
1548 /* */
1549 APT_DEPRECATED bool pkgMakeOnlyStatusCache(OpProgress &Progress,DynamicMMap **OutMap)
1550 { return pkgCacheGenerator::MakeOnlyStatusCache(&Progress, OutMap); }
1551 bool pkgCacheGenerator::MakeOnlyStatusCache(OpProgress *Progress,DynamicMMap **OutMap)
1552 {
1553 std::vector<pkgIndexFile *> Files;
1554 unsigned long EndOfSource = Files.size();
1555 if (_system->AddStatusFiles(Files) == false)
1556 return false;
1557
1558 SPtr<DynamicMMap> Map = CreateDynamicMMap(NULL);
1559 unsigned long CurrentSize = 0;
1560 unsigned long TotalSize = 0;
1561
1562 TotalSize = ComputeSize(Files.begin()+EndOfSource,Files.end());
1563
1564 // Build the status cache
1565 if (Progress != NULL)
1566 Progress->OverallProgress(0,1,1,_("Reading package lists"));
1567 pkgCacheGenerator Gen(Map.Get(),Progress);
1568 if (_error->PendingError() == true)
1569 return false;
1570 if (BuildCache(Gen,Progress,CurrentSize,TotalSize,
1571 Files.begin()+EndOfSource,Files.end()) == false)
1572 return false;
1573
1574 if (_error->PendingError() == true)
1575 return false;
1576 *OutMap = Map.UnGuard();
1577
1578 return true;
1579 }
1580 /*}}}*/
1581 // IsDuplicateDescription /*{{{*/
1582 static bool IsDuplicateDescription(pkgCache::DescIterator Desc,
1583 MD5SumValue const &CurMd5, std::string const &CurLang)
1584 {
1585 // Descriptions in the same link-list have all the same md5
1586 if (Desc.end() == true || MD5SumValue(Desc.md5()) != CurMd5)
1587 return false;
1588 for (; Desc.end() == false; ++Desc)
1589 if (Desc.LanguageCode() == CurLang)
1590 return true;
1591 return false;
1592 }
1593 /*}}}*/
1594 // CacheGenerator::FinishCache /*{{{*/
1595 bool pkgCacheGenerator::FinishCache(OpProgress * /*Progress*/)
1596 {
1597 return true;
1598 }
1599 /*}}}*/