]> git.saurik.com Git - apt.git/blob - apt-pkg/pkgcachegen.cc
9615b4c22303347cf6095f659c76a195103b28bb
[apt.git] / apt-pkg / pkgcachegen.cc
1 // -*- mode: cpp; mode: fold -*-
2 // Description /*{{{*/
3 // $Id: pkgcachegen.cc,v 1.53.2.1 2003/12/24 23:09:17 mdz Exp $
4 /* ######################################################################
5
6 Package Cache Generator - Generator for the cache structure.
7
8 This builds the cache structure from the abstract package list parser.
9
10 ##################################################################### */
11 /*}}}*/
12 // Include Files /*{{{*/
13 #include <config.h>
14
15 #include <apt-pkg/pkgcachegen.h>
16 #include <apt-pkg/error.h>
17 #include <apt-pkg/version.h>
18 #include <apt-pkg/progress.h>
19 #include <apt-pkg/sourcelist.h>
20 #include <apt-pkg/configuration.h>
21 #include <apt-pkg/strutl.h>
22 #include <apt-pkg/sptr.h>
23 #include <apt-pkg/pkgsystem.h>
24 #include <apt-pkg/macros.h>
25 #include <apt-pkg/metaindex.h>
26 #include <apt-pkg/fileutl.h>
27 #include <apt-pkg/hashsum_template.h>
28 #include <apt-pkg/indexfile.h>
29 #include <apt-pkg/md5.h>
30 #include <apt-pkg/mmap.h>
31 #include <apt-pkg/pkgcache.h>
32 #include <apt-pkg/cacheiterators.h>
33
34 #include <stddef.h>
35 #include <string.h>
36 #include <iostream>
37 #include <string>
38 #include <vector>
39 #include <sys/stat.h>
40 #include <unistd.h>
41
42 #include <apti18n.h>
43 /*}}}*/
44 typedef std::vector<pkgIndexFile *>::iterator FileIterator;
45 template <typename Iter> std::vector<Iter*> pkgCacheGenerator::Dynamic<Iter>::toReMap;
46
47 static bool IsDuplicateDescription(pkgCache::DescIterator Desc,
48 MD5SumValue const &CurMd5, std::string const &CurLang);
49
50 using std::string;
51
52 // CacheGenerator::pkgCacheGenerator - Constructor /*{{{*/
53 // ---------------------------------------------------------------------
54 /* We set the dirty flag and make sure that is written to the disk */
55 pkgCacheGenerator::pkgCacheGenerator(DynamicMMap *pMap,OpProgress *Prog) :
56 Map(*pMap), Cache(pMap,false), Progress(Prog),
57 FoundFileDeps(0)
58 {
59 CurrentFile = 0;
60 memset(UniqHash,0,sizeof(UniqHash));
61
62 if (_error->PendingError() == true)
63 return;
64
65 if (Map.Size() == 0)
66 {
67 // Setup the map interface..
68 Cache.HeaderP = (pkgCache::Header *)Map.Data();
69 if (Map.RawAllocate(sizeof(pkgCache::Header)) == 0 && _error->PendingError() == true)
70 return;
71
72 Map.UsePools(*Cache.HeaderP->Pools,sizeof(Cache.HeaderP->Pools)/sizeof(Cache.HeaderP->Pools[0]));
73
74 // Starting header
75 *Cache.HeaderP = pkgCache::Header();
76 map_ptrloc const idxVerSysName = WriteStringInMap(_system->VS->Label);
77 if (unlikely(idxVerSysName == 0))
78 return;
79 Cache.HeaderP->VerSysName = idxVerSysName;
80 // this pointer is set in ReMap, but we need it now for WriteUniqString
81 Cache.StringItemP = (pkgCache::StringItem *)Map.Data();
82 map_ptrloc const idxArchitecture = WriteUniqString(_config->Find("APT::Architecture"));
83 if (unlikely(idxArchitecture == 0))
84 return;
85 Cache.HeaderP->Architecture = idxArchitecture;
86
87 std::vector<std::string> archs = APT::Configuration::getArchitectures();
88 if (archs.size() > 1)
89 {
90 std::vector<std::string>::const_iterator a = archs.begin();
91 std::string list = *a;
92 for (++a; a != archs.end(); ++a)
93 list.append(",").append(*a);
94 map_ptrloc const idxArchitectures = WriteStringInMap(list);
95 if (unlikely(idxArchitectures == 0))
96 return;
97 Cache.HeaderP->Architectures = idxArchitectures;
98 }
99 else
100 Cache.HeaderP->Architectures = idxArchitecture;
101
102 Cache.ReMap();
103 }
104 else
105 {
106 // Map directly from the existing file
107 Cache.ReMap();
108 Map.UsePools(*Cache.HeaderP->Pools,sizeof(Cache.HeaderP->Pools)/sizeof(Cache.HeaderP->Pools[0]));
109 if (Cache.VS != _system->VS)
110 {
111 _error->Error(_("Cache has an incompatible versioning system"));
112 return;
113 }
114 }
115
116 Cache.HeaderP->Dirty = true;
117 Map.Sync(0,sizeof(pkgCache::Header));
118 }
119 /*}}}*/
120 // CacheGenerator::~pkgCacheGenerator - Destructor /*{{{*/
121 // ---------------------------------------------------------------------
122 /* We sync the data then unset the dirty flag in two steps so as to
123 advoid a problem during a crash */
124 pkgCacheGenerator::~pkgCacheGenerator()
125 {
126 if (_error->PendingError() == true)
127 return;
128 if (Map.Sync() == false)
129 return;
130
131 Cache.HeaderP->Dirty = false;
132 Cache.HeaderP->CacheFileSize = Map.Size();
133 Map.Sync(0,sizeof(pkgCache::Header));
134 }
135 /*}}}*/
136 void pkgCacheGenerator::ReMap(void const * const oldMap, void const * const newMap) {/*{{{*/
137 if (oldMap == newMap)
138 return;
139
140 if (_config->FindB("Debug::pkgCacheGen", false))
141 std::clog << "Remaping from " << oldMap << " to " << newMap << std::endl;
142
143 Cache.ReMap(false);
144
145 CurrentFile += (pkgCache::PackageFile const * const) newMap - (pkgCache::PackageFile const * const) oldMap;
146
147 for (size_t i = 0; i < _count(UniqHash); ++i)
148 if (UniqHash[i] != 0)
149 UniqHash[i] += (pkgCache::StringItem const * const) newMap - (pkgCache::StringItem const * const) oldMap;
150
151 for (std::vector<pkgCache::GrpIterator*>::const_iterator i = Dynamic<pkgCache::GrpIterator>::toReMap.begin();
152 i != Dynamic<pkgCache::GrpIterator>::toReMap.end(); ++i)
153 (*i)->ReMap(oldMap, newMap);
154 for (std::vector<pkgCache::PkgIterator*>::const_iterator i = Dynamic<pkgCache::PkgIterator>::toReMap.begin();
155 i != Dynamic<pkgCache::PkgIterator>::toReMap.end(); ++i)
156 (*i)->ReMap(oldMap, newMap);
157 for (std::vector<pkgCache::VerIterator*>::const_iterator i = Dynamic<pkgCache::VerIterator>::toReMap.begin();
158 i != Dynamic<pkgCache::VerIterator>::toReMap.end(); ++i)
159 (*i)->ReMap(oldMap, newMap);
160 for (std::vector<pkgCache::DepIterator*>::const_iterator i = Dynamic<pkgCache::DepIterator>::toReMap.begin();
161 i != Dynamic<pkgCache::DepIterator>::toReMap.end(); ++i)
162 (*i)->ReMap(oldMap, newMap);
163 for (std::vector<pkgCache::DescIterator*>::const_iterator i = Dynamic<pkgCache::DescIterator>::toReMap.begin();
164 i != Dynamic<pkgCache::DescIterator>::toReMap.end(); ++i)
165 (*i)->ReMap(oldMap, newMap);
166 for (std::vector<pkgCache::PrvIterator*>::const_iterator i = Dynamic<pkgCache::PrvIterator>::toReMap.begin();
167 i != Dynamic<pkgCache::PrvIterator>::toReMap.end(); ++i)
168 (*i)->ReMap(oldMap, newMap);
169 for (std::vector<pkgCache::PkgFileIterator*>::const_iterator i = Dynamic<pkgCache::PkgFileIterator>::toReMap.begin();
170 i != Dynamic<pkgCache::PkgFileIterator>::toReMap.end(); ++i)
171 (*i)->ReMap(oldMap, newMap);
172 } /*}}}*/
173 // CacheGenerator::WriteStringInMap /*{{{*/
174 map_ptrloc pkgCacheGenerator::WriteStringInMap(const char *String,
175 const unsigned long &Len) {
176 void const * const oldMap = Map.Data();
177 map_ptrloc const index = Map.WriteString(String, Len);
178 if (index != 0)
179 ReMap(oldMap, Map.Data());
180 return index;
181 }
182 /*}}}*/
183 // CacheGenerator::WriteStringInMap /*{{{*/
184 map_ptrloc pkgCacheGenerator::WriteStringInMap(const char *String) {
185 void const * const oldMap = Map.Data();
186 map_ptrloc const index = Map.WriteString(String);
187 if (index != 0)
188 ReMap(oldMap, Map.Data());
189 return index;
190 }
191 /*}}}*/
192 map_ptrloc pkgCacheGenerator::AllocateInMap(const unsigned long &size) {/*{{{*/
193 void const * const oldMap = Map.Data();
194 map_ptrloc const index = Map.Allocate(size);
195 if (index != 0)
196 ReMap(oldMap, Map.Data());
197 return index;
198 }
199 /*}}}*/
200 // CacheGenerator::MergeList - Merge the package list /*{{{*/
201 // ---------------------------------------------------------------------
202 /* This provides the generation of the entries in the cache. Each loop
203 goes through a single package record from the underlying parse engine. */
204 bool pkgCacheGenerator::MergeList(ListParser &List,
205 pkgCache::VerIterator *OutVer)
206 {
207 List.Owner = this;
208
209 unsigned int Counter = 0;
210 while (List.Step() == true)
211 {
212 string const PackageName = List.Package();
213 if (PackageName.empty() == true)
214 return false;
215
216 Counter++;
217 if (Counter % 100 == 0 && Progress != 0)
218 Progress->Progress(List.Offset());
219
220 string Arch = List.Architecture();
221 string const Version = List.Version();
222 if (Version.empty() == true && Arch.empty() == true)
223 {
224 // package descriptions
225 if (MergeListGroup(List, PackageName) == false)
226 return false;
227 continue;
228 }
229
230 if (Arch.empty() == true)
231 {
232 // use the pseudo arch 'none' for arch-less packages
233 Arch = "none";
234 /* We might built a SingleArchCache here, which we don't want to blow up
235 just for these :none packages to a proper MultiArchCache, so just ensure
236 that we have always a native package structure first for SingleArch */
237 pkgCache::PkgIterator NP;
238 Dynamic<pkgCache::PkgIterator> DynPkg(NP);
239 if (NewPackage(NP, PackageName, _config->Find("APT::Architecture")) == false)
240 // TRANSLATOR: The first placeholder is a package name,
241 // the other two should be copied verbatim as they include debug info
242 return _error->Error(_("Error occurred while processing %s (%s%d)"),
243 PackageName.c_str(), "NewPackage", 0);
244 }
245
246 // Get a pointer to the package structure
247 pkgCache::PkgIterator Pkg;
248 Dynamic<pkgCache::PkgIterator> DynPkg(Pkg);
249 if (NewPackage(Pkg, PackageName, Arch) == false)
250 // TRANSLATOR: The first placeholder is a package name,
251 // the other two should be copied verbatim as they include debug info
252 return _error->Error(_("Error occurred while processing %s (%s%d)"),
253 PackageName.c_str(), "NewPackage", 1);
254
255
256 if (Version.empty() == true)
257 {
258 if (MergeListPackage(List, Pkg) == false)
259 return false;
260 }
261 else
262 {
263 if (MergeListVersion(List, Pkg, Version, OutVer) == false)
264 return false;
265 }
266
267 if (OutVer != 0)
268 {
269 FoundFileDeps |= List.HasFileDeps();
270 return true;
271 }
272 }
273
274 if (Cache.HeaderP->PackageCount >= (1ULL<<sizeof(Cache.PkgP->ID)*8)-1)
275 return _error->Error(_("Wow, you exceeded the number of package "
276 "names this APT is capable of."));
277 if (Cache.HeaderP->VersionCount >= (1ULL<<(sizeof(Cache.VerP->ID)*8))-1)
278 return _error->Error(_("Wow, you exceeded the number of versions "
279 "this APT is capable of."));
280 if (Cache.HeaderP->DescriptionCount >= (1ULL<<(sizeof(Cache.DescP->ID)*8))-1)
281 return _error->Error(_("Wow, you exceeded the number of descriptions "
282 "this APT is capable of."));
283 if (Cache.HeaderP->DependsCount >= (1ULL<<(sizeof(Cache.DepP->ID)*8))-1ULL)
284 return _error->Error(_("Wow, you exceeded the number of dependencies "
285 "this APT is capable of."));
286
287 FoundFileDeps |= List.HasFileDeps();
288 return true;
289 }
290 // CacheGenerator::MergeListGroup /*{{{*/
291 bool pkgCacheGenerator::MergeListGroup(ListParser &List, std::string const &GrpName)
292 {
293 pkgCache::GrpIterator Grp = Cache.FindGrp(GrpName);
294 // a group has no data on it's own, only packages have it but these
295 // stanzas like this come from Translation- files to add descriptions,
296 // but without a version we don't need a description for it…
297 if (Grp.end() == true)
298 return true;
299 Dynamic<pkgCache::GrpIterator> DynGrp(Grp);
300
301 pkgCache::PkgIterator Pkg;
302 Dynamic<pkgCache::PkgIterator> DynPkg(Pkg);
303 for (Pkg = Grp.PackageList(); Pkg.end() == false; Pkg = Grp.NextPkg(Pkg))
304 if (MergeListPackage(List, Pkg) == false)
305 return false;
306
307 return true;
308 }
309 /*}}}*/
310 // CacheGenerator::MergeListPackage /*{{{*/
311 bool pkgCacheGenerator::MergeListPackage(ListParser &List, pkgCache::PkgIterator &Pkg)
312 {
313 // we first process the package, then the descriptions
314 // (for deb this package processing is in fact a no-op)
315 pkgCache::VerIterator Ver(Cache);
316 Dynamic<pkgCache::VerIterator> DynVer(Ver);
317 if (List.UsePackage(Pkg, Ver) == false)
318 return _error->Error(_("Error occurred while processing %s (%s%d)"),
319 Pkg.Name(), "UsePackage", 1);
320
321 // Find the right version to write the description
322 MD5SumValue CurMd5 = List.Description_md5();
323 if (CurMd5.Value().empty() == true && List.Description("").empty() == true)
324 return true;
325 std::vector<std::string> availDesc = List.AvailableDescriptionLanguages();
326 for (Ver = Pkg.VersionList(); Ver.end() == false; ++Ver)
327 {
328 pkgCache::DescIterator VerDesc = Ver.DescriptionList();
329
330 // a version can only have one md5 describing it
331 if (VerDesc.end() == true || MD5SumValue(VerDesc.md5()) != CurMd5)
332 continue;
333
334 map_ptrloc md5idx = VerDesc->md5sum;
335 for (std::vector<std::string>::const_iterator CurLang = availDesc.begin(); CurLang != availDesc.end(); ++CurLang)
336 {
337 // don't add a new description if we have one for the given
338 // md5 && language
339 if (IsDuplicateDescription(VerDesc, CurMd5, *CurLang) == true)
340 continue;
341
342 AddNewDescription(List, Ver, *CurLang, CurMd5, md5idx);
343 }
344
345 // we can stop here as all "same" versions will share the description
346 break;
347 }
348
349 return true;
350 }
351 /*}}}*/
352 // CacheGenerator::MergeListVersion /*{{{*/
353 bool pkgCacheGenerator::MergeListVersion(ListParser &List, pkgCache::PkgIterator &Pkg,
354 std::string const &Version, pkgCache::VerIterator* &OutVer)
355 {
356 pkgCache::VerIterator Ver = Pkg.VersionList();
357 Dynamic<pkgCache::VerIterator> DynVer(Ver);
358 map_ptrloc *LastVer = &Pkg->VersionList;
359 void const * oldMap = Map.Data();
360
361 unsigned short const Hash = List.VersionHash();
362 if (Ver.end() == false)
363 {
364 /* We know the list is sorted so we use that fact in the search.
365 Insertion of new versions is done with correct sorting */
366 int Res = 1;
367 for (; Ver.end() == false; LastVer = &Ver->NextVer, Ver++)
368 {
369 Res = Cache.VS->CmpVersion(Version,Ver.VerStr());
370 // Version is higher as current version - insert here
371 if (Res > 0)
372 break;
373 // Versionstrings are equal - is hash also equal?
374 if (Res == 0 && List.SameVersion(Hash, Ver) == true)
375 break;
376 // proceed with the next till we have either the right
377 // or we found another version (which will be lower)
378 }
379
380 /* We already have a version for this item, record that we saw it */
381 if (Res == 0 && Ver.end() == false && Ver->Hash == Hash)
382 {
383 if (List.UsePackage(Pkg,Ver) == false)
384 return _error->Error(_("Error occurred while processing %s (%s%d)"),
385 Pkg.Name(), "UsePackage", 2);
386
387 if (NewFileVer(Ver,List) == false)
388 return _error->Error(_("Error occurred while processing %s (%s%d)"),
389 Pkg.Name(), "NewFileVer", 1);
390
391 // Read only a single record and return
392 if (OutVer != 0)
393 {
394 *OutVer = Ver;
395 return true;
396 }
397
398 return true;
399 }
400 }
401
402 // Add a new version
403 map_ptrloc const verindex = NewVersion(Ver, Version, Pkg.Index(), Hash, *LastVer);
404 if (verindex == 0 && _error->PendingError())
405 return _error->Error(_("Error occurred while processing %s (%s%d)"),
406 Pkg.Name(), "NewVersion", 1);
407
408 if (oldMap != Map.Data())
409 LastVer += (map_ptrloc const * const) Map.Data() - (map_ptrloc const * const) oldMap;
410 *LastVer = verindex;
411
412 if (unlikely(List.NewVersion(Ver) == false))
413 return _error->Error(_("Error occurred while processing %s (%s%d)"),
414 Pkg.Name(), "NewVersion", 2);
415
416 if (unlikely(List.UsePackage(Pkg,Ver) == false))
417 return _error->Error(_("Error occurred while processing %s (%s%d)"),
418 Pkg.Name(), "UsePackage", 3);
419
420 if (unlikely(NewFileVer(Ver,List) == false))
421 return _error->Error(_("Error occurred while processing %s (%s%d)"),
422 Pkg.Name(), "NewFileVer", 2);
423
424 pkgCache::GrpIterator Grp = Pkg.Group();
425 Dynamic<pkgCache::GrpIterator> DynGrp(Grp);
426
427 /* If it is the first version of this package we need to add implicit
428 Multi-Arch dependencies to all other package versions in the group now -
429 otherwise we just add them for this new version */
430 if (Pkg.VersionList()->NextVer == 0)
431 {
432 pkgCache::PkgIterator P = Grp.PackageList();
433 Dynamic<pkgCache::PkgIterator> DynP(P);
434 for (; P.end() != true; P = Grp.NextPkg(P))
435 {
436 if (P->ID == Pkg->ID)
437 continue;
438 pkgCache::VerIterator V = P.VersionList();
439 Dynamic<pkgCache::VerIterator> DynV(V);
440 for (; V.end() != true; ++V)
441 if (unlikely(AddImplicitDepends(V, Pkg) == false))
442 return _error->Error(_("Error occurred while processing %s (%s%d)"),
443 Pkg.Name(), "AddImplicitDepends", 1);
444 }
445 /* :none packages are packages without an architecture. They are forbidden by
446 debian-policy, so usually they will only be in (old) dpkg status files -
447 and dpkg will complain about them - and are pretty rare. We therefore do
448 usually not create conflicts while the parent is created, but only if a :none
449 package (= the target) appears. This creates incorrect dependencies on :none
450 for architecture-specific dependencies on the package we copy from, but we
451 will ignore this bug as architecture-specific dependencies are only allowed
452 in jessie and until then the :none packages should be extinct (hopefully).
453 In other words: This should work long enough to allow graceful removal of
454 these packages, it is not supposed to allow users to keep using them … */
455 if (strcmp(Pkg.Arch(), "none") == 0)
456 {
457 pkgCache::PkgIterator M = Grp.FindPreferredPkg();
458 if (M.end() == false && Pkg != M)
459 {
460 pkgCache::DepIterator D = M.RevDependsList();
461 Dynamic<pkgCache::DepIterator> DynD(D);
462 for (; D.end() == false; ++D)
463 {
464 if ((D->Type != pkgCache::Dep::Conflicts &&
465 D->Type != pkgCache::Dep::DpkgBreaks &&
466 D->Type != pkgCache::Dep::Replaces) ||
467 D.ParentPkg().Group() == Grp)
468 continue;
469
470 map_ptrloc *OldDepLast = NULL;
471 pkgCache::VerIterator ConVersion = D.ParentVer();
472 Dynamic<pkgCache::VerIterator> DynV(ConVersion);
473 // duplicate the Conflicts/Breaks/Replaces for :none arch
474 NewDepends(Pkg, ConVersion, D->Version,
475 D->CompareOp, D->Type, OldDepLast);
476 }
477 }
478 }
479 }
480 if (unlikely(AddImplicitDepends(Grp, Pkg, Ver) == false))
481 return _error->Error(_("Error occurred while processing %s (%s%d)"),
482 Pkg.Name(), "AddImplicitDepends", 2);
483
484 // Read only a single record and return
485 if (OutVer != 0)
486 {
487 *OutVer = Ver;
488 return true;
489 }
490
491 /* Record the Description(s) based on their master md5sum */
492 MD5SumValue CurMd5 = List.Description_md5();
493 if (CurMd5.Value().empty() == true && List.Description("").empty() == true)
494 return true;
495
496 /* Before we add a new description we first search in the group for
497 a version with a description of the same MD5 - if so we reuse this
498 description group instead of creating our own for this version */
499 for (pkgCache::PkgIterator P = Grp.PackageList();
500 P.end() == false; P = Grp.NextPkg(P))
501 {
502 for (pkgCache::VerIterator V = P.VersionList();
503 V.end() == false; ++V)
504 {
505 if (V->DescriptionList == 0 || MD5SumValue(V.DescriptionList().md5()) != CurMd5)
506 continue;
507 Ver->DescriptionList = V->DescriptionList;
508 }
509 }
510
511 // We haven't found reusable descriptions, so add the first description(s)
512 map_ptrloc md5idx = Ver->DescriptionList == 0 ? 0 : Ver.DescriptionList()->md5sum;
513 std::vector<std::string> availDesc = List.AvailableDescriptionLanguages();
514 for (std::vector<std::string>::const_iterator CurLang = availDesc.begin(); CurLang != availDesc.end(); ++CurLang)
515 if (AddNewDescription(List, Ver, *CurLang, CurMd5, md5idx) == false)
516 return false;
517 return true;
518 }
519 /*}}}*/
520 bool pkgCacheGenerator::AddNewDescription(ListParser &List, pkgCache::VerIterator &Ver, std::string const &lang, MD5SumValue const &CurMd5, map_ptrloc &md5idx) /*{{{*/
521 {
522 pkgCache::DescIterator Desc;
523 Dynamic<pkgCache::DescIterator> DynDesc(Desc);
524
525 map_ptrloc const descindex = NewDescription(Desc, lang, CurMd5, md5idx);
526 if (unlikely(descindex == 0 && _error->PendingError()))
527 return _error->Error(_("Error occurred while processing %s (%s%d)"),
528 Ver.ParentPkg().Name(), "NewDescription", 1);
529
530 md5idx = Desc->md5sum;
531 Desc->ParentPkg = Ver.ParentPkg().Index();
532
533 // we add at the end, so that the start is constant as we need
534 // that to be able to efficiently share these lists
535 pkgCache::DescIterator VerDesc = Ver.DescriptionList(); // old value might be invalid after ReMap
536 for (;VerDesc.end() == false && VerDesc->NextDesc != 0; ++VerDesc);
537 map_ptrloc * const LastNextDesc = (VerDesc.end() == true) ? &Ver->DescriptionList : &VerDesc->NextDesc;
538 *LastNextDesc = descindex;
539
540 if (NewFileDesc(Desc,List) == false)
541 return _error->Error(_("Error occurred while processing %s (%s%d)"),
542 Ver.ParentPkg().Name(), "NewFileDesc", 1);
543
544 return true;
545 }
546 /*}}}*/
547 /*}}}*/
548 // CacheGenerator::MergeFileProvides - Merge file provides /*{{{*/
549 // ---------------------------------------------------------------------
550 /* If we found any file depends while parsing the main list we need to
551 resolve them. Since it is undesired to load the entire list of files
552 into the cache as virtual packages we do a two stage effort. MergeList
553 identifies the file depends and this creates Provdies for them by
554 re-parsing all the indexs. */
555 bool pkgCacheGenerator::MergeFileProvides(ListParser &List)
556 {
557 List.Owner = this;
558
559 unsigned int Counter = 0;
560 while (List.Step() == true)
561 {
562 string PackageName = List.Package();
563 if (PackageName.empty() == true)
564 return false;
565 string Version = List.Version();
566 if (Version.empty() == true)
567 continue;
568
569 pkgCache::PkgIterator Pkg = Cache.FindPkg(PackageName);
570 Dynamic<pkgCache::PkgIterator> DynPkg(Pkg);
571 if (Pkg.end() == true)
572 return _error->Error(_("Error occurred while processing %s (%s%d)"),
573 PackageName.c_str(), "FindPkg", 1);
574 Counter++;
575 if (Counter % 100 == 0 && Progress != 0)
576 Progress->Progress(List.Offset());
577
578 unsigned short Hash = List.VersionHash();
579 pkgCache::VerIterator Ver = Pkg.VersionList();
580 Dynamic<pkgCache::VerIterator> DynVer(Ver);
581 for (; Ver.end() == false; ++Ver)
582 {
583 if (List.SameVersion(Hash, Ver) == true && Version == Ver.VerStr())
584 {
585 if (List.CollectFileProvides(Cache,Ver) == false)
586 return _error->Error(_("Error occurred while processing %s (%s%d)"),
587 PackageName.c_str(), "CollectFileProvides", 1);
588 break;
589 }
590 }
591
592 if (Ver.end() == true)
593 _error->Warning(_("Package %s %s was not found while processing file dependencies"),PackageName.c_str(),Version.c_str());
594 }
595
596 return true;
597 }
598 /*}}}*/
599 // CacheGenerator::NewGroup - Add a new group /*{{{*/
600 // ---------------------------------------------------------------------
601 /* This creates a new group structure and adds it to the hash table */
602 bool pkgCacheGenerator::NewGroup(pkgCache::GrpIterator &Grp, const string &Name)
603 {
604 Grp = Cache.FindGrp(Name);
605 if (Grp.end() == false)
606 return true;
607
608 // Get a structure
609 map_ptrloc const Group = AllocateInMap(sizeof(pkgCache::Group));
610 if (unlikely(Group == 0))
611 return false;
612
613 Grp = pkgCache::GrpIterator(Cache, Cache.GrpP + Group);
614 map_ptrloc const idxName = WriteStringInMap(Name);
615 if (unlikely(idxName == 0))
616 return false;
617 Grp->Name = idxName;
618
619 // Insert it into the hash table
620 unsigned long const Hash = Cache.Hash(Name);
621 map_ptrloc *insertAt = &Cache.HeaderP->GrpHashTable[Hash];
622 while (*insertAt != 0 && strcasecmp(Name.c_str(), Cache.StrP + (Cache.GrpP + *insertAt)->Name) > 0)
623 insertAt = &(Cache.GrpP + *insertAt)->Next;
624 Grp->Next = *insertAt;
625 *insertAt = Group;
626
627 Grp->ID = Cache.HeaderP->GroupCount++;
628 return true;
629 }
630 /*}}}*/
631 // CacheGenerator::NewPackage - Add a new package /*{{{*/
632 // ---------------------------------------------------------------------
633 /* This creates a new package structure and adds it to the hash table */
634 bool pkgCacheGenerator::NewPackage(pkgCache::PkgIterator &Pkg,const string &Name,
635 const string &Arch) {
636 pkgCache::GrpIterator Grp;
637 Dynamic<pkgCache::GrpIterator> DynGrp(Grp);
638 if (unlikely(NewGroup(Grp, Name) == false))
639 return false;
640
641 Pkg = Grp.FindPkg(Arch);
642 if (Pkg.end() == false)
643 return true;
644
645 // Get a structure
646 map_ptrloc const Package = AllocateInMap(sizeof(pkgCache::Package));
647 if (unlikely(Package == 0))
648 return false;
649 Pkg = pkgCache::PkgIterator(Cache,Cache.PkgP + Package);
650
651 // Insert the package into our package list
652 if (Grp->FirstPackage == 0) // the group is new
653 {
654 Grp->FirstPackage = Package;
655 // Insert it into the hash table
656 unsigned long const Hash = Cache.Hash(Name);
657 map_ptrloc *insertAt = &Cache.HeaderP->PkgHashTable[Hash];
658 while (*insertAt != 0 && strcasecmp(Name.c_str(), Cache.StrP + (Cache.PkgP + *insertAt)->Name) > 0)
659 insertAt = &(Cache.PkgP + *insertAt)->Next;
660 Pkg->Next = *insertAt;
661 *insertAt = Package;
662 }
663 else // Group the Packages together
664 {
665 // this package is the new last package
666 pkgCache::PkgIterator LastPkg(Cache, Cache.PkgP + Grp->LastPackage);
667 Pkg->Next = LastPkg->Next;
668 LastPkg->Next = Package;
669 }
670 Grp->LastPackage = Package;
671
672 // Set the name, arch and the ID
673 Pkg->Name = Grp->Name;
674 Pkg->Group = Grp.Index();
675 // all is mapped to the native architecture
676 map_ptrloc const idxArch = (Arch == "all") ? Cache.HeaderP->Architecture : WriteUniqString(Arch.c_str());
677 if (unlikely(idxArch == 0))
678 return false;
679 Pkg->Arch = idxArch;
680 Pkg->ID = Cache.HeaderP->PackageCount++;
681
682 return true;
683 }
684 /*}}}*/
685 // CacheGenerator::AddImplicitDepends /*{{{*/
686 bool pkgCacheGenerator::AddImplicitDepends(pkgCache::GrpIterator &G,
687 pkgCache::PkgIterator &P,
688 pkgCache::VerIterator &V)
689 {
690 // copy P.Arch() into a string here as a cache remap
691 // in NewDepends() later may alter the pointer location
692 string Arch = P.Arch() == NULL ? "" : P.Arch();
693 map_ptrloc *OldDepLast = NULL;
694 /* MultiArch handling introduces a lot of implicit Dependencies:
695 - MultiArch: same → Co-Installable if they have the same version
696 - All others conflict with all other group members */
697 bool const coInstall = ((V->MultiArch & pkgCache::Version::Same) == pkgCache::Version::Same);
698 pkgCache::PkgIterator D = G.PackageList();
699 Dynamic<pkgCache::PkgIterator> DynD(D);
700 map_ptrloc const VerStrIdx = V->VerStr;
701 for (; D.end() != true; D = G.NextPkg(D))
702 {
703 if (Arch == D.Arch() || D->VersionList == 0)
704 continue;
705 /* We allow only one installed arch at the time
706 per group, therefore each group member conflicts
707 with all other group members */
708 if (coInstall == true)
709 {
710 // Replaces: ${self}:other ( << ${binary:Version})
711 NewDepends(D, V, VerStrIdx,
712 pkgCache::Dep::Less, pkgCache::Dep::Replaces,
713 OldDepLast);
714 // Breaks: ${self}:other (!= ${binary:Version})
715 NewDepends(D, V, VerStrIdx,
716 pkgCache::Dep::NotEquals, pkgCache::Dep::DpkgBreaks,
717 OldDepLast);
718 } else {
719 // Conflicts: ${self}:other
720 NewDepends(D, V, 0,
721 pkgCache::Dep::NoOp, pkgCache::Dep::Conflicts,
722 OldDepLast);
723 }
724 }
725 return true;
726 }
727 bool pkgCacheGenerator::AddImplicitDepends(pkgCache::VerIterator &V,
728 pkgCache::PkgIterator &D)
729 {
730 /* MultiArch handling introduces a lot of implicit Dependencies:
731 - MultiArch: same → Co-Installable if they have the same version
732 - All others conflict with all other group members */
733 map_ptrloc *OldDepLast = NULL;
734 bool const coInstall = ((V->MultiArch & pkgCache::Version::Same) == pkgCache::Version::Same);
735 if (coInstall == true)
736 {
737 map_ptrloc const VerStrIdx = V->VerStr;
738 // Replaces: ${self}:other ( << ${binary:Version})
739 NewDepends(D, V, VerStrIdx,
740 pkgCache::Dep::Less, pkgCache::Dep::Replaces,
741 OldDepLast);
742 // Breaks: ${self}:other (!= ${binary:Version})
743 NewDepends(D, V, VerStrIdx,
744 pkgCache::Dep::NotEquals, pkgCache::Dep::DpkgBreaks,
745 OldDepLast);
746 } else {
747 // Conflicts: ${self}:other
748 NewDepends(D, V, 0,
749 pkgCache::Dep::NoOp, pkgCache::Dep::Conflicts,
750 OldDepLast);
751 }
752 return true;
753 }
754
755 /*}}}*/
756 // CacheGenerator::NewFileVer - Create a new File<->Version association /*{{{*/
757 // ---------------------------------------------------------------------
758 /* */
759 bool pkgCacheGenerator::NewFileVer(pkgCache::VerIterator &Ver,
760 ListParser &List)
761 {
762 if (CurrentFile == 0)
763 return true;
764
765 // Get a structure
766 map_ptrloc const VerFile = AllocateInMap(sizeof(pkgCache::VerFile));
767 if (VerFile == 0)
768 return 0;
769
770 pkgCache::VerFileIterator VF(Cache,Cache.VerFileP + VerFile);
771 VF->File = CurrentFile - Cache.PkgFileP;
772
773 // Link it to the end of the list
774 map_ptrloc *Last = &Ver->FileList;
775 for (pkgCache::VerFileIterator V = Ver.FileList(); V.end() == false; ++V)
776 Last = &V->NextFile;
777 VF->NextFile = *Last;
778 *Last = VF.Index();
779
780 VF->Offset = List.Offset();
781 VF->Size = List.Size();
782 if (Cache.HeaderP->MaxVerFileSize < VF->Size)
783 Cache.HeaderP->MaxVerFileSize = VF->Size;
784 Cache.HeaderP->VerFileCount++;
785
786 return true;
787 }
788 /*}}}*/
789 // CacheGenerator::NewVersion - Create a new Version /*{{{*/
790 // ---------------------------------------------------------------------
791 /* This puts a version structure in the linked list */
792 unsigned long pkgCacheGenerator::NewVersion(pkgCache::VerIterator &Ver,
793 const string &VerStr,
794 map_ptrloc const ParentPkg,
795 unsigned long const Hash,
796 unsigned long Next)
797 {
798 // Get a structure
799 map_ptrloc const Version = AllocateInMap(sizeof(pkgCache::Version));
800 if (Version == 0)
801 return 0;
802
803 // Fill it in
804 Ver = pkgCache::VerIterator(Cache,Cache.VerP + Version);
805 //Dynamic<pkgCache::VerIterator> DynV(Ver); // caller MergeListVersion already takes care of it
806 Ver->NextVer = Next;
807 Ver->ParentPkg = ParentPkg;
808 Ver->Hash = Hash;
809 Ver->ID = Cache.HeaderP->VersionCount++;
810
811 // try to find the version string in the group for reuse
812 pkgCache::PkgIterator Pkg = Ver.ParentPkg();
813 pkgCache::GrpIterator Grp = Pkg.Group();
814 if (Pkg.end() == false && Grp.end() == false)
815 {
816 for (pkgCache::PkgIterator P = Grp.PackageList(); P.end() == false; P = Grp.NextPkg(P))
817 {
818 if (Pkg == P)
819 continue;
820 for (pkgCache::VerIterator V = P.VersionList(); V.end() == false; ++V)
821 {
822 int const cmp = strcmp(V.VerStr(), VerStr.c_str());
823 if (cmp == 0)
824 {
825 Ver->VerStr = V->VerStr;
826 return Version;
827 }
828 else if (cmp < 0)
829 break;
830 }
831 }
832 }
833 // haven't found the version string, so create
834 map_ptrloc const idxVerStr = WriteStringInMap(VerStr);
835 if (unlikely(idxVerStr == 0))
836 return 0;
837 Ver->VerStr = idxVerStr;
838 return Version;
839 }
840 /*}}}*/
841 // CacheGenerator::NewFileDesc - Create a new File<->Desc association /*{{{*/
842 // ---------------------------------------------------------------------
843 /* */
844 bool pkgCacheGenerator::NewFileDesc(pkgCache::DescIterator &Desc,
845 ListParser &List)
846 {
847 if (CurrentFile == 0)
848 return true;
849
850 // Get a structure
851 map_ptrloc const DescFile = AllocateInMap(sizeof(pkgCache::DescFile));
852 if (DescFile == 0)
853 return false;
854
855 pkgCache::DescFileIterator DF(Cache,Cache.DescFileP + DescFile);
856 DF->File = CurrentFile - Cache.PkgFileP;
857
858 // Link it to the end of the list
859 map_ptrloc *Last = &Desc->FileList;
860 for (pkgCache::DescFileIterator D = Desc.FileList(); D.end() == false; ++D)
861 Last = &D->NextFile;
862
863 DF->NextFile = *Last;
864 *Last = DF.Index();
865
866 DF->Offset = List.Offset();
867 DF->Size = List.Size();
868 if (Cache.HeaderP->MaxDescFileSize < DF->Size)
869 Cache.HeaderP->MaxDescFileSize = DF->Size;
870 Cache.HeaderP->DescFileCount++;
871
872 return true;
873 }
874 /*}}}*/
875 // CacheGenerator::NewDescription - Create a new Description /*{{{*/
876 // ---------------------------------------------------------------------
877 /* This puts a description structure in the linked list */
878 map_ptrloc pkgCacheGenerator::NewDescription(pkgCache::DescIterator &Desc,
879 const string &Lang,
880 const MD5SumValue &md5sum,
881 map_ptrloc idxmd5str)
882 {
883 // Get a structure
884 map_ptrloc const Description = AllocateInMap(sizeof(pkgCache::Description));
885 if (Description == 0)
886 return 0;
887
888 // Fill it in
889 Desc = pkgCache::DescIterator(Cache,Cache.DescP + Description);
890 Desc->ID = Cache.HeaderP->DescriptionCount++;
891 map_ptrloc const idxlanguage_code = WriteUniqString(Lang);
892 if (unlikely(idxlanguage_code == 0))
893 return 0;
894 Desc->language_code = idxlanguage_code;
895
896 if (idxmd5str != 0)
897 Desc->md5sum = idxmd5str;
898 else
899 {
900 map_ptrloc const idxmd5sum = WriteStringInMap(md5sum.Value());
901 if (unlikely(idxmd5sum == 0))
902 return 0;
903 Desc->md5sum = idxmd5sum;
904 }
905
906 return Description;
907 }
908 /*}}}*/
909 // CacheGenerator::NewDepends - Create a dependency element /*{{{*/
910 // ---------------------------------------------------------------------
911 /* This creates a dependency element in the tree. It is linked to the
912 version and to the package that it is pointing to. */
913 bool pkgCacheGenerator::NewDepends(pkgCache::PkgIterator &Pkg,
914 pkgCache::VerIterator &Ver,
915 string const &Version,
916 unsigned int const &Op,
917 unsigned int const &Type,
918 map_ptrloc* &OldDepLast)
919 {
920 map_ptrloc index = 0;
921 if (Version.empty() == false)
922 {
923 int const CmpOp = Op & 0x0F;
924 // =-deps are used (79:1) for lockstep on same-source packages (e.g. data-packages)
925 if (CmpOp == pkgCache::Dep::Equals && strcmp(Version.c_str(), Ver.VerStr()) == 0)
926 index = Ver->VerStr;
927
928 if (index == 0)
929 {
930 void const * const oldMap = Map.Data();
931 index = WriteStringInMap(Version);
932 if (unlikely(index == 0))
933 return false;
934 if (OldDepLast != 0 && oldMap != Map.Data())
935 OldDepLast += (map_ptrloc const * const) Map.Data() - (map_ptrloc const * const) oldMap;
936 }
937 }
938 return NewDepends(Pkg, Ver, index, Op, Type, OldDepLast);
939 }
940 bool pkgCacheGenerator::NewDepends(pkgCache::PkgIterator &Pkg,
941 pkgCache::VerIterator &Ver,
942 map_ptrloc const Version,
943 unsigned int const &Op,
944 unsigned int const &Type,
945 map_ptrloc* &OldDepLast)
946 {
947 void const * const oldMap = Map.Data();
948 // Get a structure
949 map_ptrloc const Dependency = AllocateInMap(sizeof(pkgCache::Dependency));
950 if (unlikely(Dependency == 0))
951 return false;
952
953 // Fill it in
954 pkgCache::DepIterator Dep(Cache,Cache.DepP + Dependency);
955 Dynamic<pkgCache::DepIterator> DynDep(Dep);
956 Dep->ParentVer = Ver.Index();
957 Dep->Type = Type;
958 Dep->CompareOp = Op;
959 Dep->Version = Version;
960 Dep->ID = Cache.HeaderP->DependsCount++;
961
962 // Link it to the package
963 Dep->Package = Pkg.Index();
964 Dep->NextRevDepends = Pkg->RevDepends;
965 Pkg->RevDepends = Dep.Index();
966
967 // Do we know where to link the Dependency to?
968 if (OldDepLast == NULL)
969 {
970 OldDepLast = &Ver->DependsList;
971 for (pkgCache::DepIterator D = Ver.DependsList(); D.end() == false; ++D)
972 OldDepLast = &D->NextDepends;
973 } else if (oldMap != Map.Data())
974 OldDepLast += (map_ptrloc const * const) Map.Data() - (map_ptrloc const * const) oldMap;
975
976 Dep->NextDepends = *OldDepLast;
977 *OldDepLast = Dep.Index();
978 OldDepLast = &Dep->NextDepends;
979
980 return true;
981 }
982 /*}}}*/
983 // ListParser::NewDepends - Create the environment for a new dependency /*{{{*/
984 // ---------------------------------------------------------------------
985 /* This creates a Group and the Package to link this dependency to if
986 needed and handles also the caching of the old endpoint */
987 bool pkgCacheGenerator::ListParser::NewDepends(pkgCache::VerIterator &Ver,
988 const string &PackageName,
989 const string &Arch,
990 const string &Version,
991 unsigned int Op,
992 unsigned int Type)
993 {
994 pkgCache::GrpIterator Grp;
995 Dynamic<pkgCache::GrpIterator> DynGrp(Grp);
996 if (unlikely(Owner->NewGroup(Grp, PackageName) == false))
997 return false;
998
999 // Locate the target package
1000 pkgCache::PkgIterator Pkg = Grp.FindPkg(Arch);
1001 // we don't create 'none' packages and their dependencies if we can avoid it …
1002 if (Pkg.end() == true && Arch == "none" && strcmp(Ver.ParentPkg().Arch(), "none") != 0)
1003 return true;
1004 Dynamic<pkgCache::PkgIterator> DynPkg(Pkg);
1005 if (Pkg.end() == true) {
1006 if (unlikely(Owner->NewPackage(Pkg, PackageName, Arch) == false))
1007 return false;
1008 }
1009
1010 // Is it a file dependency?
1011 if (unlikely(PackageName[0] == '/'))
1012 FoundFileDeps = true;
1013
1014 /* Caching the old end point speeds up generation substantially */
1015 if (OldDepVer != Ver) {
1016 OldDepLast = NULL;
1017 OldDepVer = Ver;
1018 }
1019
1020 return Owner->NewDepends(Pkg, Ver, Version, Op, Type, OldDepLast);
1021 }
1022 /*}}}*/
1023 // ListParser::NewProvides - Create a Provides element /*{{{*/
1024 // ---------------------------------------------------------------------
1025 /* */
1026 bool pkgCacheGenerator::ListParser::NewProvides(pkgCache::VerIterator &Ver,
1027 const string &PkgName,
1028 const string &PkgArch,
1029 const string &Version)
1030 {
1031 pkgCache &Cache = Owner->Cache;
1032
1033 // We do not add self referencing provides
1034 if (Ver.ParentPkg().Name() == PkgName && (PkgArch == Ver.ParentPkg().Arch() ||
1035 (PkgArch == "all" && strcmp((Cache.StrP + Cache.HeaderP->Architecture), Ver.ParentPkg().Arch()) == 0)))
1036 return true;
1037
1038 // Get a structure
1039 map_ptrloc const Provides = Owner->AllocateInMap(sizeof(pkgCache::Provides));
1040 if (unlikely(Provides == 0))
1041 return false;
1042 Cache.HeaderP->ProvidesCount++;
1043
1044 // Fill it in
1045 pkgCache::PrvIterator Prv(Cache,Cache.ProvideP + Provides,Cache.PkgP);
1046 Dynamic<pkgCache::PrvIterator> DynPrv(Prv);
1047 Prv->Version = Ver.Index();
1048 Prv->NextPkgProv = Ver->ProvidesList;
1049 Ver->ProvidesList = Prv.Index();
1050 if (Version.empty() == false) {
1051 map_ptrloc const idxProvideVersion = WriteString(Version);
1052 Prv->ProvideVersion = idxProvideVersion;
1053 if (unlikely(idxProvideVersion == 0))
1054 return false;
1055 }
1056
1057 // Locate the target package
1058 pkgCache::PkgIterator Pkg;
1059 Dynamic<pkgCache::PkgIterator> DynPkg(Pkg);
1060 if (unlikely(Owner->NewPackage(Pkg,PkgName, PkgArch) == false))
1061 return false;
1062
1063 // Link it to the package
1064 Prv->ParentPkg = Pkg.Index();
1065 Prv->NextProvides = Pkg->ProvidesList;
1066 Pkg->ProvidesList = Prv.Index();
1067
1068 return true;
1069 }
1070 /*}}}*/
1071 bool pkgCacheGenerator::ListParser::SameVersion(unsigned short const Hash,/*{{{*/
1072 pkgCache::VerIterator const &Ver)
1073 {
1074 return Hash == Ver->Hash;
1075 }
1076 /*}}}*/
1077 // CacheGenerator::SelectFile - Select the current file being parsed /*{{{*/
1078 // ---------------------------------------------------------------------
1079 /* This is used to select which file is to be associated with all newly
1080 added versions. The caller is responsible for setting the IMS fields. */
1081 bool pkgCacheGenerator::SelectFile(const string &File,const string &Site,
1082 const pkgIndexFile &Index,
1083 unsigned long Flags)
1084 {
1085 // Get some space for the structure
1086 map_ptrloc const idxFile = AllocateInMap(sizeof(*CurrentFile));
1087 if (unlikely(idxFile == 0))
1088 return false;
1089 CurrentFile = Cache.PkgFileP + idxFile;
1090
1091 // Fill it in
1092 map_ptrloc const idxFileName = WriteStringInMap(File);
1093 map_ptrloc const idxSite = WriteUniqString(Site);
1094 if (unlikely(idxFileName == 0 || idxSite == 0))
1095 return false;
1096 CurrentFile->FileName = idxFileName;
1097 CurrentFile->Site = idxSite;
1098 CurrentFile->NextFile = Cache.HeaderP->FileList;
1099 CurrentFile->Flags = Flags;
1100 CurrentFile->ID = Cache.HeaderP->PackageFileCount;
1101 map_ptrloc const idxIndexType = WriteUniqString(Index.GetType()->Label);
1102 if (unlikely(idxIndexType == 0))
1103 return false;
1104 CurrentFile->IndexType = idxIndexType;
1105 PkgFileName = File;
1106 Cache.HeaderP->FileList = CurrentFile - Cache.PkgFileP;
1107 Cache.HeaderP->PackageFileCount++;
1108
1109 if (Progress != 0)
1110 Progress->SubProgress(Index.Size());
1111 return true;
1112 }
1113 /*}}}*/
1114 // CacheGenerator::WriteUniqueString - Insert a unique string /*{{{*/
1115 // ---------------------------------------------------------------------
1116 /* This is used to create handles to strings. Given the same text it
1117 always returns the same number */
1118 unsigned long pkgCacheGenerator::WriteUniqString(const char *S,
1119 unsigned int Size)
1120 {
1121 /* We use a very small transient hash table here, this speeds up generation
1122 by a fair amount on slower machines */
1123 pkgCache::StringItem *&Bucket = UniqHash[(S[0]*5 + S[1]) % _count(UniqHash)];
1124 if (Bucket != 0 &&
1125 stringcmp(S,S+Size,Cache.StrP + Bucket->String) == 0)
1126 return Bucket->String;
1127
1128 // Search for an insertion point
1129 pkgCache::StringItem *I = Cache.StringItemP + Cache.HeaderP->StringList;
1130 int Res = 1;
1131 map_ptrloc *Last = &Cache.HeaderP->StringList;
1132 for (; I != Cache.StringItemP; Last = &I->NextItem,
1133 I = Cache.StringItemP + I->NextItem)
1134 {
1135 Res = stringcmp(S,S+Size,Cache.StrP + I->String);
1136 if (Res >= 0)
1137 break;
1138 }
1139
1140 // Match
1141 if (Res == 0)
1142 {
1143 Bucket = I;
1144 return I->String;
1145 }
1146
1147 // Get a structure
1148 void const * const oldMap = Map.Data();
1149 map_ptrloc const Item = AllocateInMap(sizeof(pkgCache::StringItem));
1150 if (Item == 0)
1151 return 0;
1152
1153 map_ptrloc const idxString = WriteStringInMap(S,Size);
1154 if (unlikely(idxString == 0))
1155 return 0;
1156 if (oldMap != Map.Data()) {
1157 Last += (map_ptrloc const * const) Map.Data() - (map_ptrloc const * const) oldMap;
1158 I += (pkgCache::StringItem const * const) Map.Data() - (pkgCache::StringItem const * const) oldMap;
1159 }
1160 *Last = Item;
1161
1162 // Fill in the structure
1163 pkgCache::StringItem *ItemP = Cache.StringItemP + Item;
1164 ItemP->NextItem = I - Cache.StringItemP;
1165 ItemP->String = idxString;
1166
1167 Bucket = ItemP;
1168 return ItemP->String;
1169 }
1170 /*}}}*/
1171 // CheckValidity - Check that a cache is up-to-date /*{{{*/
1172 // ---------------------------------------------------------------------
1173 /* This just verifies that each file in the list of index files exists,
1174 has matching attributes with the cache and the cache does not have
1175 any extra files. */
1176 static bool CheckValidity(const string &CacheFile,
1177 pkgSourceList &List,
1178 FileIterator Start,
1179 FileIterator End,
1180 MMap **OutMap = 0)
1181 {
1182 bool const Debug = _config->FindB("Debug::pkgCacheGen", false);
1183 // No file, certainly invalid
1184 if (CacheFile.empty() == true || FileExists(CacheFile) == false)
1185 {
1186 if (Debug == true)
1187 std::clog << "CacheFile doesn't exist" << std::endl;
1188 return false;
1189 }
1190
1191 if (List.GetLastModifiedTime() > GetModificationTime(CacheFile))
1192 {
1193 if (Debug == true)
1194 std::clog << "sources.list is newer than the cache" << std::endl;
1195 return false;
1196 }
1197
1198 // Map it
1199 FileFd CacheF(CacheFile,FileFd::ReadOnly);
1200 SPtr<MMap> Map = new MMap(CacheF,0);
1201 pkgCache Cache(Map);
1202 if (_error->PendingError() == true || Map->Size() == 0)
1203 {
1204 if (Debug == true)
1205 std::clog << "Errors are pending or Map is empty()" << std::endl;
1206 _error->Discard();
1207 return false;
1208 }
1209
1210 /* Now we check every index file, see if it is in the cache,
1211 verify the IMS data and check that it is on the disk too.. */
1212 SPtrArray<bool> Visited = new bool[Cache.HeaderP->PackageFileCount];
1213 memset(Visited,0,sizeof(*Visited)*Cache.HeaderP->PackageFileCount);
1214 for (; Start != End; ++Start)
1215 {
1216 if (Debug == true)
1217 std::clog << "Checking PkgFile " << (*Start)->Describe() << ": ";
1218 if ((*Start)->HasPackages() == false)
1219 {
1220 if (Debug == true)
1221 std::clog << "Has NO packages" << std::endl;
1222 continue;
1223 }
1224
1225 if ((*Start)->Exists() == false)
1226 {
1227 #if 0 // mvo: we no longer give a message here (Default Sources spec)
1228 _error->WarningE("stat",_("Couldn't stat source package list %s"),
1229 (*Start)->Describe().c_str());
1230 #endif
1231 if (Debug == true)
1232 std::clog << "file doesn't exist" << std::endl;
1233 continue;
1234 }
1235
1236 // FindInCache is also expected to do an IMS check.
1237 pkgCache::PkgFileIterator File = (*Start)->FindInCache(Cache);
1238 if (File.end() == true)
1239 {
1240 if (Debug == true)
1241 std::clog << "FindInCache returned end-Pointer" << std::endl;
1242 return false;
1243 }
1244
1245 Visited[File->ID] = true;
1246 if (Debug == true)
1247 std::clog << "with ID " << File->ID << " is valid" << std::endl;
1248 }
1249
1250 for (unsigned I = 0; I != Cache.HeaderP->PackageFileCount; I++)
1251 if (Visited[I] == false)
1252 {
1253 if (Debug == true)
1254 std::clog << "File with ID" << I << " wasn't visited" << std::endl;
1255 return false;
1256 }
1257
1258 if (_error->PendingError() == true)
1259 {
1260 if (Debug == true)
1261 {
1262 std::clog << "Validity failed because of pending errors:" << std::endl;
1263 _error->DumpErrors();
1264 }
1265 _error->Discard();
1266 return false;
1267 }
1268
1269 if (OutMap != 0)
1270 *OutMap = Map.UnGuard();
1271 return true;
1272 }
1273 /*}}}*/
1274 // ComputeSize - Compute the total size of a bunch of files /*{{{*/
1275 // ---------------------------------------------------------------------
1276 /* Size is kind of an abstract notion that is only used for the progress
1277 meter */
1278 static unsigned long ComputeSize(FileIterator Start,FileIterator End)
1279 {
1280 unsigned long TotalSize = 0;
1281 for (; Start < End; ++Start)
1282 {
1283 if ((*Start)->HasPackages() == false)
1284 continue;
1285 TotalSize += (*Start)->Size();
1286 }
1287 return TotalSize;
1288 }
1289 /*}}}*/
1290 // BuildCache - Merge the list of index files into the cache /*{{{*/
1291 // ---------------------------------------------------------------------
1292 /* */
1293 static bool BuildCache(pkgCacheGenerator &Gen,
1294 OpProgress *Progress,
1295 unsigned long &CurrentSize,unsigned long TotalSize,
1296 FileIterator Start, FileIterator End)
1297 {
1298 FileIterator I;
1299 for (I = Start; I != End; ++I)
1300 {
1301 if ((*I)->HasPackages() == false)
1302 continue;
1303
1304 if ((*I)->Exists() == false)
1305 continue;
1306
1307 if ((*I)->FindInCache(Gen.GetCache()).end() == false)
1308 {
1309 _error->Warning("Duplicate sources.list entry %s",
1310 (*I)->Describe().c_str());
1311 continue;
1312 }
1313
1314 unsigned long Size = (*I)->Size();
1315 if (Progress != NULL)
1316 Progress->OverallProgress(CurrentSize,TotalSize,Size,_("Reading package lists"));
1317 CurrentSize += Size;
1318
1319 if ((*I)->Merge(Gen,Progress) == false)
1320 return false;
1321 }
1322
1323 if (Gen.HasFileDeps() == true)
1324 {
1325 if (Progress != NULL)
1326 Progress->Done();
1327 TotalSize = ComputeSize(Start, End);
1328 CurrentSize = 0;
1329 for (I = Start; I != End; ++I)
1330 {
1331 unsigned long Size = (*I)->Size();
1332 if (Progress != NULL)
1333 Progress->OverallProgress(CurrentSize,TotalSize,Size,_("Collecting File Provides"));
1334 CurrentSize += Size;
1335 if ((*I)->MergeFileProvides(Gen,Progress) == false)
1336 return false;
1337 }
1338 }
1339
1340 return true;
1341 }
1342 /*}}}*/
1343 // CacheGenerator::CreateDynamicMMap - load an mmap with configuration options /*{{{*/
1344 DynamicMMap* pkgCacheGenerator::CreateDynamicMMap(FileFd *CacheF, unsigned long Flags) {
1345 unsigned long const MapStart = _config->FindI("APT::Cache-Start", 24*1024*1024);
1346 unsigned long const MapGrow = _config->FindI("APT::Cache-Grow", 1*1024*1024);
1347 unsigned long const MapLimit = _config->FindI("APT::Cache-Limit", 0);
1348 Flags |= MMap::Moveable;
1349 if (_config->FindB("APT::Cache-Fallback", false) == true)
1350 Flags |= MMap::Fallback;
1351 if (CacheF != NULL)
1352 return new DynamicMMap(*CacheF, Flags, MapStart, MapGrow, MapLimit);
1353 else
1354 return new DynamicMMap(Flags, MapStart, MapGrow, MapLimit);
1355 }
1356 /*}}}*/
1357 // CacheGenerator::MakeStatusCache - Construct the status cache /*{{{*/
1358 // ---------------------------------------------------------------------
1359 /* This makes sure that the status cache (the cache that has all
1360 index files from the sources list and all local ones) is ready
1361 to be mmaped. If OutMap is not zero then a MMap object representing
1362 the cache will be stored there. This is pretty much mandetory if you
1363 are using AllowMem. AllowMem lets the function be run as non-root
1364 where it builds the cache 'fast' into a memory buffer. */
1365 APT_DEPRECATED bool pkgMakeStatusCache(pkgSourceList &List,OpProgress &Progress,
1366 MMap **OutMap, bool AllowMem)
1367 { return pkgCacheGenerator::MakeStatusCache(List, &Progress, OutMap, AllowMem); }
1368 bool pkgCacheGenerator::MakeStatusCache(pkgSourceList &List,OpProgress *Progress,
1369 MMap **OutMap,bool AllowMem)
1370 {
1371 bool const Debug = _config->FindB("Debug::pkgCacheGen", false);
1372
1373 std::vector<pkgIndexFile *> Files;
1374 for (std::vector<metaIndex *>::const_iterator i = List.begin();
1375 i != List.end();
1376 ++i)
1377 {
1378 std::vector <pkgIndexFile *> *Indexes = (*i)->GetIndexFiles();
1379 for (std::vector<pkgIndexFile *>::const_iterator j = Indexes->begin();
1380 j != Indexes->end();
1381 ++j)
1382 Files.push_back (*j);
1383 }
1384
1385 unsigned long const EndOfSource = Files.size();
1386 if (_system->AddStatusFiles(Files) == false)
1387 return false;
1388
1389 // Decide if we can write to the files..
1390 string const CacheFile = _config->FindFile("Dir::Cache::pkgcache");
1391 string const SrcCacheFile = _config->FindFile("Dir::Cache::srcpkgcache");
1392
1393 // ensure the cache directory exists
1394 if (CacheFile.empty() == false || SrcCacheFile.empty() == false)
1395 {
1396 string dir = _config->FindDir("Dir::Cache");
1397 size_t const len = dir.size();
1398 if (len > 5 && dir.find("/apt/", len - 6, 5) == len - 5)
1399 dir = dir.substr(0, len - 5);
1400 if (CacheFile.empty() == false)
1401 CreateDirectory(dir, flNotFile(CacheFile));
1402 if (SrcCacheFile.empty() == false)
1403 CreateDirectory(dir, flNotFile(SrcCacheFile));
1404 }
1405
1406 // Decide if we can write to the cache
1407 bool Writeable = false;
1408 if (CacheFile.empty() == false)
1409 Writeable = access(flNotFile(CacheFile).c_str(),W_OK) == 0;
1410 else
1411 if (SrcCacheFile.empty() == false)
1412 Writeable = access(flNotFile(SrcCacheFile).c_str(),W_OK) == 0;
1413 if (Debug == true)
1414 std::clog << "Do we have write-access to the cache files? " << (Writeable ? "YES" : "NO") << std::endl;
1415
1416 if (Writeable == false && AllowMem == false && CacheFile.empty() == false)
1417 return _error->Error(_("Unable to write to %s"),flNotFile(CacheFile).c_str());
1418
1419 if (Progress != NULL)
1420 Progress->OverallProgress(0,1,1,_("Reading package lists"));
1421
1422 // Cache is OK, Fin.
1423 if (CheckValidity(CacheFile, List, Files.begin(),Files.end(),OutMap) == true)
1424 {
1425 if (Progress != NULL)
1426 Progress->OverallProgress(1,1,1,_("Reading package lists"));
1427 if (Debug == true)
1428 std::clog << "pkgcache.bin is valid - no need to build anything" << std::endl;
1429 return true;
1430 }
1431 else if (Debug == true)
1432 std::clog << "pkgcache.bin is NOT valid" << std::endl;
1433
1434 /* At this point we know we need to reconstruct the package cache,
1435 begin. */
1436 SPtr<FileFd> CacheF;
1437 SPtr<DynamicMMap> Map;
1438 if (Writeable == true && CacheFile.empty() == false)
1439 {
1440 _error->PushToStack();
1441 unlink(CacheFile.c_str());
1442 CacheF = new FileFd(CacheFile,FileFd::WriteAtomic);
1443 fchmod(CacheF->Fd(),0644);
1444 Map = CreateDynamicMMap(CacheF, MMap::Public);
1445 if (_error->PendingError() == true)
1446 {
1447 delete CacheF.UnGuard();
1448 delete Map.UnGuard();
1449 if (Debug == true)
1450 std::clog << "Open filebased MMap FAILED" << std::endl;
1451 Writeable = false;
1452 if (AllowMem == false)
1453 {
1454 _error->MergeWithStack();
1455 return false;
1456 }
1457 _error->RevertToStack();
1458 }
1459 else
1460 {
1461 _error->MergeWithStack();
1462 if (Debug == true)
1463 std::clog << "Open filebased MMap" << std::endl;
1464 }
1465 }
1466 if (Writeable == false || CacheFile.empty() == true)
1467 {
1468 // Just build it in memory..
1469 Map = CreateDynamicMMap(NULL);
1470 if (Debug == true)
1471 std::clog << "Open memory Map (not filebased)" << std::endl;
1472 }
1473
1474 // Lets try the source cache.
1475 unsigned long CurrentSize = 0;
1476 unsigned long TotalSize = 0;
1477 if (CheckValidity(SrcCacheFile, List, Files.begin(),
1478 Files.begin()+EndOfSource) == true)
1479 {
1480 if (Debug == true)
1481 std::clog << "srcpkgcache.bin is valid - populate MMap with it." << std::endl;
1482 // Preload the map with the source cache
1483 FileFd SCacheF(SrcCacheFile,FileFd::ReadOnly);
1484 unsigned long const alloc = Map->RawAllocate(SCacheF.Size());
1485 if ((alloc == 0 && _error->PendingError())
1486 || SCacheF.Read((unsigned char *)Map->Data() + alloc,
1487 SCacheF.Size()) == false)
1488 return false;
1489
1490 TotalSize = ComputeSize(Files.begin()+EndOfSource,Files.end());
1491
1492 // Build the status cache
1493 pkgCacheGenerator Gen(Map.Get(),Progress);
1494 if (_error->PendingError() == true)
1495 return false;
1496 if (BuildCache(Gen,Progress,CurrentSize,TotalSize,
1497 Files.begin()+EndOfSource,Files.end()) == false)
1498 return false;
1499 }
1500 else
1501 {
1502 if (Debug == true)
1503 std::clog << "srcpkgcache.bin is NOT valid - rebuild" << std::endl;
1504 TotalSize = ComputeSize(Files.begin(),Files.end());
1505
1506 // Build the source cache
1507 pkgCacheGenerator Gen(Map.Get(),Progress);
1508 if (_error->PendingError() == true)
1509 return false;
1510 if (BuildCache(Gen,Progress,CurrentSize,TotalSize,
1511 Files.begin(),Files.begin()+EndOfSource) == false)
1512 return false;
1513
1514 // Write it back
1515 if (Writeable == true && SrcCacheFile.empty() == false)
1516 {
1517 FileFd SCacheF(SrcCacheFile,FileFd::WriteAtomic);
1518 if (_error->PendingError() == true)
1519 return false;
1520
1521 fchmod(SCacheF.Fd(),0644);
1522
1523 // Write out the main data
1524 if (SCacheF.Write(Map->Data(),Map->Size()) == false)
1525 return _error->Error(_("IO Error saving source cache"));
1526 SCacheF.Sync();
1527
1528 // Write out the proper header
1529 Gen.GetCache().HeaderP->Dirty = false;
1530 if (SCacheF.Seek(0) == false ||
1531 SCacheF.Write(Map->Data(),sizeof(*Gen.GetCache().HeaderP)) == false)
1532 return _error->Error(_("IO Error saving source cache"));
1533 Gen.GetCache().HeaderP->Dirty = true;
1534 SCacheF.Sync();
1535 }
1536
1537 // Build the status cache
1538 if (BuildCache(Gen,Progress,CurrentSize,TotalSize,
1539 Files.begin()+EndOfSource,Files.end()) == false)
1540 return false;
1541 }
1542 if (Debug == true)
1543 std::clog << "Caches are ready for shipping" << std::endl;
1544
1545 if (_error->PendingError() == true)
1546 return false;
1547 if (OutMap != 0)
1548 {
1549 if (CacheF != 0)
1550 {
1551 delete Map.UnGuard();
1552 *OutMap = new MMap(*CacheF,0);
1553 }
1554 else
1555 {
1556 *OutMap = Map.UnGuard();
1557 }
1558 }
1559
1560 return true;
1561 }
1562 /*}}}*/
1563 // CacheGenerator::MakeOnlyStatusCache - Build only a status files cache/*{{{*/
1564 // ---------------------------------------------------------------------
1565 /* */
1566 APT_DEPRECATED bool pkgMakeOnlyStatusCache(OpProgress &Progress,DynamicMMap **OutMap)
1567 { return pkgCacheGenerator::MakeOnlyStatusCache(&Progress, OutMap); }
1568 bool pkgCacheGenerator::MakeOnlyStatusCache(OpProgress *Progress,DynamicMMap **OutMap)
1569 {
1570 std::vector<pkgIndexFile *> Files;
1571 unsigned long EndOfSource = Files.size();
1572 if (_system->AddStatusFiles(Files) == false)
1573 return false;
1574
1575 SPtr<DynamicMMap> Map = CreateDynamicMMap(NULL);
1576 unsigned long CurrentSize = 0;
1577 unsigned long TotalSize = 0;
1578
1579 TotalSize = ComputeSize(Files.begin()+EndOfSource,Files.end());
1580
1581 // Build the status cache
1582 if (Progress != NULL)
1583 Progress->OverallProgress(0,1,1,_("Reading package lists"));
1584 pkgCacheGenerator Gen(Map.Get(),Progress);
1585 if (_error->PendingError() == true)
1586 return false;
1587 if (BuildCache(Gen,Progress,CurrentSize,TotalSize,
1588 Files.begin()+EndOfSource,Files.end()) == false)
1589 return false;
1590
1591 if (_error->PendingError() == true)
1592 return false;
1593 *OutMap = Map.UnGuard();
1594
1595 return true;
1596 }
1597 /*}}}*/
1598 // IsDuplicateDescription /*{{{*/
1599 static bool IsDuplicateDescription(pkgCache::DescIterator Desc,
1600 MD5SumValue const &CurMd5, std::string const &CurLang)
1601 {
1602 // Descriptions in the same link-list have all the same md5
1603 if (Desc.end() == true || MD5SumValue(Desc.md5()) != CurMd5)
1604 return false;
1605 for (; Desc.end() == false; ++Desc)
1606 if (Desc.LanguageCode() == CurLang)
1607 return true;
1608 return false;
1609 }
1610 /*}}}*/
1611 // CacheGenerator::FinishCache /*{{{*/
1612 bool pkgCacheGenerator::FinishCache(OpProgress * /*Progress*/)
1613 {
1614 return true;
1615 }
1616 /*}}}*/