]> git.saurik.com Git - apt.git/blob - apt-pkg/pkgcachegen.cc
b11ddcf9e084ba766c886fe937a322bab036d500
[apt.git] / apt-pkg / pkgcachegen.cc
1 // -*- mode: cpp; mode: fold -*-
2 // Description /*{{{*/
3 // $Id: pkgcachegen.cc,v 1.53.2.1 2003/12/24 23:09:17 mdz Exp $
4 /* ######################################################################
5
6 Package Cache Generator - Generator for the cache structure.
7
8 This builds the cache structure from the abstract package list parser.
9
10 ##################################################################### */
11 /*}}}*/
12 // Include Files /*{{{*/
13 #include <config.h>
14
15 #include <apt-pkg/pkgcachegen.h>
16 #include <apt-pkg/error.h>
17 #include <apt-pkg/version.h>
18 #include <apt-pkg/progress.h>
19 #include <apt-pkg/sourcelist.h>
20 #include <apt-pkg/configuration.h>
21 #include <apt-pkg/aptconfiguration.h>
22 #include <apt-pkg/strutl.h>
23 #include <apt-pkg/sptr.h>
24 #include <apt-pkg/pkgsystem.h>
25 #include <apt-pkg/macros.h>
26 #include <apt-pkg/tagfile.h>
27 #include <apt-pkg/metaindex.h>
28 #include <apt-pkg/fileutl.h>
29
30 #include <vector>
31 #include <sys/stat.h>
32 #include <unistd.h>
33 #include <errno.h>
34 #include <stdio.h>
35
36 #include <apti18n.h>
37 /*}}}*/
38 typedef std::vector<pkgIndexFile *>::iterator FileIterator;
39 template <typename Iter> std::vector<Iter*> pkgCacheGenerator::Dynamic<Iter>::toReMap;
40
41 static bool IsDuplicateDescription(pkgCache::DescIterator Desc,
42 MD5SumValue const &CurMd5, std::string const &CurLang);
43
44 using std::string;
45
46 // CacheGenerator::pkgCacheGenerator - Constructor /*{{{*/
47 // ---------------------------------------------------------------------
48 /* We set the dirty flag and make sure that is written to the disk */
49 pkgCacheGenerator::pkgCacheGenerator(DynamicMMap *pMap,OpProgress *Prog) :
50 Map(*pMap), Cache(pMap,false), Progress(Prog),
51 FoundFileDeps(0)
52 {
53 CurrentFile = 0;
54 memset(UniqHash,0,sizeof(UniqHash));
55
56 if (_error->PendingError() == true)
57 return;
58
59 if (Map.Size() == 0)
60 {
61 // Setup the map interface..
62 Cache.HeaderP = (pkgCache::Header *)Map.Data();
63 if (Map.RawAllocate(sizeof(pkgCache::Header)) == 0 && _error->PendingError() == true)
64 return;
65
66 Map.UsePools(*Cache.HeaderP->Pools,sizeof(Cache.HeaderP->Pools)/sizeof(Cache.HeaderP->Pools[0]));
67
68 // Starting header
69 *Cache.HeaderP = pkgCache::Header();
70 map_ptrloc const idxVerSysName = WriteStringInMap(_system->VS->Label);
71 Cache.HeaderP->VerSysName = idxVerSysName;
72 // this pointer is set in ReMap, but we need it now for WriteUniqString
73 Cache.StringItemP = (pkgCache::StringItem *)Map.Data();
74 map_ptrloc const idxArchitecture = WriteUniqString(_config->Find("APT::Architecture"));
75 Cache.HeaderP->Architecture = idxArchitecture;
76 if (unlikely(idxVerSysName == 0 || idxArchitecture == 0))
77 return;
78 Cache.ReMap();
79 }
80 else
81 {
82 // Map directly from the existing file
83 Cache.ReMap();
84 Map.UsePools(*Cache.HeaderP->Pools,sizeof(Cache.HeaderP->Pools)/sizeof(Cache.HeaderP->Pools[0]));
85 if (Cache.VS != _system->VS)
86 {
87 _error->Error(_("Cache has an incompatible versioning system"));
88 return;
89 }
90 }
91
92 Cache.HeaderP->Dirty = true;
93 Map.Sync(0,sizeof(pkgCache::Header));
94 }
95 /*}}}*/
96 // CacheGenerator::~pkgCacheGenerator - Destructor /*{{{*/
97 // ---------------------------------------------------------------------
98 /* We sync the data then unset the dirty flag in two steps so as to
99 advoid a problem during a crash */
100 pkgCacheGenerator::~pkgCacheGenerator()
101 {
102 if (_error->PendingError() == true)
103 return;
104 if (Map.Sync() == false)
105 return;
106
107 Cache.HeaderP->Dirty = false;
108 Cache.HeaderP->CacheFileSize = Map.Size();
109 Map.Sync(0,sizeof(pkgCache::Header));
110 }
111 /*}}}*/
112 void pkgCacheGenerator::ReMap(void const * const oldMap, void const * const newMap) {/*{{{*/
113 if (oldMap == newMap)
114 return;
115
116 if (_config->FindB("Debug::pkgCacheGen", false))
117 std::clog << "Remaping from " << oldMap << " to " << newMap << std::endl;
118
119 Cache.ReMap(false);
120
121 CurrentFile += (pkgCache::PackageFile*) newMap - (pkgCache::PackageFile*) oldMap;
122
123 for (size_t i = 0; i < _count(UniqHash); ++i)
124 if (UniqHash[i] != 0)
125 UniqHash[i] += (pkgCache::StringItem*) newMap - (pkgCache::StringItem*) oldMap;
126
127 for (std::vector<pkgCache::GrpIterator*>::const_iterator i = Dynamic<pkgCache::GrpIterator>::toReMap.begin();
128 i != Dynamic<pkgCache::GrpIterator>::toReMap.end(); ++i)
129 (*i)->ReMap(oldMap, newMap);
130 for (std::vector<pkgCache::PkgIterator*>::const_iterator i = Dynamic<pkgCache::PkgIterator>::toReMap.begin();
131 i != Dynamic<pkgCache::PkgIterator>::toReMap.end(); ++i)
132 (*i)->ReMap(oldMap, newMap);
133 for (std::vector<pkgCache::VerIterator*>::const_iterator i = Dynamic<pkgCache::VerIterator>::toReMap.begin();
134 i != Dynamic<pkgCache::VerIterator>::toReMap.end(); ++i)
135 (*i)->ReMap(oldMap, newMap);
136 for (std::vector<pkgCache::DepIterator*>::const_iterator i = Dynamic<pkgCache::DepIterator>::toReMap.begin();
137 i != Dynamic<pkgCache::DepIterator>::toReMap.end(); ++i)
138 (*i)->ReMap(oldMap, newMap);
139 for (std::vector<pkgCache::DescIterator*>::const_iterator i = Dynamic<pkgCache::DescIterator>::toReMap.begin();
140 i != Dynamic<pkgCache::DescIterator>::toReMap.end(); ++i)
141 (*i)->ReMap(oldMap, newMap);
142 for (std::vector<pkgCache::PrvIterator*>::const_iterator i = Dynamic<pkgCache::PrvIterator>::toReMap.begin();
143 i != Dynamic<pkgCache::PrvIterator>::toReMap.end(); ++i)
144 (*i)->ReMap(oldMap, newMap);
145 for (std::vector<pkgCache::PkgFileIterator*>::const_iterator i = Dynamic<pkgCache::PkgFileIterator>::toReMap.begin();
146 i != Dynamic<pkgCache::PkgFileIterator>::toReMap.end(); ++i)
147 (*i)->ReMap(oldMap, newMap);
148 } /*}}}*/
149 // CacheGenerator::WriteStringInMap /*{{{*/
150 map_ptrloc pkgCacheGenerator::WriteStringInMap(const char *String,
151 const unsigned long &Len) {
152 void const * const oldMap = Map.Data();
153 map_ptrloc const index = Map.WriteString(String, Len);
154 if (index != 0)
155 ReMap(oldMap, Map.Data());
156 return index;
157 }
158 /*}}}*/
159 // CacheGenerator::WriteStringInMap /*{{{*/
160 map_ptrloc pkgCacheGenerator::WriteStringInMap(const char *String) {
161 void const * const oldMap = Map.Data();
162 map_ptrloc const index = Map.WriteString(String);
163 if (index != 0)
164 ReMap(oldMap, Map.Data());
165 return index;
166 }
167 /*}}}*/
168 map_ptrloc pkgCacheGenerator::AllocateInMap(const unsigned long &size) {/*{{{*/
169 void const * const oldMap = Map.Data();
170 map_ptrloc const index = Map.Allocate(size);
171 if (index != 0)
172 ReMap(oldMap, Map.Data());
173 return index;
174 }
175 /*}}}*/
176 // CacheGenerator::MergeList - Merge the package list /*{{{*/
177 // ---------------------------------------------------------------------
178 /* This provides the generation of the entries in the cache. Each loop
179 goes through a single package record from the underlying parse engine. */
180 bool pkgCacheGenerator::MergeList(ListParser &List,
181 pkgCache::VerIterator *OutVer)
182 {
183 List.Owner = this;
184
185 unsigned int Counter = 0;
186 while (List.Step() == true)
187 {
188 string const PackageName = List.Package();
189 if (PackageName.empty() == true)
190 return false;
191
192 Counter++;
193 if (Counter % 100 == 0 && Progress != 0)
194 Progress->Progress(List.Offset());
195
196 string Arch = List.Architecture();
197 string const Version = List.Version();
198 if (Version.empty() == true && Arch.empty() == true)
199 {
200 // package descriptions
201 if (MergeListGroup(List, PackageName) == false)
202 return false;
203 continue;
204 }
205
206 if (Arch.empty() == true)
207 {
208 // use the pseudo arch 'none' for arch-less packages
209 Arch = "none";
210 /* We might built a SingleArchCache here, which we don't want to blow up
211 just for these :none packages to a proper MultiArchCache, so just ensure
212 that we have always a native package structure first for SingleArch */
213 pkgCache::PkgIterator NP;
214 Dynamic<pkgCache::PkgIterator> DynPkg(NP);
215 if (NewPackage(NP, PackageName, _config->Find("APT::Architecture")) == false)
216 // TRANSLATOR: The first placeholder is a package name,
217 // the other two should be copied verbatim as they include debug info
218 return _error->Error(_("Error occurred while processing %s (%s%d)"),
219 PackageName.c_str(), "NewPackage", 0);
220 }
221
222 // Get a pointer to the package structure
223 pkgCache::PkgIterator Pkg;
224 Dynamic<pkgCache::PkgIterator> DynPkg(Pkg);
225 if (NewPackage(Pkg, PackageName, Arch) == false)
226 // TRANSLATOR: The first placeholder is a package name,
227 // the other two should be copied verbatim as they include debug info
228 return _error->Error(_("Error occurred while processing %s (%s%d)"),
229 PackageName.c_str(), "NewPackage", 1);
230
231
232 if (Version.empty() == true)
233 {
234 if (MergeListPackage(List, Pkg) == false)
235 return false;
236 }
237 else
238 {
239 if (MergeListVersion(List, Pkg, Version, OutVer) == false)
240 return false;
241 }
242
243 if (OutVer != 0)
244 {
245 FoundFileDeps |= List.HasFileDeps();
246 return true;
247 }
248 }
249
250 if (Cache.HeaderP->PackageCount >= (1ULL<<sizeof(Cache.PkgP->ID)*8)-1)
251 return _error->Error(_("Wow, you exceeded the number of package "
252 "names this APT is capable of."));
253 if (Cache.HeaderP->VersionCount >= (1ULL<<(sizeof(Cache.VerP->ID)*8))-1)
254 return _error->Error(_("Wow, you exceeded the number of versions "
255 "this APT is capable of."));
256 if (Cache.HeaderP->DescriptionCount >= (1ULL<<(sizeof(Cache.DescP->ID)*8))-1)
257 return _error->Error(_("Wow, you exceeded the number of descriptions "
258 "this APT is capable of."));
259 if (Cache.HeaderP->DependsCount >= (1ULL<<(sizeof(Cache.DepP->ID)*8))-1ULL)
260 return _error->Error(_("Wow, you exceeded the number of dependencies "
261 "this APT is capable of."));
262
263 FoundFileDeps |= List.HasFileDeps();
264 return true;
265 }
266 // CacheGenerator::MergeListGroup /*{{{*/
267 bool pkgCacheGenerator::MergeListGroup(ListParser &List, std::string const &GrpName)
268 {
269 pkgCache::GrpIterator Grp = Cache.FindGrp(GrpName);
270 // a group has no data on it's own, only packages have it but these
271 // stanzas like this come from Translation- files to add descriptions,
272 // but without a version we don't need a description for it…
273 if (Grp.end() == true)
274 return true;
275 Dynamic<pkgCache::GrpIterator> DynGrp(Grp);
276
277 pkgCache::PkgIterator Pkg;
278 Dynamic<pkgCache::PkgIterator> DynPkg(Pkg);
279 for (Pkg = Grp.PackageList(); Pkg.end() == false; Pkg = Grp.NextPkg(Pkg))
280 if (MergeListPackage(List, Pkg) == false)
281 return false;
282
283 return true;
284 }
285 /*}}}*/
286 // CacheGenerator::MergeListPackage /*{{{*/
287 bool pkgCacheGenerator::MergeListPackage(ListParser &List, pkgCache::PkgIterator &Pkg)
288 {
289 // we first process the package, then the descriptions
290 // (for deb this package processing is in fact a no-op)
291 pkgCache::VerIterator Ver(Cache);
292 Dynamic<pkgCache::VerIterator> DynVer(Ver);
293 if (List.UsePackage(Pkg, Ver) == false)
294 return _error->Error(_("Error occurred while processing %s (%s%d)"),
295 Pkg.Name(), "UsePackage", 1);
296
297 // Find the right version to write the description
298 MD5SumValue CurMd5 = List.Description_md5();
299 std::string CurLang = List.DescriptionLanguage();
300
301 for (Ver = Pkg.VersionList(); Ver.end() == false; ++Ver)
302 {
303 pkgCache::DescIterator VerDesc = Ver.DescriptionList();
304
305 // a version can only have one md5 describing it
306 if (VerDesc.end() == true || MD5SumValue(VerDesc.md5()) != CurMd5)
307 continue;
308
309 // don't add a new description if we have one for the given
310 // md5 && language
311 if (IsDuplicateDescription(VerDesc, CurMd5, CurLang) == true)
312 continue;
313
314 pkgCache::DescIterator Desc;
315 Dynamic<pkgCache::DescIterator> DynDesc(Desc);
316
317 map_ptrloc const descindex = NewDescription(Desc, CurLang, CurMd5, VerDesc->md5sum);
318 if (unlikely(descindex == 0 && _error->PendingError()))
319 return _error->Error(_("Error occurred while processing %s (%s%d)"),
320 Pkg.Name(), "NewDescription", 1);
321
322 Desc->ParentPkg = Pkg.Index();
323
324 // we add at the end, so that the start is constant as we need
325 // that to be able to efficiently share these lists
326 VerDesc = Ver.DescriptionList(); // old value might be invalid after ReMap
327 for (;VerDesc.end() == false && VerDesc->NextDesc != 0; ++VerDesc);
328 map_ptrloc * const LastNextDesc = (VerDesc.end() == true) ? &Ver->DescriptionList : &VerDesc->NextDesc;
329 *LastNextDesc = descindex;
330
331 if (NewFileDesc(Desc,List) == false)
332 return _error->Error(_("Error occurred while processing %s (%s%d)"),
333 Pkg.Name(), "NewFileDesc", 1);
334
335 // we can stop here as all "same" versions will share the description
336 break;
337 }
338
339 return true;
340 }
341 /*}}}*/
342 // CacheGenerator::MergeListVersion /*{{{*/
343 bool pkgCacheGenerator::MergeListVersion(ListParser &List, pkgCache::PkgIterator &Pkg,
344 std::string const &Version, pkgCache::VerIterator* &OutVer)
345 {
346 pkgCache::VerIterator Ver = Pkg.VersionList();
347 Dynamic<pkgCache::VerIterator> DynVer(Ver);
348 map_ptrloc *LastVer = &Pkg->VersionList;
349 void const * oldMap = Map.Data();
350
351 unsigned long const Hash = List.VersionHash();
352 if (Ver.end() == false)
353 {
354 /* We know the list is sorted so we use that fact in the search.
355 Insertion of new versions is done with correct sorting */
356 int Res = 1;
357 for (; Ver.end() == false; LastVer = &Ver->NextVer, Ver++)
358 {
359 Res = Cache.VS->CmpVersion(Version,Ver.VerStr());
360 // Version is higher as current version - insert here
361 if (Res > 0)
362 break;
363 // Versionstrings are equal - is hash also equal?
364 if (Res == 0 && Ver->Hash == Hash)
365 break;
366 // proceed with the next till we have either the right
367 // or we found another version (which will be lower)
368 }
369
370 /* We already have a version for this item, record that we saw it */
371 if (Res == 0 && Ver.end() == false && Ver->Hash == Hash)
372 {
373 if (List.UsePackage(Pkg,Ver) == false)
374 return _error->Error(_("Error occurred while processing %s (%s%d)"),
375 Pkg.Name(), "UsePackage", 2);
376
377 if (NewFileVer(Ver,List) == false)
378 return _error->Error(_("Error occurred while processing %s (%s%d)"),
379 Pkg.Name(), "NewFileVer", 1);
380
381 // Read only a single record and return
382 if (OutVer != 0)
383 {
384 *OutVer = Ver;
385 return true;
386 }
387
388 return true;
389 }
390 }
391
392 // Add a new version
393 map_ptrloc const verindex = NewVersion(Ver,Version,*LastVer);
394 if (verindex == 0 && _error->PendingError())
395 return _error->Error(_("Error occurred while processing %s (%s%d)"),
396 Pkg.Name(), "NewVersion", 1);
397
398 if (oldMap != Map.Data())
399 LastVer += (map_ptrloc*) Map.Data() - (map_ptrloc*) oldMap;
400 *LastVer = verindex;
401 Ver->ParentPkg = Pkg.Index();
402 Ver->Hash = Hash;
403
404 if (unlikely(List.NewVersion(Ver) == false))
405 return _error->Error(_("Error occurred while processing %s (%s%d)"),
406 Pkg.Name(), "NewVersion", 2);
407
408 if (unlikely(List.UsePackage(Pkg,Ver) == false))
409 return _error->Error(_("Error occurred while processing %s (%s%d)"),
410 Pkg.Name(), "UsePackage", 3);
411
412 if (unlikely(NewFileVer(Ver,List) == false))
413 return _error->Error(_("Error occurred while processing %s (%s%d)"),
414 Pkg.Name(), "NewFileVer", 2);
415
416 pkgCache::GrpIterator Grp = Pkg.Group();
417 Dynamic<pkgCache::GrpIterator> DynGrp(Grp);
418
419 /* If it is the first version of this package we need to add implicit
420 Multi-Arch dependencies to all other package versions in the group now -
421 otherwise we just add them for this new version */
422 if (Pkg.VersionList()->NextVer == 0)
423 {
424 pkgCache::PkgIterator P = Grp.PackageList();
425 Dynamic<pkgCache::PkgIterator> DynP(P);
426 for (; P.end() != true; P = Grp.NextPkg(P))
427 {
428 if (P->ID == Pkg->ID)
429 continue;
430 pkgCache::VerIterator V = P.VersionList();
431 Dynamic<pkgCache::VerIterator> DynV(V);
432 for (; V.end() != true; ++V)
433 if (unlikely(AddImplicitDepends(V, Pkg) == false))
434 return _error->Error(_("Error occurred while processing %s (%s%d)"),
435 Pkg.Name(), "AddImplicitDepends", 1);
436 }
437 /* :none packages are packages without an architecture. They are forbidden by
438 debian-policy, so usually they will only be in (old) dpkg status files -
439 and dpkg will complain about them - and are pretty rare. We therefore do
440 usually not create conflicts while the parent is created, but only if a :none
441 package (= the target) appears. This creates incorrect dependencies on :none
442 for architecture-specific dependencies on the package we copy from, but we
443 will ignore this bug as architecture-specific dependencies are only allowed
444 in jessie and until then the :none packages should be extinct (hopefully).
445 In other words: This should work long enough to allow graceful removal of
446 these packages, it is not supposed to allow users to keep using them … */
447 if (strcmp(Pkg.Arch(), "none") == 0)
448 {
449 pkgCache::PkgIterator M = Grp.FindPreferredPkg();
450 if (M.end() == false && Pkg != M)
451 {
452 pkgCache::DepIterator D = M.RevDependsList();
453 Dynamic<pkgCache::DepIterator> DynD(D);
454 for (; D.end() == false; ++D)
455 {
456 if ((D->Type != pkgCache::Dep::Conflicts &&
457 D->Type != pkgCache::Dep::DpkgBreaks &&
458 D->Type != pkgCache::Dep::Replaces) ||
459 D.ParentPkg().Group() == Grp)
460 continue;
461
462 map_ptrloc *OldDepLast = NULL;
463 pkgCache::VerIterator ConVersion = D.ParentVer();
464 Dynamic<pkgCache::VerIterator> DynV(ConVersion);
465 // duplicate the Conflicts/Breaks/Replaces for :none arch
466 NewDepends(Pkg, ConVersion, D->Version,
467 D->CompareOp, D->Type, OldDepLast);
468 }
469 }
470 }
471 }
472 if (unlikely(AddImplicitDepends(Grp, Pkg, Ver) == false))
473 return _error->Error(_("Error occurred while processing %s (%s%d)"),
474 Pkg.Name(), "AddImplicitDepends", 2);
475
476 // Read only a single record and return
477 if (OutVer != 0)
478 {
479 *OutVer = Ver;
480 return true;
481 }
482
483 /* Record the Description (it is not translated) */
484 MD5SumValue CurMd5 = List.Description_md5();
485 if (CurMd5.Value().empty() == true)
486 return true;
487 std::string CurLang = List.DescriptionLanguage();
488
489 /* Before we add a new description we first search in the group for
490 a version with a description of the same MD5 - if so we reuse this
491 description group instead of creating our own for this version */
492 for (pkgCache::PkgIterator P = Grp.PackageList();
493 P.end() == false; P = Grp.NextPkg(P))
494 {
495 for (pkgCache::VerIterator V = P.VersionList();
496 V.end() == false; ++V)
497 {
498 if (IsDuplicateDescription(V.DescriptionList(), CurMd5, "") == false)
499 continue;
500 Ver->DescriptionList = V->DescriptionList;
501 return true;
502 }
503 }
504
505 // We haven't found reusable descriptions, so add the first description
506 pkgCache::DescIterator Desc = Ver.DescriptionList();
507 Dynamic<pkgCache::DescIterator> DynDesc(Desc);
508
509 map_ptrloc const descindex = NewDescription(Desc, CurLang, CurMd5, 0);
510 if (unlikely(descindex == 0 && _error->PendingError()))
511 return _error->Error(_("Error occurred while processing %s (%s%d)"),
512 Pkg.Name(), "NewDescription", 2);
513
514 Desc->ParentPkg = Pkg.Index();
515 Ver->DescriptionList = descindex;
516
517 if (NewFileDesc(Desc,List) == false)
518 return _error->Error(_("Error occurred while processing %s (%s%d)"),
519 Pkg.Name(), "NewFileDesc", 2);
520
521 return true;
522 }
523 /*}}}*/
524 /*}}}*/
525 // CacheGenerator::MergeFileProvides - Merge file provides /*{{{*/
526 // ---------------------------------------------------------------------
527 /* If we found any file depends while parsing the main list we need to
528 resolve them. Since it is undesired to load the entire list of files
529 into the cache as virtual packages we do a two stage effort. MergeList
530 identifies the file depends and this creates Provdies for them by
531 re-parsing all the indexs. */
532 bool pkgCacheGenerator::MergeFileProvides(ListParser &List)
533 {
534 List.Owner = this;
535
536 unsigned int Counter = 0;
537 while (List.Step() == true)
538 {
539 string PackageName = List.Package();
540 if (PackageName.empty() == true)
541 return false;
542 string Version = List.Version();
543 if (Version.empty() == true)
544 continue;
545
546 pkgCache::PkgIterator Pkg = Cache.FindPkg(PackageName);
547 Dynamic<pkgCache::PkgIterator> DynPkg(Pkg);
548 if (Pkg.end() == true)
549 return _error->Error(_("Error occurred while processing %s (%s%d)"),
550 PackageName.c_str(), "FindPkg", 1);
551 Counter++;
552 if (Counter % 100 == 0 && Progress != 0)
553 Progress->Progress(List.Offset());
554
555 unsigned long Hash = List.VersionHash();
556 pkgCache::VerIterator Ver = Pkg.VersionList();
557 Dynamic<pkgCache::VerIterator> DynVer(Ver);
558 for (; Ver.end() == false; ++Ver)
559 {
560 if (Ver->Hash == Hash && Version.c_str() == Ver.VerStr())
561 {
562 if (List.CollectFileProvides(Cache,Ver) == false)
563 return _error->Error(_("Error occurred while processing %s (%s%d)"),
564 PackageName.c_str(), "CollectFileProvides", 1);
565 break;
566 }
567 }
568
569 if (Ver.end() == true)
570 _error->Warning(_("Package %s %s was not found while processing file dependencies"),PackageName.c_str(),Version.c_str());
571 }
572
573 return true;
574 }
575 /*}}}*/
576 // CacheGenerator::NewGroup - Add a new group /*{{{*/
577 // ---------------------------------------------------------------------
578 /* This creates a new group structure and adds it to the hash table */
579 bool pkgCacheGenerator::NewGroup(pkgCache::GrpIterator &Grp, const string &Name)
580 {
581 Grp = Cache.FindGrp(Name);
582 if (Grp.end() == false)
583 return true;
584
585 // Get a structure
586 map_ptrloc const Group = AllocateInMap(sizeof(pkgCache::Group));
587 if (unlikely(Group == 0))
588 return false;
589
590 Grp = pkgCache::GrpIterator(Cache, Cache.GrpP + Group);
591 map_ptrloc const idxName = WriteStringInMap(Name);
592 if (unlikely(idxName == 0))
593 return false;
594 Grp->Name = idxName;
595
596 // Insert it into the hash table
597 unsigned long const Hash = Cache.Hash(Name);
598 Grp->Next = Cache.HeaderP->GrpHashTable[Hash];
599 Cache.HeaderP->GrpHashTable[Hash] = Group;
600
601 Grp->ID = Cache.HeaderP->GroupCount++;
602 return true;
603 }
604 /*}}}*/
605 // CacheGenerator::NewPackage - Add a new package /*{{{*/
606 // ---------------------------------------------------------------------
607 /* This creates a new package structure and adds it to the hash table */
608 bool pkgCacheGenerator::NewPackage(pkgCache::PkgIterator &Pkg,const string &Name,
609 const string &Arch) {
610 pkgCache::GrpIterator Grp;
611 Dynamic<pkgCache::GrpIterator> DynGrp(Grp);
612 if (unlikely(NewGroup(Grp, Name) == false))
613 return false;
614
615 Pkg = Grp.FindPkg(Arch);
616 if (Pkg.end() == false)
617 return true;
618
619 // Get a structure
620 map_ptrloc const Package = AllocateInMap(sizeof(pkgCache::Package));
621 if (unlikely(Package == 0))
622 return false;
623 Pkg = pkgCache::PkgIterator(Cache,Cache.PkgP + Package);
624
625 // Insert the package into our package list
626 if (Grp->FirstPackage == 0) // the group is new
627 {
628 // Insert it into the hash table
629 unsigned long const Hash = Cache.Hash(Name);
630 Pkg->NextPackage = Cache.HeaderP->PkgHashTable[Hash];
631 Cache.HeaderP->PkgHashTable[Hash] = Package;
632 Grp->FirstPackage = Package;
633 }
634 else // Group the Packages together
635 {
636 // this package is the new last package
637 pkgCache::PkgIterator LastPkg(Cache, Cache.PkgP + Grp->LastPackage);
638 Pkg->NextPackage = LastPkg->NextPackage;
639 LastPkg->NextPackage = Package;
640 }
641 Grp->LastPackage = Package;
642
643 // Set the name, arch and the ID
644 Pkg->Name = Grp->Name;
645 Pkg->Group = Grp.Index();
646 // all is mapped to the native architecture
647 map_ptrloc const idxArch = (Arch == "all") ? Cache.HeaderP->Architecture : WriteUniqString(Arch.c_str());
648 if (unlikely(idxArch == 0))
649 return false;
650 Pkg->Arch = idxArch;
651 Pkg->ID = Cache.HeaderP->PackageCount++;
652
653 return true;
654 }
655 /*}}}*/
656 // CacheGenerator::AddImplicitDepends /*{{{*/
657 bool pkgCacheGenerator::AddImplicitDepends(pkgCache::GrpIterator &G,
658 pkgCache::PkgIterator &P,
659 pkgCache::VerIterator &V)
660 {
661 // copy P.Arch() into a string here as a cache remap
662 // in NewDepends() later may alter the pointer location
663 string Arch = P.Arch() == NULL ? "" : P.Arch();
664 map_ptrloc *OldDepLast = NULL;
665 /* MultiArch handling introduces a lot of implicit Dependencies:
666 - MultiArch: same → Co-Installable if they have the same version
667 - All others conflict with all other group members */
668 bool const coInstall = ((V->MultiArch & pkgCache::Version::Same) == pkgCache::Version::Same);
669 pkgCache::PkgIterator D = G.PackageList();
670 Dynamic<pkgCache::PkgIterator> DynD(D);
671 map_ptrloc const VerStrIdx = V->VerStr;
672 for (; D.end() != true; D = G.NextPkg(D))
673 {
674 if (Arch == D.Arch() || D->VersionList == 0)
675 continue;
676 /* We allow only one installed arch at the time
677 per group, therefore each group member conflicts
678 with all other group members */
679 if (coInstall == true)
680 {
681 // Replaces: ${self}:other ( << ${binary:Version})
682 NewDepends(D, V, VerStrIdx,
683 pkgCache::Dep::Less, pkgCache::Dep::Replaces,
684 OldDepLast);
685 // Breaks: ${self}:other (!= ${binary:Version})
686 NewDepends(D, V, VerStrIdx,
687 pkgCache::Dep::NotEquals, pkgCache::Dep::DpkgBreaks,
688 OldDepLast);
689 } else {
690 // Conflicts: ${self}:other
691 NewDepends(D, V, 0,
692 pkgCache::Dep::NoOp, pkgCache::Dep::Conflicts,
693 OldDepLast);
694 }
695 }
696 return true;
697 }
698 bool pkgCacheGenerator::AddImplicitDepends(pkgCache::VerIterator &V,
699 pkgCache::PkgIterator &D)
700 {
701 /* MultiArch handling introduces a lot of implicit Dependencies:
702 - MultiArch: same → Co-Installable if they have the same version
703 - All others conflict with all other group members */
704 map_ptrloc *OldDepLast = NULL;
705 bool const coInstall = ((V->MultiArch & pkgCache::Version::Same) == pkgCache::Version::Same);
706 if (coInstall == true)
707 {
708 map_ptrloc const VerStrIdx = V->VerStr;
709 // Replaces: ${self}:other ( << ${binary:Version})
710 NewDepends(D, V, VerStrIdx,
711 pkgCache::Dep::Less, pkgCache::Dep::Replaces,
712 OldDepLast);
713 // Breaks: ${self}:other (!= ${binary:Version})
714 NewDepends(D, V, VerStrIdx,
715 pkgCache::Dep::NotEquals, pkgCache::Dep::DpkgBreaks,
716 OldDepLast);
717 } else {
718 // Conflicts: ${self}:other
719 NewDepends(D, V, 0,
720 pkgCache::Dep::NoOp, pkgCache::Dep::Conflicts,
721 OldDepLast);
722 }
723 return true;
724 }
725
726 /*}}}*/
727 // CacheGenerator::NewFileVer - Create a new File<->Version association /*{{{*/
728 // ---------------------------------------------------------------------
729 /* */
730 bool pkgCacheGenerator::NewFileVer(pkgCache::VerIterator &Ver,
731 ListParser &List)
732 {
733 if (CurrentFile == 0)
734 return true;
735
736 // Get a structure
737 map_ptrloc const VerFile = AllocateInMap(sizeof(pkgCache::VerFile));
738 if (VerFile == 0)
739 return 0;
740
741 pkgCache::VerFileIterator VF(Cache,Cache.VerFileP + VerFile);
742 VF->File = CurrentFile - Cache.PkgFileP;
743
744 // Link it to the end of the list
745 map_ptrloc *Last = &Ver->FileList;
746 for (pkgCache::VerFileIterator V = Ver.FileList(); V.end() == false; ++V)
747 Last = &V->NextFile;
748 VF->NextFile = *Last;
749 *Last = VF.Index();
750
751 VF->Offset = List.Offset();
752 VF->Size = List.Size();
753 if (Cache.HeaderP->MaxVerFileSize < VF->Size)
754 Cache.HeaderP->MaxVerFileSize = VF->Size;
755 Cache.HeaderP->VerFileCount++;
756
757 return true;
758 }
759 /*}}}*/
760 // CacheGenerator::NewVersion - Create a new Version /*{{{*/
761 // ---------------------------------------------------------------------
762 /* This puts a version structure in the linked list */
763 unsigned long pkgCacheGenerator::NewVersion(pkgCache::VerIterator &Ver,
764 const string &VerStr,
765 unsigned long Next)
766 {
767 // Get a structure
768 map_ptrloc const Version = AllocateInMap(sizeof(pkgCache::Version));
769 if (Version == 0)
770 return 0;
771
772 // Fill it in
773 Ver = pkgCache::VerIterator(Cache,Cache.VerP + Version);
774 //Dynamic<pkgCache::VerIterator> DynV(Ver); // caller MergeListVersion already takes care of it
775 Ver->NextVer = Next;
776 Ver->ID = Cache.HeaderP->VersionCount++;
777 map_ptrloc const idxVerStr = WriteStringInMap(VerStr);
778 if (unlikely(idxVerStr == 0))
779 return 0;
780 Ver->VerStr = idxVerStr;
781
782 return Version;
783 }
784 /*}}}*/
785 // CacheGenerator::NewFileDesc - Create a new File<->Desc association /*{{{*/
786 // ---------------------------------------------------------------------
787 /* */
788 bool pkgCacheGenerator::NewFileDesc(pkgCache::DescIterator &Desc,
789 ListParser &List)
790 {
791 if (CurrentFile == 0)
792 return true;
793
794 // Get a structure
795 map_ptrloc const DescFile = AllocateInMap(sizeof(pkgCache::DescFile));
796 if (DescFile == 0)
797 return false;
798
799 pkgCache::DescFileIterator DF(Cache,Cache.DescFileP + DescFile);
800 DF->File = CurrentFile - Cache.PkgFileP;
801
802 // Link it to the end of the list
803 map_ptrloc *Last = &Desc->FileList;
804 for (pkgCache::DescFileIterator D = Desc.FileList(); D.end() == false; ++D)
805 Last = &D->NextFile;
806
807 DF->NextFile = *Last;
808 *Last = DF.Index();
809
810 DF->Offset = List.Offset();
811 DF->Size = List.Size();
812 if (Cache.HeaderP->MaxDescFileSize < DF->Size)
813 Cache.HeaderP->MaxDescFileSize = DF->Size;
814 Cache.HeaderP->DescFileCount++;
815
816 return true;
817 }
818 /*}}}*/
819 // CacheGenerator::NewDescription - Create a new Description /*{{{*/
820 // ---------------------------------------------------------------------
821 /* This puts a description structure in the linked list */
822 map_ptrloc pkgCacheGenerator::NewDescription(pkgCache::DescIterator &Desc,
823 const string &Lang,
824 const MD5SumValue &md5sum,
825 map_ptrloc idxmd5str)
826 {
827 // Get a structure
828 map_ptrloc const Description = AllocateInMap(sizeof(pkgCache::Description));
829 if (Description == 0)
830 return 0;
831
832 // Fill it in
833 Desc = pkgCache::DescIterator(Cache,Cache.DescP + Description);
834 Desc->ID = Cache.HeaderP->DescriptionCount++;
835 map_ptrloc const idxlanguage_code = WriteUniqString(Lang);
836 if (unlikely(idxlanguage_code == 0))
837 return 0;
838 Desc->language_code = idxlanguage_code;
839
840 if (idxmd5str != 0)
841 Desc->md5sum = idxmd5str;
842 else
843 {
844 map_ptrloc const idxmd5sum = WriteStringInMap(md5sum.Value());
845 if (unlikely(idxmd5sum == 0))
846 return 0;
847 Desc->md5sum = idxmd5sum;
848 }
849
850 return Description;
851 }
852 /*}}}*/
853 // CacheGenerator::NewDepends - Create a dependency element /*{{{*/
854 // ---------------------------------------------------------------------
855 /* This creates a dependency element in the tree. It is linked to the
856 version and to the package that it is pointing to. */
857 bool pkgCacheGenerator::NewDepends(pkgCache::PkgIterator &Pkg,
858 pkgCache::VerIterator &Ver,
859 string const &Version,
860 unsigned int const &Op,
861 unsigned int const &Type,
862 map_ptrloc* &OldDepLast)
863 {
864 map_ptrloc index = 0;
865 if (Version.empty() == false)
866 {
867 int const CmpOp = Op & 0x0F;
868 // =-deps are used (79:1) for lockstep on same-source packages (e.g. data-packages)
869 if (CmpOp == pkgCache::Dep::Equals && strcmp(Version.c_str(), Ver.VerStr()) == 0)
870 index = Ver->VerStr;
871
872 if (index == 0)
873 {
874 void const * const oldMap = Map.Data();
875 index = WriteStringInMap(Version);
876 if (unlikely(index == 0))
877 return false;
878 if (oldMap != Map.Data())
879 OldDepLast += (map_ptrloc*) Map.Data() - (map_ptrloc*) oldMap;
880 }
881 }
882 return NewDepends(Pkg, Ver, index, Op, Type, OldDepLast);
883 }
884 bool pkgCacheGenerator::NewDepends(pkgCache::PkgIterator &Pkg,
885 pkgCache::VerIterator &Ver,
886 map_ptrloc const Version,
887 unsigned int const &Op,
888 unsigned int const &Type,
889 map_ptrloc* &OldDepLast)
890 {
891 void const * const oldMap = Map.Data();
892 // Get a structure
893 map_ptrloc const Dependency = AllocateInMap(sizeof(pkgCache::Dependency));
894 if (unlikely(Dependency == 0))
895 return false;
896
897 // Fill it in
898 pkgCache::DepIterator Dep(Cache,Cache.DepP + Dependency);
899 Dynamic<pkgCache::DepIterator> DynDep(Dep);
900 Dep->ParentVer = Ver.Index();
901 Dep->Type = Type;
902 Dep->CompareOp = Op;
903 Dep->Version = Version;
904 Dep->ID = Cache.HeaderP->DependsCount++;
905
906 // Link it to the package
907 Dep->Package = Pkg.Index();
908 Dep->NextRevDepends = Pkg->RevDepends;
909 Pkg->RevDepends = Dep.Index();
910
911 // Do we know where to link the Dependency to?
912 if (OldDepLast == NULL)
913 {
914 OldDepLast = &Ver->DependsList;
915 for (pkgCache::DepIterator D = Ver.DependsList(); D.end() == false; ++D)
916 OldDepLast = &D->NextDepends;
917 } else if (oldMap != Map.Data())
918 OldDepLast += (map_ptrloc*) Map.Data() - (map_ptrloc*) oldMap;
919
920 Dep->NextDepends = *OldDepLast;
921 *OldDepLast = Dep.Index();
922 OldDepLast = &Dep->NextDepends;
923
924 return true;
925 }
926 /*}}}*/
927 // ListParser::NewDepends - Create the environment for a new dependency /*{{{*/
928 // ---------------------------------------------------------------------
929 /* This creates a Group and the Package to link this dependency to if
930 needed and handles also the caching of the old endpoint */
931 bool pkgCacheGenerator::ListParser::NewDepends(pkgCache::VerIterator &Ver,
932 const string &PackageName,
933 const string &Arch,
934 const string &Version,
935 unsigned int Op,
936 unsigned int Type)
937 {
938 pkgCache::GrpIterator Grp;
939 Dynamic<pkgCache::GrpIterator> DynGrp(Grp);
940 if (unlikely(Owner->NewGroup(Grp, PackageName) == false))
941 return false;
942
943 // Locate the target package
944 pkgCache::PkgIterator Pkg = Grp.FindPkg(Arch);
945 // we don't create 'none' packages and their dependencies if we can avoid it …
946 if (Pkg.end() == true && Arch == "none" && strcmp(Ver.ParentPkg().Arch(), "none") != 0)
947 return true;
948 Dynamic<pkgCache::PkgIterator> DynPkg(Pkg);
949 if (Pkg.end() == true) {
950 if (unlikely(Owner->NewPackage(Pkg, PackageName, Arch) == false))
951 return false;
952 }
953
954 // Is it a file dependency?
955 if (unlikely(PackageName[0] == '/'))
956 FoundFileDeps = true;
957
958 /* Caching the old end point speeds up generation substantially */
959 if (OldDepVer != Ver) {
960 OldDepLast = NULL;
961 OldDepVer = Ver;
962 }
963
964 return Owner->NewDepends(Pkg, Ver, Version, Op, Type, OldDepLast);
965 }
966 /*}}}*/
967 // ListParser::NewProvides - Create a Provides element /*{{{*/
968 // ---------------------------------------------------------------------
969 /* */
970 bool pkgCacheGenerator::ListParser::NewProvides(pkgCache::VerIterator &Ver,
971 const string &PkgName,
972 const string &PkgArch,
973 const string &Version)
974 {
975 pkgCache &Cache = Owner->Cache;
976
977 // We do not add self referencing provides
978 if (Ver.ParentPkg().Name() == PkgName && (PkgArch == Ver.ParentPkg().Arch() ||
979 (PkgArch == "all" && strcmp((Cache.StrP + Cache.HeaderP->Architecture), Ver.ParentPkg().Arch()) == 0)))
980 return true;
981
982 // Get a structure
983 map_ptrloc const Provides = Owner->AllocateInMap(sizeof(pkgCache::Provides));
984 if (unlikely(Provides == 0))
985 return false;
986 Cache.HeaderP->ProvidesCount++;
987
988 // Fill it in
989 pkgCache::PrvIterator Prv(Cache,Cache.ProvideP + Provides,Cache.PkgP);
990 Dynamic<pkgCache::PrvIterator> DynPrv(Prv);
991 Prv->Version = Ver.Index();
992 Prv->NextPkgProv = Ver->ProvidesList;
993 Ver->ProvidesList = Prv.Index();
994 if (Version.empty() == false) {
995 map_ptrloc const idxProvideVersion = WriteString(Version);
996 Prv->ProvideVersion = idxProvideVersion;
997 if (unlikely(idxProvideVersion == 0))
998 return false;
999 }
1000
1001 // Locate the target package
1002 pkgCache::PkgIterator Pkg;
1003 Dynamic<pkgCache::PkgIterator> DynPkg(Pkg);
1004 if (unlikely(Owner->NewPackage(Pkg,PkgName, PkgArch) == false))
1005 return false;
1006
1007 // Link it to the package
1008 Prv->ParentPkg = Pkg.Index();
1009 Prv->NextProvides = Pkg->ProvidesList;
1010 Pkg->ProvidesList = Prv.Index();
1011
1012 return true;
1013 }
1014 /*}}}*/
1015 // CacheGenerator::SelectFile - Select the current file being parsed /*{{{*/
1016 // ---------------------------------------------------------------------
1017 /* This is used to select which file is to be associated with all newly
1018 added versions. The caller is responsible for setting the IMS fields. */
1019 bool pkgCacheGenerator::SelectFile(const string &File,const string &Site,
1020 const pkgIndexFile &Index,
1021 unsigned long Flags)
1022 {
1023 // Get some space for the structure
1024 map_ptrloc const idxFile = AllocateInMap(sizeof(*CurrentFile));
1025 if (unlikely(idxFile == 0))
1026 return false;
1027 CurrentFile = Cache.PkgFileP + idxFile;
1028
1029 // Fill it in
1030 map_ptrloc const idxFileName = WriteStringInMap(File);
1031 map_ptrloc const idxSite = WriteUniqString(Site);
1032 if (unlikely(idxFileName == 0 || idxSite == 0))
1033 return false;
1034 CurrentFile->FileName = idxFileName;
1035 CurrentFile->Site = idxSite;
1036 CurrentFile->NextFile = Cache.HeaderP->FileList;
1037 CurrentFile->Flags = Flags;
1038 CurrentFile->ID = Cache.HeaderP->PackageFileCount;
1039 map_ptrloc const idxIndexType = WriteUniqString(Index.GetType()->Label);
1040 if (unlikely(idxIndexType == 0))
1041 return false;
1042 CurrentFile->IndexType = idxIndexType;
1043 PkgFileName = File;
1044 Cache.HeaderP->FileList = CurrentFile - Cache.PkgFileP;
1045 Cache.HeaderP->PackageFileCount++;
1046
1047 if (Progress != 0)
1048 Progress->SubProgress(Index.Size());
1049 return true;
1050 }
1051 /*}}}*/
1052 // CacheGenerator::WriteUniqueString - Insert a unique string /*{{{*/
1053 // ---------------------------------------------------------------------
1054 /* This is used to create handles to strings. Given the same text it
1055 always returns the same number */
1056 unsigned long pkgCacheGenerator::WriteUniqString(const char *S,
1057 unsigned int Size)
1058 {
1059 /* We use a very small transient hash table here, this speeds up generation
1060 by a fair amount on slower machines */
1061 pkgCache::StringItem *&Bucket = UniqHash[(S[0]*5 + S[1]) % _count(UniqHash)];
1062 if (Bucket != 0 &&
1063 stringcmp(S,S+Size,Cache.StrP + Bucket->String) == 0)
1064 return Bucket->String;
1065
1066 // Search for an insertion point
1067 pkgCache::StringItem *I = Cache.StringItemP + Cache.HeaderP->StringList;
1068 int Res = 1;
1069 map_ptrloc *Last = &Cache.HeaderP->StringList;
1070 for (; I != Cache.StringItemP; Last = &I->NextItem,
1071 I = Cache.StringItemP + I->NextItem)
1072 {
1073 Res = stringcmp(S,S+Size,Cache.StrP + I->String);
1074 if (Res >= 0)
1075 break;
1076 }
1077
1078 // Match
1079 if (Res == 0)
1080 {
1081 Bucket = I;
1082 return I->String;
1083 }
1084
1085 // Get a structure
1086 void const * const oldMap = Map.Data();
1087 map_ptrloc const Item = AllocateInMap(sizeof(pkgCache::StringItem));
1088 if (Item == 0)
1089 return 0;
1090
1091 map_ptrloc const idxString = WriteStringInMap(S,Size);
1092 if (unlikely(idxString == 0))
1093 return 0;
1094 if (oldMap != Map.Data()) {
1095 Last += (map_ptrloc*) Map.Data() - (map_ptrloc*) oldMap;
1096 I += (pkgCache::StringItem*) Map.Data() - (pkgCache::StringItem*) oldMap;
1097 }
1098 *Last = Item;
1099
1100 // Fill in the structure
1101 pkgCache::StringItem *ItemP = Cache.StringItemP + Item;
1102 ItemP->NextItem = I - Cache.StringItemP;
1103 ItemP->String = idxString;
1104
1105 Bucket = ItemP;
1106 return ItemP->String;
1107 }
1108 /*}}}*/
1109 // CheckValidity - Check that a cache is up-to-date /*{{{*/
1110 // ---------------------------------------------------------------------
1111 /* This just verifies that each file in the list of index files exists,
1112 has matching attributes with the cache and the cache does not have
1113 any extra files. */
1114 static bool CheckValidity(const string &CacheFile,
1115 pkgSourceList &List,
1116 FileIterator Start,
1117 FileIterator End,
1118 MMap **OutMap = 0)
1119 {
1120 bool const Debug = _config->FindB("Debug::pkgCacheGen", false);
1121 // No file, certainly invalid
1122 if (CacheFile.empty() == true || FileExists(CacheFile) == false)
1123 {
1124 if (Debug == true)
1125 std::clog << "CacheFile doesn't exist" << std::endl;
1126 return false;
1127 }
1128
1129 if (List.GetLastModifiedTime() > GetModificationTime(CacheFile))
1130 {
1131 if (Debug == true)
1132 std::clog << "sources.list is newer than the cache" << std::endl;
1133 return false;
1134 }
1135
1136 // Map it
1137 FileFd CacheF(CacheFile,FileFd::ReadOnly);
1138 SPtr<MMap> Map = new MMap(CacheF,0);
1139 pkgCache Cache(Map);
1140 if (_error->PendingError() == true || Map->Size() == 0)
1141 {
1142 if (Debug == true)
1143 std::clog << "Errors are pending or Map is empty()" << std::endl;
1144 _error->Discard();
1145 return false;
1146 }
1147
1148 /* Now we check every index file, see if it is in the cache,
1149 verify the IMS data and check that it is on the disk too.. */
1150 SPtrArray<bool> Visited = new bool[Cache.HeaderP->PackageFileCount];
1151 memset(Visited,0,sizeof(*Visited)*Cache.HeaderP->PackageFileCount);
1152 for (; Start != End; ++Start)
1153 {
1154 if (Debug == true)
1155 std::clog << "Checking PkgFile " << (*Start)->Describe() << ": ";
1156 if ((*Start)->HasPackages() == false)
1157 {
1158 if (Debug == true)
1159 std::clog << "Has NO packages" << std::endl;
1160 continue;
1161 }
1162
1163 if ((*Start)->Exists() == false)
1164 {
1165 #if 0 // mvo: we no longer give a message here (Default Sources spec)
1166 _error->WarningE("stat",_("Couldn't stat source package list %s"),
1167 (*Start)->Describe().c_str());
1168 #endif
1169 if (Debug == true)
1170 std::clog << "file doesn't exist" << std::endl;
1171 continue;
1172 }
1173
1174 // FindInCache is also expected to do an IMS check.
1175 pkgCache::PkgFileIterator File = (*Start)->FindInCache(Cache);
1176 if (File.end() == true)
1177 {
1178 if (Debug == true)
1179 std::clog << "FindInCache returned end-Pointer" << std::endl;
1180 return false;
1181 }
1182
1183 Visited[File->ID] = true;
1184 if (Debug == true)
1185 std::clog << "with ID " << File->ID << " is valid" << std::endl;
1186 }
1187
1188 for (unsigned I = 0; I != Cache.HeaderP->PackageFileCount; I++)
1189 if (Visited[I] == false)
1190 {
1191 if (Debug == true)
1192 std::clog << "File with ID" << I << " wasn't visited" << std::endl;
1193 return false;
1194 }
1195
1196 if (_error->PendingError() == true)
1197 {
1198 if (Debug == true)
1199 {
1200 std::clog << "Validity failed because of pending errors:" << std::endl;
1201 _error->DumpErrors();
1202 }
1203 _error->Discard();
1204 return false;
1205 }
1206
1207 if (OutMap != 0)
1208 *OutMap = Map.UnGuard();
1209 return true;
1210 }
1211 /*}}}*/
1212 // ComputeSize - Compute the total size of a bunch of files /*{{{*/
1213 // ---------------------------------------------------------------------
1214 /* Size is kind of an abstract notion that is only used for the progress
1215 meter */
1216 static unsigned long ComputeSize(FileIterator Start,FileIterator End)
1217 {
1218 unsigned long TotalSize = 0;
1219 for (; Start != End; ++Start)
1220 {
1221 if ((*Start)->HasPackages() == false)
1222 continue;
1223 TotalSize += (*Start)->Size();
1224 }
1225 return TotalSize;
1226 }
1227 /*}}}*/
1228 // BuildCache - Merge the list of index files into the cache /*{{{*/
1229 // ---------------------------------------------------------------------
1230 /* */
1231 static bool BuildCache(pkgCacheGenerator &Gen,
1232 OpProgress *Progress,
1233 unsigned long &CurrentSize,unsigned long TotalSize,
1234 FileIterator Start, FileIterator End)
1235 {
1236 FileIterator I;
1237 for (I = Start; I != End; ++I)
1238 {
1239 if ((*I)->HasPackages() == false)
1240 continue;
1241
1242 if ((*I)->Exists() == false)
1243 continue;
1244
1245 if ((*I)->FindInCache(Gen.GetCache()).end() == false)
1246 {
1247 _error->Warning("Duplicate sources.list entry %s",
1248 (*I)->Describe().c_str());
1249 continue;
1250 }
1251
1252 unsigned long Size = (*I)->Size();
1253 if (Progress != NULL)
1254 Progress->OverallProgress(CurrentSize,TotalSize,Size,_("Reading package lists"));
1255 CurrentSize += Size;
1256
1257 if ((*I)->Merge(Gen,Progress) == false)
1258 return false;
1259 }
1260
1261 if (Gen.HasFileDeps() == true)
1262 {
1263 if (Progress != NULL)
1264 Progress->Done();
1265 TotalSize = ComputeSize(Start, End);
1266 CurrentSize = 0;
1267 for (I = Start; I != End; ++I)
1268 {
1269 unsigned long Size = (*I)->Size();
1270 if (Progress != NULL)
1271 Progress->OverallProgress(CurrentSize,TotalSize,Size,_("Collecting File Provides"));
1272 CurrentSize += Size;
1273 if ((*I)->MergeFileProvides(Gen,Progress) == false)
1274 return false;
1275 }
1276 }
1277
1278 return true;
1279 }
1280 /*}}}*/
1281 // CacheGenerator::CreateDynamicMMap - load an mmap with configuration options /*{{{*/
1282 DynamicMMap* pkgCacheGenerator::CreateDynamicMMap(FileFd *CacheF, unsigned long Flags) {
1283 unsigned long const MapStart = _config->FindI("APT::Cache-Start", 24*1024*1024);
1284 unsigned long const MapGrow = _config->FindI("APT::Cache-Grow", 1*1024*1024);
1285 unsigned long const MapLimit = _config->FindI("APT::Cache-Limit", 0);
1286 Flags |= MMap::Moveable;
1287 if (_config->FindB("APT::Cache-Fallback", false) == true)
1288 Flags |= MMap::Fallback;
1289 if (CacheF != NULL)
1290 return new DynamicMMap(*CacheF, Flags, MapStart, MapGrow, MapLimit);
1291 else
1292 return new DynamicMMap(Flags, MapStart, MapGrow, MapLimit);
1293 }
1294 /*}}}*/
1295 // CacheGenerator::MakeStatusCache - Construct the status cache /*{{{*/
1296 // ---------------------------------------------------------------------
1297 /* This makes sure that the status cache (the cache that has all
1298 index files from the sources list and all local ones) is ready
1299 to be mmaped. If OutMap is not zero then a MMap object representing
1300 the cache will be stored there. This is pretty much mandetory if you
1301 are using AllowMem. AllowMem lets the function be run as non-root
1302 where it builds the cache 'fast' into a memory buffer. */
1303 __deprecated bool pkgMakeStatusCache(pkgSourceList &List,OpProgress &Progress,
1304 MMap **OutMap, bool AllowMem)
1305 { return pkgCacheGenerator::MakeStatusCache(List, &Progress, OutMap, AllowMem); }
1306 bool pkgCacheGenerator::MakeStatusCache(pkgSourceList &List,OpProgress *Progress,
1307 MMap **OutMap,bool AllowMem)
1308 {
1309 bool const Debug = _config->FindB("Debug::pkgCacheGen", false);
1310
1311 std::vector<pkgIndexFile *> Files;
1312 for (std::vector<metaIndex *>::const_iterator i = List.begin();
1313 i != List.end();
1314 ++i)
1315 {
1316 std::vector <pkgIndexFile *> *Indexes = (*i)->GetIndexFiles();
1317 for (std::vector<pkgIndexFile *>::const_iterator j = Indexes->begin();
1318 j != Indexes->end();
1319 ++j)
1320 Files.push_back (*j);
1321 }
1322
1323 unsigned long const EndOfSource = Files.size();
1324 if (_system->AddStatusFiles(Files) == false)
1325 return false;
1326
1327 // Decide if we can write to the files..
1328 string const CacheFile = _config->FindFile("Dir::Cache::pkgcache");
1329 string const SrcCacheFile = _config->FindFile("Dir::Cache::srcpkgcache");
1330
1331 // ensure the cache directory exists
1332 if (CacheFile.empty() == false || SrcCacheFile.empty() == false)
1333 {
1334 string dir = _config->FindDir("Dir::Cache");
1335 size_t const len = dir.size();
1336 if (len > 5 && dir.find("/apt/", len - 6, 5) == len - 5)
1337 dir = dir.substr(0, len - 5);
1338 if (CacheFile.empty() == false)
1339 CreateDirectory(dir, flNotFile(CacheFile));
1340 if (SrcCacheFile.empty() == false)
1341 CreateDirectory(dir, flNotFile(SrcCacheFile));
1342 }
1343
1344 // Decide if we can write to the cache
1345 bool Writeable = false;
1346 if (CacheFile.empty() == false)
1347 Writeable = access(flNotFile(CacheFile).c_str(),W_OK) == 0;
1348 else
1349 if (SrcCacheFile.empty() == false)
1350 Writeable = access(flNotFile(SrcCacheFile).c_str(),W_OK) == 0;
1351 if (Debug == true)
1352 std::clog << "Do we have write-access to the cache files? " << (Writeable ? "YES" : "NO") << std::endl;
1353
1354 if (Writeable == false && AllowMem == false && CacheFile.empty() == false)
1355 return _error->Error(_("Unable to write to %s"),flNotFile(CacheFile).c_str());
1356
1357 if (Progress != NULL)
1358 Progress->OverallProgress(0,1,1,_("Reading package lists"));
1359
1360 // Cache is OK, Fin.
1361 if (CheckValidity(CacheFile, List, Files.begin(),Files.end(),OutMap) == true)
1362 {
1363 if (Progress != NULL)
1364 Progress->OverallProgress(1,1,1,_("Reading package lists"));
1365 if (Debug == true)
1366 std::clog << "pkgcache.bin is valid - no need to build anything" << std::endl;
1367 return true;
1368 }
1369 else if (Debug == true)
1370 std::clog << "pkgcache.bin is NOT valid" << std::endl;
1371
1372 /* At this point we know we need to reconstruct the package cache,
1373 begin. */
1374 SPtr<FileFd> CacheF;
1375 SPtr<DynamicMMap> Map;
1376 if (Writeable == true && CacheFile.empty() == false)
1377 {
1378 _error->PushToStack();
1379 unlink(CacheFile.c_str());
1380 CacheF = new FileFd(CacheFile,FileFd::WriteAtomic);
1381 fchmod(CacheF->Fd(),0644);
1382 Map = CreateDynamicMMap(CacheF, MMap::Public);
1383 if (_error->PendingError() == true)
1384 {
1385 delete CacheF.UnGuard();
1386 delete Map.UnGuard();
1387 if (Debug == true)
1388 std::clog << "Open filebased MMap FAILED" << std::endl;
1389 Writeable = false;
1390 if (AllowMem == false)
1391 {
1392 _error->MergeWithStack();
1393 return false;
1394 }
1395 _error->RevertToStack();
1396 }
1397 else
1398 {
1399 _error->MergeWithStack();
1400 if (Debug == true)
1401 std::clog << "Open filebased MMap" << std::endl;
1402 }
1403 }
1404 if (Writeable == false || CacheFile.empty() == true)
1405 {
1406 // Just build it in memory..
1407 Map = CreateDynamicMMap(NULL);
1408 if (Debug == true)
1409 std::clog << "Open memory Map (not filebased)" << std::endl;
1410 }
1411
1412 // Lets try the source cache.
1413 unsigned long CurrentSize = 0;
1414 unsigned long TotalSize = 0;
1415 if (CheckValidity(SrcCacheFile, List, Files.begin(),
1416 Files.begin()+EndOfSource) == true)
1417 {
1418 if (Debug == true)
1419 std::clog << "srcpkgcache.bin is valid - populate MMap with it." << std::endl;
1420 // Preload the map with the source cache
1421 FileFd SCacheF(SrcCacheFile,FileFd::ReadOnly);
1422 unsigned long const alloc = Map->RawAllocate(SCacheF.Size());
1423 if ((alloc == 0 && _error->PendingError())
1424 || SCacheF.Read((unsigned char *)Map->Data() + alloc,
1425 SCacheF.Size()) == false)
1426 return false;
1427
1428 TotalSize = ComputeSize(Files.begin()+EndOfSource,Files.end());
1429
1430 // Build the status cache
1431 pkgCacheGenerator Gen(Map.Get(),Progress);
1432 if (_error->PendingError() == true)
1433 return false;
1434 if (BuildCache(Gen,Progress,CurrentSize,TotalSize,
1435 Files.begin()+EndOfSource,Files.end()) == false)
1436 return false;
1437 }
1438 else
1439 {
1440 if (Debug == true)
1441 std::clog << "srcpkgcache.bin is NOT valid - rebuild" << std::endl;
1442 TotalSize = ComputeSize(Files.begin(),Files.end());
1443
1444 // Build the source cache
1445 pkgCacheGenerator Gen(Map.Get(),Progress);
1446 if (_error->PendingError() == true)
1447 return false;
1448 if (BuildCache(Gen,Progress,CurrentSize,TotalSize,
1449 Files.begin(),Files.begin()+EndOfSource) == false)
1450 return false;
1451
1452 // Write it back
1453 if (Writeable == true && SrcCacheFile.empty() == false)
1454 {
1455 FileFd SCacheF(SrcCacheFile,FileFd::WriteAtomic);
1456 if (_error->PendingError() == true)
1457 return false;
1458
1459 fchmod(SCacheF.Fd(),0644);
1460
1461 // Write out the main data
1462 if (SCacheF.Write(Map->Data(),Map->Size()) == false)
1463 return _error->Error(_("IO Error saving source cache"));
1464 SCacheF.Sync();
1465
1466 // Write out the proper header
1467 Gen.GetCache().HeaderP->Dirty = false;
1468 if (SCacheF.Seek(0) == false ||
1469 SCacheF.Write(Map->Data(),sizeof(*Gen.GetCache().HeaderP)) == false)
1470 return _error->Error(_("IO Error saving source cache"));
1471 Gen.GetCache().HeaderP->Dirty = true;
1472 SCacheF.Sync();
1473 }
1474
1475 // Build the status cache
1476 if (BuildCache(Gen,Progress,CurrentSize,TotalSize,
1477 Files.begin()+EndOfSource,Files.end()) == false)
1478 return false;
1479 }
1480 if (Debug == true)
1481 std::clog << "Caches are ready for shipping" << std::endl;
1482
1483 if (_error->PendingError() == true)
1484 return false;
1485 if (OutMap != 0)
1486 {
1487 if (CacheF != 0)
1488 {
1489 delete Map.UnGuard();
1490 *OutMap = new MMap(*CacheF,0);
1491 }
1492 else
1493 {
1494 *OutMap = Map.UnGuard();
1495 }
1496 }
1497
1498 return true;
1499 }
1500 /*}}}*/
1501 // CacheGenerator::MakeOnlyStatusCache - Build only a status files cache/*{{{*/
1502 // ---------------------------------------------------------------------
1503 /* */
1504 __deprecated bool pkgMakeOnlyStatusCache(OpProgress &Progress,DynamicMMap **OutMap)
1505 { return pkgCacheGenerator::MakeOnlyStatusCache(&Progress, OutMap); }
1506 bool pkgCacheGenerator::MakeOnlyStatusCache(OpProgress *Progress,DynamicMMap **OutMap)
1507 {
1508 std::vector<pkgIndexFile *> Files;
1509 unsigned long EndOfSource = Files.size();
1510 if (_system->AddStatusFiles(Files) == false)
1511 return false;
1512
1513 SPtr<DynamicMMap> Map = CreateDynamicMMap(NULL);
1514 unsigned long CurrentSize = 0;
1515 unsigned long TotalSize = 0;
1516
1517 TotalSize = ComputeSize(Files.begin()+EndOfSource,Files.end());
1518
1519 // Build the status cache
1520 if (Progress != NULL)
1521 Progress->OverallProgress(0,1,1,_("Reading package lists"));
1522 pkgCacheGenerator Gen(Map.Get(),Progress);
1523 if (_error->PendingError() == true)
1524 return false;
1525 if (BuildCache(Gen,Progress,CurrentSize,TotalSize,
1526 Files.begin()+EndOfSource,Files.end()) == false)
1527 return false;
1528
1529 if (_error->PendingError() == true)
1530 return false;
1531 *OutMap = Map.UnGuard();
1532
1533 return true;
1534 }
1535 /*}}}*/
1536 // IsDuplicateDescription /*{{{*/
1537 static bool IsDuplicateDescription(pkgCache::DescIterator Desc,
1538 MD5SumValue const &CurMd5, std::string const &CurLang)
1539 {
1540 // Descriptions in the same link-list have all the same md5
1541 if (Desc.end() == true || MD5SumValue(Desc.md5()) != CurMd5)
1542 return false;
1543 for (; Desc.end() == false; ++Desc)
1544 if (Desc.LanguageCode() == CurLang)
1545 return true;
1546 return false;
1547 }
1548 /*}}}*/
1549 // CacheGenerator::FinishCache /*{{{*/
1550 bool pkgCacheGenerator::FinishCache(OpProgress *Progress)
1551 {
1552 return true;
1553 }
1554 /*}}}*/