]>
git.saurik.com Git - apt.git/blob - apt-pkg/pkgcachegen.cc
1 // -*- mode: cpp; mode: fold -*-
3 // $Id: pkgcachegen.cc,v 1.53.2.1 2003/12/24 23:09:17 mdz Exp $
4 /* ######################################################################
6 Package Cache Generator - Generator for the cache structure.
8 This builds the cache structure from the abstract package list parser.
10 ##################################################################### */
12 // Include Files /*{{{*/
15 #include <apt-pkg/pkgcachegen.h>
16 #include <apt-pkg/error.h>
17 #include <apt-pkg/version.h>
18 #include <apt-pkg/progress.h>
19 #include <apt-pkg/sourcelist.h>
20 #include <apt-pkg/configuration.h>
21 #include <apt-pkg/strutl.h>
22 #include <apt-pkg/sptr.h>
23 #include <apt-pkg/pkgsystem.h>
24 #include <apt-pkg/macros.h>
25 #include <apt-pkg/metaindex.h>
26 #include <apt-pkg/fileutl.h>
27 #include <apt-pkg/hashsum_template.h>
28 #include <apt-pkg/indexfile.h>
29 #include <apt-pkg/md5.h>
30 #include <apt-pkg/mmap.h>
31 #include <apt-pkg/pkgcache.h>
32 #include <apt-pkg/cacheiterators.h>
44 typedef std::vector
<pkgIndexFile
*>::iterator FileIterator
;
45 template <typename Iter
> std::vector
<Iter
*> pkgCacheGenerator::Dynamic
<Iter
>::toReMap
;
47 static bool IsDuplicateDescription(pkgCache::DescIterator Desc
,
48 MD5SumValue
const &CurMd5
, std::string
const &CurLang
);
52 // CacheGenerator::pkgCacheGenerator - Constructor /*{{{*/
53 // ---------------------------------------------------------------------
54 /* We set the dirty flag and make sure that is written to the disk */
55 pkgCacheGenerator::pkgCacheGenerator(DynamicMMap
*pMap
,OpProgress
*Prog
) :
56 Map(*pMap
), Cache(pMap
,false), Progress(Prog
),
60 memset(UniqHash
,0,sizeof(UniqHash
));
62 if (_error
->PendingError() == true)
67 // Setup the map interface..
68 Cache
.HeaderP
= (pkgCache::Header
*)Map
.Data();
69 if (Map
.RawAllocate(sizeof(pkgCache::Header
)) == 0 && _error
->PendingError() == true)
72 Map
.UsePools(*Cache
.HeaderP
->Pools
,sizeof(Cache
.HeaderP
->Pools
)/sizeof(Cache
.HeaderP
->Pools
[0]));
75 *Cache
.HeaderP
= pkgCache::Header();
76 map_ptrloc
const idxVerSysName
= WriteStringInMap(_system
->VS
->Label
);
77 Cache
.HeaderP
->VerSysName
= idxVerSysName
;
78 // this pointer is set in ReMap, but we need it now for WriteUniqString
79 Cache
.StringItemP
= (pkgCache::StringItem
*)Map
.Data();
80 map_ptrloc
const idxArchitecture
= WriteUniqString(_config
->Find("APT::Architecture"));
81 Cache
.HeaderP
->Architecture
= idxArchitecture
;
82 if (unlikely(idxVerSysName
== 0 || idxArchitecture
== 0))
88 // Map directly from the existing file
90 Map
.UsePools(*Cache
.HeaderP
->Pools
,sizeof(Cache
.HeaderP
->Pools
)/sizeof(Cache
.HeaderP
->Pools
[0]));
91 if (Cache
.VS
!= _system
->VS
)
93 _error
->Error(_("Cache has an incompatible versioning system"));
98 Cache
.HeaderP
->Dirty
= true;
99 Map
.Sync(0,sizeof(pkgCache::Header
));
102 // CacheGenerator::~pkgCacheGenerator - Destructor /*{{{*/
103 // ---------------------------------------------------------------------
104 /* We sync the data then unset the dirty flag in two steps so as to
105 advoid a problem during a crash */
106 pkgCacheGenerator::~pkgCacheGenerator()
108 if (_error
->PendingError() == true)
110 if (Map
.Sync() == false)
113 Cache
.HeaderP
->Dirty
= false;
114 Cache
.HeaderP
->CacheFileSize
= Map
.Size();
115 Map
.Sync(0,sizeof(pkgCache::Header
));
118 void pkgCacheGenerator::ReMap(void const * const oldMap
, void const * const newMap
) {/*{{{*/
119 if (oldMap
== newMap
)
122 if (_config
->FindB("Debug::pkgCacheGen", false))
123 std::clog
<< "Remaping from " << oldMap
<< " to " << newMap
<< std::endl
;
127 CurrentFile
+= (pkgCache::PackageFile
const * const) newMap
- (pkgCache::PackageFile
const * const) oldMap
;
129 for (size_t i
= 0; i
< _count(UniqHash
); ++i
)
130 if (UniqHash
[i
] != 0)
131 UniqHash
[i
] += (pkgCache::StringItem
const * const) newMap
- (pkgCache::StringItem
const * const) oldMap
;
133 for (std::vector
<pkgCache::GrpIterator
*>::const_iterator i
= Dynamic
<pkgCache::GrpIterator
>::toReMap
.begin();
134 i
!= Dynamic
<pkgCache::GrpIterator
>::toReMap
.end(); ++i
)
135 (*i
)->ReMap(oldMap
, newMap
);
136 for (std::vector
<pkgCache::PkgIterator
*>::const_iterator i
= Dynamic
<pkgCache::PkgIterator
>::toReMap
.begin();
137 i
!= Dynamic
<pkgCache::PkgIterator
>::toReMap
.end(); ++i
)
138 (*i
)->ReMap(oldMap
, newMap
);
139 for (std::vector
<pkgCache::VerIterator
*>::const_iterator i
= Dynamic
<pkgCache::VerIterator
>::toReMap
.begin();
140 i
!= Dynamic
<pkgCache::VerIterator
>::toReMap
.end(); ++i
)
141 (*i
)->ReMap(oldMap
, newMap
);
142 for (std::vector
<pkgCache::DepIterator
*>::const_iterator i
= Dynamic
<pkgCache::DepIterator
>::toReMap
.begin();
143 i
!= Dynamic
<pkgCache::DepIterator
>::toReMap
.end(); ++i
)
144 (*i
)->ReMap(oldMap
, newMap
);
145 for (std::vector
<pkgCache::DescIterator
*>::const_iterator i
= Dynamic
<pkgCache::DescIterator
>::toReMap
.begin();
146 i
!= Dynamic
<pkgCache::DescIterator
>::toReMap
.end(); ++i
)
147 (*i
)->ReMap(oldMap
, newMap
);
148 for (std::vector
<pkgCache::PrvIterator
*>::const_iterator i
= Dynamic
<pkgCache::PrvIterator
>::toReMap
.begin();
149 i
!= Dynamic
<pkgCache::PrvIterator
>::toReMap
.end(); ++i
)
150 (*i
)->ReMap(oldMap
, newMap
);
151 for (std::vector
<pkgCache::PkgFileIterator
*>::const_iterator i
= Dynamic
<pkgCache::PkgFileIterator
>::toReMap
.begin();
152 i
!= Dynamic
<pkgCache::PkgFileIterator
>::toReMap
.end(); ++i
)
153 (*i
)->ReMap(oldMap
, newMap
);
155 // CacheGenerator::WriteStringInMap /*{{{*/
156 map_ptrloc
pkgCacheGenerator::WriteStringInMap(const char *String
,
157 const unsigned long &Len
) {
158 void const * const oldMap
= Map
.Data();
159 map_ptrloc
const index
= Map
.WriteString(String
, Len
);
161 ReMap(oldMap
, Map
.Data());
165 // CacheGenerator::WriteStringInMap /*{{{*/
166 map_ptrloc
pkgCacheGenerator::WriteStringInMap(const char *String
) {
167 void const * const oldMap
= Map
.Data();
168 map_ptrloc
const index
= Map
.WriteString(String
);
170 ReMap(oldMap
, Map
.Data());
174 map_ptrloc
pkgCacheGenerator::AllocateInMap(const unsigned long &size
) {/*{{{*/
175 void const * const oldMap
= Map
.Data();
176 map_ptrloc
const index
= Map
.Allocate(size
);
178 ReMap(oldMap
, Map
.Data());
182 // CacheGenerator::MergeList - Merge the package list /*{{{*/
183 // ---------------------------------------------------------------------
184 /* This provides the generation of the entries in the cache. Each loop
185 goes through a single package record from the underlying parse engine. */
186 bool pkgCacheGenerator::MergeList(ListParser
&List
,
187 pkgCache::VerIterator
*OutVer
)
191 unsigned int Counter
= 0;
192 while (List
.Step() == true)
194 string
const PackageName
= List
.Package();
195 if (PackageName
.empty() == true)
199 if (Counter
% 100 == 0 && Progress
!= 0)
200 Progress
->Progress(List
.Offset());
202 string Arch
= List
.Architecture();
203 string
const Version
= List
.Version();
204 if (Version
.empty() == true && Arch
.empty() == true)
206 // package descriptions
207 if (MergeListGroup(List
, PackageName
) == false)
212 if (Arch
.empty() == true)
214 // use the pseudo arch 'none' for arch-less packages
216 /* We might built a SingleArchCache here, which we don't want to blow up
217 just for these :none packages to a proper MultiArchCache, so just ensure
218 that we have always a native package structure first for SingleArch */
219 pkgCache::PkgIterator NP
;
220 Dynamic
<pkgCache::PkgIterator
> DynPkg(NP
);
221 if (NewPackage(NP
, PackageName
, _config
->Find("APT::Architecture")) == false)
222 // TRANSLATOR: The first placeholder is a package name,
223 // the other two should be copied verbatim as they include debug info
224 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
225 PackageName
.c_str(), "NewPackage", 0);
228 // Get a pointer to the package structure
229 pkgCache::PkgIterator Pkg
;
230 Dynamic
<pkgCache::PkgIterator
> DynPkg(Pkg
);
231 if (NewPackage(Pkg
, PackageName
, Arch
) == false)
232 // TRANSLATOR: The first placeholder is a package name,
233 // the other two should be copied verbatim as they include debug info
234 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
235 PackageName
.c_str(), "NewPackage", 1);
238 if (Version
.empty() == true)
240 if (MergeListPackage(List
, Pkg
) == false)
245 if (MergeListVersion(List
, Pkg
, Version
, OutVer
) == false)
251 FoundFileDeps
|= List
.HasFileDeps();
256 if (Cache
.HeaderP
->PackageCount
>= (1ULL<<sizeof(Cache
.PkgP
->ID
)*8)-1)
257 return _error
->Error(_("Wow, you exceeded the number of package "
258 "names this APT is capable of."));
259 if (Cache
.HeaderP
->VersionCount
>= (1ULL<<(sizeof(Cache
.VerP
->ID
)*8))-1)
260 return _error
->Error(_("Wow, you exceeded the number of versions "
261 "this APT is capable of."));
262 if (Cache
.HeaderP
->DescriptionCount
>= (1ULL<<(sizeof(Cache
.DescP
->ID
)*8))-1)
263 return _error
->Error(_("Wow, you exceeded the number of descriptions "
264 "this APT is capable of."));
265 if (Cache
.HeaderP
->DependsCount
>= (1ULL<<(sizeof(Cache
.DepP
->ID
)*8))-1ULL)
266 return _error
->Error(_("Wow, you exceeded the number of dependencies "
267 "this APT is capable of."));
269 FoundFileDeps
|= List
.HasFileDeps();
272 // CacheGenerator::MergeListGroup /*{{{*/
273 bool pkgCacheGenerator::MergeListGroup(ListParser
&List
, std::string
const &GrpName
)
275 pkgCache::GrpIterator Grp
= Cache
.FindGrp(GrpName
);
276 // a group has no data on it's own, only packages have it but these
277 // stanzas like this come from Translation- files to add descriptions,
278 // but without a version we don't need a description for it…
279 if (Grp
.end() == true)
281 Dynamic
<pkgCache::GrpIterator
> DynGrp(Grp
);
283 pkgCache::PkgIterator Pkg
;
284 Dynamic
<pkgCache::PkgIterator
> DynPkg(Pkg
);
285 for (Pkg
= Grp
.PackageList(); Pkg
.end() == false; Pkg
= Grp
.NextPkg(Pkg
))
286 if (MergeListPackage(List
, Pkg
) == false)
292 // CacheGenerator::MergeListPackage /*{{{*/
293 bool pkgCacheGenerator::MergeListPackage(ListParser
&List
, pkgCache::PkgIterator
&Pkg
)
295 // we first process the package, then the descriptions
296 // (for deb this package processing is in fact a no-op)
297 pkgCache::VerIterator
Ver(Cache
);
298 Dynamic
<pkgCache::VerIterator
> DynVer(Ver
);
299 if (List
.UsePackage(Pkg
, Ver
) == false)
300 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
301 Pkg
.Name(), "UsePackage", 1);
303 // Find the right version to write the description
304 MD5SumValue CurMd5
= List
.Description_md5();
305 if (CurMd5
.Value().empty() == true || List
.Description().empty() == true)
307 std::string CurLang
= List
.DescriptionLanguage();
309 for (Ver
= Pkg
.VersionList(); Ver
.end() == false; ++Ver
)
311 pkgCache::DescIterator VerDesc
= Ver
.DescriptionList();
313 // a version can only have one md5 describing it
314 if (VerDesc
.end() == true || MD5SumValue(VerDesc
.md5()) != CurMd5
)
317 // don't add a new description if we have one for the given
319 if (IsDuplicateDescription(VerDesc
, CurMd5
, CurLang
) == true)
322 pkgCache::DescIterator Desc
;
323 Dynamic
<pkgCache::DescIterator
> DynDesc(Desc
);
325 map_ptrloc
const descindex
= NewDescription(Desc
, CurLang
, CurMd5
, VerDesc
->md5sum
);
326 if (unlikely(descindex
== 0 && _error
->PendingError()))
327 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
328 Pkg
.Name(), "NewDescription", 1);
330 Desc
->ParentPkg
= Pkg
.Index();
332 // we add at the end, so that the start is constant as we need
333 // that to be able to efficiently share these lists
334 VerDesc
= Ver
.DescriptionList(); // old value might be invalid after ReMap
335 for (;VerDesc
.end() == false && VerDesc
->NextDesc
!= 0; ++VerDesc
);
336 map_ptrloc
* const LastNextDesc
= (VerDesc
.end() == true) ? &Ver
->DescriptionList
: &VerDesc
->NextDesc
;
337 *LastNextDesc
= descindex
;
339 if (NewFileDesc(Desc
,List
) == false)
340 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
341 Pkg
.Name(), "NewFileDesc", 1);
343 // we can stop here as all "same" versions will share the description
350 // CacheGenerator::MergeListVersion /*{{{*/
351 bool pkgCacheGenerator::MergeListVersion(ListParser
&List
, pkgCache::PkgIterator
&Pkg
,
352 std::string
const &Version
, pkgCache::VerIterator
* &OutVer
)
354 pkgCache::VerIterator Ver
= Pkg
.VersionList();
355 Dynamic
<pkgCache::VerIterator
> DynVer(Ver
);
356 map_ptrloc
*LastVer
= &Pkg
->VersionList
;
357 void const * oldMap
= Map
.Data();
359 unsigned long const Hash
= List
.VersionHash();
360 if (Ver
.end() == false)
362 /* We know the list is sorted so we use that fact in the search.
363 Insertion of new versions is done with correct sorting */
365 for (; Ver
.end() == false; LastVer
= &Ver
->NextVer
, Ver
++)
367 Res
= Cache
.VS
->CmpVersion(Version
,Ver
.VerStr());
368 // Version is higher as current version - insert here
371 // Versionstrings are equal - is hash also equal?
372 if (Res
== 0 && Ver
->Hash
== Hash
)
374 // proceed with the next till we have either the right
375 // or we found another version (which will be lower)
378 /* We already have a version for this item, record that we saw it */
379 if (Res
== 0 && Ver
.end() == false && Ver
->Hash
== Hash
)
381 if (List
.UsePackage(Pkg
,Ver
) == false)
382 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
383 Pkg
.Name(), "UsePackage", 2);
385 if (NewFileVer(Ver
,List
) == false)
386 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
387 Pkg
.Name(), "NewFileVer", 1);
389 // Read only a single record and return
401 map_ptrloc
const verindex
= NewVersion(Ver
, Version
, Pkg
.Index(), Hash
, *LastVer
);
402 if (verindex
== 0 && _error
->PendingError())
403 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
404 Pkg
.Name(), "NewVersion", 1);
406 if (oldMap
!= Map
.Data())
407 LastVer
+= (map_ptrloc
const * const) Map
.Data() - (map_ptrloc
const * const) oldMap
;
410 if (unlikely(List
.NewVersion(Ver
) == false))
411 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
412 Pkg
.Name(), "NewVersion", 2);
414 if (unlikely(List
.UsePackage(Pkg
,Ver
) == false))
415 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
416 Pkg
.Name(), "UsePackage", 3);
418 if (unlikely(NewFileVer(Ver
,List
) == false))
419 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
420 Pkg
.Name(), "NewFileVer", 2);
422 pkgCache::GrpIterator Grp
= Pkg
.Group();
423 Dynamic
<pkgCache::GrpIterator
> DynGrp(Grp
);
425 /* If it is the first version of this package we need to add implicit
426 Multi-Arch dependencies to all other package versions in the group now -
427 otherwise we just add them for this new version */
428 if (Pkg
.VersionList()->NextVer
== 0)
430 pkgCache::PkgIterator P
= Grp
.PackageList();
431 Dynamic
<pkgCache::PkgIterator
> DynP(P
);
432 for (; P
.end() != true; P
= Grp
.NextPkg(P
))
434 if (P
->ID
== Pkg
->ID
)
436 pkgCache::VerIterator V
= P
.VersionList();
437 Dynamic
<pkgCache::VerIterator
> DynV(V
);
438 for (; V
.end() != true; ++V
)
439 if (unlikely(AddImplicitDepends(V
, Pkg
) == false))
440 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
441 Pkg
.Name(), "AddImplicitDepends", 1);
443 /* :none packages are packages without an architecture. They are forbidden by
444 debian-policy, so usually they will only be in (old) dpkg status files -
445 and dpkg will complain about them - and are pretty rare. We therefore do
446 usually not create conflicts while the parent is created, but only if a :none
447 package (= the target) appears. This creates incorrect dependencies on :none
448 for architecture-specific dependencies on the package we copy from, but we
449 will ignore this bug as architecture-specific dependencies are only allowed
450 in jessie and until then the :none packages should be extinct (hopefully).
451 In other words: This should work long enough to allow graceful removal of
452 these packages, it is not supposed to allow users to keep using them … */
453 if (strcmp(Pkg
.Arch(), "none") == 0)
455 pkgCache::PkgIterator M
= Grp
.FindPreferredPkg();
456 if (M
.end() == false && Pkg
!= M
)
458 pkgCache::DepIterator D
= M
.RevDependsList();
459 Dynamic
<pkgCache::DepIterator
> DynD(D
);
460 for (; D
.end() == false; ++D
)
462 if ((D
->Type
!= pkgCache::Dep::Conflicts
&&
463 D
->Type
!= pkgCache::Dep::DpkgBreaks
&&
464 D
->Type
!= pkgCache::Dep::Replaces
) ||
465 D
.ParentPkg().Group() == Grp
)
468 map_ptrloc
*OldDepLast
= NULL
;
469 pkgCache::VerIterator ConVersion
= D
.ParentVer();
470 Dynamic
<pkgCache::VerIterator
> DynV(ConVersion
);
471 // duplicate the Conflicts/Breaks/Replaces for :none arch
472 NewDepends(Pkg
, ConVersion
, D
->Version
,
473 D
->CompareOp
, D
->Type
, OldDepLast
);
478 if (unlikely(AddImplicitDepends(Grp
, Pkg
, Ver
) == false))
479 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
480 Pkg
.Name(), "AddImplicitDepends", 2);
482 // Read only a single record and return
489 /* Record the Description (it is not translated) */
490 MD5SumValue CurMd5
= List
.Description_md5();
491 if (CurMd5
.Value().empty() == true || List
.Description().empty() == true)
493 std::string CurLang
= List
.DescriptionLanguage();
495 /* Before we add a new description we first search in the group for
496 a version with a description of the same MD5 - if so we reuse this
497 description group instead of creating our own for this version */
498 for (pkgCache::PkgIterator P
= Grp
.PackageList();
499 P
.end() == false; P
= Grp
.NextPkg(P
))
501 for (pkgCache::VerIterator V
= P
.VersionList();
502 V
.end() == false; ++V
)
504 if (IsDuplicateDescription(V
.DescriptionList(), CurMd5
, "") == false)
506 Ver
->DescriptionList
= V
->DescriptionList
;
511 // We haven't found reusable descriptions, so add the first description
512 pkgCache::DescIterator Desc
= Ver
.DescriptionList();
513 Dynamic
<pkgCache::DescIterator
> DynDesc(Desc
);
515 map_ptrloc
const descindex
= NewDescription(Desc
, CurLang
, CurMd5
, 0);
516 if (unlikely(descindex
== 0 && _error
->PendingError()))
517 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
518 Pkg
.Name(), "NewDescription", 2);
520 Desc
->ParentPkg
= Pkg
.Index();
521 Ver
->DescriptionList
= descindex
;
523 if (NewFileDesc(Desc
,List
) == false)
524 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
525 Pkg
.Name(), "NewFileDesc", 2);
531 // CacheGenerator::MergeFileProvides - Merge file provides /*{{{*/
532 // ---------------------------------------------------------------------
533 /* If we found any file depends while parsing the main list we need to
534 resolve them. Since it is undesired to load the entire list of files
535 into the cache as virtual packages we do a two stage effort. MergeList
536 identifies the file depends and this creates Provdies for them by
537 re-parsing all the indexs. */
538 bool pkgCacheGenerator::MergeFileProvides(ListParser
&List
)
542 unsigned int Counter
= 0;
543 while (List
.Step() == true)
545 string PackageName
= List
.Package();
546 if (PackageName
.empty() == true)
548 string Version
= List
.Version();
549 if (Version
.empty() == true)
552 pkgCache::PkgIterator Pkg
= Cache
.FindPkg(PackageName
);
553 Dynamic
<pkgCache::PkgIterator
> DynPkg(Pkg
);
554 if (Pkg
.end() == true)
555 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
556 PackageName
.c_str(), "FindPkg", 1);
558 if (Counter
% 100 == 0 && Progress
!= 0)
559 Progress
->Progress(List
.Offset());
561 unsigned long Hash
= List
.VersionHash();
562 pkgCache::VerIterator Ver
= Pkg
.VersionList();
563 Dynamic
<pkgCache::VerIterator
> DynVer(Ver
);
564 for (; Ver
.end() == false; ++Ver
)
566 if (Ver
->Hash
== Hash
&& Version
== Ver
.VerStr())
568 if (List
.CollectFileProvides(Cache
,Ver
) == false)
569 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
570 PackageName
.c_str(), "CollectFileProvides", 1);
575 if (Ver
.end() == true)
576 _error
->Warning(_("Package %s %s was not found while processing file dependencies"),PackageName
.c_str(),Version
.c_str());
582 // CacheGenerator::NewGroup - Add a new group /*{{{*/
583 // ---------------------------------------------------------------------
584 /* This creates a new group structure and adds it to the hash table */
585 bool pkgCacheGenerator::NewGroup(pkgCache::GrpIterator
&Grp
, const string
&Name
)
587 Grp
= Cache
.FindGrp(Name
);
588 if (Grp
.end() == false)
592 map_ptrloc
const Group
= AllocateInMap(sizeof(pkgCache::Group
));
593 if (unlikely(Group
== 0))
596 Grp
= pkgCache::GrpIterator(Cache
, Cache
.GrpP
+ Group
);
597 map_ptrloc
const idxName
= WriteStringInMap(Name
);
598 if (unlikely(idxName
== 0))
602 // Insert it into the hash table
603 unsigned long const Hash
= Cache
.Hash(Name
);
604 map_ptrloc
*insertAt
= &Cache
.HeaderP
->GrpHashTable
[Hash
];
605 while (*insertAt
!= 0 && strcasecmp(Name
.c_str(), Cache
.StrP
+ (Cache
.GrpP
+ *insertAt
)->Name
) > 0)
606 insertAt
= &(Cache
.GrpP
+ *insertAt
)->Next
;
607 Grp
->Next
= *insertAt
;
610 Grp
->ID
= Cache
.HeaderP
->GroupCount
++;
614 // CacheGenerator::NewPackage - Add a new package /*{{{*/
615 // ---------------------------------------------------------------------
616 /* This creates a new package structure and adds it to the hash table */
617 bool pkgCacheGenerator::NewPackage(pkgCache::PkgIterator
&Pkg
,const string
&Name
,
618 const string
&Arch
) {
619 pkgCache::GrpIterator Grp
;
620 Dynamic
<pkgCache::GrpIterator
> DynGrp(Grp
);
621 if (unlikely(NewGroup(Grp
, Name
) == false))
624 Pkg
= Grp
.FindPkg(Arch
);
625 if (Pkg
.end() == false)
629 map_ptrloc
const Package
= AllocateInMap(sizeof(pkgCache::Package
));
630 if (unlikely(Package
== 0))
632 Pkg
= pkgCache::PkgIterator(Cache
,Cache
.PkgP
+ Package
);
634 // Insert the package into our package list
635 if (Grp
->FirstPackage
== 0) // the group is new
637 Grp
->FirstPackage
= Package
;
638 // Insert it into the hash table
639 unsigned long const Hash
= Cache
.Hash(Name
);
640 map_ptrloc
*insertAt
= &Cache
.HeaderP
->PkgHashTable
[Hash
];
641 while (*insertAt
!= 0 && strcasecmp(Name
.c_str(), Cache
.StrP
+ (Cache
.PkgP
+ *insertAt
)->Name
) > 0)
642 insertAt
= &(Cache
.PkgP
+ *insertAt
)->NextPackage
;
643 Pkg
->NextPackage
= *insertAt
;
646 else // Group the Packages together
648 // this package is the new last package
649 pkgCache::PkgIterator
LastPkg(Cache
, Cache
.PkgP
+ Grp
->LastPackage
);
650 Pkg
->NextPackage
= LastPkg
->NextPackage
;
651 LastPkg
->NextPackage
= Package
;
653 Grp
->LastPackage
= Package
;
655 // Set the name, arch and the ID
656 Pkg
->Name
= Grp
->Name
;
657 Pkg
->Group
= Grp
.Index();
658 // all is mapped to the native architecture
659 map_ptrloc
const idxArch
= (Arch
== "all") ? Cache
.HeaderP
->Architecture
: WriteUniqString(Arch
.c_str());
660 if (unlikely(idxArch
== 0))
663 Pkg
->ID
= Cache
.HeaderP
->PackageCount
++;
668 // CacheGenerator::AddImplicitDepends /*{{{*/
669 bool pkgCacheGenerator::AddImplicitDepends(pkgCache::GrpIterator
&G
,
670 pkgCache::PkgIterator
&P
,
671 pkgCache::VerIterator
&V
)
673 // copy P.Arch() into a string here as a cache remap
674 // in NewDepends() later may alter the pointer location
675 string Arch
= P
.Arch() == NULL
? "" : P
.Arch();
676 map_ptrloc
*OldDepLast
= NULL
;
677 /* MultiArch handling introduces a lot of implicit Dependencies:
678 - MultiArch: same → Co-Installable if they have the same version
679 - All others conflict with all other group members */
680 bool const coInstall
= ((V
->MultiArch
& pkgCache::Version::Same
) == pkgCache::Version::Same
);
681 pkgCache::PkgIterator D
= G
.PackageList();
682 Dynamic
<pkgCache::PkgIterator
> DynD(D
);
683 map_ptrloc
const VerStrIdx
= V
->VerStr
;
684 for (; D
.end() != true; D
= G
.NextPkg(D
))
686 if (Arch
== D
.Arch() || D
->VersionList
== 0)
688 /* We allow only one installed arch at the time
689 per group, therefore each group member conflicts
690 with all other group members */
691 if (coInstall
== true)
693 // Replaces: ${self}:other ( << ${binary:Version})
694 NewDepends(D
, V
, VerStrIdx
,
695 pkgCache::Dep::Less
, pkgCache::Dep::Replaces
,
697 // Breaks: ${self}:other (!= ${binary:Version})
698 NewDepends(D
, V
, VerStrIdx
,
699 pkgCache::Dep::NotEquals
, pkgCache::Dep::DpkgBreaks
,
702 // Conflicts: ${self}:other
704 pkgCache::Dep::NoOp
, pkgCache::Dep::Conflicts
,
710 bool pkgCacheGenerator::AddImplicitDepends(pkgCache::VerIterator
&V
,
711 pkgCache::PkgIterator
&D
)
713 /* MultiArch handling introduces a lot of implicit Dependencies:
714 - MultiArch: same → Co-Installable if they have the same version
715 - All others conflict with all other group members */
716 map_ptrloc
*OldDepLast
= NULL
;
717 bool const coInstall
= ((V
->MultiArch
& pkgCache::Version::Same
) == pkgCache::Version::Same
);
718 if (coInstall
== true)
720 map_ptrloc
const VerStrIdx
= V
->VerStr
;
721 // Replaces: ${self}:other ( << ${binary:Version})
722 NewDepends(D
, V
, VerStrIdx
,
723 pkgCache::Dep::Less
, pkgCache::Dep::Replaces
,
725 // Breaks: ${self}:other (!= ${binary:Version})
726 NewDepends(D
, V
, VerStrIdx
,
727 pkgCache::Dep::NotEquals
, pkgCache::Dep::DpkgBreaks
,
730 // Conflicts: ${self}:other
732 pkgCache::Dep::NoOp
, pkgCache::Dep::Conflicts
,
739 // CacheGenerator::NewFileVer - Create a new File<->Version association /*{{{*/
740 // ---------------------------------------------------------------------
742 bool pkgCacheGenerator::NewFileVer(pkgCache::VerIterator
&Ver
,
745 if (CurrentFile
== 0)
749 map_ptrloc
const VerFile
= AllocateInMap(sizeof(pkgCache::VerFile
));
753 pkgCache::VerFileIterator
VF(Cache
,Cache
.VerFileP
+ VerFile
);
754 VF
->File
= CurrentFile
- Cache
.PkgFileP
;
756 // Link it to the end of the list
757 map_ptrloc
*Last
= &Ver
->FileList
;
758 for (pkgCache::VerFileIterator V
= Ver
.FileList(); V
.end() == false; ++V
)
760 VF
->NextFile
= *Last
;
763 VF
->Offset
= List
.Offset();
764 VF
->Size
= List
.Size();
765 if (Cache
.HeaderP
->MaxVerFileSize
< VF
->Size
)
766 Cache
.HeaderP
->MaxVerFileSize
= VF
->Size
;
767 Cache
.HeaderP
->VerFileCount
++;
772 // CacheGenerator::NewVersion - Create a new Version /*{{{*/
773 // ---------------------------------------------------------------------
774 /* This puts a version structure in the linked list */
775 unsigned long pkgCacheGenerator::NewVersion(pkgCache::VerIterator
&Ver
,
776 const string
&VerStr
,
777 map_ptrloc
const ParentPkg
,
778 unsigned long const Hash
,
782 map_ptrloc
const Version
= AllocateInMap(sizeof(pkgCache::Version
));
787 Ver
= pkgCache::VerIterator(Cache
,Cache
.VerP
+ Version
);
788 //Dynamic<pkgCache::VerIterator> DynV(Ver); // caller MergeListVersion already takes care of it
790 Ver
->ParentPkg
= ParentPkg
;
792 Ver
->ID
= Cache
.HeaderP
->VersionCount
++;
794 // try to find the version string in the group for reuse
795 pkgCache::PkgIterator Pkg
= Ver
.ParentPkg();
796 pkgCache::GrpIterator Grp
= Pkg
.Group();
797 if (Pkg
.end() == false && Grp
.end() == false)
799 for (pkgCache::PkgIterator P
= Grp
.PackageList(); P
.end() == false; P
= Grp
.NextPkg(P
))
803 for (pkgCache::VerIterator V
= P
.VersionList(); V
.end() == false; ++V
)
805 int const cmp
= strcmp(V
.VerStr(), VerStr
.c_str());
808 Ver
->VerStr
= V
->VerStr
;
816 // haven't found the version string, so create
817 map_ptrloc
const idxVerStr
= WriteStringInMap(VerStr
);
818 if (unlikely(idxVerStr
== 0))
820 Ver
->VerStr
= idxVerStr
;
824 // CacheGenerator::NewFileDesc - Create a new File<->Desc association /*{{{*/
825 // ---------------------------------------------------------------------
827 bool pkgCacheGenerator::NewFileDesc(pkgCache::DescIterator
&Desc
,
830 if (CurrentFile
== 0)
834 map_ptrloc
const DescFile
= AllocateInMap(sizeof(pkgCache::DescFile
));
838 pkgCache::DescFileIterator
DF(Cache
,Cache
.DescFileP
+ DescFile
);
839 DF
->File
= CurrentFile
- Cache
.PkgFileP
;
841 // Link it to the end of the list
842 map_ptrloc
*Last
= &Desc
->FileList
;
843 for (pkgCache::DescFileIterator D
= Desc
.FileList(); D
.end() == false; ++D
)
846 DF
->NextFile
= *Last
;
849 DF
->Offset
= List
.Offset();
850 DF
->Size
= List
.Size();
851 if (Cache
.HeaderP
->MaxDescFileSize
< DF
->Size
)
852 Cache
.HeaderP
->MaxDescFileSize
= DF
->Size
;
853 Cache
.HeaderP
->DescFileCount
++;
858 // CacheGenerator::NewDescription - Create a new Description /*{{{*/
859 // ---------------------------------------------------------------------
860 /* This puts a description structure in the linked list */
861 map_ptrloc
pkgCacheGenerator::NewDescription(pkgCache::DescIterator
&Desc
,
863 const MD5SumValue
&md5sum
,
864 map_ptrloc idxmd5str
)
867 map_ptrloc
const Description
= AllocateInMap(sizeof(pkgCache::Description
));
868 if (Description
== 0)
872 Desc
= pkgCache::DescIterator(Cache
,Cache
.DescP
+ Description
);
873 Desc
->ID
= Cache
.HeaderP
->DescriptionCount
++;
874 map_ptrloc
const idxlanguage_code
= WriteUniqString(Lang
);
875 if (unlikely(idxlanguage_code
== 0))
877 Desc
->language_code
= idxlanguage_code
;
880 Desc
->md5sum
= idxmd5str
;
883 map_ptrloc
const idxmd5sum
= WriteStringInMap(md5sum
.Value());
884 if (unlikely(idxmd5sum
== 0))
886 Desc
->md5sum
= idxmd5sum
;
892 // CacheGenerator::NewDepends - Create a dependency element /*{{{*/
893 // ---------------------------------------------------------------------
894 /* This creates a dependency element in the tree. It is linked to the
895 version and to the package that it is pointing to. */
896 bool pkgCacheGenerator::NewDepends(pkgCache::PkgIterator
&Pkg
,
897 pkgCache::VerIterator
&Ver
,
898 string
const &Version
,
899 unsigned int const &Op
,
900 unsigned int const &Type
,
901 map_ptrloc
* &OldDepLast
)
903 map_ptrloc index
= 0;
904 if (Version
.empty() == false)
906 int const CmpOp
= Op
& 0x0F;
907 // =-deps are used (79:1) for lockstep on same-source packages (e.g. data-packages)
908 if (CmpOp
== pkgCache::Dep::Equals
&& strcmp(Version
.c_str(), Ver
.VerStr()) == 0)
913 void const * const oldMap
= Map
.Data();
914 index
= WriteStringInMap(Version
);
915 if (unlikely(index
== 0))
917 if (OldDepLast
!= 0 && oldMap
!= Map
.Data())
918 OldDepLast
+= (map_ptrloc
const * const) Map
.Data() - (map_ptrloc
const * const) oldMap
;
921 return NewDepends(Pkg
, Ver
, index
, Op
, Type
, OldDepLast
);
923 bool pkgCacheGenerator::NewDepends(pkgCache::PkgIterator
&Pkg
,
924 pkgCache::VerIterator
&Ver
,
925 map_ptrloc
const Version
,
926 unsigned int const &Op
,
927 unsigned int const &Type
,
928 map_ptrloc
* &OldDepLast
)
930 void const * const oldMap
= Map
.Data();
932 map_ptrloc
const Dependency
= AllocateInMap(sizeof(pkgCache::Dependency
));
933 if (unlikely(Dependency
== 0))
937 pkgCache::DepIterator
Dep(Cache
,Cache
.DepP
+ Dependency
);
938 Dynamic
<pkgCache::DepIterator
> DynDep(Dep
);
939 Dep
->ParentVer
= Ver
.Index();
942 Dep
->Version
= Version
;
943 Dep
->ID
= Cache
.HeaderP
->DependsCount
++;
945 // Link it to the package
946 Dep
->Package
= Pkg
.Index();
947 Dep
->NextRevDepends
= Pkg
->RevDepends
;
948 Pkg
->RevDepends
= Dep
.Index();
950 // Do we know where to link the Dependency to?
951 if (OldDepLast
== NULL
)
953 OldDepLast
= &Ver
->DependsList
;
954 for (pkgCache::DepIterator D
= Ver
.DependsList(); D
.end() == false; ++D
)
955 OldDepLast
= &D
->NextDepends
;
956 } else if (oldMap
!= Map
.Data())
957 OldDepLast
+= (map_ptrloc
const * const) Map
.Data() - (map_ptrloc
const * const) oldMap
;
959 Dep
->NextDepends
= *OldDepLast
;
960 *OldDepLast
= Dep
.Index();
961 OldDepLast
= &Dep
->NextDepends
;
966 // ListParser::NewDepends - Create the environment for a new dependency /*{{{*/
967 // ---------------------------------------------------------------------
968 /* This creates a Group and the Package to link this dependency to if
969 needed and handles also the caching of the old endpoint */
970 bool pkgCacheGenerator::ListParser::NewDepends(pkgCache::VerIterator
&Ver
,
971 const string
&PackageName
,
973 const string
&Version
,
977 pkgCache::GrpIterator Grp
;
978 Dynamic
<pkgCache::GrpIterator
> DynGrp(Grp
);
979 if (unlikely(Owner
->NewGroup(Grp
, PackageName
) == false))
982 // Locate the target package
983 pkgCache::PkgIterator Pkg
= Grp
.FindPkg(Arch
);
984 // we don't create 'none' packages and their dependencies if we can avoid it …
985 if (Pkg
.end() == true && Arch
== "none" && strcmp(Ver
.ParentPkg().Arch(), "none") != 0)
987 Dynamic
<pkgCache::PkgIterator
> DynPkg(Pkg
);
988 if (Pkg
.end() == true) {
989 if (unlikely(Owner
->NewPackage(Pkg
, PackageName
, Arch
) == false))
993 // Is it a file dependency?
994 if (unlikely(PackageName
[0] == '/'))
995 FoundFileDeps
= true;
997 /* Caching the old end point speeds up generation substantially */
998 if (OldDepVer
!= Ver
) {
1003 return Owner
->NewDepends(Pkg
, Ver
, Version
, Op
, Type
, OldDepLast
);
1006 // ListParser::NewProvides - Create a Provides element /*{{{*/
1007 // ---------------------------------------------------------------------
1009 bool pkgCacheGenerator::ListParser::NewProvides(pkgCache::VerIterator
&Ver
,
1010 const string
&PkgName
,
1011 const string
&PkgArch
,
1012 const string
&Version
)
1014 pkgCache
&Cache
= Owner
->Cache
;
1016 // We do not add self referencing provides
1017 if (Ver
.ParentPkg().Name() == PkgName
&& (PkgArch
== Ver
.ParentPkg().Arch() ||
1018 (PkgArch
== "all" && strcmp((Cache
.StrP
+ Cache
.HeaderP
->Architecture
), Ver
.ParentPkg().Arch()) == 0)))
1022 map_ptrloc
const Provides
= Owner
->AllocateInMap(sizeof(pkgCache::Provides
));
1023 if (unlikely(Provides
== 0))
1025 Cache
.HeaderP
->ProvidesCount
++;
1028 pkgCache::PrvIterator
Prv(Cache
,Cache
.ProvideP
+ Provides
,Cache
.PkgP
);
1029 Dynamic
<pkgCache::PrvIterator
> DynPrv(Prv
);
1030 Prv
->Version
= Ver
.Index();
1031 Prv
->NextPkgProv
= Ver
->ProvidesList
;
1032 Ver
->ProvidesList
= Prv
.Index();
1033 if (Version
.empty() == false) {
1034 map_ptrloc
const idxProvideVersion
= WriteString(Version
);
1035 Prv
->ProvideVersion
= idxProvideVersion
;
1036 if (unlikely(idxProvideVersion
== 0))
1040 // Locate the target package
1041 pkgCache::PkgIterator Pkg
;
1042 Dynamic
<pkgCache::PkgIterator
> DynPkg(Pkg
);
1043 if (unlikely(Owner
->NewPackage(Pkg
,PkgName
, PkgArch
) == false))
1046 // Link it to the package
1047 Prv
->ParentPkg
= Pkg
.Index();
1048 Prv
->NextProvides
= Pkg
->ProvidesList
;
1049 Pkg
->ProvidesList
= Prv
.Index();
1054 // CacheGenerator::SelectFile - Select the current file being parsed /*{{{*/
1055 // ---------------------------------------------------------------------
1056 /* This is used to select which file is to be associated with all newly
1057 added versions. The caller is responsible for setting the IMS fields. */
1058 bool pkgCacheGenerator::SelectFile(const string
&File
,const string
&Site
,
1059 const pkgIndexFile
&Index
,
1060 unsigned long Flags
)
1062 // Get some space for the structure
1063 map_ptrloc
const idxFile
= AllocateInMap(sizeof(*CurrentFile
));
1064 if (unlikely(idxFile
== 0))
1066 CurrentFile
= Cache
.PkgFileP
+ idxFile
;
1069 map_ptrloc
const idxFileName
= WriteStringInMap(File
);
1070 map_ptrloc
const idxSite
= WriteUniqString(Site
);
1071 if (unlikely(idxFileName
== 0 || idxSite
== 0))
1073 CurrentFile
->FileName
= idxFileName
;
1074 CurrentFile
->Site
= idxSite
;
1075 CurrentFile
->NextFile
= Cache
.HeaderP
->FileList
;
1076 CurrentFile
->Flags
= Flags
;
1077 CurrentFile
->ID
= Cache
.HeaderP
->PackageFileCount
;
1078 map_ptrloc
const idxIndexType
= WriteUniqString(Index
.GetType()->Label
);
1079 if (unlikely(idxIndexType
== 0))
1081 CurrentFile
->IndexType
= idxIndexType
;
1083 Cache
.HeaderP
->FileList
= CurrentFile
- Cache
.PkgFileP
;
1084 Cache
.HeaderP
->PackageFileCount
++;
1087 Progress
->SubProgress(Index
.Size());
1091 // CacheGenerator::WriteUniqueString - Insert a unique string /*{{{*/
1092 // ---------------------------------------------------------------------
1093 /* This is used to create handles to strings. Given the same text it
1094 always returns the same number */
1095 unsigned long pkgCacheGenerator::WriteUniqString(const char *S
,
1098 /* We use a very small transient hash table here, this speeds up generation
1099 by a fair amount on slower machines */
1100 pkgCache::StringItem
*&Bucket
= UniqHash
[(S
[0]*5 + S
[1]) % _count(UniqHash
)];
1102 stringcmp(S
,S
+Size
,Cache
.StrP
+ Bucket
->String
) == 0)
1103 return Bucket
->String
;
1105 // Search for an insertion point
1106 pkgCache::StringItem
*I
= Cache
.StringItemP
+ Cache
.HeaderP
->StringList
;
1108 map_ptrloc
*Last
= &Cache
.HeaderP
->StringList
;
1109 for (; I
!= Cache
.StringItemP
; Last
= &I
->NextItem
,
1110 I
= Cache
.StringItemP
+ I
->NextItem
)
1112 Res
= stringcmp(S
,S
+Size
,Cache
.StrP
+ I
->String
);
1125 void const * const oldMap
= Map
.Data();
1126 map_ptrloc
const Item
= AllocateInMap(sizeof(pkgCache::StringItem
));
1130 map_ptrloc
const idxString
= WriteStringInMap(S
,Size
);
1131 if (unlikely(idxString
== 0))
1133 if (oldMap
!= Map
.Data()) {
1134 Last
+= (map_ptrloc
const * const) Map
.Data() - (map_ptrloc
const * const) oldMap
;
1135 I
+= (pkgCache::StringItem
const * const) Map
.Data() - (pkgCache::StringItem
const * const) oldMap
;
1139 // Fill in the structure
1140 pkgCache::StringItem
*ItemP
= Cache
.StringItemP
+ Item
;
1141 ItemP
->NextItem
= I
- Cache
.StringItemP
;
1142 ItemP
->String
= idxString
;
1145 return ItemP
->String
;
1148 // CheckValidity - Check that a cache is up-to-date /*{{{*/
1149 // ---------------------------------------------------------------------
1150 /* This just verifies that each file in the list of index files exists,
1151 has matching attributes with the cache and the cache does not have
1153 static bool CheckValidity(const string
&CacheFile
,
1154 pkgSourceList
&List
,
1159 bool const Debug
= _config
->FindB("Debug::pkgCacheGen", false);
1160 // No file, certainly invalid
1161 if (CacheFile
.empty() == true || FileExists(CacheFile
) == false)
1164 std::clog
<< "CacheFile doesn't exist" << std::endl
;
1168 if (List
.GetLastModifiedTime() > GetModificationTime(CacheFile
))
1171 std::clog
<< "sources.list is newer than the cache" << std::endl
;
1176 FileFd
CacheF(CacheFile
,FileFd::ReadOnly
);
1177 SPtr
<MMap
> Map
= new MMap(CacheF
,0);
1178 pkgCache
Cache(Map
);
1179 if (_error
->PendingError() == true || Map
->Size() == 0)
1182 std::clog
<< "Errors are pending or Map is empty()" << std::endl
;
1187 /* Now we check every index file, see if it is in the cache,
1188 verify the IMS data and check that it is on the disk too.. */
1189 SPtrArray
<bool> Visited
= new bool[Cache
.HeaderP
->PackageFileCount
];
1190 memset(Visited
,0,sizeof(*Visited
)*Cache
.HeaderP
->PackageFileCount
);
1191 for (; Start
!= End
; ++Start
)
1194 std::clog
<< "Checking PkgFile " << (*Start
)->Describe() << ": ";
1195 if ((*Start
)->HasPackages() == false)
1198 std::clog
<< "Has NO packages" << std::endl
;
1202 if ((*Start
)->Exists() == false)
1204 #if 0 // mvo: we no longer give a message here (Default Sources spec)
1205 _error
->WarningE("stat",_("Couldn't stat source package list %s"),
1206 (*Start
)->Describe().c_str());
1209 std::clog
<< "file doesn't exist" << std::endl
;
1213 // FindInCache is also expected to do an IMS check.
1214 pkgCache::PkgFileIterator File
= (*Start
)->FindInCache(Cache
);
1215 if (File
.end() == true)
1218 std::clog
<< "FindInCache returned end-Pointer" << std::endl
;
1222 Visited
[File
->ID
] = true;
1224 std::clog
<< "with ID " << File
->ID
<< " is valid" << std::endl
;
1227 for (unsigned I
= 0; I
!= Cache
.HeaderP
->PackageFileCount
; I
++)
1228 if (Visited
[I
] == false)
1231 std::clog
<< "File with ID" << I
<< " wasn't visited" << std::endl
;
1235 if (_error
->PendingError() == true)
1239 std::clog
<< "Validity failed because of pending errors:" << std::endl
;
1240 _error
->DumpErrors();
1247 *OutMap
= Map
.UnGuard();
1251 // ComputeSize - Compute the total size of a bunch of files /*{{{*/
1252 // ---------------------------------------------------------------------
1253 /* Size is kind of an abstract notion that is only used for the progress
1255 static unsigned long ComputeSize(FileIterator Start
,FileIterator End
)
1257 unsigned long TotalSize
= 0;
1258 for (; Start
< End
; ++Start
)
1260 if ((*Start
)->HasPackages() == false)
1262 TotalSize
+= (*Start
)->Size();
1267 // BuildCache - Merge the list of index files into the cache /*{{{*/
1268 // ---------------------------------------------------------------------
1270 static bool BuildCache(pkgCacheGenerator
&Gen
,
1271 OpProgress
*Progress
,
1272 unsigned long &CurrentSize
,unsigned long TotalSize
,
1273 FileIterator Start
, FileIterator End
)
1276 for (I
= Start
; I
!= End
; ++I
)
1278 if ((*I
)->HasPackages() == false)
1281 if ((*I
)->Exists() == false)
1284 if ((*I
)->FindInCache(Gen
.GetCache()).end() == false)
1286 _error
->Warning("Duplicate sources.list entry %s",
1287 (*I
)->Describe().c_str());
1291 unsigned long Size
= (*I
)->Size();
1292 if (Progress
!= NULL
)
1293 Progress
->OverallProgress(CurrentSize
,TotalSize
,Size
,_("Reading package lists"));
1294 CurrentSize
+= Size
;
1296 if ((*I
)->Merge(Gen
,Progress
) == false)
1300 if (Gen
.HasFileDeps() == true)
1302 if (Progress
!= NULL
)
1304 TotalSize
= ComputeSize(Start
, End
);
1306 for (I
= Start
; I
!= End
; ++I
)
1308 unsigned long Size
= (*I
)->Size();
1309 if (Progress
!= NULL
)
1310 Progress
->OverallProgress(CurrentSize
,TotalSize
,Size
,_("Collecting File Provides"));
1311 CurrentSize
+= Size
;
1312 if ((*I
)->MergeFileProvides(Gen
,Progress
) == false)
1320 // CacheGenerator::CreateDynamicMMap - load an mmap with configuration options /*{{{*/
1321 DynamicMMap
* pkgCacheGenerator::CreateDynamicMMap(FileFd
*CacheF
, unsigned long Flags
) {
1322 unsigned long const MapStart
= _config
->FindI("APT::Cache-Start", 24*1024*1024);
1323 unsigned long const MapGrow
= _config
->FindI("APT::Cache-Grow", 1*1024*1024);
1324 unsigned long const MapLimit
= _config
->FindI("APT::Cache-Limit", 0);
1325 Flags
|= MMap::Moveable
;
1326 if (_config
->FindB("APT::Cache-Fallback", false) == true)
1327 Flags
|= MMap::Fallback
;
1329 return new DynamicMMap(*CacheF
, Flags
, MapStart
, MapGrow
, MapLimit
);
1331 return new DynamicMMap(Flags
, MapStart
, MapGrow
, MapLimit
);
1334 // CacheGenerator::MakeStatusCache - Construct the status cache /*{{{*/
1335 // ---------------------------------------------------------------------
1336 /* This makes sure that the status cache (the cache that has all
1337 index files from the sources list and all local ones) is ready
1338 to be mmaped. If OutMap is not zero then a MMap object representing
1339 the cache will be stored there. This is pretty much mandetory if you
1340 are using AllowMem. AllowMem lets the function be run as non-root
1341 where it builds the cache 'fast' into a memory buffer. */
1342 APT_DEPRECATED
bool pkgMakeStatusCache(pkgSourceList
&List
,OpProgress
&Progress
,
1343 MMap
**OutMap
, bool AllowMem
)
1344 { return pkgCacheGenerator::MakeStatusCache(List
, &Progress
, OutMap
, AllowMem
); }
1345 bool pkgCacheGenerator::MakeStatusCache(pkgSourceList
&List
,OpProgress
*Progress
,
1346 MMap
**OutMap
,bool AllowMem
)
1348 bool const Debug
= _config
->FindB("Debug::pkgCacheGen", false);
1350 std::vector
<pkgIndexFile
*> Files
;
1351 for (std::vector
<metaIndex
*>::const_iterator i
= List
.begin();
1355 std::vector
<pkgIndexFile
*> *Indexes
= (*i
)->GetIndexFiles();
1356 for (std::vector
<pkgIndexFile
*>::const_iterator j
= Indexes
->begin();
1357 j
!= Indexes
->end();
1359 Files
.push_back (*j
);
1362 unsigned long const EndOfSource
= Files
.size();
1363 if (_system
->AddStatusFiles(Files
) == false)
1366 // Decide if we can write to the files..
1367 string
const CacheFile
= _config
->FindFile("Dir::Cache::pkgcache");
1368 string
const SrcCacheFile
= _config
->FindFile("Dir::Cache::srcpkgcache");
1370 // ensure the cache directory exists
1371 if (CacheFile
.empty() == false || SrcCacheFile
.empty() == false)
1373 string dir
= _config
->FindDir("Dir::Cache");
1374 size_t const len
= dir
.size();
1375 if (len
> 5 && dir
.find("/apt/", len
- 6, 5) == len
- 5)
1376 dir
= dir
.substr(0, len
- 5);
1377 if (CacheFile
.empty() == false)
1378 CreateDirectory(dir
, flNotFile(CacheFile
));
1379 if (SrcCacheFile
.empty() == false)
1380 CreateDirectory(dir
, flNotFile(SrcCacheFile
));
1383 // Decide if we can write to the cache
1384 bool Writeable
= false;
1385 if (CacheFile
.empty() == false)
1386 Writeable
= access(flNotFile(CacheFile
).c_str(),W_OK
) == 0;
1388 if (SrcCacheFile
.empty() == false)
1389 Writeable
= access(flNotFile(SrcCacheFile
).c_str(),W_OK
) == 0;
1391 std::clog
<< "Do we have write-access to the cache files? " << (Writeable
? "YES" : "NO") << std::endl
;
1393 if (Writeable
== false && AllowMem
== false && CacheFile
.empty() == false)
1394 return _error
->Error(_("Unable to write to %s"),flNotFile(CacheFile
).c_str());
1396 if (Progress
!= NULL
)
1397 Progress
->OverallProgress(0,1,1,_("Reading package lists"));
1399 // Cache is OK, Fin.
1400 if (CheckValidity(CacheFile
, List
, Files
.begin(),Files
.end(),OutMap
) == true)
1402 if (Progress
!= NULL
)
1403 Progress
->OverallProgress(1,1,1,_("Reading package lists"));
1405 std::clog
<< "pkgcache.bin is valid - no need to build anything" << std::endl
;
1408 else if (Debug
== true)
1409 std::clog
<< "pkgcache.bin is NOT valid" << std::endl
;
1411 /* At this point we know we need to reconstruct the package cache,
1413 SPtr
<FileFd
> CacheF
;
1414 SPtr
<DynamicMMap
> Map
;
1415 if (Writeable
== true && CacheFile
.empty() == false)
1417 _error
->PushToStack();
1418 unlink(CacheFile
.c_str());
1419 CacheF
= new FileFd(CacheFile
,FileFd::WriteAtomic
);
1420 fchmod(CacheF
->Fd(),0644);
1421 Map
= CreateDynamicMMap(CacheF
, MMap::Public
);
1422 if (_error
->PendingError() == true)
1424 delete CacheF
.UnGuard();
1425 delete Map
.UnGuard();
1427 std::clog
<< "Open filebased MMap FAILED" << std::endl
;
1429 if (AllowMem
== false)
1431 _error
->MergeWithStack();
1434 _error
->RevertToStack();
1438 _error
->MergeWithStack();
1440 std::clog
<< "Open filebased MMap" << std::endl
;
1443 if (Writeable
== false || CacheFile
.empty() == true)
1445 // Just build it in memory..
1446 Map
= CreateDynamicMMap(NULL
);
1448 std::clog
<< "Open memory Map (not filebased)" << std::endl
;
1451 // Lets try the source cache.
1452 unsigned long CurrentSize
= 0;
1453 unsigned long TotalSize
= 0;
1454 if (CheckValidity(SrcCacheFile
, List
, Files
.begin(),
1455 Files
.begin()+EndOfSource
) == true)
1458 std::clog
<< "srcpkgcache.bin is valid - populate MMap with it." << std::endl
;
1459 // Preload the map with the source cache
1460 FileFd
SCacheF(SrcCacheFile
,FileFd::ReadOnly
);
1461 unsigned long const alloc
= Map
->RawAllocate(SCacheF
.Size());
1462 if ((alloc
== 0 && _error
->PendingError())
1463 || SCacheF
.Read((unsigned char *)Map
->Data() + alloc
,
1464 SCacheF
.Size()) == false)
1467 TotalSize
= ComputeSize(Files
.begin()+EndOfSource
,Files
.end());
1469 // Build the status cache
1470 pkgCacheGenerator
Gen(Map
.Get(),Progress
);
1471 if (_error
->PendingError() == true)
1473 if (BuildCache(Gen
,Progress
,CurrentSize
,TotalSize
,
1474 Files
.begin()+EndOfSource
,Files
.end()) == false)
1480 std::clog
<< "srcpkgcache.bin is NOT valid - rebuild" << std::endl
;
1481 TotalSize
= ComputeSize(Files
.begin(),Files
.end());
1483 // Build the source cache
1484 pkgCacheGenerator
Gen(Map
.Get(),Progress
);
1485 if (_error
->PendingError() == true)
1487 if (BuildCache(Gen
,Progress
,CurrentSize
,TotalSize
,
1488 Files
.begin(),Files
.begin()+EndOfSource
) == false)
1492 if (Writeable
== true && SrcCacheFile
.empty() == false)
1494 FileFd
SCacheF(SrcCacheFile
,FileFd::WriteAtomic
);
1495 if (_error
->PendingError() == true)
1498 fchmod(SCacheF
.Fd(),0644);
1500 // Write out the main data
1501 if (SCacheF
.Write(Map
->Data(),Map
->Size()) == false)
1502 return _error
->Error(_("IO Error saving source cache"));
1505 // Write out the proper header
1506 Gen
.GetCache().HeaderP
->Dirty
= false;
1507 if (SCacheF
.Seek(0) == false ||
1508 SCacheF
.Write(Map
->Data(),sizeof(*Gen
.GetCache().HeaderP
)) == false)
1509 return _error
->Error(_("IO Error saving source cache"));
1510 Gen
.GetCache().HeaderP
->Dirty
= true;
1514 // Build the status cache
1515 if (BuildCache(Gen
,Progress
,CurrentSize
,TotalSize
,
1516 Files
.begin()+EndOfSource
,Files
.end()) == false)
1520 std::clog
<< "Caches are ready for shipping" << std::endl
;
1522 if (_error
->PendingError() == true)
1528 delete Map
.UnGuard();
1529 *OutMap
= new MMap(*CacheF
,0);
1533 *OutMap
= Map
.UnGuard();
1540 // CacheGenerator::MakeOnlyStatusCache - Build only a status files cache/*{{{*/
1541 // ---------------------------------------------------------------------
1543 APT_DEPRECATED
bool pkgMakeOnlyStatusCache(OpProgress
&Progress
,DynamicMMap
**OutMap
)
1544 { return pkgCacheGenerator::MakeOnlyStatusCache(&Progress
, OutMap
); }
1545 bool pkgCacheGenerator::MakeOnlyStatusCache(OpProgress
*Progress
,DynamicMMap
**OutMap
)
1547 std::vector
<pkgIndexFile
*> Files
;
1548 unsigned long EndOfSource
= Files
.size();
1549 if (_system
->AddStatusFiles(Files
) == false)
1552 SPtr
<DynamicMMap
> Map
= CreateDynamicMMap(NULL
);
1553 unsigned long CurrentSize
= 0;
1554 unsigned long TotalSize
= 0;
1556 TotalSize
= ComputeSize(Files
.begin()+EndOfSource
,Files
.end());
1558 // Build the status cache
1559 if (Progress
!= NULL
)
1560 Progress
->OverallProgress(0,1,1,_("Reading package lists"));
1561 pkgCacheGenerator
Gen(Map
.Get(),Progress
);
1562 if (_error
->PendingError() == true)
1564 if (BuildCache(Gen
,Progress
,CurrentSize
,TotalSize
,
1565 Files
.begin()+EndOfSource
,Files
.end()) == false)
1568 if (_error
->PendingError() == true)
1570 *OutMap
= Map
.UnGuard();
1575 // IsDuplicateDescription /*{{{*/
1576 static bool IsDuplicateDescription(pkgCache::DescIterator Desc
,
1577 MD5SumValue
const &CurMd5
, std::string
const &CurLang
)
1579 // Descriptions in the same link-list have all the same md5
1580 if (Desc
.end() == true || MD5SumValue(Desc
.md5()) != CurMd5
)
1582 for (; Desc
.end() == false; ++Desc
)
1583 if (Desc
.LanguageCode() == CurLang
)
1588 // CacheGenerator::FinishCache /*{{{*/
1589 bool pkgCacheGenerator::FinishCache(OpProgress
* /*Progress*/)