]>
git.saurik.com Git - apt.git/blob - apt-pkg/pkgcachegen.cc
1 // -*- mode: cpp; mode: fold -*-
3 // $Id: pkgcachegen.cc,v 1.53.2.1 2003/12/24 23:09:17 mdz Exp $
4 /* ######################################################################
6 Package Cache Generator - Generator for the cache structure.
8 This builds the cache structure from the abstract package list parser.
10 ##################################################################### */
12 // Include Files /*{{{*/
15 #include <apt-pkg/pkgcachegen.h>
16 #include <apt-pkg/error.h>
17 #include <apt-pkg/version.h>
18 #include <apt-pkg/progress.h>
19 #include <apt-pkg/sourcelist.h>
20 #include <apt-pkg/configuration.h>
21 #include <apt-pkg/aptconfiguration.h>
22 #include <apt-pkg/strutl.h>
23 #include <apt-pkg/sptr.h>
24 #include <apt-pkg/pkgsystem.h>
25 #include <apt-pkg/macros.h>
26 #include <apt-pkg/tagfile.h>
27 #include <apt-pkg/metaindex.h>
28 #include <apt-pkg/fileutl.h>
38 typedef std::vector
<pkgIndexFile
*>::iterator FileIterator
;
39 template <typename Iter
> std::vector
<Iter
*> pkgCacheGenerator::Dynamic
<Iter
>::toReMap
;
41 static bool IsDuplicateDescription(pkgCache::DescIterator Desc
,
42 MD5SumValue
const &CurMd5
, std::string
const &CurLang
);
46 // CacheGenerator::pkgCacheGenerator - Constructor /*{{{*/
47 // ---------------------------------------------------------------------
48 /* We set the dirty flag and make sure that is written to the disk */
49 pkgCacheGenerator::pkgCacheGenerator(DynamicMMap
*pMap
,OpProgress
*Prog
) :
50 Map(*pMap
), Cache(pMap
,false), Progress(Prog
),
54 memset(UniqHash
,0,sizeof(UniqHash
));
56 if (_error
->PendingError() == true)
61 // Setup the map interface..
62 Cache
.HeaderP
= (pkgCache::Header
*)Map
.Data();
63 if (Map
.RawAllocate(sizeof(pkgCache::Header
)) == 0 && _error
->PendingError() == true)
66 Map
.UsePools(*Cache
.HeaderP
->Pools
,sizeof(Cache
.HeaderP
->Pools
)/sizeof(Cache
.HeaderP
->Pools
[0]));
69 *Cache
.HeaderP
= pkgCache::Header();
70 map_ptrloc
const idxVerSysName
= WriteStringInMap(_system
->VS
->Label
);
71 Cache
.HeaderP
->VerSysName
= idxVerSysName
;
72 // this pointer is set in ReMap, but we need it now for WriteUniqString
73 Cache
.StringItemP
= (pkgCache::StringItem
*)Map
.Data();
74 map_ptrloc
const idxArchitecture
= WriteUniqString(_config
->Find("APT::Architecture"));
75 Cache
.HeaderP
->Architecture
= idxArchitecture
;
76 if (unlikely(idxVerSysName
== 0 || idxArchitecture
== 0))
82 // Map directly from the existing file
84 Map
.UsePools(*Cache
.HeaderP
->Pools
,sizeof(Cache
.HeaderP
->Pools
)/sizeof(Cache
.HeaderP
->Pools
[0]));
85 if (Cache
.VS
!= _system
->VS
)
87 _error
->Error(_("Cache has an incompatible versioning system"));
92 Cache
.HeaderP
->Dirty
= true;
93 Map
.Sync(0,sizeof(pkgCache::Header
));
96 // CacheGenerator::~pkgCacheGenerator - Destructor /*{{{*/
97 // ---------------------------------------------------------------------
98 /* We sync the data then unset the dirty flag in two steps so as to
99 advoid a problem during a crash */
100 pkgCacheGenerator::~pkgCacheGenerator()
102 if (_error
->PendingError() == true)
104 if (Map
.Sync() == false)
107 Cache
.HeaderP
->Dirty
= false;
108 Cache
.HeaderP
->CacheFileSize
= Map
.Size();
109 Map
.Sync(0,sizeof(pkgCache::Header
));
112 void pkgCacheGenerator::ReMap(void const * const oldMap
, void const * const newMap
) {/*{{{*/
113 if (oldMap
== newMap
)
116 if (_config
->FindB("Debug::pkgCacheGen", false))
117 std::clog
<< "Remaping from " << oldMap
<< " to " << newMap
<< std::endl
;
121 CurrentFile
+= (pkgCache::PackageFile
*) newMap
- (pkgCache::PackageFile
*) oldMap
;
123 for (size_t i
= 0; i
< _count(UniqHash
); ++i
)
124 if (UniqHash
[i
] != 0)
125 UniqHash
[i
] += (pkgCache::StringItem
*) newMap
- (pkgCache::StringItem
*) oldMap
;
127 for (std::vector
<pkgCache::GrpIterator
*>::const_iterator i
= Dynamic
<pkgCache::GrpIterator
>::toReMap
.begin();
128 i
!= Dynamic
<pkgCache::GrpIterator
>::toReMap
.end(); ++i
)
129 (*i
)->ReMap(oldMap
, newMap
);
130 for (std::vector
<pkgCache::PkgIterator
*>::const_iterator i
= Dynamic
<pkgCache::PkgIterator
>::toReMap
.begin();
131 i
!= Dynamic
<pkgCache::PkgIterator
>::toReMap
.end(); ++i
)
132 (*i
)->ReMap(oldMap
, newMap
);
133 for (std::vector
<pkgCache::VerIterator
*>::const_iterator i
= Dynamic
<pkgCache::VerIterator
>::toReMap
.begin();
134 i
!= Dynamic
<pkgCache::VerIterator
>::toReMap
.end(); ++i
)
135 (*i
)->ReMap(oldMap
, newMap
);
136 for (std::vector
<pkgCache::DepIterator
*>::const_iterator i
= Dynamic
<pkgCache::DepIterator
>::toReMap
.begin();
137 i
!= Dynamic
<pkgCache::DepIterator
>::toReMap
.end(); ++i
)
138 (*i
)->ReMap(oldMap
, newMap
);
139 for (std::vector
<pkgCache::DescIterator
*>::const_iterator i
= Dynamic
<pkgCache::DescIterator
>::toReMap
.begin();
140 i
!= Dynamic
<pkgCache::DescIterator
>::toReMap
.end(); ++i
)
141 (*i
)->ReMap(oldMap
, newMap
);
142 for (std::vector
<pkgCache::PrvIterator
*>::const_iterator i
= Dynamic
<pkgCache::PrvIterator
>::toReMap
.begin();
143 i
!= Dynamic
<pkgCache::PrvIterator
>::toReMap
.end(); ++i
)
144 (*i
)->ReMap(oldMap
, newMap
);
145 for (std::vector
<pkgCache::PkgFileIterator
*>::const_iterator i
= Dynamic
<pkgCache::PkgFileIterator
>::toReMap
.begin();
146 i
!= Dynamic
<pkgCache::PkgFileIterator
>::toReMap
.end(); ++i
)
147 (*i
)->ReMap(oldMap
, newMap
);
149 // CacheGenerator::WriteStringInMap /*{{{*/
150 map_ptrloc
pkgCacheGenerator::WriteStringInMap(const char *String
,
151 const unsigned long &Len
) {
152 void const * const oldMap
= Map
.Data();
153 map_ptrloc
const index
= Map
.WriteString(String
, Len
);
155 ReMap(oldMap
, Map
.Data());
159 // CacheGenerator::WriteStringInMap /*{{{*/
160 map_ptrloc
pkgCacheGenerator::WriteStringInMap(const char *String
) {
161 void const * const oldMap
= Map
.Data();
162 map_ptrloc
const index
= Map
.WriteString(String
);
164 ReMap(oldMap
, Map
.Data());
168 map_ptrloc
pkgCacheGenerator::AllocateInMap(const unsigned long &size
) {/*{{{*/
169 void const * const oldMap
= Map
.Data();
170 map_ptrloc
const index
= Map
.Allocate(size
);
172 ReMap(oldMap
, Map
.Data());
176 // CacheGenerator::MergeList - Merge the package list /*{{{*/
177 // ---------------------------------------------------------------------
178 /* This provides the generation of the entries in the cache. Each loop
179 goes through a single package record from the underlying parse engine. */
180 bool pkgCacheGenerator::MergeList(ListParser
&List
,
181 pkgCache::VerIterator
*OutVer
)
185 unsigned int Counter
= 0;
186 while (List
.Step() == true)
188 string
const PackageName
= List
.Package();
189 if (PackageName
.empty() == true)
193 if (Counter
% 100 == 0 && Progress
!= 0)
194 Progress
->Progress(List
.Offset());
196 string Arch
= List
.Architecture();
197 string
const Version
= List
.Version();
198 if (Version
.empty() == true && Arch
.empty() == true)
200 // package descriptions
201 if (MergeListGroup(List
, PackageName
) == false)
206 if (Arch
.empty() == true)
208 // use the pseudo arch 'none' for arch-less packages
210 /* We might built a SingleArchCache here, which we don't want to blow up
211 just for these :none packages to a proper MultiArchCache, so just ensure
212 that we have always a native package structure first for SingleArch */
213 pkgCache::PkgIterator NP
;
214 Dynamic
<pkgCache::PkgIterator
> DynPkg(NP
);
215 if (NewPackage(NP
, PackageName
, _config
->Find("APT::Architecture")) == false)
216 // TRANSLATOR: The first placeholder is a package name,
217 // the other two should be copied verbatim as they include debug info
218 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
219 PackageName
.c_str(), "NewPackage", 0);
222 // Get a pointer to the package structure
223 pkgCache::PkgIterator Pkg
;
224 Dynamic
<pkgCache::PkgIterator
> DynPkg(Pkg
);
225 if (NewPackage(Pkg
, PackageName
, Arch
) == false)
226 // TRANSLATOR: The first placeholder is a package name,
227 // the other two should be copied verbatim as they include debug info
228 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
229 PackageName
.c_str(), "NewPackage", 1);
232 if (Version
.empty() == true)
234 if (MergeListPackage(List
, Pkg
) == false)
239 if (MergeListVersion(List
, Pkg
, Version
, OutVer
) == false)
245 FoundFileDeps
|= List
.HasFileDeps();
250 if (Cache
.HeaderP
->PackageCount
>= (1ULL<<sizeof(Cache
.PkgP
->ID
)*8)-1)
251 return _error
->Error(_("Wow, you exceeded the number of package "
252 "names this APT is capable of."));
253 if (Cache
.HeaderP
->VersionCount
>= (1ULL<<(sizeof(Cache
.VerP
->ID
)*8))-1)
254 return _error
->Error(_("Wow, you exceeded the number of versions "
255 "this APT is capable of."));
256 if (Cache
.HeaderP
->DescriptionCount
>= (1ULL<<(sizeof(Cache
.DescP
->ID
)*8))-1)
257 return _error
->Error(_("Wow, you exceeded the number of descriptions "
258 "this APT is capable of."));
259 if (Cache
.HeaderP
->DependsCount
>= (1ULL<<(sizeof(Cache
.DepP
->ID
)*8))-1ULL)
260 return _error
->Error(_("Wow, you exceeded the number of dependencies "
261 "this APT is capable of."));
263 FoundFileDeps
|= List
.HasFileDeps();
266 // CacheGenerator::MergeListGroup /*{{{*/
267 bool pkgCacheGenerator::MergeListGroup(ListParser
&List
, std::string
const &GrpName
)
269 pkgCache::GrpIterator Grp
= Cache
.FindGrp(GrpName
);
270 // a group has no data on it's own, only packages have it but these
271 // stanzas like this come from Translation- files to add descriptions,
272 // but without a version we don't need a description for it…
273 if (Grp
.end() == true)
275 Dynamic
<pkgCache::GrpIterator
> DynGrp(Grp
);
277 pkgCache::PkgIterator Pkg
;
278 Dynamic
<pkgCache::PkgIterator
> DynPkg(Pkg
);
279 for (Pkg
= Grp
.PackageList(); Pkg
.end() == false; Pkg
= Grp
.NextPkg(Pkg
))
280 if (MergeListPackage(List
, Pkg
) == false)
286 // CacheGenerator::MergeListPackage /*{{{*/
287 bool pkgCacheGenerator::MergeListPackage(ListParser
&List
, pkgCache::PkgIterator
&Pkg
)
289 // we first process the package, then the descriptions
290 // (for deb this package processing is in fact a no-op)
291 pkgCache::VerIterator
Ver(Cache
);
292 Dynamic
<pkgCache::VerIterator
> DynVer(Ver
);
293 if (List
.UsePackage(Pkg
, Ver
) == false)
294 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
295 Pkg
.Name(), "UsePackage", 1);
297 // Find the right version to write the description
298 MD5SumValue CurMd5
= List
.Description_md5();
299 std::string CurLang
= List
.DescriptionLanguage();
301 for (Ver
= Pkg
.VersionList(); Ver
.end() == false; ++Ver
)
303 pkgCache::DescIterator Desc
= Ver
.DescriptionList();
305 // a version can only have one md5 describing it
306 if (Desc
.end() == true || MD5SumValue(Desc
.md5()) != CurMd5
)
309 // don't add a new description if we have one for the given
311 if (IsDuplicateDescription(Desc
, CurMd5
, CurLang
) == true)
314 Dynamic
<pkgCache::DescIterator
> DynDesc(Desc
);
315 // we add at the end, so that the start is constant as we need
316 // that to be able to efficiently share these lists
317 map_ptrloc
*LastDesc
= &Ver
->DescriptionList
;
318 for (;Desc
.end() == false && Desc
->NextDesc
!= 0; ++Desc
);
319 if (Desc
.end() == false)
320 LastDesc
= &Desc
->NextDesc
;
322 void const * const oldMap
= Map
.Data();
323 map_ptrloc
const descindex
= NewDescription(Desc
, CurLang
, CurMd5
, *LastDesc
);
324 if (unlikely(descindex
== 0 && _error
->PendingError()))
325 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
326 Pkg
.Name(), "NewDescription", 1);
327 if (oldMap
!= Map
.Data())
328 LastDesc
+= (map_ptrloc
*) Map
.Data() - (map_ptrloc
*) oldMap
;
329 *LastDesc
= descindex
;
330 Desc
->ParentPkg
= Pkg
.Index();
332 if ((*LastDesc
== 0 && _error
->PendingError()) || NewFileDesc(Desc
,List
) == false)
333 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
334 Pkg
.Name(), "NewFileDesc", 1);
336 // we can stop here as all "same" versions will share the description
343 // CacheGenerator::MergeListVersion /*{{{*/
344 bool pkgCacheGenerator::MergeListVersion(ListParser
&List
, pkgCache::PkgIterator
&Pkg
,
345 std::string
const &Version
, pkgCache::VerIterator
* &OutVer
)
347 pkgCache::VerIterator Ver
= Pkg
.VersionList();
348 Dynamic
<pkgCache::VerIterator
> DynVer(Ver
);
349 map_ptrloc
*LastVer
= &Pkg
->VersionList
;
350 void const * oldMap
= Map
.Data();
352 unsigned long const Hash
= List
.VersionHash();
353 if (Ver
.end() == false)
355 /* We know the list is sorted so we use that fact in the search.
356 Insertion of new versions is done with correct sorting */
358 for (; Ver
.end() == false; LastVer
= &Ver
->NextVer
, Ver
++)
360 Res
= Cache
.VS
->CmpVersion(Version
,Ver
.VerStr());
361 // Version is higher as current version - insert here
364 // Versionstrings are equal - is hash also equal?
365 if (Res
== 0 && Ver
->Hash
== Hash
)
367 // proceed with the next till we have either the right
368 // or we found another version (which will be lower)
371 /* We already have a version for this item, record that we saw it */
372 if (Res
== 0 && Ver
.end() == false && Ver
->Hash
== Hash
)
374 if (List
.UsePackage(Pkg
,Ver
) == false)
375 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
376 Pkg
.Name(), "UsePackage", 2);
378 if (NewFileVer(Ver
,List
) == false)
379 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
380 Pkg
.Name(), "NewFileVer", 1);
382 // Read only a single record and return
394 map_ptrloc
const verindex
= NewVersion(Ver
,Version
,*LastVer
);
395 if (verindex
== 0 && _error
->PendingError())
396 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
397 Pkg
.Name(), "NewVersion", 1);
399 if (oldMap
!= Map
.Data())
400 LastVer
+= (map_ptrloc
*) Map
.Data() - (map_ptrloc
*) oldMap
;
402 Ver
->ParentPkg
= Pkg
.Index();
405 if (unlikely(List
.NewVersion(Ver
) == false))
406 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
407 Pkg
.Name(), "NewVersion", 2);
409 if (unlikely(List
.UsePackage(Pkg
,Ver
) == false))
410 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
411 Pkg
.Name(), "UsePackage", 3);
413 if (unlikely(NewFileVer(Ver
,List
) == false))
414 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
415 Pkg
.Name(), "NewFileVer", 2);
417 pkgCache::GrpIterator Grp
= Pkg
.Group();
418 Dynamic
<pkgCache::GrpIterator
> DynGrp(Grp
);
420 /* If it is the first version of this package we need to add implicit
421 Multi-Arch dependencies to all other package versions in the group now -
422 otherwise we just add them for this new version */
423 if (Pkg
.VersionList()->NextVer
== 0)
425 pkgCache::PkgIterator P
= Grp
.PackageList();
426 Dynamic
<pkgCache::PkgIterator
> DynP(P
);
427 for (; P
.end() != true; P
= Grp
.NextPkg(P
))
429 if (P
->ID
== Pkg
->ID
)
431 pkgCache::VerIterator V
= P
.VersionList();
432 Dynamic
<pkgCache::VerIterator
> DynV(V
);
433 for (; V
.end() != true; ++V
)
434 if (unlikely(AddImplicitDepends(V
, Pkg
) == false))
435 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
436 Pkg
.Name(), "AddImplicitDepends", 1);
438 /* :none packages are packages without an architecture. They are forbidden by
439 debian-policy, so usually they will only be in (old) dpkg status files -
440 and dpkg will complain about them - and are pretty rare. We therefore do
441 usually not create conflicts while the parent is created, but only if a :none
442 package (= the target) appears. This creates incorrect dependencies on :none
443 for architecture-specific dependencies on the package we copy from, but we
444 will ignore this bug as architecture-specific dependencies are only allowed
445 in jessie and until then the :none packages should be extinct (hopefully).
446 In other words: This should work long enough to allow graceful removal of
447 these packages, it is not supposed to allow users to keep using them … */
448 if (strcmp(Pkg
.Arch(), "none") == 0)
450 pkgCache::PkgIterator M
= Grp
.FindPreferredPkg();
451 if (M
.end() == false && Pkg
!= M
)
453 pkgCache::DepIterator D
= M
.RevDependsList();
454 Dynamic
<pkgCache::DepIterator
> DynD(D
);
455 for (; D
.end() == false; ++D
)
457 if ((D
->Type
!= pkgCache::Dep::Conflicts
&&
458 D
->Type
!= pkgCache::Dep::DpkgBreaks
&&
459 D
->Type
!= pkgCache::Dep::Replaces
) ||
460 D
.ParentPkg().Group() == Grp
)
463 map_ptrloc
*OldDepLast
= NULL
;
464 pkgCache::VerIterator ConVersion
= D
.ParentVer();
465 Dynamic
<pkgCache::VerIterator
> DynV(ConVersion
);
466 // duplicate the Conflicts/Breaks/Replaces for :none arch
468 NewDepends(Pkg
, ConVersion
, "", 0, D
->Type
, OldDepLast
);
470 NewDepends(Pkg
, ConVersion
, D
.TargetVer(),
471 D
->CompareOp
, D
->Type
, OldDepLast
);
476 if (unlikely(AddImplicitDepends(Grp
, Pkg
, Ver
) == false))
477 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
478 Pkg
.Name(), "AddImplicitDepends", 2);
480 // Read only a single record and return
487 /* Record the Description (it is not translated) */
488 MD5SumValue CurMd5
= List
.Description_md5();
489 if (CurMd5
.Value().empty() == true)
491 std::string CurLang
= List
.DescriptionLanguage();
493 /* Before we add a new description we first search in the group for
494 a version with a description of the same MD5 - if so we reuse this
495 description group instead of creating our own for this version */
496 for (pkgCache::PkgIterator P
= Grp
.PackageList();
497 P
.end() == false; P
= Grp
.NextPkg(P
))
499 for (pkgCache::VerIterator V
= P
.VersionList();
500 V
.end() == false; ++V
)
502 if (IsDuplicateDescription(V
.DescriptionList(), CurMd5
, "") == false)
504 Ver
->DescriptionList
= V
->DescriptionList
;
509 // We haven't found reusable descriptions, so add the first description
510 pkgCache::DescIterator Desc
= Ver
.DescriptionList();
511 Dynamic
<pkgCache::DescIterator
> DynDesc(Desc
);
512 map_ptrloc
*LastDesc
= &Ver
->DescriptionList
;
515 map_ptrloc
const descindex
= NewDescription(Desc
, CurLang
, CurMd5
, *LastDesc
);
516 if (unlikely(descindex
== 0 && _error
->PendingError()))
517 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
518 Pkg
.Name(), "NewDescription", 2);
519 if (oldMap
!= Map
.Data())
520 LastDesc
+= (map_ptrloc
*) Map
.Data() - (map_ptrloc
*) oldMap
;
521 *LastDesc
= descindex
;
522 Desc
->ParentPkg
= Pkg
.Index();
524 if ((*LastDesc
== 0 && _error
->PendingError()) || NewFileDesc(Desc
,List
) == false)
525 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
526 Pkg
.Name(), "NewFileDesc", 2);
532 // CacheGenerator::MergeFileProvides - Merge file provides /*{{{*/
533 // ---------------------------------------------------------------------
534 /* If we found any file depends while parsing the main list we need to
535 resolve them. Since it is undesired to load the entire list of files
536 into the cache as virtual packages we do a two stage effort. MergeList
537 identifies the file depends and this creates Provdies for them by
538 re-parsing all the indexs. */
539 bool pkgCacheGenerator::MergeFileProvides(ListParser
&List
)
543 unsigned int Counter
= 0;
544 while (List
.Step() == true)
546 string PackageName
= List
.Package();
547 if (PackageName
.empty() == true)
549 string Version
= List
.Version();
550 if (Version
.empty() == true)
553 pkgCache::PkgIterator Pkg
= Cache
.FindPkg(PackageName
);
554 Dynamic
<pkgCache::PkgIterator
> DynPkg(Pkg
);
555 if (Pkg
.end() == true)
556 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
557 PackageName
.c_str(), "FindPkg", 1);
559 if (Counter
% 100 == 0 && Progress
!= 0)
560 Progress
->Progress(List
.Offset());
562 unsigned long Hash
= List
.VersionHash();
563 pkgCache::VerIterator Ver
= Pkg
.VersionList();
564 Dynamic
<pkgCache::VerIterator
> DynVer(Ver
);
565 for (; Ver
.end() == false; ++Ver
)
567 if (Ver
->Hash
== Hash
&& Version
.c_str() == Ver
.VerStr())
569 if (List
.CollectFileProvides(Cache
,Ver
) == false)
570 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
571 PackageName
.c_str(), "CollectFileProvides", 1);
576 if (Ver
.end() == true)
577 _error
->Warning(_("Package %s %s was not found while processing file dependencies"),PackageName
.c_str(),Version
.c_str());
583 // CacheGenerator::NewGroup - Add a new group /*{{{*/
584 // ---------------------------------------------------------------------
585 /* This creates a new group structure and adds it to the hash table */
586 bool pkgCacheGenerator::NewGroup(pkgCache::GrpIterator
&Grp
, const string
&Name
)
588 Grp
= Cache
.FindGrp(Name
);
589 if (Grp
.end() == false)
593 map_ptrloc
const Group
= AllocateInMap(sizeof(pkgCache::Group
));
594 if (unlikely(Group
== 0))
597 Grp
= pkgCache::GrpIterator(Cache
, Cache
.GrpP
+ Group
);
598 map_ptrloc
const idxName
= WriteStringInMap(Name
);
599 if (unlikely(idxName
== 0))
603 // Insert it into the hash table
604 unsigned long const Hash
= Cache
.Hash(Name
);
605 Grp
->Next
= Cache
.HeaderP
->GrpHashTable
[Hash
];
606 Cache
.HeaderP
->GrpHashTable
[Hash
] = Group
;
608 Grp
->ID
= Cache
.HeaderP
->GroupCount
++;
612 // CacheGenerator::NewPackage - Add a new package /*{{{*/
613 // ---------------------------------------------------------------------
614 /* This creates a new package structure and adds it to the hash table */
615 bool pkgCacheGenerator::NewPackage(pkgCache::PkgIterator
&Pkg
,const string
&Name
,
616 const string
&Arch
) {
617 pkgCache::GrpIterator Grp
;
618 Dynamic
<pkgCache::GrpIterator
> DynGrp(Grp
);
619 if (unlikely(NewGroup(Grp
, Name
) == false))
622 Pkg
= Grp
.FindPkg(Arch
);
623 if (Pkg
.end() == false)
627 map_ptrloc
const Package
= AllocateInMap(sizeof(pkgCache::Package
));
628 if (unlikely(Package
== 0))
630 Pkg
= pkgCache::PkgIterator(Cache
,Cache
.PkgP
+ Package
);
632 // Insert the package into our package list
633 if (Grp
->FirstPackage
== 0) // the group is new
635 // Insert it into the hash table
636 unsigned long const Hash
= Cache
.Hash(Name
);
637 Pkg
->NextPackage
= Cache
.HeaderP
->PkgHashTable
[Hash
];
638 Cache
.HeaderP
->PkgHashTable
[Hash
] = Package
;
639 Grp
->FirstPackage
= Package
;
641 else // Group the Packages together
643 // this package is the new last package
644 pkgCache::PkgIterator
LastPkg(Cache
, Cache
.PkgP
+ Grp
->LastPackage
);
645 Pkg
->NextPackage
= LastPkg
->NextPackage
;
646 LastPkg
->NextPackage
= Package
;
648 Grp
->LastPackage
= Package
;
650 // Set the name, arch and the ID
651 Pkg
->Name
= Grp
->Name
;
652 Pkg
->Group
= Grp
.Index();
653 // all is mapped to the native architecture
654 map_ptrloc
const idxArch
= (Arch
== "all") ? Cache
.HeaderP
->Architecture
: WriteUniqString(Arch
.c_str());
655 if (unlikely(idxArch
== 0))
658 Pkg
->ID
= Cache
.HeaderP
->PackageCount
++;
663 // CacheGenerator::AddImplicitDepends /*{{{*/
664 bool pkgCacheGenerator::AddImplicitDepends(pkgCache::GrpIterator
&G
,
665 pkgCache::PkgIterator
&P
,
666 pkgCache::VerIterator
&V
)
668 // copy P.Arch() into a string here as a cache remap
669 // in NewDepends() later may alter the pointer location
670 string Arch
= P
.Arch() == NULL
? "" : P
.Arch();
671 map_ptrloc
*OldDepLast
= NULL
;
672 /* MultiArch handling introduces a lot of implicit Dependencies:
673 - MultiArch: same → Co-Installable if they have the same version
674 - All others conflict with all other group members */
675 bool const coInstall
= ((V
->MultiArch
& pkgCache::Version::Same
) == pkgCache::Version::Same
);
676 pkgCache::PkgIterator D
= G
.PackageList();
677 Dynamic
<pkgCache::PkgIterator
> DynD(D
);
678 for (; D
.end() != true; D
= G
.NextPkg(D
))
680 if (Arch
== D
.Arch() || D
->VersionList
== 0)
682 /* We allow only one installed arch at the time
683 per group, therefore each group member conflicts
684 with all other group members */
685 if (coInstall
== true)
687 // Replaces: ${self}:other ( << ${binary:Version})
688 NewDepends(D
, V
, V
.VerStr(),
689 pkgCache::Dep::Less
, pkgCache::Dep::Replaces
,
691 // Breaks: ${self}:other (!= ${binary:Version})
692 NewDepends(D
, V
, V
.VerStr(),
693 pkgCache::Dep::NotEquals
, pkgCache::Dep::DpkgBreaks
,
696 // Conflicts: ${self}:other
698 pkgCache::Dep::NoOp
, pkgCache::Dep::Conflicts
,
704 bool pkgCacheGenerator::AddImplicitDepends(pkgCache::VerIterator
&V
,
705 pkgCache::PkgIterator
&D
)
707 /* MultiArch handling introduces a lot of implicit Dependencies:
708 - MultiArch: same → Co-Installable if they have the same version
709 - All others conflict with all other group members */
710 map_ptrloc
*OldDepLast
= NULL
;
711 bool const coInstall
= ((V
->MultiArch
& pkgCache::Version::Same
) == pkgCache::Version::Same
);
712 if (coInstall
== true)
714 // Replaces: ${self}:other ( << ${binary:Version})
715 NewDepends(D
, V
, V
.VerStr(),
716 pkgCache::Dep::Less
, pkgCache::Dep::Replaces
,
718 // Breaks: ${self}:other (!= ${binary:Version})
719 NewDepends(D
, V
, V
.VerStr(),
720 pkgCache::Dep::NotEquals
, pkgCache::Dep::DpkgBreaks
,
723 // Conflicts: ${self}:other
725 pkgCache::Dep::NoOp
, pkgCache::Dep::Conflicts
,
732 // CacheGenerator::NewFileVer - Create a new File<->Version association /*{{{*/
733 // ---------------------------------------------------------------------
735 bool pkgCacheGenerator::NewFileVer(pkgCache::VerIterator
&Ver
,
738 if (CurrentFile
== 0)
742 map_ptrloc
const VerFile
= AllocateInMap(sizeof(pkgCache::VerFile
));
746 pkgCache::VerFileIterator
VF(Cache
,Cache
.VerFileP
+ VerFile
);
747 VF
->File
= CurrentFile
- Cache
.PkgFileP
;
749 // Link it to the end of the list
750 map_ptrloc
*Last
= &Ver
->FileList
;
751 for (pkgCache::VerFileIterator V
= Ver
.FileList(); V
.end() == false; ++V
)
753 VF
->NextFile
= *Last
;
756 VF
->Offset
= List
.Offset();
757 VF
->Size
= List
.Size();
758 if (Cache
.HeaderP
->MaxVerFileSize
< VF
->Size
)
759 Cache
.HeaderP
->MaxVerFileSize
= VF
->Size
;
760 Cache
.HeaderP
->VerFileCount
++;
765 // CacheGenerator::NewVersion - Create a new Version /*{{{*/
766 // ---------------------------------------------------------------------
767 /* This puts a version structure in the linked list */
768 unsigned long pkgCacheGenerator::NewVersion(pkgCache::VerIterator
&Ver
,
769 const string
&VerStr
,
773 map_ptrloc
const Version
= AllocateInMap(sizeof(pkgCache::Version
));
778 Ver
= pkgCache::VerIterator(Cache
,Cache
.VerP
+ Version
);
779 //Dynamic<pkgCache::VerIterator> DynV(Ver); // caller MergeListVersion already takes care of it
781 Ver
->ID
= Cache
.HeaderP
->VersionCount
++;
782 map_ptrloc
const idxVerStr
= WriteStringInMap(VerStr
);
783 if (unlikely(idxVerStr
== 0))
785 Ver
->VerStr
= idxVerStr
;
790 // CacheGenerator::NewFileDesc - Create a new File<->Desc association /*{{{*/
791 // ---------------------------------------------------------------------
793 bool pkgCacheGenerator::NewFileDesc(pkgCache::DescIterator
&Desc
,
796 if (CurrentFile
== 0)
800 map_ptrloc
const DescFile
= AllocateInMap(sizeof(pkgCache::DescFile
));
804 pkgCache::DescFileIterator
DF(Cache
,Cache
.DescFileP
+ DescFile
);
805 DF
->File
= CurrentFile
- Cache
.PkgFileP
;
807 // Link it to the end of the list
808 map_ptrloc
*Last
= &Desc
->FileList
;
809 for (pkgCache::DescFileIterator D
= Desc
.FileList(); D
.end() == false; ++D
)
812 DF
->NextFile
= *Last
;
815 DF
->Offset
= List
.Offset();
816 DF
->Size
= List
.Size();
817 if (Cache
.HeaderP
->MaxDescFileSize
< DF
->Size
)
818 Cache
.HeaderP
->MaxDescFileSize
= DF
->Size
;
819 Cache
.HeaderP
->DescFileCount
++;
824 // CacheGenerator::NewDescription - Create a new Description /*{{{*/
825 // ---------------------------------------------------------------------
826 /* This puts a description structure in the linked list */
827 map_ptrloc
pkgCacheGenerator::NewDescription(pkgCache::DescIterator
&Desc
,
829 const MD5SumValue
&md5sum
,
833 map_ptrloc
const Description
= AllocateInMap(sizeof(pkgCache::Description
));
834 if (Description
== 0)
838 Desc
= pkgCache::DescIterator(Cache
,Cache
.DescP
+ Description
);
839 Desc
->NextDesc
= Next
;
840 Desc
->ID
= Cache
.HeaderP
->DescriptionCount
++;
841 map_ptrloc
const idxlanguage_code
= WriteStringInMap(Lang
);
842 map_ptrloc
const idxmd5sum
= WriteStringInMap(md5sum
.Value());
843 if (unlikely(idxlanguage_code
== 0 || idxmd5sum
== 0))
845 Desc
->language_code
= idxlanguage_code
;
846 Desc
->md5sum
= idxmd5sum
;
851 // CacheGenerator::NewDepends - Create a dependency element /*{{{*/
852 // ---------------------------------------------------------------------
853 /* This creates a dependency element in the tree. It is linked to the
854 version and to the package that it is pointing to. */
855 bool pkgCacheGenerator::NewDepends(pkgCache::PkgIterator
&Pkg
,
856 pkgCache::VerIterator
&Ver
,
857 string
const &Version
,
858 unsigned int const &Op
,
859 unsigned int const &Type
,
860 map_ptrloc
* &OldDepLast
)
862 void const * const oldMap
= Map
.Data();
864 map_ptrloc
const Dependency
= AllocateInMap(sizeof(pkgCache::Dependency
));
865 if (unlikely(Dependency
== 0))
869 pkgCache::DepIterator
Dep(Cache
,Cache
.DepP
+ Dependency
);
870 Dynamic
<pkgCache::DepIterator
> DynDep(Dep
);
871 Dep
->ParentVer
= Ver
.Index();
874 Dep
->ID
= Cache
.HeaderP
->DependsCount
++;
876 // Probe the reverse dependency list for a version string that matches
877 if (Version
.empty() == false)
879 /* for (pkgCache::DepIterator I = Pkg.RevDependsList(); I.end() == false; I++)
880 if (I->Version != 0 && I.TargetVer() == Version)
881 Dep->Version = I->Version;*/
882 if (Dep
->Version
== 0) {
883 map_ptrloc
const index
= WriteStringInMap(Version
);
884 if (unlikely(index
== 0))
886 Dep
->Version
= index
;
890 // Link it to the package
891 Dep
->Package
= Pkg
.Index();
892 Dep
->NextRevDepends
= Pkg
->RevDepends
;
893 Pkg
->RevDepends
= Dep
.Index();
895 // Do we know where to link the Dependency to?
896 if (OldDepLast
== NULL
)
898 OldDepLast
= &Ver
->DependsList
;
899 for (pkgCache::DepIterator D
= Ver
.DependsList(); D
.end() == false; ++D
)
900 OldDepLast
= &D
->NextDepends
;
901 } else if (oldMap
!= Map
.Data())
902 OldDepLast
+= (map_ptrloc
*) Map
.Data() - (map_ptrloc
*) oldMap
;
904 Dep
->NextDepends
= *OldDepLast
;
905 *OldDepLast
= Dep
.Index();
906 OldDepLast
= &Dep
->NextDepends
;
911 // ListParser::NewDepends - Create the environment for a new dependency /*{{{*/
912 // ---------------------------------------------------------------------
913 /* This creates a Group and the Package to link this dependency to if
914 needed and handles also the caching of the old endpoint */
915 bool pkgCacheGenerator::ListParser::NewDepends(pkgCache::VerIterator
&Ver
,
916 const string
&PackageName
,
918 const string
&Version
,
922 pkgCache::GrpIterator Grp
;
923 Dynamic
<pkgCache::GrpIterator
> DynGrp(Grp
);
924 if (unlikely(Owner
->NewGroup(Grp
, PackageName
) == false))
927 // Locate the target package
928 pkgCache::PkgIterator Pkg
= Grp
.FindPkg(Arch
);
929 // we don't create 'none' packages and their dependencies if we can avoid it …
930 if (Pkg
.end() == true && Arch
== "none" && strcmp(Ver
.ParentPkg().Arch(), "none") != 0)
932 Dynamic
<pkgCache::PkgIterator
> DynPkg(Pkg
);
933 if (Pkg
.end() == true) {
934 if (unlikely(Owner
->NewPackage(Pkg
, PackageName
, Arch
) == false))
938 // Is it a file dependency?
939 if (unlikely(PackageName
[0] == '/'))
940 FoundFileDeps
= true;
942 /* Caching the old end point speeds up generation substantially */
943 if (OldDepVer
!= Ver
) {
948 return Owner
->NewDepends(Pkg
, Ver
, Version
, Op
, Type
, OldDepLast
);
951 // ListParser::NewProvides - Create a Provides element /*{{{*/
952 // ---------------------------------------------------------------------
954 bool pkgCacheGenerator::ListParser::NewProvides(pkgCache::VerIterator
&Ver
,
955 const string
&PkgName
,
956 const string
&PkgArch
,
957 const string
&Version
)
959 pkgCache
&Cache
= Owner
->Cache
;
961 // We do not add self referencing provides
962 if (Ver
.ParentPkg().Name() == PkgName
&& (PkgArch
== Ver
.ParentPkg().Arch() ||
963 (PkgArch
== "all" && strcmp((Cache
.StrP
+ Cache
.HeaderP
->Architecture
), Ver
.ParentPkg().Arch()) == 0)))
967 map_ptrloc
const Provides
= Owner
->AllocateInMap(sizeof(pkgCache::Provides
));
968 if (unlikely(Provides
== 0))
970 Cache
.HeaderP
->ProvidesCount
++;
973 pkgCache::PrvIterator
Prv(Cache
,Cache
.ProvideP
+ Provides
,Cache
.PkgP
);
974 Dynamic
<pkgCache::PrvIterator
> DynPrv(Prv
);
975 Prv
->Version
= Ver
.Index();
976 Prv
->NextPkgProv
= Ver
->ProvidesList
;
977 Ver
->ProvidesList
= Prv
.Index();
978 if (Version
.empty() == false) {
979 map_ptrloc
const idxProvideVersion
= WriteString(Version
);
980 Prv
->ProvideVersion
= idxProvideVersion
;
981 if (unlikely(idxProvideVersion
== 0))
985 // Locate the target package
986 pkgCache::PkgIterator Pkg
;
987 Dynamic
<pkgCache::PkgIterator
> DynPkg(Pkg
);
988 if (unlikely(Owner
->NewPackage(Pkg
,PkgName
, PkgArch
) == false))
991 // Link it to the package
992 Prv
->ParentPkg
= Pkg
.Index();
993 Prv
->NextProvides
= Pkg
->ProvidesList
;
994 Pkg
->ProvidesList
= Prv
.Index();
999 // CacheGenerator::SelectFile - Select the current file being parsed /*{{{*/
1000 // ---------------------------------------------------------------------
1001 /* This is used to select which file is to be associated with all newly
1002 added versions. The caller is responsible for setting the IMS fields. */
1003 bool pkgCacheGenerator::SelectFile(const string
&File
,const string
&Site
,
1004 const pkgIndexFile
&Index
,
1005 unsigned long Flags
)
1007 // Get some space for the structure
1008 map_ptrloc
const idxFile
= AllocateInMap(sizeof(*CurrentFile
));
1009 if (unlikely(idxFile
== 0))
1011 CurrentFile
= Cache
.PkgFileP
+ idxFile
;
1014 map_ptrloc
const idxFileName
= WriteStringInMap(File
);
1015 map_ptrloc
const idxSite
= WriteUniqString(Site
);
1016 if (unlikely(idxFileName
== 0 || idxSite
== 0))
1018 CurrentFile
->FileName
= idxFileName
;
1019 CurrentFile
->Site
= idxSite
;
1020 CurrentFile
->NextFile
= Cache
.HeaderP
->FileList
;
1021 CurrentFile
->Flags
= Flags
;
1022 CurrentFile
->ID
= Cache
.HeaderP
->PackageFileCount
;
1023 map_ptrloc
const idxIndexType
= WriteUniqString(Index
.GetType()->Label
);
1024 if (unlikely(idxIndexType
== 0))
1026 CurrentFile
->IndexType
= idxIndexType
;
1028 Cache
.HeaderP
->FileList
= CurrentFile
- Cache
.PkgFileP
;
1029 Cache
.HeaderP
->PackageFileCount
++;
1032 Progress
->SubProgress(Index
.Size());
1036 // CacheGenerator::WriteUniqueString - Insert a unique string /*{{{*/
1037 // ---------------------------------------------------------------------
1038 /* This is used to create handles to strings. Given the same text it
1039 always returns the same number */
1040 unsigned long pkgCacheGenerator::WriteUniqString(const char *S
,
1043 /* We use a very small transient hash table here, this speeds up generation
1044 by a fair amount on slower machines */
1045 pkgCache::StringItem
*&Bucket
= UniqHash
[(S
[0]*5 + S
[1]) % _count(UniqHash
)];
1047 stringcmp(S
,S
+Size
,Cache
.StrP
+ Bucket
->String
) == 0)
1048 return Bucket
->String
;
1050 // Search for an insertion point
1051 pkgCache::StringItem
*I
= Cache
.StringItemP
+ Cache
.HeaderP
->StringList
;
1053 map_ptrloc
*Last
= &Cache
.HeaderP
->StringList
;
1054 for (; I
!= Cache
.StringItemP
; Last
= &I
->NextItem
,
1055 I
= Cache
.StringItemP
+ I
->NextItem
)
1057 Res
= stringcmp(S
,S
+Size
,Cache
.StrP
+ I
->String
);
1070 void const * const oldMap
= Map
.Data();
1071 map_ptrloc
const Item
= AllocateInMap(sizeof(pkgCache::StringItem
));
1075 map_ptrloc
const idxString
= WriteStringInMap(S
,Size
);
1076 if (unlikely(idxString
== 0))
1078 if (oldMap
!= Map
.Data()) {
1079 Last
+= (map_ptrloc
*) Map
.Data() - (map_ptrloc
*) oldMap
;
1080 I
+= (pkgCache::StringItem
*) Map
.Data() - (pkgCache::StringItem
*) oldMap
;
1084 // Fill in the structure
1085 pkgCache::StringItem
*ItemP
= Cache
.StringItemP
+ Item
;
1086 ItemP
->NextItem
= I
- Cache
.StringItemP
;
1087 ItemP
->String
= idxString
;
1090 return ItemP
->String
;
1093 // CheckValidity - Check that a cache is up-to-date /*{{{*/
1094 // ---------------------------------------------------------------------
1095 /* This just verifies that each file in the list of index files exists,
1096 has matching attributes with the cache and the cache does not have
1098 static bool CheckValidity(const string
&CacheFile
,
1099 pkgSourceList
&List
,
1104 bool const Debug
= _config
->FindB("Debug::pkgCacheGen", false);
1105 // No file, certainly invalid
1106 if (CacheFile
.empty() == true || FileExists(CacheFile
) == false)
1109 std::clog
<< "CacheFile doesn't exist" << std::endl
;
1113 if (List
.GetLastModifiedTime() > GetModificationTime(CacheFile
))
1116 std::clog
<< "sources.list is newer than the cache" << std::endl
;
1121 FileFd
CacheF(CacheFile
,FileFd::ReadOnly
);
1122 SPtr
<MMap
> Map
= new MMap(CacheF
,0);
1123 pkgCache
Cache(Map
);
1124 if (_error
->PendingError() == true || Map
->Size() == 0)
1127 std::clog
<< "Errors are pending or Map is empty()" << std::endl
;
1132 /* Now we check every index file, see if it is in the cache,
1133 verify the IMS data and check that it is on the disk too.. */
1134 SPtrArray
<bool> Visited
= new bool[Cache
.HeaderP
->PackageFileCount
];
1135 memset(Visited
,0,sizeof(*Visited
)*Cache
.HeaderP
->PackageFileCount
);
1136 for (; Start
!= End
; ++Start
)
1139 std::clog
<< "Checking PkgFile " << (*Start
)->Describe() << ": ";
1140 if ((*Start
)->HasPackages() == false)
1143 std::clog
<< "Has NO packages" << std::endl
;
1147 if ((*Start
)->Exists() == false)
1149 #if 0 // mvo: we no longer give a message here (Default Sources spec)
1150 _error
->WarningE("stat",_("Couldn't stat source package list %s"),
1151 (*Start
)->Describe().c_str());
1154 std::clog
<< "file doesn't exist" << std::endl
;
1158 // FindInCache is also expected to do an IMS check.
1159 pkgCache::PkgFileIterator File
= (*Start
)->FindInCache(Cache
);
1160 if (File
.end() == true)
1163 std::clog
<< "FindInCache returned end-Pointer" << std::endl
;
1167 Visited
[File
->ID
] = true;
1169 std::clog
<< "with ID " << File
->ID
<< " is valid" << std::endl
;
1172 for (unsigned I
= 0; I
!= Cache
.HeaderP
->PackageFileCount
; I
++)
1173 if (Visited
[I
] == false)
1176 std::clog
<< "File with ID" << I
<< " wasn't visited" << std::endl
;
1180 if (_error
->PendingError() == true)
1184 std::clog
<< "Validity failed because of pending errors:" << std::endl
;
1185 _error
->DumpErrors();
1192 *OutMap
= Map
.UnGuard();
1196 // ComputeSize - Compute the total size of a bunch of files /*{{{*/
1197 // ---------------------------------------------------------------------
1198 /* Size is kind of an abstract notion that is only used for the progress
1200 static unsigned long ComputeSize(FileIterator Start
,FileIterator End
)
1202 unsigned long TotalSize
= 0;
1203 for (; Start
!= End
; ++Start
)
1205 if ((*Start
)->HasPackages() == false)
1207 TotalSize
+= (*Start
)->Size();
1212 // BuildCache - Merge the list of index files into the cache /*{{{*/
1213 // ---------------------------------------------------------------------
1215 static bool BuildCache(pkgCacheGenerator
&Gen
,
1216 OpProgress
*Progress
,
1217 unsigned long &CurrentSize
,unsigned long TotalSize
,
1218 FileIterator Start
, FileIterator End
)
1221 for (I
= Start
; I
!= End
; ++I
)
1223 if ((*I
)->HasPackages() == false)
1226 if ((*I
)->Exists() == false)
1229 if ((*I
)->FindInCache(Gen
.GetCache()).end() == false)
1231 _error
->Warning("Duplicate sources.list entry %s",
1232 (*I
)->Describe().c_str());
1236 unsigned long Size
= (*I
)->Size();
1237 if (Progress
!= NULL
)
1238 Progress
->OverallProgress(CurrentSize
,TotalSize
,Size
,_("Reading package lists"));
1239 CurrentSize
+= Size
;
1241 if ((*I
)->Merge(Gen
,Progress
) == false)
1245 if (Gen
.HasFileDeps() == true)
1247 if (Progress
!= NULL
)
1249 TotalSize
= ComputeSize(Start
, End
);
1251 for (I
= Start
; I
!= End
; ++I
)
1253 unsigned long Size
= (*I
)->Size();
1254 if (Progress
!= NULL
)
1255 Progress
->OverallProgress(CurrentSize
,TotalSize
,Size
,_("Collecting File Provides"));
1256 CurrentSize
+= Size
;
1257 if ((*I
)->MergeFileProvides(Gen
,Progress
) == false)
1265 // CacheGenerator::CreateDynamicMMap - load an mmap with configuration options /*{{{*/
1266 DynamicMMap
* pkgCacheGenerator::CreateDynamicMMap(FileFd
*CacheF
, unsigned long Flags
) {
1267 unsigned long const MapStart
= _config
->FindI("APT::Cache-Start", 24*1024*1024);
1268 unsigned long const MapGrow
= _config
->FindI("APT::Cache-Grow", 1*1024*1024);
1269 unsigned long const MapLimit
= _config
->FindI("APT::Cache-Limit", 0);
1270 Flags
|= MMap::Moveable
;
1271 if (_config
->FindB("APT::Cache-Fallback", false) == true)
1272 Flags
|= MMap::Fallback
;
1274 return new DynamicMMap(*CacheF
, Flags
, MapStart
, MapGrow
, MapLimit
);
1276 return new DynamicMMap(Flags
, MapStart
, MapGrow
, MapLimit
);
1279 // CacheGenerator::MakeStatusCache - Construct the status cache /*{{{*/
1280 // ---------------------------------------------------------------------
1281 /* This makes sure that the status cache (the cache that has all
1282 index files from the sources list and all local ones) is ready
1283 to be mmaped. If OutMap is not zero then a MMap object representing
1284 the cache will be stored there. This is pretty much mandetory if you
1285 are using AllowMem. AllowMem lets the function be run as non-root
1286 where it builds the cache 'fast' into a memory buffer. */
1287 __deprecated
bool pkgMakeStatusCache(pkgSourceList
&List
,OpProgress
&Progress
,
1288 MMap
**OutMap
, bool AllowMem
)
1289 { return pkgCacheGenerator::MakeStatusCache(List
, &Progress
, OutMap
, AllowMem
); }
1290 bool pkgCacheGenerator::MakeStatusCache(pkgSourceList
&List
,OpProgress
*Progress
,
1291 MMap
**OutMap
,bool AllowMem
)
1293 bool const Debug
= _config
->FindB("Debug::pkgCacheGen", false);
1295 std::vector
<pkgIndexFile
*> Files
;
1296 for (std::vector
<metaIndex
*>::const_iterator i
= List
.begin();
1300 std::vector
<pkgIndexFile
*> *Indexes
= (*i
)->GetIndexFiles();
1301 for (std::vector
<pkgIndexFile
*>::const_iterator j
= Indexes
->begin();
1302 j
!= Indexes
->end();
1304 Files
.push_back (*j
);
1307 unsigned long const EndOfSource
= Files
.size();
1308 if (_system
->AddStatusFiles(Files
) == false)
1311 // Decide if we can write to the files..
1312 string
const CacheFile
= _config
->FindFile("Dir::Cache::pkgcache");
1313 string
const SrcCacheFile
= _config
->FindFile("Dir::Cache::srcpkgcache");
1315 // ensure the cache directory exists
1316 if (CacheFile
.empty() == false || SrcCacheFile
.empty() == false)
1318 string dir
= _config
->FindDir("Dir::Cache");
1319 size_t const len
= dir
.size();
1320 if (len
> 5 && dir
.find("/apt/", len
- 6, 5) == len
- 5)
1321 dir
= dir
.substr(0, len
- 5);
1322 if (CacheFile
.empty() == false)
1323 CreateDirectory(dir
, flNotFile(CacheFile
));
1324 if (SrcCacheFile
.empty() == false)
1325 CreateDirectory(dir
, flNotFile(SrcCacheFile
));
1328 // Decide if we can write to the cache
1329 bool Writeable
= false;
1330 if (CacheFile
.empty() == false)
1331 Writeable
= access(flNotFile(CacheFile
).c_str(),W_OK
) == 0;
1333 if (SrcCacheFile
.empty() == false)
1334 Writeable
= access(flNotFile(SrcCacheFile
).c_str(),W_OK
) == 0;
1336 std::clog
<< "Do we have write-access to the cache files? " << (Writeable
? "YES" : "NO") << std::endl
;
1338 if (Writeable
== false && AllowMem
== false && CacheFile
.empty() == false)
1339 return _error
->Error(_("Unable to write to %s"),flNotFile(CacheFile
).c_str());
1341 if (Progress
!= NULL
)
1342 Progress
->OverallProgress(0,1,1,_("Reading package lists"));
1344 // Cache is OK, Fin.
1345 if (CheckValidity(CacheFile
, List
, Files
.begin(),Files
.end(),OutMap
) == true)
1347 if (Progress
!= NULL
)
1348 Progress
->OverallProgress(1,1,1,_("Reading package lists"));
1350 std::clog
<< "pkgcache.bin is valid - no need to build anything" << std::endl
;
1353 else if (Debug
== true)
1354 std::clog
<< "pkgcache.bin is NOT valid" << std::endl
;
1356 /* At this point we know we need to reconstruct the package cache,
1358 SPtr
<FileFd
> CacheF
;
1359 SPtr
<DynamicMMap
> Map
;
1360 if (Writeable
== true && CacheFile
.empty() == false)
1362 _error
->PushToStack();
1363 unlink(CacheFile
.c_str());
1364 CacheF
= new FileFd(CacheFile
,FileFd::WriteAtomic
);
1365 fchmod(CacheF
->Fd(),0644);
1366 Map
= CreateDynamicMMap(CacheF
, MMap::Public
);
1367 if (_error
->PendingError() == true)
1369 delete CacheF
.UnGuard();
1370 delete Map
.UnGuard();
1372 std::clog
<< "Open filebased MMap FAILED" << std::endl
;
1374 if (AllowMem
== false)
1376 _error
->MergeWithStack();
1379 _error
->RevertToStack();
1383 _error
->MergeWithStack();
1385 std::clog
<< "Open filebased MMap" << std::endl
;
1388 if (Writeable
== false || CacheFile
.empty() == true)
1390 // Just build it in memory..
1391 Map
= CreateDynamicMMap(NULL
);
1393 std::clog
<< "Open memory Map (not filebased)" << std::endl
;
1396 // Lets try the source cache.
1397 unsigned long CurrentSize
= 0;
1398 unsigned long TotalSize
= 0;
1399 if (CheckValidity(SrcCacheFile
, List
, Files
.begin(),
1400 Files
.begin()+EndOfSource
) == true)
1403 std::clog
<< "srcpkgcache.bin is valid - populate MMap with it." << std::endl
;
1404 // Preload the map with the source cache
1405 FileFd
SCacheF(SrcCacheFile
,FileFd::ReadOnly
);
1406 unsigned long const alloc
= Map
->RawAllocate(SCacheF
.Size());
1407 if ((alloc
== 0 && _error
->PendingError())
1408 || SCacheF
.Read((unsigned char *)Map
->Data() + alloc
,
1409 SCacheF
.Size()) == false)
1412 TotalSize
= ComputeSize(Files
.begin()+EndOfSource
,Files
.end());
1414 // Build the status cache
1415 pkgCacheGenerator
Gen(Map
.Get(),Progress
);
1416 if (_error
->PendingError() == true)
1418 if (BuildCache(Gen
,Progress
,CurrentSize
,TotalSize
,
1419 Files
.begin()+EndOfSource
,Files
.end()) == false)
1425 std::clog
<< "srcpkgcache.bin is NOT valid - rebuild" << std::endl
;
1426 TotalSize
= ComputeSize(Files
.begin(),Files
.end());
1428 // Build the source cache
1429 pkgCacheGenerator
Gen(Map
.Get(),Progress
);
1430 if (_error
->PendingError() == true)
1432 if (BuildCache(Gen
,Progress
,CurrentSize
,TotalSize
,
1433 Files
.begin(),Files
.begin()+EndOfSource
) == false)
1437 if (Writeable
== true && SrcCacheFile
.empty() == false)
1439 FileFd
SCacheF(SrcCacheFile
,FileFd::WriteAtomic
);
1440 if (_error
->PendingError() == true)
1443 fchmod(SCacheF
.Fd(),0644);
1445 // Write out the main data
1446 if (SCacheF
.Write(Map
->Data(),Map
->Size()) == false)
1447 return _error
->Error(_("IO Error saving source cache"));
1450 // Write out the proper header
1451 Gen
.GetCache().HeaderP
->Dirty
= false;
1452 if (SCacheF
.Seek(0) == false ||
1453 SCacheF
.Write(Map
->Data(),sizeof(*Gen
.GetCache().HeaderP
)) == false)
1454 return _error
->Error(_("IO Error saving source cache"));
1455 Gen
.GetCache().HeaderP
->Dirty
= true;
1459 // Build the status cache
1460 if (BuildCache(Gen
,Progress
,CurrentSize
,TotalSize
,
1461 Files
.begin()+EndOfSource
,Files
.end()) == false)
1465 std::clog
<< "Caches are ready for shipping" << std::endl
;
1467 if (_error
->PendingError() == true)
1473 delete Map
.UnGuard();
1474 *OutMap
= new MMap(*CacheF
,0);
1478 *OutMap
= Map
.UnGuard();
1485 // CacheGenerator::MakeOnlyStatusCache - Build only a status files cache/*{{{*/
1486 // ---------------------------------------------------------------------
1488 __deprecated
bool pkgMakeOnlyStatusCache(OpProgress
&Progress
,DynamicMMap
**OutMap
)
1489 { return pkgCacheGenerator::MakeOnlyStatusCache(&Progress
, OutMap
); }
1490 bool pkgCacheGenerator::MakeOnlyStatusCache(OpProgress
*Progress
,DynamicMMap
**OutMap
)
1492 std::vector
<pkgIndexFile
*> Files
;
1493 unsigned long EndOfSource
= Files
.size();
1494 if (_system
->AddStatusFiles(Files
) == false)
1497 SPtr
<DynamicMMap
> Map
= CreateDynamicMMap(NULL
);
1498 unsigned long CurrentSize
= 0;
1499 unsigned long TotalSize
= 0;
1501 TotalSize
= ComputeSize(Files
.begin()+EndOfSource
,Files
.end());
1503 // Build the status cache
1504 if (Progress
!= NULL
)
1505 Progress
->OverallProgress(0,1,1,_("Reading package lists"));
1506 pkgCacheGenerator
Gen(Map
.Get(),Progress
);
1507 if (_error
->PendingError() == true)
1509 if (BuildCache(Gen
,Progress
,CurrentSize
,TotalSize
,
1510 Files
.begin()+EndOfSource
,Files
.end()) == false)
1513 if (_error
->PendingError() == true)
1515 *OutMap
= Map
.UnGuard();
1520 // IsDuplicateDescription /*{{{*/
1521 static bool IsDuplicateDescription(pkgCache::DescIterator Desc
,
1522 MD5SumValue
const &CurMd5
, std::string
const &CurLang
)
1524 // Descriptions in the same link-list have all the same md5
1525 if (Desc
.end() == true || MD5SumValue(Desc
.md5()) != CurMd5
)
1527 for (; Desc
.end() == false; ++Desc
)
1528 if (Desc
.LanguageCode() == CurLang
)
1533 // CacheGenerator::FinishCache /*{{{*/
1534 bool pkgCacheGenerator::FinishCache(OpProgress
*Progress
)