]>
git.saurik.com Git - apt.git/blob - apt-pkg/pkgcachegen.cc
739b538c6ff167c9af572823f911985bdc86be3e
1 // -*- mode: cpp; mode: fold -*-
3 // $Id: pkgcachegen.cc,v 1.53.2.1 2003/12/24 23:09:17 mdz Exp $
4 /* ######################################################################
6 Package Cache Generator - Generator for the cache structure.
8 This builds the cache structure from the abstract package list parser.
10 ##################################################################### */
12 // Include Files /*{{{*/
15 #include <apt-pkg/pkgcachegen.h>
16 #include <apt-pkg/error.h>
17 #include <apt-pkg/version.h>
18 #include <apt-pkg/progress.h>
19 #include <apt-pkg/sourcelist.h>
20 #include <apt-pkg/configuration.h>
21 #include <apt-pkg/aptconfiguration.h>
22 #include <apt-pkg/strutl.h>
23 #include <apt-pkg/sptr.h>
24 #include <apt-pkg/pkgsystem.h>
25 #include <apt-pkg/macros.h>
26 #include <apt-pkg/tagfile.h>
27 #include <apt-pkg/metaindex.h>
28 #include <apt-pkg/fileutl.h>
38 typedef std::vector
<pkgIndexFile
*>::iterator FileIterator
;
39 template <typename Iter
> std::vector
<Iter
*> pkgCacheGenerator::Dynamic
<Iter
>::toReMap
;
41 static bool IsDuplicateDescription(pkgCache::DescIterator Desc
,
42 MD5SumValue
const &CurMd5
, std::string
const &CurLang
);
46 // CacheGenerator::pkgCacheGenerator - Constructor /*{{{*/
47 // ---------------------------------------------------------------------
48 /* We set the dirty flag and make sure that is written to the disk */
49 pkgCacheGenerator::pkgCacheGenerator(DynamicMMap
*pMap
,OpProgress
*Prog
) :
50 Map(*pMap
), Cache(pMap
,false), Progress(Prog
),
54 memset(UniqHash
,0,sizeof(UniqHash
));
56 if (_error
->PendingError() == true)
61 // Setup the map interface..
62 Cache
.HeaderP
= (pkgCache::Header
*)Map
.Data();
63 if (Map
.RawAllocate(sizeof(pkgCache::Header
)) == 0 && _error
->PendingError() == true)
66 Map
.UsePools(*Cache
.HeaderP
->Pools
,sizeof(Cache
.HeaderP
->Pools
)/sizeof(Cache
.HeaderP
->Pools
[0]));
69 *Cache
.HeaderP
= pkgCache::Header();
70 map_ptrloc
const idxVerSysName
= WriteStringInMap(_system
->VS
->Label
);
71 Cache
.HeaderP
->VerSysName
= idxVerSysName
;
72 // this pointer is set in ReMap, but we need it now for WriteUniqString
73 Cache
.StringItemP
= (pkgCache::StringItem
*)Map
.Data();
74 map_ptrloc
const idxArchitecture
= WriteUniqString(_config
->Find("APT::Architecture"));
75 Cache
.HeaderP
->Architecture
= idxArchitecture
;
76 if (unlikely(idxVerSysName
== 0 || idxArchitecture
== 0))
82 // Map directly from the existing file
84 Map
.UsePools(*Cache
.HeaderP
->Pools
,sizeof(Cache
.HeaderP
->Pools
)/sizeof(Cache
.HeaderP
->Pools
[0]));
85 if (Cache
.VS
!= _system
->VS
)
87 _error
->Error(_("Cache has an incompatible versioning system"));
92 Cache
.HeaderP
->Dirty
= true;
93 Map
.Sync(0,sizeof(pkgCache::Header
));
96 // CacheGenerator::~pkgCacheGenerator - Destructor /*{{{*/
97 // ---------------------------------------------------------------------
98 /* We sync the data then unset the dirty flag in two steps so as to
99 advoid a problem during a crash */
100 pkgCacheGenerator::~pkgCacheGenerator()
102 if (_error
->PendingError() == true)
104 if (Map
.Sync() == false)
107 Cache
.HeaderP
->Dirty
= false;
108 Cache
.HeaderP
->CacheFileSize
= Map
.Size();
109 Map
.Sync(0,sizeof(pkgCache::Header
));
112 void pkgCacheGenerator::ReMap(void const * const oldMap
, void const * const newMap
) {/*{{{*/
113 if (oldMap
== newMap
)
116 if (_config
->FindB("Debug::pkgCacheGen", false))
117 std::clog
<< "Remaping from " << oldMap
<< " to " << newMap
<< std::endl
;
121 CurrentFile
+= (pkgCache::PackageFile
*) newMap
- (pkgCache::PackageFile
*) oldMap
;
123 for (size_t i
= 0; i
< _count(UniqHash
); ++i
)
124 if (UniqHash
[i
] != 0)
125 UniqHash
[i
] += (pkgCache::StringItem
*) newMap
- (pkgCache::StringItem
*) oldMap
;
127 for (std::vector
<pkgCache::GrpIterator
*>::const_iterator i
= Dynamic
<pkgCache::GrpIterator
>::toReMap
.begin();
128 i
!= Dynamic
<pkgCache::GrpIterator
>::toReMap
.end(); ++i
)
129 (*i
)->ReMap(oldMap
, newMap
);
130 for (std::vector
<pkgCache::PkgIterator
*>::const_iterator i
= Dynamic
<pkgCache::PkgIterator
>::toReMap
.begin();
131 i
!= Dynamic
<pkgCache::PkgIterator
>::toReMap
.end(); ++i
)
132 (*i
)->ReMap(oldMap
, newMap
);
133 for (std::vector
<pkgCache::VerIterator
*>::const_iterator i
= Dynamic
<pkgCache::VerIterator
>::toReMap
.begin();
134 i
!= Dynamic
<pkgCache::VerIterator
>::toReMap
.end(); ++i
)
135 (*i
)->ReMap(oldMap
, newMap
);
136 for (std::vector
<pkgCache::DepIterator
*>::const_iterator i
= Dynamic
<pkgCache::DepIterator
>::toReMap
.begin();
137 i
!= Dynamic
<pkgCache::DepIterator
>::toReMap
.end(); ++i
)
138 (*i
)->ReMap(oldMap
, newMap
);
139 for (std::vector
<pkgCache::DescIterator
*>::const_iterator i
= Dynamic
<pkgCache::DescIterator
>::toReMap
.begin();
140 i
!= Dynamic
<pkgCache::DescIterator
>::toReMap
.end(); ++i
)
141 (*i
)->ReMap(oldMap
, newMap
);
142 for (std::vector
<pkgCache::PrvIterator
*>::const_iterator i
= Dynamic
<pkgCache::PrvIterator
>::toReMap
.begin();
143 i
!= Dynamic
<pkgCache::PrvIterator
>::toReMap
.end(); ++i
)
144 (*i
)->ReMap(oldMap
, newMap
);
145 for (std::vector
<pkgCache::PkgFileIterator
*>::const_iterator i
= Dynamic
<pkgCache::PkgFileIterator
>::toReMap
.begin();
146 i
!= Dynamic
<pkgCache::PkgFileIterator
>::toReMap
.end(); ++i
)
147 (*i
)->ReMap(oldMap
, newMap
);
149 // CacheGenerator::WriteStringInMap /*{{{*/
150 map_ptrloc
pkgCacheGenerator::WriteStringInMap(const char *String
,
151 const unsigned long &Len
) {
152 void const * const oldMap
= Map
.Data();
153 map_ptrloc
const index
= Map
.WriteString(String
, Len
);
155 ReMap(oldMap
, Map
.Data());
159 // CacheGenerator::WriteStringInMap /*{{{*/
160 map_ptrloc
pkgCacheGenerator::WriteStringInMap(const char *String
) {
161 void const * const oldMap
= Map
.Data();
162 map_ptrloc
const index
= Map
.WriteString(String
);
164 ReMap(oldMap
, Map
.Data());
168 map_ptrloc
pkgCacheGenerator::AllocateInMap(const unsigned long &size
) {/*{{{*/
169 void const * const oldMap
= Map
.Data();
170 map_ptrloc
const index
= Map
.Allocate(size
);
172 ReMap(oldMap
, Map
.Data());
176 // CacheGenerator::MergeList - Merge the package list /*{{{*/
177 // ---------------------------------------------------------------------
178 /* This provides the generation of the entries in the cache. Each loop
179 goes through a single package record from the underlying parse engine. */
180 bool pkgCacheGenerator::MergeList(ListParser
&List
,
181 pkgCache::VerIterator
*OutVer
)
185 unsigned int Counter
= 0;
186 while (List
.Step() == true)
188 string
const PackageName
= List
.Package();
189 if (PackageName
.empty() == true)
193 if (Counter
% 100 == 0 && Progress
!= 0)
194 Progress
->Progress(List
.Offset());
196 string Arch
= List
.Architecture();
197 string
const Version
= List
.Version();
198 if (Version
.empty() == true && Arch
.empty() == true)
200 // package descriptions
201 if (MergeListGroup(List
, PackageName
) == false)
206 if (Arch
.empty() == true)
208 // use the pseudo arch 'none' for arch-less packages
210 /* We might built a SingleArchCache here, which we don't want to blow up
211 just for these :none packages to a proper MultiArchCache, so just ensure
212 that we have always a native package structure first for SingleArch */
213 pkgCache::PkgIterator NP
;
214 Dynamic
<pkgCache::PkgIterator
> DynPkg(NP
);
215 if (NewPackage(NP
, PackageName
, _config
->Find("APT::Architecture")) == false)
216 // TRANSLATOR: The first placeholder is a package name,
217 // the other two should be copied verbatim as they include debug info
218 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
219 PackageName
.c_str(), "NewPackage", 0);
222 // Get a pointer to the package structure
223 pkgCache::PkgIterator Pkg
;
224 Dynamic
<pkgCache::PkgIterator
> DynPkg(Pkg
);
225 if (NewPackage(Pkg
, PackageName
, Arch
) == false)
226 // TRANSLATOR: The first placeholder is a package name,
227 // the other two should be copied verbatim as they include debug info
228 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
229 PackageName
.c_str(), "NewPackage", 1);
232 if (Version
.empty() == true)
234 if (MergeListPackage(List
, Pkg
) == false)
239 if (MergeListVersion(List
, Pkg
, Version
, OutVer
) == false)
245 FoundFileDeps
|= List
.HasFileDeps();
250 if (Cache
.HeaderP
->PackageCount
>= (1ULL<<sizeof(Cache
.PkgP
->ID
)*8)-1)
251 return _error
->Error(_("Wow, you exceeded the number of package "
252 "names this APT is capable of."));
253 if (Cache
.HeaderP
->VersionCount
>= (1ULL<<(sizeof(Cache
.VerP
->ID
)*8))-1)
254 return _error
->Error(_("Wow, you exceeded the number of versions "
255 "this APT is capable of."));
256 if (Cache
.HeaderP
->DescriptionCount
>= (1ULL<<(sizeof(Cache
.DescP
->ID
)*8))-1)
257 return _error
->Error(_("Wow, you exceeded the number of descriptions "
258 "this APT is capable of."));
259 if (Cache
.HeaderP
->DependsCount
>= (1ULL<<(sizeof(Cache
.DepP
->ID
)*8))-1ULL)
260 return _error
->Error(_("Wow, you exceeded the number of dependencies "
261 "this APT is capable of."));
263 FoundFileDeps
|= List
.HasFileDeps();
266 // CacheGenerator::MergeListGroup /*{{{*/
267 bool pkgCacheGenerator::MergeListGroup(ListParser
&List
, std::string
const &GrpName
)
269 pkgCache::GrpIterator Grp
= Cache
.FindGrp(GrpName
);
270 // a group has no data on it's own, only packages have it but these
271 // stanzas like this come from Translation- files to add descriptions,
272 // but without a version we don't need a description for it…
273 if (Grp
.end() == true)
275 Dynamic
<pkgCache::GrpIterator
> DynGrp(Grp
);
277 pkgCache::PkgIterator Pkg
;
278 Dynamic
<pkgCache::PkgIterator
> DynPkg(Pkg
);
279 for (Pkg
= Grp
.PackageList(); Pkg
.end() == false; Pkg
= Grp
.NextPkg(Pkg
))
280 if (MergeListPackage(List
, Pkg
) == false)
286 // CacheGenerator::MergeListPackage /*{{{*/
287 bool pkgCacheGenerator::MergeListPackage(ListParser
&List
, pkgCache::PkgIterator
&Pkg
)
289 // we first process the package, then the descriptions
290 // (for deb this package processing is in fact a no-op)
291 pkgCache::VerIterator
Ver(Cache
);
292 Dynamic
<pkgCache::VerIterator
> DynVer(Ver
);
293 if (List
.UsePackage(Pkg
, Ver
) == false)
294 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
295 Pkg
.Name(), "UsePackage", 1);
297 // Find the right version to write the description
298 MD5SumValue CurMd5
= List
.Description_md5();
299 std::string CurLang
= List
.DescriptionLanguage();
301 for (Ver
= Pkg
.VersionList(); Ver
.end() == false; ++Ver
)
303 pkgCache::DescIterator Desc
= Ver
.DescriptionList();
305 // a version can only have one md5 describing it
306 if (Desc
.end() == true || MD5SumValue(Desc
.md5()) != CurMd5
)
309 // don't add a new description if we have one for the given
311 if (IsDuplicateDescription(Desc
, CurMd5
, CurLang
) == true)
314 Dynamic
<pkgCache::DescIterator
> DynDesc(Desc
);
315 // we add at the end, so that the start is constant as we need
316 // that to be able to efficiently share these lists
317 map_ptrloc
*LastDesc
= &Ver
->DescriptionList
;
318 for (;Desc
.end() == false && Desc
->NextDesc
!= 0; ++Desc
);
319 if (Desc
.end() == false)
320 LastDesc
= &Desc
->NextDesc
;
322 void const * const oldMap
= Map
.Data();
323 map_ptrloc
const descindex
= NewDescription(Desc
, CurLang
, CurMd5
, *LastDesc
);
324 if (unlikely(descindex
== 0 && _error
->PendingError()))
325 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
326 Pkg
.Name(), "NewDescription", 1);
327 if (oldMap
!= Map
.Data())
328 LastDesc
+= (map_ptrloc
*) Map
.Data() - (map_ptrloc
*) oldMap
;
329 *LastDesc
= descindex
;
330 Desc
->ParentPkg
= Pkg
.Index();
332 if ((*LastDesc
== 0 && _error
->PendingError()) || NewFileDesc(Desc
,List
) == false)
333 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
334 Pkg
.Name(), "NewFileDesc", 1);
336 // we can stop here as all "same" versions will share the description
343 // CacheGenerator::MergeListVersion /*{{{*/
344 bool pkgCacheGenerator::MergeListVersion(ListParser
&List
, pkgCache::PkgIterator
&Pkg
,
345 std::string
const &Version
, pkgCache::VerIterator
* &OutVer
)
347 pkgCache::VerIterator Ver
= Pkg
.VersionList();
348 Dynamic
<pkgCache::VerIterator
> DynVer(Ver
);
349 map_ptrloc
*LastVer
= &Pkg
->VersionList
;
350 void const * oldMap
= Map
.Data();
352 unsigned long const Hash
= List
.VersionHash();
353 if (Ver
.end() == false)
355 /* We know the list is sorted so we use that fact in the search.
356 Insertion of new versions is done with correct sorting */
358 for (; Ver
.end() == false; LastVer
= &Ver
->NextVer
, Ver
++)
360 Res
= Cache
.VS
->CmpVersion(Version
,Ver
.VerStr());
361 // Version is higher as current version - insert here
364 // Versionstrings are equal - is hash also equal?
365 if (Res
== 0 && Ver
->Hash
== Hash
)
367 // proceed with the next till we have either the right
368 // or we found another version (which will be lower)
371 /* We already have a version for this item, record that we saw it */
372 if (Res
== 0 && Ver
.end() == false && Ver
->Hash
== Hash
)
374 if (List
.UsePackage(Pkg
,Ver
) == false)
375 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
376 Pkg
.Name(), "UsePackage", 2);
378 if (NewFileVer(Ver
,List
) == false)
379 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
380 Pkg
.Name(), "NewFileVer", 1);
382 // Read only a single record and return
394 map_ptrloc
const verindex
= NewVersion(Ver
,Version
,*LastVer
);
395 if (verindex
== 0 && _error
->PendingError())
396 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
397 Pkg
.Name(), "NewVersion", 1);
399 if (oldMap
!= Map
.Data())
400 LastVer
+= (map_ptrloc
*) Map
.Data() - (map_ptrloc
*) oldMap
;
402 Ver
->ParentPkg
= Pkg
.Index();
405 if (unlikely(List
.NewVersion(Ver
) == false))
406 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
407 Pkg
.Name(), "NewVersion", 2);
409 if (unlikely(List
.UsePackage(Pkg
,Ver
) == false))
410 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
411 Pkg
.Name(), "UsePackage", 3);
413 if (unlikely(NewFileVer(Ver
,List
) == false))
414 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
415 Pkg
.Name(), "NewFileVer", 2);
417 pkgCache::GrpIterator Grp
= Pkg
.Group();
418 Dynamic
<pkgCache::GrpIterator
> DynGrp(Grp
);
420 /* If it is the first version of this package we need to add implicit
421 Multi-Arch dependencies to all other package versions in the group now -
422 otherwise we just add them for this new version */
423 if (Pkg
.VersionList()->NextVer
== 0)
425 pkgCache::PkgIterator P
= Grp
.PackageList();
426 Dynamic
<pkgCache::PkgIterator
> DynP(P
);
427 for (; P
.end() != true; P
= Grp
.NextPkg(P
))
429 if (P
->ID
== Pkg
->ID
)
431 pkgCache::VerIterator V
= P
.VersionList();
432 Dynamic
<pkgCache::VerIterator
> DynV(V
);
433 for (; V
.end() != true; ++V
)
434 if (unlikely(AddImplicitDepends(V
, Pkg
) == false))
435 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
436 Pkg
.Name(), "AddImplicitDepends", 1);
438 /* :none packages are packages without an architecture. They are forbidden by
439 debian-policy, so usually they will only be in (old) dpkg status files -
440 and dpkg will complain about them - and are pretty rare. We therefore do
441 usually not create conflicts while the parent is created, but only if a :none
442 package (= the target) appears. This creates incorrect dependencies on :none
443 for architecture-specific dependencies on the package we copy from, but we
444 will ignore this bug as architecture-specific dependencies are only allowed
445 in jessie and until then the :none packages should be extinct (hopefully).
446 In other words: This should work long enough to allow graceful removal of
447 these packages, it is not supposed to allow users to keep using them … */
448 if (strcmp(Pkg
.Arch(), "none") == 0)
450 pkgCache::PkgIterator M
= Grp
.FindPreferredPkg();
451 if (M
.end() == false && Pkg
!= M
)
453 pkgCache::DepIterator D
= M
.RevDependsList();
454 Dynamic
<pkgCache::DepIterator
> DynD(D
);
455 for (; D
.end() == false; ++D
)
457 if ((D
->Type
!= pkgCache::Dep::Conflicts
&&
458 D
->Type
!= pkgCache::Dep::DpkgBreaks
&&
459 D
->Type
!= pkgCache::Dep::Replaces
) ||
460 D
.ParentPkg().Group() == Grp
)
463 map_ptrloc
*OldDepLast
= NULL
;
464 pkgCache::VerIterator ConVersion
= D
.ParentVer();
465 Dynamic
<pkgCache::VerIterator
> DynV(ConVersion
);
466 // duplicate the Conflicts/Breaks/Replaces for :none arch
468 NewDepends(Pkg
, ConVersion
, "", 0, D
->Type
, OldDepLast
);
470 NewDepends(Pkg
, ConVersion
, D
.TargetVer(),
471 D
->CompareOp
, D
->Type
, OldDepLast
);
476 if (unlikely(AddImplicitDepends(Grp
, Pkg
, Ver
) == false))
477 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
478 Pkg
.Name(), "AddImplicitDepends", 2);
480 // Read only a single record and return
487 /* Record the Description (it is not translated) */
488 MD5SumValue CurMd5
= List
.Description_md5();
489 if (CurMd5
.Value().empty() == true)
491 std::string CurLang
= List
.DescriptionLanguage();
493 /* Before we add a new description we first search in the group for
494 a version with a description of the same MD5 - if so we reuse this
495 description group instead of creating our own for this version */
496 for (pkgCache::PkgIterator P
= Grp
.PackageList();
497 P
.end() == false; P
= Grp
.NextPkg(P
))
499 for (pkgCache::VerIterator V
= P
.VersionList();
500 V
.end() == false; ++V
)
502 if (IsDuplicateDescription(V
.DescriptionList(), CurMd5
, "") == false)
504 Ver
->DescriptionList
= V
->DescriptionList
;
509 // We haven't found reusable descriptions, so add the first description
510 pkgCache::DescIterator Desc
= Ver
.DescriptionList();
511 Dynamic
<pkgCache::DescIterator
> DynDesc(Desc
);
512 map_ptrloc
*LastDesc
= &Ver
->DescriptionList
;
515 map_ptrloc
const descindex
= NewDescription(Desc
, CurLang
, CurMd5
, *LastDesc
);
516 if (unlikely(descindex
== 0 && _error
->PendingError()))
517 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
518 Pkg
.Name(), "NewDescription", 2);
519 if (oldMap
!= Map
.Data())
520 LastDesc
+= (map_ptrloc
*) Map
.Data() - (map_ptrloc
*) oldMap
;
521 *LastDesc
= descindex
;
522 Desc
->ParentPkg
= Pkg
.Index();
524 if ((*LastDesc
== 0 && _error
->PendingError()) || NewFileDesc(Desc
,List
) == false)
525 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
526 Pkg
.Name(), "NewFileDesc", 2);
532 // CacheGenerator::MergeFileProvides - Merge file provides /*{{{*/
533 // ---------------------------------------------------------------------
534 /* If we found any file depends while parsing the main list we need to
535 resolve them. Since it is undesired to load the entire list of files
536 into the cache as virtual packages we do a two stage effort. MergeList
537 identifies the file depends and this creates Provdies for them by
538 re-parsing all the indexs. */
539 bool pkgCacheGenerator::MergeFileProvides(ListParser
&List
)
543 unsigned int Counter
= 0;
544 while (List
.Step() == true)
546 string PackageName
= List
.Package();
547 if (PackageName
.empty() == true)
549 string Version
= List
.Version();
550 if (Version
.empty() == true)
553 pkgCache::PkgIterator Pkg
= Cache
.FindPkg(PackageName
);
554 Dynamic
<pkgCache::PkgIterator
> DynPkg(Pkg
);
555 if (Pkg
.end() == true)
556 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
557 PackageName
.c_str(), "FindPkg", 1);
559 if (Counter
% 100 == 0 && Progress
!= 0)
560 Progress
->Progress(List
.Offset());
562 unsigned long Hash
= List
.VersionHash();
563 pkgCache::VerIterator Ver
= Pkg
.VersionList();
564 Dynamic
<pkgCache::VerIterator
> DynVer(Ver
);
565 for (; Ver
.end() == false; ++Ver
)
567 if (Ver
->Hash
== Hash
&& Version
.c_str() == Ver
.VerStr())
569 if (List
.CollectFileProvides(Cache
,Ver
) == false)
570 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
571 PackageName
.c_str(), "CollectFileProvides", 1);
576 if (Ver
.end() == true)
577 _error
->Warning(_("Package %s %s was not found while processing file dependencies"),PackageName
.c_str(),Version
.c_str());
583 // CacheGenerator::NewGroup - Add a new group /*{{{*/
584 // ---------------------------------------------------------------------
585 /* This creates a new group structure and adds it to the hash table */
586 bool pkgCacheGenerator::NewGroup(pkgCache::GrpIterator
&Grp
, const string
&Name
)
588 Grp
= Cache
.FindGrp(Name
);
589 if (Grp
.end() == false)
593 map_ptrloc
const Group
= AllocateInMap(sizeof(pkgCache::Group
));
594 if (unlikely(Group
== 0))
597 Grp
= pkgCache::GrpIterator(Cache
, Cache
.GrpP
+ Group
);
598 map_ptrloc
const idxName
= WriteStringInMap(Name
);
599 if (unlikely(idxName
== 0))
603 // Insert it into the hash table
604 unsigned long const Hash
= Cache
.Hash(Name
);
605 Grp
->Next
= Cache
.HeaderP
->GrpHashTable
[Hash
];
606 Cache
.HeaderP
->GrpHashTable
[Hash
] = Group
;
608 Grp
->ID
= Cache
.HeaderP
->GroupCount
++;
612 // CacheGenerator::NewPackage - Add a new package /*{{{*/
613 // ---------------------------------------------------------------------
614 /* This creates a new package structure and adds it to the hash table */
615 bool pkgCacheGenerator::NewPackage(pkgCache::PkgIterator
&Pkg
,const string
&Name
,
616 const string
&Arch
) {
617 pkgCache::GrpIterator Grp
;
618 Dynamic
<pkgCache::GrpIterator
> DynGrp(Grp
);
619 if (unlikely(NewGroup(Grp
, Name
) == false))
622 Pkg
= Grp
.FindPkg(Arch
);
623 if (Pkg
.end() == false)
627 map_ptrloc
const Package
= AllocateInMap(sizeof(pkgCache::Package
));
628 if (unlikely(Package
== 0))
630 Pkg
= pkgCache::PkgIterator(Cache
,Cache
.PkgP
+ Package
);
632 // Insert the package into our package list
633 if (Grp
->FirstPackage
== 0) // the group is new
635 // Insert it into the hash table
636 unsigned long const Hash
= Cache
.Hash(Name
);
637 Pkg
->NextPackage
= Cache
.HeaderP
->PkgHashTable
[Hash
];
638 Cache
.HeaderP
->PkgHashTable
[Hash
] = Package
;
639 Grp
->FirstPackage
= Package
;
641 else // Group the Packages together
643 // this package is the new last package
644 pkgCache::PkgIterator
LastPkg(Cache
, Cache
.PkgP
+ Grp
->LastPackage
);
645 Pkg
->NextPackage
= LastPkg
->NextPackage
;
646 LastPkg
->NextPackage
= Package
;
648 Grp
->LastPackage
= Package
;
650 // Set the name, arch and the ID
651 Pkg
->Name
= Grp
->Name
;
652 Pkg
->Group
= Grp
.Index();
653 // all is mapped to the native architecture
654 map_ptrloc
const idxArch
= (Arch
== "all") ? Cache
.HeaderP
->Architecture
: WriteUniqString(Arch
.c_str());
655 if (unlikely(idxArch
== 0))
658 Pkg
->ID
= Cache
.HeaderP
->PackageCount
++;
663 // CacheGenerator::AddImplicitDepends /*{{{*/
664 bool pkgCacheGenerator::AddImplicitDepends(pkgCache::GrpIterator
&G
,
665 pkgCache::PkgIterator
&P
,
666 pkgCache::VerIterator
&V
)
668 // copy P.Arch() into a string here as a cache remap
669 // in NewDepends() later may alter the pointer location
670 string Arch
= P
.Arch() == NULL
? "" : P
.Arch();
671 map_ptrloc
*OldDepLast
= NULL
;
672 /* MultiArch handling introduces a lot of implicit Dependencies:
673 - MultiArch: same → Co-Installable if they have the same version
674 - All others conflict with all other group members */
675 bool const coInstall
= ((V
->MultiArch
& pkgCache::Version::Same
) == pkgCache::Version::Same
);
676 pkgCache::PkgIterator D
= G
.PackageList();
677 Dynamic
<pkgCache::PkgIterator
> DynD(D
);
678 for (; D
.end() != true; D
= G
.NextPkg(D
))
680 if (Arch
== D
.Arch() || D
->VersionList
== 0)
682 /* We allow only one installed arch at the time
683 per group, therefore each group member conflicts
684 with all other group members */
685 if (coInstall
== true)
687 // Replaces: ${self}:other ( << ${binary:Version})
688 NewDepends(D
, V
, V
.VerStr(),
689 pkgCache::Dep::Less
, pkgCache::Dep::Replaces
,
691 // Breaks: ${self}:other (!= ${binary:Version})
692 NewDepends(D
, V
, V
.VerStr(),
693 pkgCache::Dep::NotEquals
, pkgCache::Dep::DpkgBreaks
,
696 // Conflicts: ${self}:other
698 pkgCache::Dep::NoOp
, pkgCache::Dep::Conflicts
,
704 bool pkgCacheGenerator::AddImplicitDepends(pkgCache::VerIterator
&V
,
705 pkgCache::PkgIterator
&D
)
707 /* MultiArch handling introduces a lot of implicit Dependencies:
708 - MultiArch: same → Co-Installable if they have the same version
709 - All others conflict with all other group members */
710 map_ptrloc
*OldDepLast
= NULL
;
711 bool const coInstall
= ((V
->MultiArch
& pkgCache::Version::Same
) == pkgCache::Version::Same
);
712 if (coInstall
== true)
714 // Replaces: ${self}:other ( << ${binary:Version})
715 NewDepends(D
, V
, V
.VerStr(),
716 pkgCache::Dep::Less
, pkgCache::Dep::Replaces
,
718 // Breaks: ${self}:other (!= ${binary:Version})
719 NewDepends(D
, V
, V
.VerStr(),
720 pkgCache::Dep::NotEquals
, pkgCache::Dep::DpkgBreaks
,
723 // Conflicts: ${self}:other
725 pkgCache::Dep::NoOp
, pkgCache::Dep::Conflicts
,
732 // CacheGenerator::NewFileVer - Create a new File<->Version association /*{{{*/
733 // ---------------------------------------------------------------------
735 bool pkgCacheGenerator::NewFileVer(pkgCache::VerIterator
&Ver
,
738 if (CurrentFile
== 0)
742 map_ptrloc
const VerFile
= AllocateInMap(sizeof(pkgCache::VerFile
));
746 pkgCache::VerFileIterator
VF(Cache
,Cache
.VerFileP
+ VerFile
);
747 VF
->File
= CurrentFile
- Cache
.PkgFileP
;
749 // Link it to the end of the list
750 map_ptrloc
*Last
= &Ver
->FileList
;
751 for (pkgCache::VerFileIterator V
= Ver
.FileList(); V
.end() == false; ++V
)
753 VF
->NextFile
= *Last
;
756 VF
->Offset
= List
.Offset();
757 VF
->Size
= List
.Size();
758 if (Cache
.HeaderP
->MaxVerFileSize
< VF
->Size
)
759 Cache
.HeaderP
->MaxVerFileSize
= VF
->Size
;
760 Cache
.HeaderP
->VerFileCount
++;
765 // CacheGenerator::NewVersion - Create a new Version /*{{{*/
766 // ---------------------------------------------------------------------
767 /* This puts a version structure in the linked list */
768 unsigned long pkgCacheGenerator::NewVersion(pkgCache::VerIterator
&Ver
,
769 const string
&VerStr
,
773 map_ptrloc
const Version
= AllocateInMap(sizeof(pkgCache::Version
));
778 Ver
= pkgCache::VerIterator(Cache
,Cache
.VerP
+ Version
);
779 //Dynamic<pkgCache::VerIterator> DynV(Ver); // caller MergeListVersion already takes care of it
781 Ver
->ID
= Cache
.HeaderP
->VersionCount
++;
782 map_ptrloc
const idxVerStr
= WriteStringInMap(VerStr
);
783 if (unlikely(idxVerStr
== 0))
785 Ver
->VerStr
= idxVerStr
;
790 // CacheGenerator::NewFileDesc - Create a new File<->Desc association /*{{{*/
791 // ---------------------------------------------------------------------
793 bool pkgCacheGenerator::NewFileDesc(pkgCache::DescIterator
&Desc
,
796 if (CurrentFile
== 0)
800 map_ptrloc
const DescFile
= AllocateInMap(sizeof(pkgCache::DescFile
));
804 pkgCache::DescFileIterator
DF(Cache
,Cache
.DescFileP
+ DescFile
);
805 DF
->File
= CurrentFile
- Cache
.PkgFileP
;
807 // Link it to the end of the list
808 map_ptrloc
*Last
= &Desc
->FileList
;
809 for (pkgCache::DescFileIterator D
= Desc
.FileList(); D
.end() == false; ++D
)
812 DF
->NextFile
= *Last
;
815 DF
->Offset
= List
.Offset();
816 DF
->Size
= List
.Size();
817 if (Cache
.HeaderP
->MaxDescFileSize
< DF
->Size
)
818 Cache
.HeaderP
->MaxDescFileSize
= DF
->Size
;
819 Cache
.HeaderP
->DescFileCount
++;
824 // CacheGenerator::NewDescription - Create a new Description /*{{{*/
825 // ---------------------------------------------------------------------
826 /* This puts a description structure in the linked list */
827 map_ptrloc
pkgCacheGenerator::NewDescription(pkgCache::DescIterator
&Desc
,
829 const MD5SumValue
&md5sum
,
833 map_ptrloc
const Description
= AllocateInMap(sizeof(pkgCache::Description
));
834 if (Description
== 0)
838 Desc
= pkgCache::DescIterator(Cache
,Cache
.DescP
+ Description
);
839 Desc
->NextDesc
= Next
;
840 Desc
->ID
= Cache
.HeaderP
->DescriptionCount
++;
841 map_ptrloc
const idxlanguage_code
= WriteStringInMap(Lang
);
842 map_ptrloc
const idxmd5sum
= WriteStringInMap(md5sum
.Value());
843 if (unlikely(idxlanguage_code
== 0 || idxmd5sum
== 0))
845 Desc
->language_code
= idxlanguage_code
;
846 Desc
->md5sum
= idxmd5sum
;
851 // CacheGenerator::NewDepends - Create a dependency element /*{{{*/
852 // ---------------------------------------------------------------------
853 /* This creates a dependency element in the tree. It is linked to the
854 version and to the package that it is pointing to. */
855 bool pkgCacheGenerator::NewDepends(pkgCache::PkgIterator
&Pkg
,
856 pkgCache::VerIterator
&Ver
,
857 string
const &Version
,
858 unsigned int const &Op
,
859 unsigned int const &Type
,
860 map_ptrloc
* &OldDepLast
)
862 void const * const oldMap
= Map
.Data();
864 map_ptrloc
const Dependency
= AllocateInMap(sizeof(pkgCache::Dependency
));
865 if (unlikely(Dependency
== 0))
869 pkgCache::DepIterator
Dep(Cache
,Cache
.DepP
+ Dependency
);
870 Dynamic
<pkgCache::DepIterator
> DynDep(Dep
);
871 Dep
->ParentVer
= Ver
.Index();
874 Dep
->ID
= Cache
.HeaderP
->DependsCount
++;
876 // Probe the reverse dependency list for a version string that matches
877 if (Version
.empty() == false)
879 /* for (pkgCache::DepIterator I = Pkg.RevDependsList(); I.end() == false; I++)
880 if (I->Version != 0 && I.TargetVer() == Version)
881 Dep->Version = I->Version;*/
882 if (Dep
->Version
== 0) {
883 map_ptrloc
const index
= WriteStringInMap(Version
);
884 if (unlikely(index
== 0))
886 Dep
->Version
= index
;
890 // Link it to the package
891 Dep
->Package
= Pkg
.Index();
892 Dep
->NextRevDepends
= Pkg
->RevDepends
;
893 Pkg
->RevDepends
= Dep
.Index();
895 // Do we know where to link the Dependency to?
896 if (OldDepLast
== NULL
)
898 OldDepLast
= &Ver
->DependsList
;
899 for (pkgCache::DepIterator D
= Ver
.DependsList(); D
.end() == false; ++D
)
900 OldDepLast
= &D
->NextDepends
;
901 } else if (oldMap
!= Map
.Data())
902 OldDepLast
+= (map_ptrloc
*) Map
.Data() - (map_ptrloc
*) oldMap
;
904 Dep
->NextDepends
= *OldDepLast
;
905 *OldDepLast
= Dep
.Index();
906 OldDepLast
= &Dep
->NextDepends
;
911 // ListParser::NewDepends - Create the environment for a new dependency /*{{{*/
912 // ---------------------------------------------------------------------
913 /* This creates a Group and the Package to link this dependency to if
914 needed and handles also the caching of the old endpoint */
915 bool pkgCacheGenerator::ListParser::NewDepends(pkgCache::VerIterator
&Ver
,
916 const string
&PackageName
,
918 const string
&Version
,
922 pkgCache::GrpIterator Grp
;
923 Dynamic
<pkgCache::GrpIterator
> DynGrp(Grp
);
924 if (unlikely(Owner
->NewGroup(Grp
, PackageName
) == false))
927 // Locate the target package
928 pkgCache::PkgIterator Pkg
= Grp
.FindPkg(Arch
);
929 // we don't create 'none' packages and their dependencies if we can avoid it …
930 if (Pkg
.end() == true && Arch
== "none" && strcmp(Ver
.ParentPkg().Arch(), "none") != 0)
932 Dynamic
<pkgCache::PkgIterator
> DynPkg(Pkg
);
933 if (Pkg
.end() == true) {
934 if (unlikely(Owner
->NewPackage(Pkg
, PackageName
, Arch
) == false))
938 // Is it a file dependency?
939 if (unlikely(PackageName
[0] == '/'))
940 FoundFileDeps
= true;
942 /* Caching the old end point speeds up generation substantially */
943 if (OldDepVer
!= Ver
) {
948 return Owner
->NewDepends(Pkg
, Ver
, Version
, Op
, Type
, OldDepLast
);
951 // ListParser::NewProvides - Create a Provides element /*{{{*/
952 // ---------------------------------------------------------------------
954 bool pkgCacheGenerator::ListParser::NewProvides(pkgCache::VerIterator
&Ver
,
955 const string
&PkgName
,
956 const string
&PkgArch
,
957 const string
&Version
)
959 pkgCache
&Cache
= Owner
->Cache
;
961 // We do not add self referencing provides
962 if (Ver
.ParentPkg().Name() == PkgName
&& (PkgArch
== Ver
.ParentPkg().Arch() ||
963 (PkgArch
== "all" && strcmp((Cache
.StrP
+ Cache
.HeaderP
->Architecture
), Ver
.ParentPkg().Arch()) == 0)))
967 map_ptrloc
const Provides
= Owner
->AllocateInMap(sizeof(pkgCache::Provides
));
968 if (unlikely(Provides
== 0))
970 Cache
.HeaderP
->ProvidesCount
++;
973 pkgCache::PrvIterator
Prv(Cache
,Cache
.ProvideP
+ Provides
,Cache
.PkgP
);
974 Dynamic
<pkgCache::PrvIterator
> DynPrv(Prv
);
975 Prv
->Version
= Ver
.Index();
976 Prv
->NextPkgProv
= Ver
->ProvidesList
;
977 Ver
->ProvidesList
= Prv
.Index();
978 if (Version
.empty() == false && unlikely((Prv
->ProvideVersion
= WriteString(Version
)) == 0))
981 // Locate the target package
982 pkgCache::PkgIterator Pkg
;
983 Dynamic
<pkgCache::PkgIterator
> DynPkg(Pkg
);
984 if (unlikely(Owner
->NewPackage(Pkg
,PkgName
, PkgArch
) == false))
987 // Link it to the package
988 Prv
->ParentPkg
= Pkg
.Index();
989 Prv
->NextProvides
= Pkg
->ProvidesList
;
990 Pkg
->ProvidesList
= Prv
.Index();
995 // CacheGenerator::SelectFile - Select the current file being parsed /*{{{*/
996 // ---------------------------------------------------------------------
997 /* This is used to select which file is to be associated with all newly
998 added versions. The caller is responsible for setting the IMS fields. */
999 bool pkgCacheGenerator::SelectFile(const string
&File
,const string
&Site
,
1000 const pkgIndexFile
&Index
,
1001 unsigned long Flags
)
1003 // Get some space for the structure
1004 map_ptrloc
const idxFile
= AllocateInMap(sizeof(*CurrentFile
));
1005 if (unlikely(idxFile
== 0))
1007 CurrentFile
= Cache
.PkgFileP
+ idxFile
;
1010 map_ptrloc
const idxFileName
= WriteStringInMap(File
);
1011 map_ptrloc
const idxSite
= WriteUniqString(Site
);
1012 if (unlikely(idxFileName
== 0 || idxSite
== 0))
1014 CurrentFile
->FileName
= idxFileName
;
1015 CurrentFile
->Site
= idxSite
;
1016 CurrentFile
->NextFile
= Cache
.HeaderP
->FileList
;
1017 CurrentFile
->Flags
= Flags
;
1018 CurrentFile
->ID
= Cache
.HeaderP
->PackageFileCount
;
1019 map_ptrloc
const idxIndexType
= WriteUniqString(Index
.GetType()->Label
);
1020 if (unlikely(idxIndexType
== 0))
1022 CurrentFile
->IndexType
= idxIndexType
;
1024 Cache
.HeaderP
->FileList
= CurrentFile
- Cache
.PkgFileP
;
1025 Cache
.HeaderP
->PackageFileCount
++;
1028 Progress
->SubProgress(Index
.Size());
1032 // CacheGenerator::WriteUniqueString - Insert a unique string /*{{{*/
1033 // ---------------------------------------------------------------------
1034 /* This is used to create handles to strings. Given the same text it
1035 always returns the same number */
1036 unsigned long pkgCacheGenerator::WriteUniqString(const char *S
,
1039 /* We use a very small transient hash table here, this speeds up generation
1040 by a fair amount on slower machines */
1041 pkgCache::StringItem
*&Bucket
= UniqHash
[(S
[0]*5 + S
[1]) % _count(UniqHash
)];
1043 stringcmp(S
,S
+Size
,Cache
.StrP
+ Bucket
->String
) == 0)
1044 return Bucket
->String
;
1046 // Search for an insertion point
1047 pkgCache::StringItem
*I
= Cache
.StringItemP
+ Cache
.HeaderP
->StringList
;
1049 map_ptrloc
*Last
= &Cache
.HeaderP
->StringList
;
1050 for (; I
!= Cache
.StringItemP
; Last
= &I
->NextItem
,
1051 I
= Cache
.StringItemP
+ I
->NextItem
)
1053 Res
= stringcmp(S
,S
+Size
,Cache
.StrP
+ I
->String
);
1066 void const * const oldMap
= Map
.Data();
1067 map_ptrloc
const Item
= AllocateInMap(sizeof(pkgCache::StringItem
));
1071 map_ptrloc
const idxString
= WriteStringInMap(S
,Size
);
1072 if (unlikely(idxString
== 0))
1074 if (oldMap
!= Map
.Data()) {
1075 Last
+= (map_ptrloc
*) Map
.Data() - (map_ptrloc
*) oldMap
;
1076 I
+= (pkgCache::StringItem
*) Map
.Data() - (pkgCache::StringItem
*) oldMap
;
1080 // Fill in the structure
1081 pkgCache::StringItem
*ItemP
= Cache
.StringItemP
+ Item
;
1082 ItemP
->NextItem
= I
- Cache
.StringItemP
;
1083 ItemP
->String
= idxString
;
1086 return ItemP
->String
;
1089 // CheckValidity - Check that a cache is up-to-date /*{{{*/
1090 // ---------------------------------------------------------------------
1091 /* This just verifies that each file in the list of index files exists,
1092 has matching attributes with the cache and the cache does not have
1094 static bool CheckValidity(const string
&CacheFile
,
1095 pkgSourceList
&List
,
1100 bool const Debug
= _config
->FindB("Debug::pkgCacheGen", false);
1101 // No file, certainly invalid
1102 if (CacheFile
.empty() == true || FileExists(CacheFile
) == false)
1105 std::clog
<< "CacheFile doesn't exist" << std::endl
;
1109 if (List
.GetLastModifiedTime() > GetModificationTime(CacheFile
))
1112 std::clog
<< "sources.list is newer than the cache" << std::endl
;
1117 FileFd
CacheF(CacheFile
,FileFd::ReadOnly
);
1118 SPtr
<MMap
> Map
= new MMap(CacheF
,0);
1119 pkgCache
Cache(Map
);
1120 if (_error
->PendingError() == true || Map
->Size() == 0)
1123 std::clog
<< "Errors are pending or Map is empty()" << std::endl
;
1128 /* Now we check every index file, see if it is in the cache,
1129 verify the IMS data and check that it is on the disk too.. */
1130 SPtrArray
<bool> Visited
= new bool[Cache
.HeaderP
->PackageFileCount
];
1131 memset(Visited
,0,sizeof(*Visited
)*Cache
.HeaderP
->PackageFileCount
);
1132 for (; Start
!= End
; ++Start
)
1135 std::clog
<< "Checking PkgFile " << (*Start
)->Describe() << ": ";
1136 if ((*Start
)->HasPackages() == false)
1139 std::clog
<< "Has NO packages" << std::endl
;
1143 if ((*Start
)->Exists() == false)
1145 #if 0 // mvo: we no longer give a message here (Default Sources spec)
1146 _error
->WarningE("stat",_("Couldn't stat source package list %s"),
1147 (*Start
)->Describe().c_str());
1150 std::clog
<< "file doesn't exist" << std::endl
;
1154 // FindInCache is also expected to do an IMS check.
1155 pkgCache::PkgFileIterator File
= (*Start
)->FindInCache(Cache
);
1156 if (File
.end() == true)
1159 std::clog
<< "FindInCache returned end-Pointer" << std::endl
;
1163 Visited
[File
->ID
] = true;
1165 std::clog
<< "with ID " << File
->ID
<< " is valid" << std::endl
;
1168 for (unsigned I
= 0; I
!= Cache
.HeaderP
->PackageFileCount
; I
++)
1169 if (Visited
[I
] == false)
1172 std::clog
<< "File with ID" << I
<< " wasn't visited" << std::endl
;
1176 if (_error
->PendingError() == true)
1180 std::clog
<< "Validity failed because of pending errors:" << std::endl
;
1181 _error
->DumpErrors();
1188 *OutMap
= Map
.UnGuard();
1192 // ComputeSize - Compute the total size of a bunch of files /*{{{*/
1193 // ---------------------------------------------------------------------
1194 /* Size is kind of an abstract notion that is only used for the progress
1196 static unsigned long ComputeSize(FileIterator Start
,FileIterator End
)
1198 unsigned long TotalSize
= 0;
1199 for (; Start
!= End
; ++Start
)
1201 if ((*Start
)->HasPackages() == false)
1203 TotalSize
+= (*Start
)->Size();
1208 // BuildCache - Merge the list of index files into the cache /*{{{*/
1209 // ---------------------------------------------------------------------
1211 static bool BuildCache(pkgCacheGenerator
&Gen
,
1212 OpProgress
*Progress
,
1213 unsigned long &CurrentSize
,unsigned long TotalSize
,
1214 FileIterator Start
, FileIterator End
)
1217 for (I
= Start
; I
!= End
; ++I
)
1219 if ((*I
)->HasPackages() == false)
1222 if ((*I
)->Exists() == false)
1225 if ((*I
)->FindInCache(Gen
.GetCache()).end() == false)
1227 _error
->Warning("Duplicate sources.list entry %s",
1228 (*I
)->Describe().c_str());
1232 unsigned long Size
= (*I
)->Size();
1233 if (Progress
!= NULL
)
1234 Progress
->OverallProgress(CurrentSize
,TotalSize
,Size
,_("Reading package lists"));
1235 CurrentSize
+= Size
;
1237 if ((*I
)->Merge(Gen
,Progress
) == false)
1241 if (Gen
.HasFileDeps() == true)
1243 if (Progress
!= NULL
)
1245 TotalSize
= ComputeSize(Start
, End
);
1247 for (I
= Start
; I
!= End
; ++I
)
1249 unsigned long Size
= (*I
)->Size();
1250 if (Progress
!= NULL
)
1251 Progress
->OverallProgress(CurrentSize
,TotalSize
,Size
,_("Collecting File Provides"));
1252 CurrentSize
+= Size
;
1253 if ((*I
)->MergeFileProvides(Gen
,Progress
) == false)
1261 // CacheGenerator::CreateDynamicMMap - load an mmap with configuration options /*{{{*/
1262 DynamicMMap
* pkgCacheGenerator::CreateDynamicMMap(FileFd
*CacheF
, unsigned long Flags
) {
1263 unsigned long const MapStart
= _config
->FindI("APT::Cache-Start", 24*1024*1024);
1264 unsigned long const MapGrow
= _config
->FindI("APT::Cache-Grow", 1*1024*1024);
1265 unsigned long const MapLimit
= _config
->FindI("APT::Cache-Limit", 0);
1266 Flags
|= MMap::Moveable
;
1267 if (_config
->FindB("APT::Cache-Fallback", false) == true)
1268 Flags
|= MMap::Fallback
;
1270 return new DynamicMMap(*CacheF
, Flags
, MapStart
, MapGrow
, MapLimit
);
1272 return new DynamicMMap(Flags
, MapStart
, MapGrow
, MapLimit
);
1275 // CacheGenerator::MakeStatusCache - Construct the status cache /*{{{*/
1276 // ---------------------------------------------------------------------
1277 /* This makes sure that the status cache (the cache that has all
1278 index files from the sources list and all local ones) is ready
1279 to be mmaped. If OutMap is not zero then a MMap object representing
1280 the cache will be stored there. This is pretty much mandetory if you
1281 are using AllowMem. AllowMem lets the function be run as non-root
1282 where it builds the cache 'fast' into a memory buffer. */
1283 __deprecated
bool pkgMakeStatusCache(pkgSourceList
&List
,OpProgress
&Progress
,
1284 MMap
**OutMap
, bool AllowMem
)
1285 { return pkgCacheGenerator::MakeStatusCache(List
, &Progress
, OutMap
, AllowMem
); }
1286 bool pkgCacheGenerator::MakeStatusCache(pkgSourceList
&List
,OpProgress
*Progress
,
1287 MMap
**OutMap
,bool AllowMem
)
1289 bool const Debug
= _config
->FindB("Debug::pkgCacheGen", false);
1291 std::vector
<pkgIndexFile
*> Files
;
1292 for (std::vector
<metaIndex
*>::const_iterator i
= List
.begin();
1296 std::vector
<pkgIndexFile
*> *Indexes
= (*i
)->GetIndexFiles();
1297 for (std::vector
<pkgIndexFile
*>::const_iterator j
= Indexes
->begin();
1298 j
!= Indexes
->end();
1300 Files
.push_back (*j
);
1303 unsigned long const EndOfSource
= Files
.size();
1304 if (_system
->AddStatusFiles(Files
) == false)
1307 // Decide if we can write to the files..
1308 string
const CacheFile
= _config
->FindFile("Dir::Cache::pkgcache");
1309 string
const SrcCacheFile
= _config
->FindFile("Dir::Cache::srcpkgcache");
1311 // ensure the cache directory exists
1312 if (CacheFile
.empty() == false || SrcCacheFile
.empty() == false)
1314 string dir
= _config
->FindDir("Dir::Cache");
1315 size_t const len
= dir
.size();
1316 if (len
> 5 && dir
.find("/apt/", len
- 6, 5) == len
- 5)
1317 dir
= dir
.substr(0, len
- 5);
1318 if (CacheFile
.empty() == false)
1319 CreateDirectory(dir
, flNotFile(CacheFile
));
1320 if (SrcCacheFile
.empty() == false)
1321 CreateDirectory(dir
, flNotFile(SrcCacheFile
));
1324 // Decide if we can write to the cache
1325 bool Writeable
= false;
1326 if (CacheFile
.empty() == false)
1327 Writeable
= access(flNotFile(CacheFile
).c_str(),W_OK
) == 0;
1329 if (SrcCacheFile
.empty() == false)
1330 Writeable
= access(flNotFile(SrcCacheFile
).c_str(),W_OK
) == 0;
1332 std::clog
<< "Do we have write-access to the cache files? " << (Writeable
? "YES" : "NO") << std::endl
;
1334 if (Writeable
== false && AllowMem
== false && CacheFile
.empty() == false)
1335 return _error
->Error(_("Unable to write to %s"),flNotFile(CacheFile
).c_str());
1337 if (Progress
!= NULL
)
1338 Progress
->OverallProgress(0,1,1,_("Reading package lists"));
1340 // Cache is OK, Fin.
1341 if (CheckValidity(CacheFile
, List
, Files
.begin(),Files
.end(),OutMap
) == true)
1343 if (Progress
!= NULL
)
1344 Progress
->OverallProgress(1,1,1,_("Reading package lists"));
1346 std::clog
<< "pkgcache.bin is valid - no need to build anything" << std::endl
;
1349 else if (Debug
== true)
1350 std::clog
<< "pkgcache.bin is NOT valid" << std::endl
;
1352 /* At this point we know we need to reconstruct the package cache,
1354 SPtr
<FileFd
> CacheF
;
1355 SPtr
<DynamicMMap
> Map
;
1356 if (Writeable
== true && CacheFile
.empty() == false)
1358 _error
->PushToStack();
1359 unlink(CacheFile
.c_str());
1360 CacheF
= new FileFd(CacheFile
,FileFd::WriteAtomic
);
1361 fchmod(CacheF
->Fd(),0644);
1362 Map
= CreateDynamicMMap(CacheF
, MMap::Public
);
1363 if (_error
->PendingError() == true)
1365 delete CacheF
.UnGuard();
1366 delete Map
.UnGuard();
1368 std::clog
<< "Open filebased MMap FAILED" << std::endl
;
1370 if (AllowMem
== false)
1372 _error
->MergeWithStack();
1375 _error
->RevertToStack();
1379 _error
->MergeWithStack();
1381 std::clog
<< "Open filebased MMap" << std::endl
;
1384 if (Writeable
== false || CacheFile
.empty() == true)
1386 // Just build it in memory..
1387 Map
= CreateDynamicMMap(NULL
);
1389 std::clog
<< "Open memory Map (not filebased)" << std::endl
;
1392 // Lets try the source cache.
1393 unsigned long CurrentSize
= 0;
1394 unsigned long TotalSize
= 0;
1395 if (CheckValidity(SrcCacheFile
, List
, Files
.begin(),
1396 Files
.begin()+EndOfSource
) == true)
1399 std::clog
<< "srcpkgcache.bin is valid - populate MMap with it." << std::endl
;
1400 // Preload the map with the source cache
1401 FileFd
SCacheF(SrcCacheFile
,FileFd::ReadOnly
);
1402 unsigned long const alloc
= Map
->RawAllocate(SCacheF
.Size());
1403 if ((alloc
== 0 && _error
->PendingError())
1404 || SCacheF
.Read((unsigned char *)Map
->Data() + alloc
,
1405 SCacheF
.Size()) == false)
1408 TotalSize
= ComputeSize(Files
.begin()+EndOfSource
,Files
.end());
1410 // Build the status cache
1411 pkgCacheGenerator
Gen(Map
.Get(),Progress
);
1412 if (_error
->PendingError() == true)
1414 if (BuildCache(Gen
,Progress
,CurrentSize
,TotalSize
,
1415 Files
.begin()+EndOfSource
,Files
.end()) == false)
1421 std::clog
<< "srcpkgcache.bin is NOT valid - rebuild" << std::endl
;
1422 TotalSize
= ComputeSize(Files
.begin(),Files
.end());
1424 // Build the source cache
1425 pkgCacheGenerator
Gen(Map
.Get(),Progress
);
1426 if (_error
->PendingError() == true)
1428 if (BuildCache(Gen
,Progress
,CurrentSize
,TotalSize
,
1429 Files
.begin(),Files
.begin()+EndOfSource
) == false)
1433 if (Writeable
== true && SrcCacheFile
.empty() == false)
1435 FileFd
SCacheF(SrcCacheFile
,FileFd::WriteAtomic
);
1436 if (_error
->PendingError() == true)
1439 fchmod(SCacheF
.Fd(),0644);
1441 // Write out the main data
1442 if (SCacheF
.Write(Map
->Data(),Map
->Size()) == false)
1443 return _error
->Error(_("IO Error saving source cache"));
1446 // Write out the proper header
1447 Gen
.GetCache().HeaderP
->Dirty
= false;
1448 if (SCacheF
.Seek(0) == false ||
1449 SCacheF
.Write(Map
->Data(),sizeof(*Gen
.GetCache().HeaderP
)) == false)
1450 return _error
->Error(_("IO Error saving source cache"));
1451 Gen
.GetCache().HeaderP
->Dirty
= true;
1455 // Build the status cache
1456 if (BuildCache(Gen
,Progress
,CurrentSize
,TotalSize
,
1457 Files
.begin()+EndOfSource
,Files
.end()) == false)
1461 std::clog
<< "Caches are ready for shipping" << std::endl
;
1463 if (_error
->PendingError() == true)
1469 delete Map
.UnGuard();
1470 *OutMap
= new MMap(*CacheF
,0);
1474 *OutMap
= Map
.UnGuard();
1481 // CacheGenerator::MakeOnlyStatusCache - Build only a status files cache/*{{{*/
1482 // ---------------------------------------------------------------------
1484 __deprecated
bool pkgMakeOnlyStatusCache(OpProgress
&Progress
,DynamicMMap
**OutMap
)
1485 { return pkgCacheGenerator::MakeOnlyStatusCache(&Progress
, OutMap
); }
1486 bool pkgCacheGenerator::MakeOnlyStatusCache(OpProgress
*Progress
,DynamicMMap
**OutMap
)
1488 std::vector
<pkgIndexFile
*> Files
;
1489 unsigned long EndOfSource
= Files
.size();
1490 if (_system
->AddStatusFiles(Files
) == false)
1493 SPtr
<DynamicMMap
> Map
= CreateDynamicMMap(NULL
);
1494 unsigned long CurrentSize
= 0;
1495 unsigned long TotalSize
= 0;
1497 TotalSize
= ComputeSize(Files
.begin()+EndOfSource
,Files
.end());
1499 // Build the status cache
1500 if (Progress
!= NULL
)
1501 Progress
->OverallProgress(0,1,1,_("Reading package lists"));
1502 pkgCacheGenerator
Gen(Map
.Get(),Progress
);
1503 if (_error
->PendingError() == true)
1505 if (BuildCache(Gen
,Progress
,CurrentSize
,TotalSize
,
1506 Files
.begin()+EndOfSource
,Files
.end()) == false)
1509 if (_error
->PendingError() == true)
1511 *OutMap
= Map
.UnGuard();
1516 // IsDuplicateDescription /*{{{*/
1517 static bool IsDuplicateDescription(pkgCache::DescIterator Desc
,
1518 MD5SumValue
const &CurMd5
, std::string
const &CurLang
)
1520 // Descriptions in the same link-list have all the same md5
1521 if (Desc
.end() == true || MD5SumValue(Desc
.md5()) != CurMd5
)
1523 for (; Desc
.end() == false; ++Desc
)
1524 if (Desc
.LanguageCode() == CurLang
)
1529 // CacheGenerator::FinishCache /*{{{*/
1530 bool pkgCacheGenerator::FinishCache(OpProgress
*Progress
)