]>
git.saurik.com Git - apt.git/blob - apt-pkg/pkgcachegen.cc
1 // -*- mode: cpp; mode: fold -*-
3 // $Id: pkgcachegen.cc,v 1.53.2.1 2003/12/24 23:09:17 mdz Exp $
4 /* ######################################################################
6 Package Cache Generator - Generator for the cache structure.
8 This builds the cache structure from the abstract package list parser.
10 ##################################################################### */
12 // Include Files /*{{{*/
15 #include <apt-pkg/pkgcachegen.h>
16 #include <apt-pkg/error.h>
17 #include <apt-pkg/version.h>
18 #include <apt-pkg/progress.h>
19 #include <apt-pkg/sourcelist.h>
20 #include <apt-pkg/configuration.h>
21 #include <apt-pkg/aptconfiguration.h>
22 #include <apt-pkg/strutl.h>
23 #include <apt-pkg/sptr.h>
24 #include <apt-pkg/pkgsystem.h>
25 #include <apt-pkg/macros.h>
26 #include <apt-pkg/tagfile.h>
27 #include <apt-pkg/metaindex.h>
28 #include <apt-pkg/fileutl.h>
38 typedef std::vector
<pkgIndexFile
*>::iterator FileIterator
;
39 template <typename Iter
> std::vector
<Iter
*> pkgCacheGenerator::Dynamic
<Iter
>::toReMap
;
41 static bool IsDuplicateDescription(pkgCache::DescIterator Desc
,
42 MD5SumValue
const &CurMd5
, std::string
const &CurLang
);
46 // CacheGenerator::pkgCacheGenerator - Constructor /*{{{*/
47 // ---------------------------------------------------------------------
48 /* We set the dirty flag and make sure that is written to the disk */
49 pkgCacheGenerator::pkgCacheGenerator(DynamicMMap
*pMap
,OpProgress
*Prog
) :
50 Map(*pMap
), Cache(pMap
,false), Progress(Prog
),
54 memset(UniqHash
,0,sizeof(UniqHash
));
56 if (_error
->PendingError() == true)
61 // Setup the map interface..
62 Cache
.HeaderP
= (pkgCache::Header
*)Map
.Data();
63 if (Map
.RawAllocate(sizeof(pkgCache::Header
)) == 0 && _error
->PendingError() == true)
66 Map
.UsePools(*Cache
.HeaderP
->Pools
,sizeof(Cache
.HeaderP
->Pools
)/sizeof(Cache
.HeaderP
->Pools
[0]));
69 *Cache
.HeaderP
= pkgCache::Header();
70 map_ptrloc
const idxVerSysName
= WriteStringInMap(_system
->VS
->Label
);
71 Cache
.HeaderP
->VerSysName
= idxVerSysName
;
72 map_ptrloc
const idxArchitecture
= WriteStringInMap(_config
->Find("APT::Architecture"));
73 Cache
.HeaderP
->Architecture
= idxArchitecture
;
74 if (unlikely(idxVerSysName
== 0 || idxArchitecture
== 0))
80 // Map directly from the existing file
82 Map
.UsePools(*Cache
.HeaderP
->Pools
,sizeof(Cache
.HeaderP
->Pools
)/sizeof(Cache
.HeaderP
->Pools
[0]));
83 if (Cache
.VS
!= _system
->VS
)
85 _error
->Error(_("Cache has an incompatible versioning system"));
90 Cache
.HeaderP
->Dirty
= true;
91 Map
.Sync(0,sizeof(pkgCache::Header
));
94 // CacheGenerator::~pkgCacheGenerator - Destructor /*{{{*/
95 // ---------------------------------------------------------------------
96 /* We sync the data then unset the dirty flag in two steps so as to
97 advoid a problem during a crash */
98 pkgCacheGenerator::~pkgCacheGenerator()
100 if (_error
->PendingError() == true)
102 if (Map
.Sync() == false)
105 Cache
.HeaderP
->Dirty
= false;
106 Cache
.HeaderP
->CacheFileSize
= Map
.Size();
107 Map
.Sync(0,sizeof(pkgCache::Header
));
110 void pkgCacheGenerator::ReMap(void const * const oldMap
, void const * const newMap
) {/*{{{*/
111 if (oldMap
== newMap
)
114 if (_config
->FindB("Debug::pkgCacheGen", false))
115 std::clog
<< "Remaping from " << oldMap
<< " to " << newMap
<< std::endl
;
119 CurrentFile
+= (pkgCache::PackageFile
*) newMap
- (pkgCache::PackageFile
*) oldMap
;
121 for (size_t i
= 0; i
< _count(UniqHash
); ++i
)
122 if (UniqHash
[i
] != 0)
123 UniqHash
[i
] += (pkgCache::StringItem
*) newMap
- (pkgCache::StringItem
*) oldMap
;
125 for (std::vector
<pkgCache::GrpIterator
*>::const_iterator i
= Dynamic
<pkgCache::GrpIterator
>::toReMap
.begin();
126 i
!= Dynamic
<pkgCache::GrpIterator
>::toReMap
.end(); ++i
)
127 (*i
)->ReMap(oldMap
, newMap
);
128 for (std::vector
<pkgCache::PkgIterator
*>::const_iterator i
= Dynamic
<pkgCache::PkgIterator
>::toReMap
.begin();
129 i
!= Dynamic
<pkgCache::PkgIterator
>::toReMap
.end(); ++i
)
130 (*i
)->ReMap(oldMap
, newMap
);
131 for (std::vector
<pkgCache::VerIterator
*>::const_iterator i
= Dynamic
<pkgCache::VerIterator
>::toReMap
.begin();
132 i
!= Dynamic
<pkgCache::VerIterator
>::toReMap
.end(); ++i
)
133 (*i
)->ReMap(oldMap
, newMap
);
134 for (std::vector
<pkgCache::DepIterator
*>::const_iterator i
= Dynamic
<pkgCache::DepIterator
>::toReMap
.begin();
135 i
!= Dynamic
<pkgCache::DepIterator
>::toReMap
.end(); ++i
)
136 (*i
)->ReMap(oldMap
, newMap
);
137 for (std::vector
<pkgCache::DescIterator
*>::const_iterator i
= Dynamic
<pkgCache::DescIterator
>::toReMap
.begin();
138 i
!= Dynamic
<pkgCache::DescIterator
>::toReMap
.end(); ++i
)
139 (*i
)->ReMap(oldMap
, newMap
);
140 for (std::vector
<pkgCache::PrvIterator
*>::const_iterator i
= Dynamic
<pkgCache::PrvIterator
>::toReMap
.begin();
141 i
!= Dynamic
<pkgCache::PrvIterator
>::toReMap
.end(); ++i
)
142 (*i
)->ReMap(oldMap
, newMap
);
143 for (std::vector
<pkgCache::PkgFileIterator
*>::const_iterator i
= Dynamic
<pkgCache::PkgFileIterator
>::toReMap
.begin();
144 i
!= Dynamic
<pkgCache::PkgFileIterator
>::toReMap
.end(); ++i
)
145 (*i
)->ReMap(oldMap
, newMap
);
147 // CacheGenerator::WriteStringInMap /*{{{*/
148 map_ptrloc
pkgCacheGenerator::WriteStringInMap(const char *String
,
149 const unsigned long &Len
) {
150 void const * const oldMap
= Map
.Data();
151 map_ptrloc
const index
= Map
.WriteString(String
, Len
);
153 ReMap(oldMap
, Map
.Data());
157 // CacheGenerator::WriteStringInMap /*{{{*/
158 map_ptrloc
pkgCacheGenerator::WriteStringInMap(const char *String
) {
159 void const * const oldMap
= Map
.Data();
160 map_ptrloc
const index
= Map
.WriteString(String
);
162 ReMap(oldMap
, Map
.Data());
166 map_ptrloc
pkgCacheGenerator::AllocateInMap(const unsigned long &size
) {/*{{{*/
167 void const * const oldMap
= Map
.Data();
168 map_ptrloc
const index
= Map
.Allocate(size
);
170 ReMap(oldMap
, Map
.Data());
174 // CacheGenerator::MergeList - Merge the package list /*{{{*/
175 // ---------------------------------------------------------------------
176 /* This provides the generation of the entries in the cache. Each loop
177 goes through a single package record from the underlying parse engine. */
178 bool pkgCacheGenerator::MergeList(ListParser
&List
,
179 pkgCache::VerIterator
*OutVer
)
183 unsigned int Counter
= 0;
184 while (List
.Step() == true)
186 string
const PackageName
= List
.Package();
187 if (PackageName
.empty() == true)
191 if (Counter
% 100 == 0 && Progress
!= 0)
192 Progress
->Progress(List
.Offset());
194 string Arch
= List
.Architecture();
195 string
const Version
= List
.Version();
196 if (Version
.empty() == true && Arch
.empty() == true)
198 // package descriptions
199 if (MergeListGroup(List
, PackageName
) == false)
204 if (Arch
.empty() == true)
206 // use the pseudo arch 'none' for arch-less packages
208 /* We might built a SingleArchCache here, which we don't want to blow up
209 just for these :none packages to a proper MultiArchCache, so just ensure
210 that we have always a native package structure first for SingleArch */
211 pkgCache::PkgIterator NP
;
212 Dynamic
<pkgCache::PkgIterator
> DynPkg(NP
);
213 if (NewPackage(NP
, PackageName
, _config
->Find("APT::Architecture")) == false)
214 // TRANSLATOR: The first placeholder is a package name,
215 // the other two should be copied verbatim as they include debug info
216 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
217 PackageName
.c_str(), "NewPackage", 0);
220 // Get a pointer to the package structure
221 pkgCache::PkgIterator Pkg
;
222 Dynamic
<pkgCache::PkgIterator
> DynPkg(Pkg
);
223 if (NewPackage(Pkg
, PackageName
, Arch
) == false)
224 // TRANSLATOR: The first placeholder is a package name,
225 // the other two should be copied verbatim as they include debug info
226 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
227 PackageName
.c_str(), "NewPackage", 1);
230 if (Version
.empty() == true)
232 if (MergeListPackage(List
, Pkg
) == false)
237 if (MergeListVersion(List
, Pkg
, Version
, OutVer
) == false)
243 FoundFileDeps
|= List
.HasFileDeps();
248 if (Cache
.HeaderP
->PackageCount
>= (1ULL<<sizeof(Cache
.PkgP
->ID
)*8)-1)
249 return _error
->Error(_("Wow, you exceeded the number of package "
250 "names this APT is capable of."));
251 if (Cache
.HeaderP
->VersionCount
>= (1ULL<<(sizeof(Cache
.VerP
->ID
)*8))-1)
252 return _error
->Error(_("Wow, you exceeded the number of versions "
253 "this APT is capable of."));
254 if (Cache
.HeaderP
->DescriptionCount
>= (1ULL<<(sizeof(Cache
.DescP
->ID
)*8))-1)
255 return _error
->Error(_("Wow, you exceeded the number of descriptions "
256 "this APT is capable of."));
257 if (Cache
.HeaderP
->DependsCount
>= (1ULL<<(sizeof(Cache
.DepP
->ID
)*8))-1ULL)
258 return _error
->Error(_("Wow, you exceeded the number of dependencies "
259 "this APT is capable of."));
261 FoundFileDeps
|= List
.HasFileDeps();
264 // CacheGenerator::MergeListGroup /*{{{*/
265 bool pkgCacheGenerator::MergeListGroup(ListParser
&List
, std::string
const &GrpName
)
267 pkgCache::GrpIterator Grp
= Cache
.FindGrp(GrpName
);
268 // a group has no data on it's own, only packages have it but these
269 // stanzas like this come from Translation- files to add descriptions,
270 // but without a version we don't need a description for it…
271 if (Grp
.end() == true)
273 Dynamic
<pkgCache::GrpIterator
> DynGrp(Grp
);
275 pkgCache::PkgIterator Pkg
;
276 Dynamic
<pkgCache::PkgIterator
> DynPkg(Pkg
);
277 for (Pkg
= Grp
.PackageList(); Pkg
.end() == false; Pkg
= Grp
.NextPkg(Pkg
))
278 if (MergeListPackage(List
, Pkg
) == false)
284 // CacheGenerator::MergeListPackage /*{{{*/
285 bool pkgCacheGenerator::MergeListPackage(ListParser
&List
, pkgCache::PkgIterator
&Pkg
)
287 // we first process the package, then the descriptions
288 // (for deb this package processing is in fact a no-op)
289 pkgCache::VerIterator
Ver(Cache
);
290 Dynamic
<pkgCache::VerIterator
> DynVer(Ver
);
291 if (List
.UsePackage(Pkg
, Ver
) == false)
292 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
293 Pkg
.Name(), "UsePackage", 1);
295 // Find the right version to write the description
296 MD5SumValue CurMd5
= List
.Description_md5();
297 std::string CurLang
= List
.DescriptionLanguage();
299 for (Ver
= Pkg
.VersionList(); Ver
.end() == false; ++Ver
)
301 pkgCache::DescIterator Desc
= Ver
.DescriptionList();
303 // a version can only have one md5 describing it
304 if (Desc
.end() == true || MD5SumValue(Desc
.md5()) != CurMd5
)
307 // don't add a new description if we have one for the given
309 if (IsDuplicateDescription(Desc
, CurMd5
, CurLang
) == true)
312 Dynamic
<pkgCache::DescIterator
> DynDesc(Desc
);
313 // we add at the end, so that the start is constant as we need
314 // that to be able to efficiently share these lists
315 map_ptrloc
*LastDesc
= &Ver
->DescriptionList
;
316 for (;Desc
.end() == false && Desc
->NextDesc
!= 0; ++Desc
);
317 if (Desc
.end() == false)
318 LastDesc
= &Desc
->NextDesc
;
320 void const * const oldMap
= Map
.Data();
321 map_ptrloc
const descindex
= NewDescription(Desc
, CurLang
, CurMd5
, *LastDesc
);
322 if (unlikely(descindex
== 0 && _error
->PendingError()))
323 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
324 Pkg
.Name(), "NewDescription", 1);
325 if (oldMap
!= Map
.Data())
326 LastDesc
+= (map_ptrloc
*) Map
.Data() - (map_ptrloc
*) oldMap
;
327 *LastDesc
= descindex
;
328 Desc
->ParentPkg
= Pkg
.Index();
330 if ((*LastDesc
== 0 && _error
->PendingError()) || NewFileDesc(Desc
,List
) == false)
331 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
332 Pkg
.Name(), "NewFileDesc", 1);
334 // we can stop here as all "same" versions will share the description
341 // CacheGenerator::MergeListVersion /*{{{*/
342 bool pkgCacheGenerator::MergeListVersion(ListParser
&List
, pkgCache::PkgIterator
&Pkg
,
343 std::string
const &Version
, pkgCache::VerIterator
* &OutVer
)
345 pkgCache::VerIterator Ver
= Pkg
.VersionList();
346 Dynamic
<pkgCache::VerIterator
> DynVer(Ver
);
347 map_ptrloc
*LastVer
= &Pkg
->VersionList
;
348 void const * oldMap
= Map
.Data();
350 unsigned long const Hash
= List
.VersionHash();
351 if (Ver
.end() == false)
353 /* We know the list is sorted so we use that fact in the search.
354 Insertion of new versions is done with correct sorting */
356 for (; Ver
.end() == false; LastVer
= &Ver
->NextVer
, Ver
++)
358 Res
= Cache
.VS
->CmpVersion(Version
,Ver
.VerStr());
359 // Version is higher as current version - insert here
362 // Versionstrings are equal - is hash also equal?
363 if (Res
== 0 && Ver
->Hash
== Hash
)
365 // proceed with the next till we have either the right
366 // or we found another version (which will be lower)
369 /* We already have a version for this item, record that we saw it */
370 if (Res
== 0 && Ver
.end() == false && Ver
->Hash
== Hash
)
372 if (List
.UsePackage(Pkg
,Ver
) == false)
373 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
374 Pkg
.Name(), "UsePackage", 2);
376 if (NewFileVer(Ver
,List
) == false)
377 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
378 Pkg
.Name(), "NewFileVer", 1);
380 // Read only a single record and return
392 map_ptrloc
const verindex
= NewVersion(Ver
,Version
,*LastVer
);
393 if (verindex
== 0 && _error
->PendingError())
394 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
395 Pkg
.Name(), "NewVersion", 1);
397 if (oldMap
!= Map
.Data())
398 LastVer
+= (map_ptrloc
*) Map
.Data() - (map_ptrloc
*) oldMap
;
400 Ver
->ParentPkg
= Pkg
.Index();
403 if (unlikely(List
.NewVersion(Ver
) == false))
404 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
405 Pkg
.Name(), "NewVersion", 2);
407 if (unlikely(List
.UsePackage(Pkg
,Ver
) == false))
408 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
409 Pkg
.Name(), "UsePackage", 3);
411 if (unlikely(NewFileVer(Ver
,List
) == false))
412 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
413 Pkg
.Name(), "NewFileVer", 2);
415 pkgCache::GrpIterator Grp
= Pkg
.Group();
416 Dynamic
<pkgCache::GrpIterator
> DynGrp(Grp
);
418 /* If it is the first version of this package we need to add implicit
419 Multi-Arch dependencies to all other package versions in the group now -
420 otherwise we just add them for this new version */
421 if (Pkg
.VersionList()->NextVer
== 0)
423 pkgCache::PkgIterator P
= Grp
.PackageList();
424 Dynamic
<pkgCache::PkgIterator
> DynP(P
);
425 for (; P
.end() != true; P
= Grp
.NextPkg(P
))
427 if (P
->ID
== Pkg
->ID
)
429 pkgCache::VerIterator V
= P
.VersionList();
430 Dynamic
<pkgCache::VerIterator
> DynV(V
);
431 for (; V
.end() != true; ++V
)
432 if (unlikely(AddImplicitDepends(V
, Pkg
) == false))
433 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
434 Pkg
.Name(), "AddImplicitDepends", 1);
436 /* :none packages are packages without an architecture. They are forbidden by
437 debian-policy, so usually they will only be in (old) dpkg status files -
438 and dpkg will complain about them - and are pretty rare. We therefore do
439 usually not create conflicts while the parent is created, but only if a :none
440 package (= the target) appears. This creates incorrect dependencies on :none
441 for architecture-specific dependencies on the package we copy from, but we
442 will ignore this bug as architecture-specific dependencies are only allowed
443 in jessie and until then the :none packages should be extinct (hopefully).
444 In other words: This should work long enough to allow graceful removal of
445 these packages, it is not supposed to allow users to keep using them … */
446 if (strcmp(Pkg
.Arch(), "none") == 0)
448 pkgCache::PkgIterator M
= Grp
.FindPreferredPkg();
449 if (M
.end() == false && Pkg
!= M
)
451 pkgCache::DepIterator D
= M
.RevDependsList();
452 Dynamic
<pkgCache::DepIterator
> DynD(D
);
453 for (; D
.end() == false; ++D
)
455 if ((D
->Type
!= pkgCache::Dep::Conflicts
&&
456 D
->Type
!= pkgCache::Dep::DpkgBreaks
&&
457 D
->Type
!= pkgCache::Dep::Replaces
) ||
458 D
.ParentPkg().Group() == Grp
)
461 map_ptrloc
*OldDepLast
= NULL
;
462 pkgCache::VerIterator ConVersion
= D
.ParentVer();
463 Dynamic
<pkgCache::VerIterator
> DynV(ConVersion
);
464 // duplicate the Conflicts/Breaks/Replaces for :none arch
466 NewDepends(Pkg
, ConVersion
, "", 0, D
->Type
, OldDepLast
);
468 NewDepends(Pkg
, ConVersion
, D
.TargetVer(),
469 D
->CompareOp
, D
->Type
, OldDepLast
);
474 if (unlikely(AddImplicitDepends(Grp
, Pkg
, Ver
) == false))
475 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
476 Pkg
.Name(), "AddImplicitDepends", 2);
478 // Read only a single record and return
485 /* Record the Description (it is not translated) */
486 MD5SumValue CurMd5
= List
.Description_md5();
487 if (CurMd5
.Value().empty() == true)
489 std::string CurLang
= List
.DescriptionLanguage();
491 /* Before we add a new description we first search in the group for
492 a version with a description of the same MD5 - if so we reuse this
493 description group instead of creating our own for this version */
494 for (pkgCache::PkgIterator P
= Grp
.PackageList();
495 P
.end() == false; P
= Grp
.NextPkg(P
))
497 for (pkgCache::VerIterator V
= P
.VersionList();
498 V
.end() == false; ++V
)
500 if (IsDuplicateDescription(V
.DescriptionList(), CurMd5
, "") == false)
502 Ver
->DescriptionList
= V
->DescriptionList
;
507 // We haven't found reusable descriptions, so add the first description
508 pkgCache::DescIterator Desc
= Ver
.DescriptionList();
509 Dynamic
<pkgCache::DescIterator
> DynDesc(Desc
);
510 map_ptrloc
*LastDesc
= &Ver
->DescriptionList
;
513 map_ptrloc
const descindex
= NewDescription(Desc
, CurLang
, CurMd5
, *LastDesc
);
514 if (unlikely(descindex
== 0 && _error
->PendingError()))
515 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
516 Pkg
.Name(), "NewDescription", 2);
517 if (oldMap
!= Map
.Data())
518 LastDesc
+= (map_ptrloc
*) Map
.Data() - (map_ptrloc
*) oldMap
;
519 *LastDesc
= descindex
;
520 Desc
->ParentPkg
= Pkg
.Index();
522 if ((*LastDesc
== 0 && _error
->PendingError()) || NewFileDesc(Desc
,List
) == false)
523 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
524 Pkg
.Name(), "NewFileDesc", 2);
530 // CacheGenerator::MergeFileProvides - Merge file provides /*{{{*/
531 // ---------------------------------------------------------------------
532 /* If we found any file depends while parsing the main list we need to
533 resolve them. Since it is undesired to load the entire list of files
534 into the cache as virtual packages we do a two stage effort. MergeList
535 identifies the file depends and this creates Provdies for them by
536 re-parsing all the indexs. */
537 bool pkgCacheGenerator::MergeFileProvides(ListParser
&List
)
541 unsigned int Counter
= 0;
542 while (List
.Step() == true)
544 string PackageName
= List
.Package();
545 if (PackageName
.empty() == true)
547 string Version
= List
.Version();
548 if (Version
.empty() == true)
551 pkgCache::PkgIterator Pkg
= Cache
.FindPkg(PackageName
);
552 Dynamic
<pkgCache::PkgIterator
> DynPkg(Pkg
);
553 if (Pkg
.end() == true)
554 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
555 PackageName
.c_str(), "FindPkg", 1);
557 if (Counter
% 100 == 0 && Progress
!= 0)
558 Progress
->Progress(List
.Offset());
560 unsigned long Hash
= List
.VersionHash();
561 pkgCache::VerIterator Ver
= Pkg
.VersionList();
562 Dynamic
<pkgCache::VerIterator
> DynVer(Ver
);
563 for (; Ver
.end() == false; ++Ver
)
565 if (Ver
->Hash
== Hash
&& Version
.c_str() == Ver
.VerStr())
567 if (List
.CollectFileProvides(Cache
,Ver
) == false)
568 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
569 PackageName
.c_str(), "CollectFileProvides", 1);
574 if (Ver
.end() == true)
575 _error
->Warning(_("Package %s %s was not found while processing file dependencies"),PackageName
.c_str(),Version
.c_str());
581 // CacheGenerator::NewGroup - Add a new group /*{{{*/
582 // ---------------------------------------------------------------------
583 /* This creates a new group structure and adds it to the hash table */
584 bool pkgCacheGenerator::NewGroup(pkgCache::GrpIterator
&Grp
, const string
&Name
)
586 Grp
= Cache
.FindGrp(Name
);
587 if (Grp
.end() == false)
591 map_ptrloc
const Group
= AllocateInMap(sizeof(pkgCache::Group
));
592 if (unlikely(Group
== 0))
595 Grp
= pkgCache::GrpIterator(Cache
, Cache
.GrpP
+ Group
);
596 map_ptrloc
const idxName
= WriteStringInMap(Name
);
597 if (unlikely(idxName
== 0))
601 // Insert it into the hash table
602 unsigned long const Hash
= Cache
.Hash(Name
);
603 Grp
->Next
= Cache
.HeaderP
->GrpHashTable
[Hash
];
604 Cache
.HeaderP
->GrpHashTable
[Hash
] = Group
;
606 Grp
->ID
= Cache
.HeaderP
->GroupCount
++;
610 // CacheGenerator::NewPackage - Add a new package /*{{{*/
611 // ---------------------------------------------------------------------
612 /* This creates a new package structure and adds it to the hash table */
613 bool pkgCacheGenerator::NewPackage(pkgCache::PkgIterator
&Pkg
,const string
&Name
,
614 const string
&Arch
) {
615 pkgCache::GrpIterator Grp
;
616 Dynamic
<pkgCache::GrpIterator
> DynGrp(Grp
);
617 if (unlikely(NewGroup(Grp
, Name
) == false))
620 Pkg
= Grp
.FindPkg(Arch
);
621 if (Pkg
.end() == false)
625 map_ptrloc
const Package
= AllocateInMap(sizeof(pkgCache::Package
));
626 if (unlikely(Package
== 0))
628 Pkg
= pkgCache::PkgIterator(Cache
,Cache
.PkgP
+ Package
);
630 // Insert the package into our package list
631 if (Grp
->FirstPackage
== 0) // the group is new
633 // Insert it into the hash table
634 unsigned long const Hash
= Cache
.Hash(Name
);
635 Pkg
->NextPackage
= Cache
.HeaderP
->PkgHashTable
[Hash
];
636 Cache
.HeaderP
->PkgHashTable
[Hash
] = Package
;
637 Grp
->FirstPackage
= Package
;
639 else // Group the Packages together
641 // this package is the new last package
642 pkgCache::PkgIterator
LastPkg(Cache
, Cache
.PkgP
+ Grp
->LastPackage
);
643 Pkg
->NextPackage
= LastPkg
->NextPackage
;
644 LastPkg
->NextPackage
= Package
;
646 Grp
->LastPackage
= Package
;
648 // Set the name, arch and the ID
649 Pkg
->Name
= Grp
->Name
;
650 Pkg
->Group
= Grp
.Index();
651 // all is mapped to the native architecture
652 map_ptrloc
const idxArch
= (Arch
== "all") ? Cache
.HeaderP
->Architecture
: WriteUniqString(Arch
.c_str());
653 if (unlikely(idxArch
== 0))
656 Pkg
->ID
= Cache
.HeaderP
->PackageCount
++;
661 // CacheGenerator::AddImplicitDepends /*{{{*/
662 bool pkgCacheGenerator::AddImplicitDepends(pkgCache::GrpIterator
&G
,
663 pkgCache::PkgIterator
&P
,
664 pkgCache::VerIterator
&V
)
666 // copy P.Arch() into a string here as a cache remap
667 // in NewDepends() later may alter the pointer location
668 string Arch
= P
.Arch() == NULL
? "" : P
.Arch();
669 map_ptrloc
*OldDepLast
= NULL
;
670 /* MultiArch handling introduces a lot of implicit Dependencies:
671 - MultiArch: same → Co-Installable if they have the same version
672 - All others conflict with all other group members */
673 bool const coInstall
= ((V
->MultiArch
& pkgCache::Version::Same
) == pkgCache::Version::Same
);
674 pkgCache::PkgIterator D
= G
.PackageList();
675 Dynamic
<pkgCache::PkgIterator
> DynD(D
);
676 for (; D
.end() != true; D
= G
.NextPkg(D
))
678 if (Arch
== D
.Arch() || D
->VersionList
== 0)
680 /* We allow only one installed arch at the time
681 per group, therefore each group member conflicts
682 with all other group members */
683 if (coInstall
== true)
685 // Replaces: ${self}:other ( << ${binary:Version})
686 NewDepends(D
, V
, V
.VerStr(),
687 pkgCache::Dep::Less
, pkgCache::Dep::Replaces
,
689 // Breaks: ${self}:other (!= ${binary:Version})
690 NewDepends(D
, V
, V
.VerStr(),
691 pkgCache::Dep::NotEquals
, pkgCache::Dep::DpkgBreaks
,
694 // Conflicts: ${self}:other
696 pkgCache::Dep::NoOp
, pkgCache::Dep::Conflicts
,
702 bool pkgCacheGenerator::AddImplicitDepends(pkgCache::VerIterator
&V
,
703 pkgCache::PkgIterator
&D
)
705 /* MultiArch handling introduces a lot of implicit Dependencies:
706 - MultiArch: same → Co-Installable if they have the same version
707 - All others conflict with all other group members */
708 map_ptrloc
*OldDepLast
= NULL
;
709 bool const coInstall
= ((V
->MultiArch
& pkgCache::Version::Same
) == pkgCache::Version::Same
);
710 if (coInstall
== true)
712 // Replaces: ${self}:other ( << ${binary:Version})
713 NewDepends(D
, V
, V
.VerStr(),
714 pkgCache::Dep::Less
, pkgCache::Dep::Replaces
,
716 // Breaks: ${self}:other (!= ${binary:Version})
717 NewDepends(D
, V
, V
.VerStr(),
718 pkgCache::Dep::NotEquals
, pkgCache::Dep::DpkgBreaks
,
721 // Conflicts: ${self}:other
723 pkgCache::Dep::NoOp
, pkgCache::Dep::Conflicts
,
730 // CacheGenerator::NewFileVer - Create a new File<->Version association /*{{{*/
731 // ---------------------------------------------------------------------
733 bool pkgCacheGenerator::NewFileVer(pkgCache::VerIterator
&Ver
,
736 if (CurrentFile
== 0)
740 map_ptrloc
const VerFile
= AllocateInMap(sizeof(pkgCache::VerFile
));
744 pkgCache::VerFileIterator
VF(Cache
,Cache
.VerFileP
+ VerFile
);
745 VF
->File
= CurrentFile
- Cache
.PkgFileP
;
747 // Link it to the end of the list
748 map_ptrloc
*Last
= &Ver
->FileList
;
749 for (pkgCache::VerFileIterator V
= Ver
.FileList(); V
.end() == false; ++V
)
751 VF
->NextFile
= *Last
;
754 VF
->Offset
= List
.Offset();
755 VF
->Size
= List
.Size();
756 if (Cache
.HeaderP
->MaxVerFileSize
< VF
->Size
)
757 Cache
.HeaderP
->MaxVerFileSize
= VF
->Size
;
758 Cache
.HeaderP
->VerFileCount
++;
763 // CacheGenerator::NewVersion - Create a new Version /*{{{*/
764 // ---------------------------------------------------------------------
765 /* This puts a version structure in the linked list */
766 unsigned long pkgCacheGenerator::NewVersion(pkgCache::VerIterator
&Ver
,
767 const string
&VerStr
,
771 map_ptrloc
const Version
= AllocateInMap(sizeof(pkgCache::Version
));
776 Ver
= pkgCache::VerIterator(Cache
,Cache
.VerP
+ Version
);
777 Dynamic
<pkgCache::VerIterator
> DynV(Ver
);
779 Ver
->ID
= Cache
.HeaderP
->VersionCount
++;
780 map_ptrloc
const idxVerStr
= WriteStringInMap(VerStr
);
781 if (unlikely(idxVerStr
== 0))
783 Ver
->VerStr
= idxVerStr
;
788 // CacheGenerator::NewFileDesc - Create a new File<->Desc association /*{{{*/
789 // ---------------------------------------------------------------------
791 bool pkgCacheGenerator::NewFileDesc(pkgCache::DescIterator
&Desc
,
794 if (CurrentFile
== 0)
798 map_ptrloc
const DescFile
= AllocateInMap(sizeof(pkgCache::DescFile
));
802 pkgCache::DescFileIterator
DF(Cache
,Cache
.DescFileP
+ DescFile
);
803 DF
->File
= CurrentFile
- Cache
.PkgFileP
;
805 // Link it to the end of the list
806 map_ptrloc
*Last
= &Desc
->FileList
;
807 for (pkgCache::DescFileIterator D
= Desc
.FileList(); D
.end() == false; ++D
)
810 DF
->NextFile
= *Last
;
813 DF
->Offset
= List
.Offset();
814 DF
->Size
= List
.Size();
815 if (Cache
.HeaderP
->MaxDescFileSize
< DF
->Size
)
816 Cache
.HeaderP
->MaxDescFileSize
= DF
->Size
;
817 Cache
.HeaderP
->DescFileCount
++;
822 // CacheGenerator::NewDescription - Create a new Description /*{{{*/
823 // ---------------------------------------------------------------------
824 /* This puts a description structure in the linked list */
825 map_ptrloc
pkgCacheGenerator::NewDescription(pkgCache::DescIterator
&Desc
,
827 const MD5SumValue
&md5sum
,
831 map_ptrloc
const Description
= AllocateInMap(sizeof(pkgCache::Description
));
832 if (Description
== 0)
836 Desc
= pkgCache::DescIterator(Cache
,Cache
.DescP
+ Description
);
837 Desc
->NextDesc
= Next
;
838 Desc
->ID
= Cache
.HeaderP
->DescriptionCount
++;
839 map_ptrloc
const idxlanguage_code
= WriteStringInMap(Lang
);
840 map_ptrloc
const idxmd5sum
= WriteStringInMap(md5sum
.Value());
841 if (unlikely(idxlanguage_code
== 0 || idxmd5sum
== 0))
843 Desc
->language_code
= idxlanguage_code
;
844 Desc
->md5sum
= idxmd5sum
;
849 // CacheGenerator::NewDepends - Create a dependency element /*{{{*/
850 // ---------------------------------------------------------------------
851 /* This creates a dependency element in the tree. It is linked to the
852 version and to the package that it is pointing to. */
853 bool pkgCacheGenerator::NewDepends(pkgCache::PkgIterator
&Pkg
,
854 pkgCache::VerIterator
&Ver
,
855 string
const &Version
,
856 unsigned int const &Op
,
857 unsigned int const &Type
,
858 map_ptrloc
* &OldDepLast
)
860 void const * const oldMap
= Map
.Data();
862 map_ptrloc
const Dependency
= AllocateInMap(sizeof(pkgCache::Dependency
));
863 if (unlikely(Dependency
== 0))
867 pkgCache::DepIterator
Dep(Cache
,Cache
.DepP
+ Dependency
);
868 Dynamic
<pkgCache::DepIterator
> DynDep(Dep
);
869 Dep
->ParentVer
= Ver
.Index();
872 Dep
->ID
= Cache
.HeaderP
->DependsCount
++;
874 // Probe the reverse dependency list for a version string that matches
875 if (Version
.empty() == false)
877 /* for (pkgCache::DepIterator I = Pkg.RevDependsList(); I.end() == false; I++)
878 if (I->Version != 0 && I.TargetVer() == Version)
879 Dep->Version = I->Version;*/
880 if (Dep
->Version
== 0) {
881 map_ptrloc
const index
= WriteStringInMap(Version
);
882 if (unlikely(index
== 0))
884 Dep
->Version
= index
;
888 // Link it to the package
889 Dep
->Package
= Pkg
.Index();
890 Dep
->NextRevDepends
= Pkg
->RevDepends
;
891 Pkg
->RevDepends
= Dep
.Index();
893 // Do we know where to link the Dependency to?
894 if (OldDepLast
== NULL
)
896 OldDepLast
= &Ver
->DependsList
;
897 for (pkgCache::DepIterator D
= Ver
.DependsList(); D
.end() == false; ++D
)
898 OldDepLast
= &D
->NextDepends
;
899 } else if (oldMap
!= Map
.Data())
900 OldDepLast
+= (map_ptrloc
*) Map
.Data() - (map_ptrloc
*) oldMap
;
902 Dep
->NextDepends
= *OldDepLast
;
903 *OldDepLast
= Dep
.Index();
904 OldDepLast
= &Dep
->NextDepends
;
909 // ListParser::NewDepends - Create the environment for a new dependency /*{{{*/
910 // ---------------------------------------------------------------------
911 /* This creates a Group and the Package to link this dependency to if
912 needed and handles also the caching of the old endpoint */
913 bool pkgCacheGenerator::ListParser::NewDepends(pkgCache::VerIterator
&Ver
,
914 const string
&PackageName
,
916 const string
&Version
,
920 pkgCache::GrpIterator Grp
;
921 Dynamic
<pkgCache::GrpIterator
> DynGrp(Grp
);
922 if (unlikely(Owner
->NewGroup(Grp
, PackageName
) == false))
925 // Locate the target package
926 pkgCache::PkgIterator Pkg
= Grp
.FindPkg(Arch
);
927 // we don't create 'none' packages and their dependencies if we can avoid it …
928 if (Pkg
.end() == true && Arch
== "none" && strcmp(Ver
.ParentPkg().Arch(), "none") != 0)
930 Dynamic
<pkgCache::PkgIterator
> DynPkg(Pkg
);
931 if (Pkg
.end() == true) {
932 if (unlikely(Owner
->NewPackage(Pkg
, PackageName
, Arch
) == false))
936 // Is it a file dependency?
937 if (unlikely(PackageName
[0] == '/'))
938 FoundFileDeps
= true;
940 /* Caching the old end point speeds up generation substantially */
941 if (OldDepVer
!= Ver
) {
946 return Owner
->NewDepends(Pkg
, Ver
, Version
, Op
, Type
, OldDepLast
);
949 // ListParser::NewProvides - Create a Provides element /*{{{*/
950 // ---------------------------------------------------------------------
952 bool pkgCacheGenerator::ListParser::NewProvides(pkgCache::VerIterator
&Ver
,
953 const string
&PkgName
,
954 const string
&PkgArch
,
955 const string
&Version
)
957 pkgCache
&Cache
= Owner
->Cache
;
959 // We do not add self referencing provides
960 if (Ver
.ParentPkg().Name() == PkgName
&& (PkgArch
== Ver
.ParentPkg().Arch() ||
961 (PkgArch
== "all" && strcmp((Cache
.StrP
+ Cache
.HeaderP
->Architecture
), Ver
.ParentPkg().Arch()) == 0)))
965 map_ptrloc
const Provides
= Owner
->AllocateInMap(sizeof(pkgCache::Provides
));
966 if (unlikely(Provides
== 0))
968 Cache
.HeaderP
->ProvidesCount
++;
971 pkgCache::PrvIterator
Prv(Cache
,Cache
.ProvideP
+ Provides
,Cache
.PkgP
);
972 Dynamic
<pkgCache::PrvIterator
> DynPrv(Prv
);
973 Prv
->Version
= Ver
.Index();
974 Prv
->NextPkgProv
= Ver
->ProvidesList
;
975 Ver
->ProvidesList
= Prv
.Index();
976 if (Version
.empty() == false && unlikely((Prv
->ProvideVersion
= WriteString(Version
)) == 0))
979 // Locate the target package
980 pkgCache::PkgIterator Pkg
;
981 Dynamic
<pkgCache::PkgIterator
> DynPkg(Pkg
);
982 if (unlikely(Owner
->NewPackage(Pkg
,PkgName
, PkgArch
) == false))
985 // Link it to the package
986 Prv
->ParentPkg
= Pkg
.Index();
987 Prv
->NextProvides
= Pkg
->ProvidesList
;
988 Pkg
->ProvidesList
= Prv
.Index();
993 // CacheGenerator::SelectFile - Select the current file being parsed /*{{{*/
994 // ---------------------------------------------------------------------
995 /* This is used to select which file is to be associated with all newly
996 added versions. The caller is responsible for setting the IMS fields. */
997 bool pkgCacheGenerator::SelectFile(const string
&File
,const string
&Site
,
998 const pkgIndexFile
&Index
,
1001 // Get some space for the structure
1002 map_ptrloc
const idxFile
= AllocateInMap(sizeof(*CurrentFile
));
1003 if (unlikely(idxFile
== 0))
1005 CurrentFile
= Cache
.PkgFileP
+ idxFile
;
1008 map_ptrloc
const idxFileName
= WriteStringInMap(File
);
1009 map_ptrloc
const idxSite
= WriteUniqString(Site
);
1010 if (unlikely(idxFileName
== 0 || idxSite
== 0))
1012 CurrentFile
->FileName
= idxFileName
;
1013 CurrentFile
->Site
= idxSite
;
1014 CurrentFile
->NextFile
= Cache
.HeaderP
->FileList
;
1015 CurrentFile
->Flags
= Flags
;
1016 CurrentFile
->ID
= Cache
.HeaderP
->PackageFileCount
;
1017 map_ptrloc
const idxIndexType
= WriteUniqString(Index
.GetType()->Label
);
1018 if (unlikely(idxIndexType
== 0))
1020 CurrentFile
->IndexType
= idxIndexType
;
1022 Cache
.HeaderP
->FileList
= CurrentFile
- Cache
.PkgFileP
;
1023 Cache
.HeaderP
->PackageFileCount
++;
1026 Progress
->SubProgress(Index
.Size());
1030 // CacheGenerator::WriteUniqueString - Insert a unique string /*{{{*/
1031 // ---------------------------------------------------------------------
1032 /* This is used to create handles to strings. Given the same text it
1033 always returns the same number */
1034 unsigned long pkgCacheGenerator::WriteUniqString(const char *S
,
1037 /* We use a very small transient hash table here, this speeds up generation
1038 by a fair amount on slower machines */
1039 pkgCache::StringItem
*&Bucket
= UniqHash
[(S
[0]*5 + S
[1]) % _count(UniqHash
)];
1041 stringcmp(S
,S
+Size
,Cache
.StrP
+ Bucket
->String
) == 0)
1042 return Bucket
->String
;
1044 // Search for an insertion point
1045 pkgCache::StringItem
*I
= Cache
.StringItemP
+ Cache
.HeaderP
->StringList
;
1047 map_ptrloc
*Last
= &Cache
.HeaderP
->StringList
;
1048 for (; I
!= Cache
.StringItemP
; Last
= &I
->NextItem
,
1049 I
= Cache
.StringItemP
+ I
->NextItem
)
1051 Res
= stringcmp(S
,S
+Size
,Cache
.StrP
+ I
->String
);
1064 void const * const oldMap
= Map
.Data();
1065 map_ptrloc
const Item
= AllocateInMap(sizeof(pkgCache::StringItem
));
1069 map_ptrloc
const idxString
= WriteStringInMap(S
,Size
);
1070 if (unlikely(idxString
== 0))
1072 if (oldMap
!= Map
.Data()) {
1073 Last
+= (map_ptrloc
*) Map
.Data() - (map_ptrloc
*) oldMap
;
1074 I
+= (pkgCache::StringItem
*) Map
.Data() - (pkgCache::StringItem
*) oldMap
;
1078 // Fill in the structure
1079 pkgCache::StringItem
*ItemP
= Cache
.StringItemP
+ Item
;
1080 ItemP
->NextItem
= I
- Cache
.StringItemP
;
1081 ItemP
->String
= idxString
;
1084 return ItemP
->String
;
1087 // CheckValidity - Check that a cache is up-to-date /*{{{*/
1088 // ---------------------------------------------------------------------
1089 /* This just verifies that each file in the list of index files exists,
1090 has matching attributes with the cache and the cache does not have
1092 static bool CheckValidity(const string
&CacheFile
,
1093 pkgSourceList
&List
,
1098 bool const Debug
= _config
->FindB("Debug::pkgCacheGen", false);
1099 // No file, certainly invalid
1100 if (CacheFile
.empty() == true || FileExists(CacheFile
) == false)
1103 std::clog
<< "CacheFile doesn't exist" << std::endl
;
1107 if (List
.GetLastModifiedTime() > GetModificationTime(CacheFile
))
1110 std::clog
<< "sources.list is newer than the cache" << std::endl
;
1115 FileFd
CacheF(CacheFile
,FileFd::ReadOnly
);
1116 SPtr
<MMap
> Map
= new MMap(CacheF
,0);
1117 pkgCache
Cache(Map
);
1118 if (_error
->PendingError() == true || Map
->Size() == 0)
1121 std::clog
<< "Errors are pending or Map is empty()" << std::endl
;
1126 /* Now we check every index file, see if it is in the cache,
1127 verify the IMS data and check that it is on the disk too.. */
1128 SPtrArray
<bool> Visited
= new bool[Cache
.HeaderP
->PackageFileCount
];
1129 memset(Visited
,0,sizeof(*Visited
)*Cache
.HeaderP
->PackageFileCount
);
1130 for (; Start
!= End
; ++Start
)
1133 std::clog
<< "Checking PkgFile " << (*Start
)->Describe() << ": ";
1134 if ((*Start
)->HasPackages() == false)
1137 std::clog
<< "Has NO packages" << std::endl
;
1141 if ((*Start
)->Exists() == false)
1143 #if 0 // mvo: we no longer give a message here (Default Sources spec)
1144 _error
->WarningE("stat",_("Couldn't stat source package list %s"),
1145 (*Start
)->Describe().c_str());
1148 std::clog
<< "file doesn't exist" << std::endl
;
1152 // FindInCache is also expected to do an IMS check.
1153 pkgCache::PkgFileIterator File
= (*Start
)->FindInCache(Cache
);
1154 if (File
.end() == true)
1157 std::clog
<< "FindInCache returned end-Pointer" << std::endl
;
1161 Visited
[File
->ID
] = true;
1163 std::clog
<< "with ID " << File
->ID
<< " is valid" << std::endl
;
1166 for (unsigned I
= 0; I
!= Cache
.HeaderP
->PackageFileCount
; I
++)
1167 if (Visited
[I
] == false)
1170 std::clog
<< "File with ID" << I
<< " wasn't visited" << std::endl
;
1174 if (_error
->PendingError() == true)
1178 std::clog
<< "Validity failed because of pending errors:" << std::endl
;
1179 _error
->DumpErrors();
1186 *OutMap
= Map
.UnGuard();
1190 // ComputeSize - Compute the total size of a bunch of files /*{{{*/
1191 // ---------------------------------------------------------------------
1192 /* Size is kind of an abstract notion that is only used for the progress
1194 static unsigned long ComputeSize(FileIterator Start
,FileIterator End
)
1196 unsigned long TotalSize
= 0;
1197 for (; Start
!= End
; ++Start
)
1199 if ((*Start
)->HasPackages() == false)
1201 TotalSize
+= (*Start
)->Size();
1206 // BuildCache - Merge the list of index files into the cache /*{{{*/
1207 // ---------------------------------------------------------------------
1209 static bool BuildCache(pkgCacheGenerator
&Gen
,
1210 OpProgress
*Progress
,
1211 unsigned long &CurrentSize
,unsigned long TotalSize
,
1212 FileIterator Start
, FileIterator End
)
1215 for (I
= Start
; I
!= End
; ++I
)
1217 if ((*I
)->HasPackages() == false)
1220 if ((*I
)->Exists() == false)
1223 if ((*I
)->FindInCache(Gen
.GetCache()).end() == false)
1225 _error
->Warning("Duplicate sources.list entry %s",
1226 (*I
)->Describe().c_str());
1230 unsigned long Size
= (*I
)->Size();
1231 if (Progress
!= NULL
)
1232 Progress
->OverallProgress(CurrentSize
,TotalSize
,Size
,_("Reading package lists"));
1233 CurrentSize
+= Size
;
1235 if ((*I
)->Merge(Gen
,Progress
) == false)
1239 if (Gen
.HasFileDeps() == true)
1241 if (Progress
!= NULL
)
1243 TotalSize
= ComputeSize(Start
, End
);
1245 for (I
= Start
; I
!= End
; ++I
)
1247 unsigned long Size
= (*I
)->Size();
1248 if (Progress
!= NULL
)
1249 Progress
->OverallProgress(CurrentSize
,TotalSize
,Size
,_("Collecting File Provides"));
1250 CurrentSize
+= Size
;
1251 if ((*I
)->MergeFileProvides(Gen
,Progress
) == false)
1259 // CacheGenerator::CreateDynamicMMap - load an mmap with configuration options /*{{{*/
1260 DynamicMMap
* pkgCacheGenerator::CreateDynamicMMap(FileFd
*CacheF
, unsigned long Flags
) {
1261 unsigned long const MapStart
= _config
->FindI("APT::Cache-Start", 24*1024*1024);
1262 unsigned long const MapGrow
= _config
->FindI("APT::Cache-Grow", 1*1024*1024);
1263 unsigned long const MapLimit
= _config
->FindI("APT::Cache-Limit", 0);
1264 Flags
|= MMap::Moveable
;
1265 if (_config
->FindB("APT::Cache-Fallback", false) == true)
1266 Flags
|= MMap::Fallback
;
1268 return new DynamicMMap(*CacheF
, Flags
, MapStart
, MapGrow
, MapLimit
);
1270 return new DynamicMMap(Flags
, MapStart
, MapGrow
, MapLimit
);
1273 // CacheGenerator::MakeStatusCache - Construct the status cache /*{{{*/
1274 // ---------------------------------------------------------------------
1275 /* This makes sure that the status cache (the cache that has all
1276 index files from the sources list and all local ones) is ready
1277 to be mmaped. If OutMap is not zero then a MMap object representing
1278 the cache will be stored there. This is pretty much mandetory if you
1279 are using AllowMem. AllowMem lets the function be run as non-root
1280 where it builds the cache 'fast' into a memory buffer. */
1281 __deprecated
bool pkgMakeStatusCache(pkgSourceList
&List
,OpProgress
&Progress
,
1282 MMap
**OutMap
, bool AllowMem
)
1283 { return pkgCacheGenerator::MakeStatusCache(List
, &Progress
, OutMap
, AllowMem
); }
1284 bool pkgCacheGenerator::MakeStatusCache(pkgSourceList
&List
,OpProgress
*Progress
,
1285 MMap
**OutMap
,bool AllowMem
)
1287 bool const Debug
= _config
->FindB("Debug::pkgCacheGen", false);
1289 std::vector
<pkgIndexFile
*> Files
;
1290 for (std::vector
<metaIndex
*>::const_iterator i
= List
.begin();
1294 std::vector
<pkgIndexFile
*> *Indexes
= (*i
)->GetIndexFiles();
1295 for (std::vector
<pkgIndexFile
*>::const_iterator j
= Indexes
->begin();
1296 j
!= Indexes
->end();
1298 Files
.push_back (*j
);
1301 unsigned long const EndOfSource
= Files
.size();
1302 if (_system
->AddStatusFiles(Files
) == false)
1305 // Decide if we can write to the files..
1306 string
const CacheFile
= _config
->FindFile("Dir::Cache::pkgcache");
1307 string
const SrcCacheFile
= _config
->FindFile("Dir::Cache::srcpkgcache");
1309 // ensure the cache directory exists
1310 if (CacheFile
.empty() == false || SrcCacheFile
.empty() == false)
1312 string dir
= _config
->FindDir("Dir::Cache");
1313 size_t const len
= dir
.size();
1314 if (len
> 5 && dir
.find("/apt/", len
- 6, 5) == len
- 5)
1315 dir
= dir
.substr(0, len
- 5);
1316 if (CacheFile
.empty() == false)
1317 CreateDirectory(dir
, flNotFile(CacheFile
));
1318 if (SrcCacheFile
.empty() == false)
1319 CreateDirectory(dir
, flNotFile(SrcCacheFile
));
1322 // Decide if we can write to the cache
1323 bool Writeable
= false;
1324 if (CacheFile
.empty() == false)
1325 Writeable
= access(flNotFile(CacheFile
).c_str(),W_OK
) == 0;
1327 if (SrcCacheFile
.empty() == false)
1328 Writeable
= access(flNotFile(SrcCacheFile
).c_str(),W_OK
) == 0;
1330 std::clog
<< "Do we have write-access to the cache files? " << (Writeable
? "YES" : "NO") << std::endl
;
1332 if (Writeable
== false && AllowMem
== false && CacheFile
.empty() == false)
1333 return _error
->Error(_("Unable to write to %s"),flNotFile(CacheFile
).c_str());
1335 if (Progress
!= NULL
)
1336 Progress
->OverallProgress(0,1,1,_("Reading package lists"));
1338 // Cache is OK, Fin.
1339 if (CheckValidity(CacheFile
, List
, Files
.begin(),Files
.end(),OutMap
) == true)
1341 if (Progress
!= NULL
)
1342 Progress
->OverallProgress(1,1,1,_("Reading package lists"));
1344 std::clog
<< "pkgcache.bin is valid - no need to build anything" << std::endl
;
1347 else if (Debug
== true)
1348 std::clog
<< "pkgcache.bin is NOT valid" << std::endl
;
1350 /* At this point we know we need to reconstruct the package cache,
1352 SPtr
<FileFd
> CacheF
;
1353 SPtr
<DynamicMMap
> Map
;
1354 if (Writeable
== true && CacheFile
.empty() == false)
1356 _error
->PushToStack();
1357 unlink(CacheFile
.c_str());
1358 CacheF
= new FileFd(CacheFile
,FileFd::WriteAtomic
);
1359 fchmod(CacheF
->Fd(),0644);
1360 Map
= CreateDynamicMMap(CacheF
, MMap::Public
);
1361 if (_error
->PendingError() == true)
1363 delete CacheF
.UnGuard();
1364 delete Map
.UnGuard();
1366 std::clog
<< "Open filebased MMap FAILED" << std::endl
;
1368 if (AllowMem
== false)
1370 _error
->MergeWithStack();
1373 _error
->RevertToStack();
1377 _error
->MergeWithStack();
1379 std::clog
<< "Open filebased MMap" << std::endl
;
1382 if (Writeable
== false || CacheFile
.empty() == true)
1384 // Just build it in memory..
1385 Map
= CreateDynamicMMap(NULL
);
1387 std::clog
<< "Open memory Map (not filebased)" << std::endl
;
1390 // Lets try the source cache.
1391 unsigned long CurrentSize
= 0;
1392 unsigned long TotalSize
= 0;
1393 if (CheckValidity(SrcCacheFile
, List
, Files
.begin(),
1394 Files
.begin()+EndOfSource
) == true)
1397 std::clog
<< "srcpkgcache.bin is valid - populate MMap with it." << std::endl
;
1398 // Preload the map with the source cache
1399 FileFd
SCacheF(SrcCacheFile
,FileFd::ReadOnly
);
1400 unsigned long const alloc
= Map
->RawAllocate(SCacheF
.Size());
1401 if ((alloc
== 0 && _error
->PendingError())
1402 || SCacheF
.Read((unsigned char *)Map
->Data() + alloc
,
1403 SCacheF
.Size()) == false)
1406 TotalSize
= ComputeSize(Files
.begin()+EndOfSource
,Files
.end());
1408 // Build the status cache
1409 pkgCacheGenerator
Gen(Map
.Get(),Progress
);
1410 if (_error
->PendingError() == true)
1412 if (BuildCache(Gen
,Progress
,CurrentSize
,TotalSize
,
1413 Files
.begin()+EndOfSource
,Files
.end()) == false)
1419 std::clog
<< "srcpkgcache.bin is NOT valid - rebuild" << std::endl
;
1420 TotalSize
= ComputeSize(Files
.begin(),Files
.end());
1422 // Build the source cache
1423 pkgCacheGenerator
Gen(Map
.Get(),Progress
);
1424 if (_error
->PendingError() == true)
1426 if (BuildCache(Gen
,Progress
,CurrentSize
,TotalSize
,
1427 Files
.begin(),Files
.begin()+EndOfSource
) == false)
1431 if (Writeable
== true && SrcCacheFile
.empty() == false)
1433 FileFd
SCacheF(SrcCacheFile
,FileFd::WriteAtomic
);
1434 if (_error
->PendingError() == true)
1437 fchmod(SCacheF
.Fd(),0644);
1439 // Write out the main data
1440 if (SCacheF
.Write(Map
->Data(),Map
->Size()) == false)
1441 return _error
->Error(_("IO Error saving source cache"));
1444 // Write out the proper header
1445 Gen
.GetCache().HeaderP
->Dirty
= false;
1446 if (SCacheF
.Seek(0) == false ||
1447 SCacheF
.Write(Map
->Data(),sizeof(*Gen
.GetCache().HeaderP
)) == false)
1448 return _error
->Error(_("IO Error saving source cache"));
1449 Gen
.GetCache().HeaderP
->Dirty
= true;
1453 // Build the status cache
1454 if (BuildCache(Gen
,Progress
,CurrentSize
,TotalSize
,
1455 Files
.begin()+EndOfSource
,Files
.end()) == false)
1459 std::clog
<< "Caches are ready for shipping" << std::endl
;
1461 if (_error
->PendingError() == true)
1467 delete Map
.UnGuard();
1468 *OutMap
= new MMap(*CacheF
,0);
1472 *OutMap
= Map
.UnGuard();
1479 // CacheGenerator::MakeOnlyStatusCache - Build only a status files cache/*{{{*/
1480 // ---------------------------------------------------------------------
1482 __deprecated
bool pkgMakeOnlyStatusCache(OpProgress
&Progress
,DynamicMMap
**OutMap
)
1483 { return pkgCacheGenerator::MakeOnlyStatusCache(&Progress
, OutMap
); }
1484 bool pkgCacheGenerator::MakeOnlyStatusCache(OpProgress
*Progress
,DynamicMMap
**OutMap
)
1486 std::vector
<pkgIndexFile
*> Files
;
1487 unsigned long EndOfSource
= Files
.size();
1488 if (_system
->AddStatusFiles(Files
) == false)
1491 SPtr
<DynamicMMap
> Map
= CreateDynamicMMap(NULL
);
1492 unsigned long CurrentSize
= 0;
1493 unsigned long TotalSize
= 0;
1495 TotalSize
= ComputeSize(Files
.begin()+EndOfSource
,Files
.end());
1497 // Build the status cache
1498 if (Progress
!= NULL
)
1499 Progress
->OverallProgress(0,1,1,_("Reading package lists"));
1500 pkgCacheGenerator
Gen(Map
.Get(),Progress
);
1501 if (_error
->PendingError() == true)
1503 if (BuildCache(Gen
,Progress
,CurrentSize
,TotalSize
,
1504 Files
.begin()+EndOfSource
,Files
.end()) == false)
1507 if (_error
->PendingError() == true)
1509 *OutMap
= Map
.UnGuard();
1514 // IsDuplicateDescription /*{{{*/
1515 static bool IsDuplicateDescription(pkgCache::DescIterator Desc
,
1516 MD5SumValue
const &CurMd5
, std::string
const &CurLang
)
1518 // Descriptions in the same link-list have all the same md5
1519 if (Desc
.end() == true || MD5SumValue(Desc
.md5()) != CurMd5
)
1521 for (; Desc
.end() == false; ++Desc
)
1522 if (Desc
.LanguageCode() == CurLang
)
1527 // CacheGenerator::FinishCache /*{{{*/
1528 bool pkgCacheGenerator::FinishCache(OpProgress
*Progress
)