]>
git.saurik.com Git - apt.git/blob - apt-pkg/pkgcachegen.cc
490c2ecbb3c6de0412743da3f7952b3bb692ed34
1 // -*- mode: cpp; mode: fold -*-
3 // $Id: pkgcachegen.cc,v 1.53.2.1 2003/12/24 23:09:17 mdz Exp $
4 /* ######################################################################
6 Package Cache Generator - Generator for the cache structure.
8 This builds the cache structure from the abstract package list parser.
10 ##################################################################### */
12 // Include Files /*{{{*/
15 #include <apt-pkg/pkgcachegen.h>
16 #include <apt-pkg/error.h>
17 #include <apt-pkg/version.h>
18 #include <apt-pkg/progress.h>
19 #include <apt-pkg/sourcelist.h>
20 #include <apt-pkg/configuration.h>
21 #include <apt-pkg/aptconfiguration.h>
22 #include <apt-pkg/strutl.h>
23 #include <apt-pkg/sptr.h>
24 #include <apt-pkg/pkgsystem.h>
25 #include <apt-pkg/macros.h>
26 #include <apt-pkg/tagfile.h>
27 #include <apt-pkg/metaindex.h>
28 #include <apt-pkg/fileutl.h>
38 typedef std::vector
<pkgIndexFile
*>::iterator FileIterator
;
39 template <typename Iter
> std::vector
<Iter
*> pkgCacheGenerator::Dynamic
<Iter
>::toReMap
;
41 static bool IsDuplicateDescription(pkgCache::DescIterator Desc
,
42 MD5SumValue
const &CurMd5
, std::string
const &CurLang
);
46 // CacheGenerator::pkgCacheGenerator - Constructor /*{{{*/
47 // ---------------------------------------------------------------------
48 /* We set the dirty flag and make sure that is written to the disk */
49 pkgCacheGenerator::pkgCacheGenerator(DynamicMMap
*pMap
,OpProgress
*Prog
) :
50 Map(*pMap
), Cache(pMap
,false), Progress(Prog
),
54 memset(UniqHash
,0,sizeof(UniqHash
));
56 if (_error
->PendingError() == true)
61 // Setup the map interface..
62 Cache
.HeaderP
= (pkgCache::Header
*)Map
.Data();
63 if (Map
.RawAllocate(sizeof(pkgCache::Header
)) == 0 && _error
->PendingError() == true)
66 Map
.UsePools(*Cache
.HeaderP
->Pools
,sizeof(Cache
.HeaderP
->Pools
)/sizeof(Cache
.HeaderP
->Pools
[0]));
69 *Cache
.HeaderP
= pkgCache::Header();
70 map_ptrloc
const idxVerSysName
= WriteStringInMap(_system
->VS
->Label
);
71 Cache
.HeaderP
->VerSysName
= idxVerSysName
;
72 map_ptrloc
const idxArchitecture
= WriteStringInMap(_config
->Find("APT::Architecture"));
73 Cache
.HeaderP
->Architecture
= idxArchitecture
;
74 if (unlikely(idxVerSysName
== 0 || idxArchitecture
== 0))
80 // Map directly from the existing file
82 Map
.UsePools(*Cache
.HeaderP
->Pools
,sizeof(Cache
.HeaderP
->Pools
)/sizeof(Cache
.HeaderP
->Pools
[0]));
83 if (Cache
.VS
!= _system
->VS
)
85 _error
->Error(_("Cache has an incompatible versioning system"));
90 Cache
.HeaderP
->Dirty
= true;
91 Map
.Sync(0,sizeof(pkgCache::Header
));
94 // CacheGenerator::~pkgCacheGenerator - Destructor /*{{{*/
95 // ---------------------------------------------------------------------
96 /* We sync the data then unset the dirty flag in two steps so as to
97 advoid a problem during a crash */
98 pkgCacheGenerator::~pkgCacheGenerator()
100 if (_error
->PendingError() == true)
102 if (Map
.Sync() == false)
105 Cache
.HeaderP
->Dirty
= false;
106 Cache
.HeaderP
->CacheFileSize
= Map
.Size();
107 Map
.Sync(0,sizeof(pkgCache::Header
));
110 void pkgCacheGenerator::ReMap(void const * const oldMap
, void const * const newMap
) {/*{{{*/
111 if (oldMap
== newMap
)
114 if (_config
->FindB("Debug::pkgCacheGen", false))
115 std::clog
<< "Remaping from " << oldMap
<< " to " << newMap
<< std::endl
;
119 CurrentFile
+= (pkgCache::PackageFile
*) newMap
- (pkgCache::PackageFile
*) oldMap
;
121 for (size_t i
= 0; i
< _count(UniqHash
); ++i
)
122 if (UniqHash
[i
] != 0)
123 UniqHash
[i
] += (pkgCache::StringItem
*) newMap
- (pkgCache::StringItem
*) oldMap
;
125 for (std::vector
<pkgCache::GrpIterator
*>::const_iterator i
= Dynamic
<pkgCache::GrpIterator
>::toReMap
.begin();
126 i
!= Dynamic
<pkgCache::GrpIterator
>::toReMap
.end(); ++i
)
127 (*i
)->ReMap(oldMap
, newMap
);
128 for (std::vector
<pkgCache::PkgIterator
*>::const_iterator i
= Dynamic
<pkgCache::PkgIterator
>::toReMap
.begin();
129 i
!= Dynamic
<pkgCache::PkgIterator
>::toReMap
.end(); ++i
)
130 (*i
)->ReMap(oldMap
, newMap
);
131 for (std::vector
<pkgCache::VerIterator
*>::const_iterator i
= Dynamic
<pkgCache::VerIterator
>::toReMap
.begin();
132 i
!= Dynamic
<pkgCache::VerIterator
>::toReMap
.end(); ++i
)
133 (*i
)->ReMap(oldMap
, newMap
);
134 for (std::vector
<pkgCache::DepIterator
*>::const_iterator i
= Dynamic
<pkgCache::DepIterator
>::toReMap
.begin();
135 i
!= Dynamic
<pkgCache::DepIterator
>::toReMap
.end(); ++i
)
136 (*i
)->ReMap(oldMap
, newMap
);
137 for (std::vector
<pkgCache::DescIterator
*>::const_iterator i
= Dynamic
<pkgCache::DescIterator
>::toReMap
.begin();
138 i
!= Dynamic
<pkgCache::DescIterator
>::toReMap
.end(); ++i
)
139 (*i
)->ReMap(oldMap
, newMap
);
140 for (std::vector
<pkgCache::PrvIterator
*>::const_iterator i
= Dynamic
<pkgCache::PrvIterator
>::toReMap
.begin();
141 i
!= Dynamic
<pkgCache::PrvIterator
>::toReMap
.end(); ++i
)
142 (*i
)->ReMap(oldMap
, newMap
);
143 for (std::vector
<pkgCache::PkgFileIterator
*>::const_iterator i
= Dynamic
<pkgCache::PkgFileIterator
>::toReMap
.begin();
144 i
!= Dynamic
<pkgCache::PkgFileIterator
>::toReMap
.end(); ++i
)
145 (*i
)->ReMap(oldMap
, newMap
);
147 // CacheGenerator::WriteStringInMap /*{{{*/
148 map_ptrloc
pkgCacheGenerator::WriteStringInMap(const char *String
,
149 const unsigned long &Len
) {
150 void const * const oldMap
= Map
.Data();
151 map_ptrloc
const index
= Map
.WriteString(String
, Len
);
153 ReMap(oldMap
, Map
.Data());
157 // CacheGenerator::WriteStringInMap /*{{{*/
158 map_ptrloc
pkgCacheGenerator::WriteStringInMap(const char *String
) {
159 void const * const oldMap
= Map
.Data();
160 map_ptrloc
const index
= Map
.WriteString(String
);
162 ReMap(oldMap
, Map
.Data());
166 map_ptrloc
pkgCacheGenerator::AllocateInMap(const unsigned long &size
) {/*{{{*/
167 void const * const oldMap
= Map
.Data();
168 map_ptrloc
const index
= Map
.Allocate(size
);
170 ReMap(oldMap
, Map
.Data());
174 // CacheGenerator::MergeList - Merge the package list /*{{{*/
175 // ---------------------------------------------------------------------
176 /* This provides the generation of the entries in the cache. Each loop
177 goes through a single package record from the underlying parse engine. */
178 bool pkgCacheGenerator::MergeList(ListParser
&List
,
179 pkgCache::VerIterator
*OutVer
)
183 unsigned int Counter
= 0;
184 while (List
.Step() == true)
186 string
const PackageName
= List
.Package();
187 if (PackageName
.empty() == true)
191 if (Counter
% 100 == 0 && Progress
!= 0)
192 Progress
->Progress(List
.Offset());
194 string Arch
= List
.Architecture();
195 string
const Version
= List
.Version();
196 if (Version
.empty() == true && Arch
.empty() == true)
198 if (MergeListGroup(List
, PackageName
) == false)
202 if (Arch
.empty() == true)
204 // use the pseudo arch 'none' for arch-less packages
206 /* We might built a SingleArchCache here, which we don't want to blow up
207 just for these :none packages to a proper MultiArchCache, so just ensure
208 that we have always a native package structure first for SingleArch */
209 pkgCache::PkgIterator NP
;
210 if (NewPackage(NP
, PackageName
, _config
->Find("APT::Architecture")) == false)
211 // TRANSLATOR: The first placeholder is a package name,
212 // the other two should be copied verbatim as they include debug info
213 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
214 PackageName
.c_str(), "NewPackage", 0);
217 // Get a pointer to the package structure
218 pkgCache::PkgIterator Pkg
;
219 Dynamic
<pkgCache::PkgIterator
> DynPkg(Pkg
);
220 if (NewPackage(Pkg
, PackageName
, Arch
) == false)
221 // TRANSLATOR: The first placeholder is a package name,
222 // the other two should be copied verbatim as they include debug info
223 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
224 PackageName
.c_str(), "NewPackage", 1);
227 if (Version
.empty() == true)
229 if (MergeListPackage(List
, Pkg
) == false)
234 if (MergeListVersion(List
, Pkg
, Version
, OutVer
) == false)
240 FoundFileDeps
|= List
.HasFileDeps();
245 if (Cache
.HeaderP
->PackageCount
>= (1ULL<<sizeof(Cache
.PkgP
->ID
)*8)-1)
246 return _error
->Error(_("Wow, you exceeded the number of package "
247 "names this APT is capable of."));
248 if (Cache
.HeaderP
->VersionCount
>= (1ULL<<(sizeof(Cache
.VerP
->ID
)*8))-1)
249 return _error
->Error(_("Wow, you exceeded the number of versions "
250 "this APT is capable of."));
251 if (Cache
.HeaderP
->DescriptionCount
>= (1ULL<<(sizeof(Cache
.DescP
->ID
)*8))-1)
252 return _error
->Error(_("Wow, you exceeded the number of descriptions "
253 "this APT is capable of."));
254 if (Cache
.HeaderP
->DependsCount
>= (1ULL<<(sizeof(Cache
.DepP
->ID
)*8))-1ULL)
255 return _error
->Error(_("Wow, you exceeded the number of dependencies "
256 "this APT is capable of."));
258 FoundFileDeps
|= List
.HasFileDeps();
261 // CacheGenerator::MergeListGroup /*{{{*/
262 bool pkgCacheGenerator::MergeListGroup(ListParser
&List
, std::string
const &GrpName
)
264 pkgCache::GrpIterator Grp
= Cache
.FindGrp(GrpName
);
265 // a group has no data on it's own, only packages have it but these
266 // stanzas like this come from Translation- files to add descriptions,
267 // but without a version we don't need a description for it…
268 if (Grp
.end() == true)
270 Dynamic
<pkgCache::GrpIterator
> DynGrp(Grp
);
272 pkgCache::PkgIterator Pkg
;
273 Dynamic
<pkgCache::PkgIterator
> DynPkg(Pkg
);
274 for (Pkg
= Grp
.PackageList(); Pkg
.end() == false; Pkg
= Grp
.NextPkg(Pkg
))
275 if (MergeListPackage(List
, Pkg
) == false)
281 // CacheGenerator::MergeListPackage /*{{{*/
282 bool pkgCacheGenerator::MergeListPackage(ListParser
&List
, pkgCache::PkgIterator
&Pkg
)
284 // we first process the package, then the descriptions
285 // (for deb this package processing is in fact a no-op)
286 pkgCache::VerIterator
Ver(Cache
);
287 Dynamic
<pkgCache::VerIterator
> DynVer(Ver
);
288 if (List
.UsePackage(Pkg
, Ver
) == false)
289 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
290 Pkg
.Name(), "UsePackage", 1);
292 // Find the right version to write the description
293 MD5SumValue CurMd5
= List
.Description_md5();
294 std::string CurLang
= List
.DescriptionLanguage();
296 for (Ver
= Pkg
.VersionList(); Ver
.end() == false; ++Ver
)
298 pkgCache::DescIterator Desc
= Ver
.DescriptionList();
300 // a version can only have one md5 describing it
301 if (Desc
.end() == true || MD5SumValue(Desc
.md5()) != CurMd5
)
304 // don't add a new description if we have one for the given
306 if (IsDuplicateDescription(Desc
, CurMd5
, CurLang
) == true)
309 Dynamic
<pkgCache::DescIterator
> DynDesc(Desc
);
310 // we add at the end, so that the start is constant as we need
311 // that to be able to efficiently share these lists
312 map_ptrloc
*LastDesc
= &Ver
->DescriptionList
;
313 for (;Desc
.end() == false && Desc
->NextDesc
!= 0; ++Desc
);
314 if (Desc
.end() == false)
315 LastDesc
= &Desc
->NextDesc
;
317 void const * const oldMap
= Map
.Data();
318 map_ptrloc
const descindex
= NewDescription(Desc
, CurLang
, CurMd5
, *LastDesc
);
319 if (unlikely(descindex
== 0 && _error
->PendingError()))
320 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
321 Pkg
.Name(), "NewDescription", 1);
322 if (oldMap
!= Map
.Data())
323 LastDesc
+= (map_ptrloc
*) Map
.Data() - (map_ptrloc
*) oldMap
;
324 *LastDesc
= descindex
;
325 Desc
->ParentPkg
= Pkg
.Index();
327 if ((*LastDesc
== 0 && _error
->PendingError()) || NewFileDesc(Desc
,List
) == false)
328 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
329 Pkg
.Name(), "NewFileDesc", 1);
331 // we can stop here as all "same" versions will share the description
338 // CacheGenerator::MergeListVersion /*{{{*/
339 bool pkgCacheGenerator::MergeListVersion(ListParser
&List
, pkgCache::PkgIterator
&Pkg
,
340 std::string
const &Version
, pkgCache::VerIterator
* &OutVer
)
342 pkgCache::VerIterator Ver
= Pkg
.VersionList();
343 Dynamic
<pkgCache::VerIterator
> DynVer(Ver
);
344 map_ptrloc
*LastVer
= &Pkg
->VersionList
;
345 void const * oldMap
= Map
.Data();
347 unsigned long const Hash
= List
.VersionHash();
348 if (Ver
.end() == false)
350 /* We know the list is sorted so we use that fact in the search.
351 Insertion of new versions is done with correct sorting */
353 for (; Ver
.end() == false; LastVer
= &Ver
->NextVer
, Ver
++)
355 Res
= Cache
.VS
->CmpVersion(Version
,Ver
.VerStr());
356 // Version is higher as current version - insert here
359 // Versionstrings are equal - is hash also equal?
360 if (Res
== 0 && Ver
->Hash
== Hash
)
362 // proceed with the next till we have either the right
363 // or we found another version (which will be lower)
366 /* We already have a version for this item, record that we saw it */
367 if (Res
== 0 && Ver
.end() == false && Ver
->Hash
== Hash
)
369 if (List
.UsePackage(Pkg
,Ver
) == false)
370 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
371 Pkg
.Name(), "UsePackage", 2);
373 if (NewFileVer(Ver
,List
) == false)
374 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
375 Pkg
.Name(), "NewFileVer", 1);
377 // Read only a single record and return
389 map_ptrloc
const verindex
= NewVersion(Ver
,Version
,*LastVer
);
390 if (verindex
== 0 && _error
->PendingError())
391 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
392 Pkg
.Name(), "NewVersion", 1);
394 if (oldMap
!= Map
.Data())
395 LastVer
+= (map_ptrloc
*) Map
.Data() - (map_ptrloc
*) oldMap
;
397 Ver
->ParentPkg
= Pkg
.Index();
400 if (unlikely(List
.NewVersion(Ver
) == false))
401 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
402 Pkg
.Name(), "NewVersion", 2);
404 if (unlikely(List
.UsePackage(Pkg
,Ver
) == false))
405 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
406 Pkg
.Name(), "UsePackage", 3);
408 if (unlikely(NewFileVer(Ver
,List
) == false))
409 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
410 Pkg
.Name(), "NewFileVer", 2);
412 pkgCache::GrpIterator Grp
= Pkg
.Group();
413 Dynamic
<pkgCache::GrpIterator
> DynGrp(Grp
);
415 /* If it is the first version of this package we need to add implicit
416 Multi-Arch dependencies to all other package versions in the group now -
417 otherwise we just add them for this new version */
418 if (Pkg
.VersionList()->NextVer
== 0)
420 pkgCache::PkgIterator P
= Grp
.PackageList();
421 Dynamic
<pkgCache::PkgIterator
> DynP(P
);
422 for (; P
.end() != true; P
= Grp
.NextPkg(P
))
424 if (P
->ID
== Pkg
->ID
)
426 pkgCache::VerIterator V
= P
.VersionList();
427 Dynamic
<pkgCache::VerIterator
> DynV(V
);
428 for (; V
.end() != true; ++V
)
429 if (unlikely(AddImplicitDepends(V
, Pkg
) == false))
430 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
431 Pkg
.Name(), "AddImplicitDepends", 1);
433 /* :none packages are packages without an architecture. They are forbidden by
434 debian-policy, so usually they will only be in (old) dpkg status files -
435 and dpkg will complain about them - and are pretty rare. We therefore do
436 usually not create conflicts while the parent is created, but only if a :none
437 package (= the target) appears. This creates incorrect dependencies on :none
438 for architecture-specific dependencies on the package we copy from, but we
439 will ignore this bug as architecture-specific dependencies are only allowed
440 in jessie and until then the :none packages should be extinct (hopefully).
441 In other words: This should work long enough to allow graceful removal of
442 these packages, it is not supposed to allow users to keep using them … */
443 if (strcmp(Pkg
.Arch(), "none") == 0)
445 pkgCache::PkgIterator M
= Grp
.FindPreferredPkg();
446 if (M
.end() == false && Pkg
!= M
)
448 pkgCache::DepIterator D
= M
.RevDependsList();
449 Dynamic
<pkgCache::DepIterator
> DynD(D
);
450 for (; D
.end() == false; ++D
)
452 if ((D
->Type
!= pkgCache::Dep::Conflicts
&&
453 D
->Type
!= pkgCache::Dep::DpkgBreaks
&&
454 D
->Type
!= pkgCache::Dep::Replaces
) ||
455 D
.ParentPkg().Group() == Grp
)
458 map_ptrloc
*OldDepLast
= NULL
;
459 pkgCache::VerIterator ConVersion
= D
.ParentVer();
460 // duplicate the Conflicts/Breaks/Replaces for :none arch
462 NewDepends(Pkg
, ConVersion
, "", 0, D
->Type
, OldDepLast
);
464 NewDepends(Pkg
, ConVersion
, D
.TargetVer(),
465 D
->CompareOp
, D
->Type
, OldDepLast
);
470 if (unlikely(AddImplicitDepends(Grp
, Pkg
, Ver
) == false))
471 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
472 Pkg
.Name(), "AddImplicitDepends", 2);
474 // Read only a single record and return
481 /* Record the Description (it is not translated) */
482 MD5SumValue CurMd5
= List
.Description_md5();
483 if (CurMd5
.Value().empty() == true)
485 std::string CurLang
= List
.DescriptionLanguage();
487 /* Before we add a new description we first search in the group for
488 a version with a description of the same MD5 - if so we reuse this
489 description group instead of creating our own for this version */
490 for (pkgCache::PkgIterator P
= Grp
.PackageList();
491 P
.end() == false; P
= Grp
.NextPkg(P
))
493 for (pkgCache::VerIterator V
= P
.VersionList();
494 V
.end() == false; ++V
)
496 if (IsDuplicateDescription(V
.DescriptionList(), CurMd5
, "") == false)
498 Ver
->DescriptionList
= V
->DescriptionList
;
503 // We haven't found reusable descriptions, so add the first description
504 pkgCache::DescIterator Desc
= Ver
.DescriptionList();
505 Dynamic
<pkgCache::DescIterator
> DynDesc(Desc
);
506 map_ptrloc
*LastDesc
= &Ver
->DescriptionList
;
509 map_ptrloc
const descindex
= NewDescription(Desc
, CurLang
, CurMd5
, *LastDesc
);
510 if (unlikely(descindex
== 0 && _error
->PendingError()))
511 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
512 Pkg
.Name(), "NewDescription", 2);
513 if (oldMap
!= Map
.Data())
514 LastDesc
+= (map_ptrloc
*) Map
.Data() - (map_ptrloc
*) oldMap
;
515 *LastDesc
= descindex
;
516 Desc
->ParentPkg
= Pkg
.Index();
518 if ((*LastDesc
== 0 && _error
->PendingError()) || NewFileDesc(Desc
,List
) == false)
519 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
520 Pkg
.Name(), "NewFileDesc", 2);
526 // CacheGenerator::MergeFileProvides - Merge file provides /*{{{*/
527 // ---------------------------------------------------------------------
528 /* If we found any file depends while parsing the main list we need to
529 resolve them. Since it is undesired to load the entire list of files
530 into the cache as virtual packages we do a two stage effort. MergeList
531 identifies the file depends and this creates Provdies for them by
532 re-parsing all the indexs. */
533 bool pkgCacheGenerator::MergeFileProvides(ListParser
&List
)
537 unsigned int Counter
= 0;
538 while (List
.Step() == true)
540 string PackageName
= List
.Package();
541 if (PackageName
.empty() == true)
543 string Version
= List
.Version();
544 if (Version
.empty() == true)
547 pkgCache::PkgIterator Pkg
= Cache
.FindPkg(PackageName
);
548 Dynamic
<pkgCache::PkgIterator
> DynPkg(Pkg
);
549 if (Pkg
.end() == true)
550 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
551 PackageName
.c_str(), "FindPkg", 1);
553 if (Counter
% 100 == 0 && Progress
!= 0)
554 Progress
->Progress(List
.Offset());
556 unsigned long Hash
= List
.VersionHash();
557 pkgCache::VerIterator Ver
= Pkg
.VersionList();
558 Dynamic
<pkgCache::VerIterator
> DynVer(Ver
);
559 for (; Ver
.end() == false; ++Ver
)
561 if (Ver
->Hash
== Hash
&& Version
.c_str() == Ver
.VerStr())
563 if (List
.CollectFileProvides(Cache
,Ver
) == false)
564 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
565 PackageName
.c_str(), "CollectFileProvides", 1);
570 if (Ver
.end() == true)
571 _error
->Warning(_("Package %s %s was not found while processing file dependencies"),PackageName
.c_str(),Version
.c_str());
577 // CacheGenerator::NewGroup - Add a new group /*{{{*/
578 // ---------------------------------------------------------------------
579 /* This creates a new group structure and adds it to the hash table */
580 bool pkgCacheGenerator::NewGroup(pkgCache::GrpIterator
&Grp
, const string
&Name
)
582 Grp
= Cache
.FindGrp(Name
);
583 if (Grp
.end() == false)
587 map_ptrloc
const Group
= AllocateInMap(sizeof(pkgCache::Group
));
588 if (unlikely(Group
== 0))
591 Grp
= pkgCache::GrpIterator(Cache
, Cache
.GrpP
+ Group
);
592 map_ptrloc
const idxName
= WriteStringInMap(Name
);
593 if (unlikely(idxName
== 0))
597 // Insert it into the hash table
598 unsigned long const Hash
= Cache
.Hash(Name
);
599 Grp
->Next
= Cache
.HeaderP
->GrpHashTable
[Hash
];
600 Cache
.HeaderP
->GrpHashTable
[Hash
] = Group
;
602 Grp
->ID
= Cache
.HeaderP
->GroupCount
++;
606 // CacheGenerator::NewPackage - Add a new package /*{{{*/
607 // ---------------------------------------------------------------------
608 /* This creates a new package structure and adds it to the hash table */
609 bool pkgCacheGenerator::NewPackage(pkgCache::PkgIterator
&Pkg
,const string
&Name
,
610 const string
&Arch
) {
611 pkgCache::GrpIterator Grp
;
612 Dynamic
<pkgCache::GrpIterator
> DynGrp(Grp
);
613 if (unlikely(NewGroup(Grp
, Name
) == false))
616 Pkg
= Grp
.FindPkg(Arch
);
617 if (Pkg
.end() == false)
621 map_ptrloc
const Package
= AllocateInMap(sizeof(pkgCache::Package
));
622 if (unlikely(Package
== 0))
624 Pkg
= pkgCache::PkgIterator(Cache
,Cache
.PkgP
+ Package
);
626 // Insert the package into our package list
627 if (Grp
->FirstPackage
== 0) // the group is new
629 // Insert it into the hash table
630 unsigned long const Hash
= Cache
.Hash(Name
);
631 Pkg
->NextPackage
= Cache
.HeaderP
->PkgHashTable
[Hash
];
632 Cache
.HeaderP
->PkgHashTable
[Hash
] = Package
;
633 Grp
->FirstPackage
= Package
;
635 else // Group the Packages together
637 // this package is the new last package
638 pkgCache::PkgIterator
LastPkg(Cache
, Cache
.PkgP
+ Grp
->LastPackage
);
639 Pkg
->NextPackage
= LastPkg
->NextPackage
;
640 LastPkg
->NextPackage
= Package
;
642 Grp
->LastPackage
= Package
;
644 // Set the name, arch and the ID
645 Pkg
->Name
= Grp
->Name
;
646 Pkg
->Group
= Grp
.Index();
647 // all is mapped to the native architecture
648 map_ptrloc
const idxArch
= (Arch
== "all") ? Cache
.HeaderP
->Architecture
: WriteUniqString(Arch
.c_str());
649 if (unlikely(idxArch
== 0))
652 Pkg
->ID
= Cache
.HeaderP
->PackageCount
++;
657 // CacheGenerator::AddImplicitDepends /*{{{*/
658 bool pkgCacheGenerator::AddImplicitDepends(pkgCache::GrpIterator
&G
,
659 pkgCache::PkgIterator
&P
,
660 pkgCache::VerIterator
&V
)
662 // copy P.Arch() into a string here as a cache remap
663 // in NewDepends() later may alter the pointer location
664 string Arch
= P
.Arch() == NULL
? "" : P
.Arch();
665 map_ptrloc
*OldDepLast
= NULL
;
666 /* MultiArch handling introduces a lot of implicit Dependencies:
667 - MultiArch: same → Co-Installable if they have the same version
668 - All others conflict with all other group members */
669 bool const coInstall
= ((V
->MultiArch
& pkgCache::Version::Same
) == pkgCache::Version::Same
);
670 pkgCache::PkgIterator D
= G
.PackageList();
671 Dynamic
<pkgCache::PkgIterator
> DynD(D
);
672 for (; D
.end() != true; D
= G
.NextPkg(D
))
674 if (Arch
== D
.Arch() || D
->VersionList
== 0)
676 /* We allow only one installed arch at the time
677 per group, therefore each group member conflicts
678 with all other group members */
679 if (coInstall
== true)
681 // Replaces: ${self}:other ( << ${binary:Version})
682 NewDepends(D
, V
, V
.VerStr(),
683 pkgCache::Dep::Less
, pkgCache::Dep::Replaces
,
685 // Breaks: ${self}:other (!= ${binary:Version})
686 NewDepends(D
, V
, V
.VerStr(),
687 pkgCache::Dep::NotEquals
, pkgCache::Dep::DpkgBreaks
,
690 // Conflicts: ${self}:other
692 pkgCache::Dep::NoOp
, pkgCache::Dep::Conflicts
,
698 bool pkgCacheGenerator::AddImplicitDepends(pkgCache::VerIterator
&V
,
699 pkgCache::PkgIterator
&D
)
701 /* MultiArch handling introduces a lot of implicit Dependencies:
702 - MultiArch: same → Co-Installable if they have the same version
703 - All others conflict with all other group members */
704 map_ptrloc
*OldDepLast
= NULL
;
705 bool const coInstall
= ((V
->MultiArch
& pkgCache::Version::Same
) == pkgCache::Version::Same
);
706 if (coInstall
== true)
708 // Replaces: ${self}:other ( << ${binary:Version})
709 NewDepends(D
, V
, V
.VerStr(),
710 pkgCache::Dep::Less
, pkgCache::Dep::Replaces
,
712 // Breaks: ${self}:other (!= ${binary:Version})
713 NewDepends(D
, V
, V
.VerStr(),
714 pkgCache::Dep::NotEquals
, pkgCache::Dep::DpkgBreaks
,
717 // Conflicts: ${self}:other
719 pkgCache::Dep::NoOp
, pkgCache::Dep::Conflicts
,
726 // CacheGenerator::NewFileVer - Create a new File<->Version association /*{{{*/
727 // ---------------------------------------------------------------------
729 bool pkgCacheGenerator::NewFileVer(pkgCache::VerIterator
&Ver
,
732 if (CurrentFile
== 0)
736 map_ptrloc
const VerFile
= AllocateInMap(sizeof(pkgCache::VerFile
));
740 pkgCache::VerFileIterator
VF(Cache
,Cache
.VerFileP
+ VerFile
);
741 VF
->File
= CurrentFile
- Cache
.PkgFileP
;
743 // Link it to the end of the list
744 map_ptrloc
*Last
= &Ver
->FileList
;
745 for (pkgCache::VerFileIterator V
= Ver
.FileList(); V
.end() == false; ++V
)
747 VF
->NextFile
= *Last
;
750 VF
->Offset
= List
.Offset();
751 VF
->Size
= List
.Size();
752 if (Cache
.HeaderP
->MaxVerFileSize
< VF
->Size
)
753 Cache
.HeaderP
->MaxVerFileSize
= VF
->Size
;
754 Cache
.HeaderP
->VerFileCount
++;
759 // CacheGenerator::NewVersion - Create a new Version /*{{{*/
760 // ---------------------------------------------------------------------
761 /* This puts a version structure in the linked list */
762 unsigned long pkgCacheGenerator::NewVersion(pkgCache::VerIterator
&Ver
,
763 const string
&VerStr
,
767 map_ptrloc
const Version
= AllocateInMap(sizeof(pkgCache::Version
));
772 Ver
= pkgCache::VerIterator(Cache
,Cache
.VerP
+ Version
);
774 Ver
->ID
= Cache
.HeaderP
->VersionCount
++;
775 map_ptrloc
const idxVerStr
= WriteStringInMap(VerStr
);
776 if (unlikely(idxVerStr
== 0))
778 Ver
->VerStr
= idxVerStr
;
783 // CacheGenerator::NewFileDesc - Create a new File<->Desc association /*{{{*/
784 // ---------------------------------------------------------------------
786 bool pkgCacheGenerator::NewFileDesc(pkgCache::DescIterator
&Desc
,
789 if (CurrentFile
== 0)
793 map_ptrloc
const DescFile
= AllocateInMap(sizeof(pkgCache::DescFile
));
797 pkgCache::DescFileIterator
DF(Cache
,Cache
.DescFileP
+ DescFile
);
798 DF
->File
= CurrentFile
- Cache
.PkgFileP
;
800 // Link it to the end of the list
801 map_ptrloc
*Last
= &Desc
->FileList
;
802 for (pkgCache::DescFileIterator D
= Desc
.FileList(); D
.end() == false; ++D
)
805 DF
->NextFile
= *Last
;
808 DF
->Offset
= List
.Offset();
809 DF
->Size
= List
.Size();
810 if (Cache
.HeaderP
->MaxDescFileSize
< DF
->Size
)
811 Cache
.HeaderP
->MaxDescFileSize
= DF
->Size
;
812 Cache
.HeaderP
->DescFileCount
++;
817 // CacheGenerator::NewDescription - Create a new Description /*{{{*/
818 // ---------------------------------------------------------------------
819 /* This puts a description structure in the linked list */
820 map_ptrloc
pkgCacheGenerator::NewDescription(pkgCache::DescIterator
&Desc
,
822 const MD5SumValue
&md5sum
,
826 map_ptrloc
const Description
= AllocateInMap(sizeof(pkgCache::Description
));
827 if (Description
== 0)
831 Desc
= pkgCache::DescIterator(Cache
,Cache
.DescP
+ Description
);
832 Desc
->NextDesc
= Next
;
833 Desc
->ID
= Cache
.HeaderP
->DescriptionCount
++;
834 map_ptrloc
const idxlanguage_code
= WriteStringInMap(Lang
);
835 map_ptrloc
const idxmd5sum
= WriteStringInMap(md5sum
.Value());
836 if (unlikely(idxlanguage_code
== 0 || idxmd5sum
== 0))
838 Desc
->language_code
= idxlanguage_code
;
839 Desc
->md5sum
= idxmd5sum
;
844 // CacheGenerator::NewDepends - Create a dependency element /*{{{*/
845 // ---------------------------------------------------------------------
846 /* This creates a dependency element in the tree. It is linked to the
847 version and to the package that it is pointing to. */
848 bool pkgCacheGenerator::NewDepends(pkgCache::PkgIterator
&Pkg
,
849 pkgCache::VerIterator
&Ver
,
850 string
const &Version
,
851 unsigned int const &Op
,
852 unsigned int const &Type
,
853 map_ptrloc
* &OldDepLast
)
855 void const * const oldMap
= Map
.Data();
857 map_ptrloc
const Dependency
= AllocateInMap(sizeof(pkgCache::Dependency
));
858 if (unlikely(Dependency
== 0))
862 pkgCache::DepIterator
Dep(Cache
,Cache
.DepP
+ Dependency
);
863 Dynamic
<pkgCache::DepIterator
> DynDep(Dep
);
864 Dep
->ParentVer
= Ver
.Index();
867 Dep
->ID
= Cache
.HeaderP
->DependsCount
++;
869 // Probe the reverse dependency list for a version string that matches
870 if (Version
.empty() == false)
872 /* for (pkgCache::DepIterator I = Pkg.RevDependsList(); I.end() == false; I++)
873 if (I->Version != 0 && I.TargetVer() == Version)
874 Dep->Version = I->Version;*/
875 if (Dep
->Version
== 0) {
876 map_ptrloc
const index
= WriteStringInMap(Version
);
877 if (unlikely(index
== 0))
879 Dep
->Version
= index
;
883 // Link it to the package
884 Dep
->Package
= Pkg
.Index();
885 Dep
->NextRevDepends
= Pkg
->RevDepends
;
886 Pkg
->RevDepends
= Dep
.Index();
888 // Do we know where to link the Dependency to?
889 if (OldDepLast
== NULL
)
891 OldDepLast
= &Ver
->DependsList
;
892 for (pkgCache::DepIterator D
= Ver
.DependsList(); D
.end() == false; ++D
)
893 OldDepLast
= &D
->NextDepends
;
894 } else if (oldMap
!= Map
.Data())
895 OldDepLast
+= (map_ptrloc
*) Map
.Data() - (map_ptrloc
*) oldMap
;
897 Dep
->NextDepends
= *OldDepLast
;
898 *OldDepLast
= Dep
.Index();
899 OldDepLast
= &Dep
->NextDepends
;
904 // ListParser::NewDepends - Create the environment for a new dependency /*{{{*/
905 // ---------------------------------------------------------------------
906 /* This creates a Group and the Package to link this dependency to if
907 needed and handles also the caching of the old endpoint */
908 bool pkgCacheGenerator::ListParser::NewDepends(pkgCache::VerIterator
&Ver
,
909 const string
&PackageName
,
911 const string
&Version
,
915 pkgCache::GrpIterator Grp
;
916 Dynamic
<pkgCache::GrpIterator
> DynGrp(Grp
);
917 if (unlikely(Owner
->NewGroup(Grp
, PackageName
) == false))
920 // Locate the target package
921 pkgCache::PkgIterator Pkg
= Grp
.FindPkg(Arch
);
922 // we don't create 'none' packages and their dependencies if we can avoid it …
923 if (Pkg
.end() == true && Arch
== "none")
925 Dynamic
<pkgCache::PkgIterator
> DynPkg(Pkg
);
926 if (Pkg
.end() == true) {
927 if (unlikely(Owner
->NewPackage(Pkg
, PackageName
, Arch
) == false))
931 // Is it a file dependency?
932 if (unlikely(PackageName
[0] == '/'))
933 FoundFileDeps
= true;
935 /* Caching the old end point speeds up generation substantially */
936 if (OldDepVer
!= Ver
) {
941 return Owner
->NewDepends(Pkg
, Ver
, Version
, Op
, Type
, OldDepLast
);
944 // ListParser::NewProvides - Create a Provides element /*{{{*/
945 // ---------------------------------------------------------------------
947 bool pkgCacheGenerator::ListParser::NewProvides(pkgCache::VerIterator
&Ver
,
948 const string
&PkgName
,
949 const string
&PkgArch
,
950 const string
&Version
)
952 pkgCache
&Cache
= Owner
->Cache
;
954 // We do not add self referencing provides
955 if (Ver
.ParentPkg().Name() == PkgName
&& (PkgArch
== Ver
.ParentPkg().Arch() ||
956 (PkgArch
== "all" && strcmp((Cache
.StrP
+ Cache
.HeaderP
->Architecture
), Ver
.ParentPkg().Arch()) == 0)))
960 map_ptrloc
const Provides
= Owner
->AllocateInMap(sizeof(pkgCache::Provides
));
961 if (unlikely(Provides
== 0))
963 Cache
.HeaderP
->ProvidesCount
++;
966 pkgCache::PrvIterator
Prv(Cache
,Cache
.ProvideP
+ Provides
,Cache
.PkgP
);
967 Dynamic
<pkgCache::PrvIterator
> DynPrv(Prv
);
968 Prv
->Version
= Ver
.Index();
969 Prv
->NextPkgProv
= Ver
->ProvidesList
;
970 Ver
->ProvidesList
= Prv
.Index();
971 if (Version
.empty() == false && unlikely((Prv
->ProvideVersion
= WriteString(Version
)) == 0))
974 // Locate the target package
975 pkgCache::PkgIterator Pkg
;
976 Dynamic
<pkgCache::PkgIterator
> DynPkg(Pkg
);
977 if (unlikely(Owner
->NewPackage(Pkg
,PkgName
, PkgArch
) == false))
980 // Link it to the package
981 Prv
->ParentPkg
= Pkg
.Index();
982 Prv
->NextProvides
= Pkg
->ProvidesList
;
983 Pkg
->ProvidesList
= Prv
.Index();
988 // CacheGenerator::SelectFile - Select the current file being parsed /*{{{*/
989 // ---------------------------------------------------------------------
990 /* This is used to select which file is to be associated with all newly
991 added versions. The caller is responsible for setting the IMS fields. */
992 bool pkgCacheGenerator::SelectFile(const string
&File
,const string
&Site
,
993 const pkgIndexFile
&Index
,
996 // Get some space for the structure
997 map_ptrloc
const idxFile
= AllocateInMap(sizeof(*CurrentFile
));
998 if (unlikely(idxFile
== 0))
1000 CurrentFile
= Cache
.PkgFileP
+ idxFile
;
1003 map_ptrloc
const idxFileName
= WriteStringInMap(File
);
1004 map_ptrloc
const idxSite
= WriteUniqString(Site
);
1005 if (unlikely(idxFileName
== 0 || idxSite
== 0))
1007 CurrentFile
->FileName
= idxFileName
;
1008 CurrentFile
->Site
= idxSite
;
1009 CurrentFile
->NextFile
= Cache
.HeaderP
->FileList
;
1010 CurrentFile
->Flags
= Flags
;
1011 CurrentFile
->ID
= Cache
.HeaderP
->PackageFileCount
;
1012 map_ptrloc
const idxIndexType
= WriteUniqString(Index
.GetType()->Label
);
1013 if (unlikely(idxIndexType
== 0))
1015 CurrentFile
->IndexType
= idxIndexType
;
1017 Cache
.HeaderP
->FileList
= CurrentFile
- Cache
.PkgFileP
;
1018 Cache
.HeaderP
->PackageFileCount
++;
1021 Progress
->SubProgress(Index
.Size());
1025 // CacheGenerator::WriteUniqueString - Insert a unique string /*{{{*/
1026 // ---------------------------------------------------------------------
1027 /* This is used to create handles to strings. Given the same text it
1028 always returns the same number */
1029 unsigned long pkgCacheGenerator::WriteUniqString(const char *S
,
1032 /* We use a very small transient hash table here, this speeds up generation
1033 by a fair amount on slower machines */
1034 pkgCache::StringItem
*&Bucket
= UniqHash
[(S
[0]*5 + S
[1]) % _count(UniqHash
)];
1036 stringcmp(S
,S
+Size
,Cache
.StrP
+ Bucket
->String
) == 0)
1037 return Bucket
->String
;
1039 // Search for an insertion point
1040 pkgCache::StringItem
*I
= Cache
.StringItemP
+ Cache
.HeaderP
->StringList
;
1042 map_ptrloc
*Last
= &Cache
.HeaderP
->StringList
;
1043 for (; I
!= Cache
.StringItemP
; Last
= &I
->NextItem
,
1044 I
= Cache
.StringItemP
+ I
->NextItem
)
1046 Res
= stringcmp(S
,S
+Size
,Cache
.StrP
+ I
->String
);
1059 void const * const oldMap
= Map
.Data();
1060 map_ptrloc
const Item
= AllocateInMap(sizeof(pkgCache::StringItem
));
1064 map_ptrloc
const idxString
= WriteStringInMap(S
,Size
);
1065 if (unlikely(idxString
== 0))
1067 if (oldMap
!= Map
.Data()) {
1068 Last
+= (map_ptrloc
*) Map
.Data() - (map_ptrloc
*) oldMap
;
1069 I
+= (pkgCache::StringItem
*) Map
.Data() - (pkgCache::StringItem
*) oldMap
;
1073 // Fill in the structure
1074 pkgCache::StringItem
*ItemP
= Cache
.StringItemP
+ Item
;
1075 ItemP
->NextItem
= I
- Cache
.StringItemP
;
1076 ItemP
->String
= idxString
;
1079 return ItemP
->String
;
1082 // CheckValidity - Check that a cache is up-to-date /*{{{*/
1083 // ---------------------------------------------------------------------
1084 /* This just verifies that each file in the list of index files exists,
1085 has matching attributes with the cache and the cache does not have
1087 static bool CheckValidity(const string
&CacheFile
,
1088 pkgSourceList
&List
,
1093 bool const Debug
= _config
->FindB("Debug::pkgCacheGen", false);
1094 // No file, certainly invalid
1095 if (CacheFile
.empty() == true || FileExists(CacheFile
) == false)
1098 std::clog
<< "CacheFile doesn't exist" << std::endl
;
1102 if (List
.GetLastModifiedTime() > GetModificationTime(CacheFile
))
1105 std::clog
<< "sources.list is newer than the cache" << std::endl
;
1110 FileFd
CacheF(CacheFile
,FileFd::ReadOnly
);
1111 SPtr
<MMap
> Map
= new MMap(CacheF
,0);
1112 pkgCache
Cache(Map
);
1113 if (_error
->PendingError() == true || Map
->Size() == 0)
1116 std::clog
<< "Errors are pending or Map is empty()" << std::endl
;
1121 /* Now we check every index file, see if it is in the cache,
1122 verify the IMS data and check that it is on the disk too.. */
1123 SPtrArray
<bool> Visited
= new bool[Cache
.HeaderP
->PackageFileCount
];
1124 memset(Visited
,0,sizeof(*Visited
)*Cache
.HeaderP
->PackageFileCount
);
1125 for (; Start
!= End
; ++Start
)
1128 std::clog
<< "Checking PkgFile " << (*Start
)->Describe() << ": ";
1129 if ((*Start
)->HasPackages() == false)
1132 std::clog
<< "Has NO packages" << std::endl
;
1136 if ((*Start
)->Exists() == false)
1138 #if 0 // mvo: we no longer give a message here (Default Sources spec)
1139 _error
->WarningE("stat",_("Couldn't stat source package list %s"),
1140 (*Start
)->Describe().c_str());
1143 std::clog
<< "file doesn't exist" << std::endl
;
1147 // FindInCache is also expected to do an IMS check.
1148 pkgCache::PkgFileIterator File
= (*Start
)->FindInCache(Cache
);
1149 if (File
.end() == true)
1152 std::clog
<< "FindInCache returned end-Pointer" << std::endl
;
1156 Visited
[File
->ID
] = true;
1158 std::clog
<< "with ID " << File
->ID
<< " is valid" << std::endl
;
1161 for (unsigned I
= 0; I
!= Cache
.HeaderP
->PackageFileCount
; I
++)
1162 if (Visited
[I
] == false)
1165 std::clog
<< "File with ID" << I
<< " wasn't visited" << std::endl
;
1169 if (_error
->PendingError() == true)
1173 std::clog
<< "Validity failed because of pending errors:" << std::endl
;
1174 _error
->DumpErrors();
1181 *OutMap
= Map
.UnGuard();
1185 // ComputeSize - Compute the total size of a bunch of files /*{{{*/
1186 // ---------------------------------------------------------------------
1187 /* Size is kind of an abstract notion that is only used for the progress
1189 static unsigned long ComputeSize(FileIterator Start
,FileIterator End
)
1191 unsigned long TotalSize
= 0;
1192 for (; Start
!= End
; ++Start
)
1194 if ((*Start
)->HasPackages() == false)
1196 TotalSize
+= (*Start
)->Size();
1201 // BuildCache - Merge the list of index files into the cache /*{{{*/
1202 // ---------------------------------------------------------------------
1204 static bool BuildCache(pkgCacheGenerator
&Gen
,
1205 OpProgress
*Progress
,
1206 unsigned long &CurrentSize
,unsigned long TotalSize
,
1207 FileIterator Start
, FileIterator End
)
1210 for (I
= Start
; I
!= End
; ++I
)
1212 if ((*I
)->HasPackages() == false)
1215 if ((*I
)->Exists() == false)
1218 if ((*I
)->FindInCache(Gen
.GetCache()).end() == false)
1220 _error
->Warning("Duplicate sources.list entry %s",
1221 (*I
)->Describe().c_str());
1225 unsigned long Size
= (*I
)->Size();
1226 if (Progress
!= NULL
)
1227 Progress
->OverallProgress(CurrentSize
,TotalSize
,Size
,_("Reading package lists"));
1228 CurrentSize
+= Size
;
1230 if ((*I
)->Merge(Gen
,Progress
) == false)
1234 if (Gen
.HasFileDeps() == true)
1236 if (Progress
!= NULL
)
1238 TotalSize
= ComputeSize(Start
, End
);
1240 for (I
= Start
; I
!= End
; ++I
)
1242 unsigned long Size
= (*I
)->Size();
1243 if (Progress
!= NULL
)
1244 Progress
->OverallProgress(CurrentSize
,TotalSize
,Size
,_("Collecting File Provides"));
1245 CurrentSize
+= Size
;
1246 if ((*I
)->MergeFileProvides(Gen
,Progress
) == false)
1254 // CacheGenerator::CreateDynamicMMap - load an mmap with configuration options /*{{{*/
1255 DynamicMMap
* pkgCacheGenerator::CreateDynamicMMap(FileFd
*CacheF
, unsigned long Flags
) {
1256 unsigned long const MapStart
= _config
->FindI("APT::Cache-Start", 24*1024*1024);
1257 unsigned long const MapGrow
= _config
->FindI("APT::Cache-Grow", 1*1024*1024);
1258 unsigned long const MapLimit
= _config
->FindI("APT::Cache-Limit", 0);
1259 Flags
|= MMap::Moveable
;
1260 if (_config
->FindB("APT::Cache-Fallback", false) == true)
1261 Flags
|= MMap::Fallback
;
1263 return new DynamicMMap(*CacheF
, Flags
, MapStart
, MapGrow
, MapLimit
);
1265 return new DynamicMMap(Flags
, MapStart
, MapGrow
, MapLimit
);
1268 // CacheGenerator::MakeStatusCache - Construct the status cache /*{{{*/
1269 // ---------------------------------------------------------------------
1270 /* This makes sure that the status cache (the cache that has all
1271 index files from the sources list and all local ones) is ready
1272 to be mmaped. If OutMap is not zero then a MMap object representing
1273 the cache will be stored there. This is pretty much mandetory if you
1274 are using AllowMem. AllowMem lets the function be run as non-root
1275 where it builds the cache 'fast' into a memory buffer. */
1276 __deprecated
bool pkgMakeStatusCache(pkgSourceList
&List
,OpProgress
&Progress
,
1277 MMap
**OutMap
, bool AllowMem
)
1278 { return pkgCacheGenerator::MakeStatusCache(List
, &Progress
, OutMap
, AllowMem
); }
1279 bool pkgCacheGenerator::MakeStatusCache(pkgSourceList
&List
,OpProgress
*Progress
,
1280 MMap
**OutMap
,bool AllowMem
)
1282 bool const Debug
= _config
->FindB("Debug::pkgCacheGen", false);
1284 std::vector
<pkgIndexFile
*> Files
;
1285 for (std::vector
<metaIndex
*>::const_iterator i
= List
.begin();
1289 std::vector
<pkgIndexFile
*> *Indexes
= (*i
)->GetIndexFiles();
1290 for (std::vector
<pkgIndexFile
*>::const_iterator j
= Indexes
->begin();
1291 j
!= Indexes
->end();
1293 Files
.push_back (*j
);
1296 unsigned long const EndOfSource
= Files
.size();
1297 if (_system
->AddStatusFiles(Files
) == false)
1300 // Decide if we can write to the files..
1301 string
const CacheFile
= _config
->FindFile("Dir::Cache::pkgcache");
1302 string
const SrcCacheFile
= _config
->FindFile("Dir::Cache::srcpkgcache");
1304 // ensure the cache directory exists
1305 if (CacheFile
.empty() == false || SrcCacheFile
.empty() == false)
1307 string dir
= _config
->FindDir("Dir::Cache");
1308 size_t const len
= dir
.size();
1309 if (len
> 5 && dir
.find("/apt/", len
- 6, 5) == len
- 5)
1310 dir
= dir
.substr(0, len
- 5);
1311 if (CacheFile
.empty() == false)
1312 CreateDirectory(dir
, flNotFile(CacheFile
));
1313 if (SrcCacheFile
.empty() == false)
1314 CreateDirectory(dir
, flNotFile(SrcCacheFile
));
1317 // Decide if we can write to the cache
1318 bool Writeable
= false;
1319 if (CacheFile
.empty() == false)
1320 Writeable
= access(flNotFile(CacheFile
).c_str(),W_OK
) == 0;
1322 if (SrcCacheFile
.empty() == false)
1323 Writeable
= access(flNotFile(SrcCacheFile
).c_str(),W_OK
) == 0;
1325 std::clog
<< "Do we have write-access to the cache files? " << (Writeable
? "YES" : "NO") << std::endl
;
1327 if (Writeable
== false && AllowMem
== false && CacheFile
.empty() == false)
1328 return _error
->Error(_("Unable to write to %s"),flNotFile(CacheFile
).c_str());
1330 if (Progress
!= NULL
)
1331 Progress
->OverallProgress(0,1,1,_("Reading package lists"));
1333 // Cache is OK, Fin.
1334 if (CheckValidity(CacheFile
, List
, Files
.begin(),Files
.end(),OutMap
) == true)
1336 if (Progress
!= NULL
)
1337 Progress
->OverallProgress(1,1,1,_("Reading package lists"));
1339 std::clog
<< "pkgcache.bin is valid - no need to build anything" << std::endl
;
1342 else if (Debug
== true)
1343 std::clog
<< "pkgcache.bin is NOT valid" << std::endl
;
1345 /* At this point we know we need to reconstruct the package cache,
1347 SPtr
<FileFd
> CacheF
;
1348 SPtr
<DynamicMMap
> Map
;
1349 if (Writeable
== true && CacheFile
.empty() == false)
1351 _error
->PushToStack();
1352 unlink(CacheFile
.c_str());
1353 CacheF
= new FileFd(CacheFile
,FileFd::WriteAtomic
);
1354 fchmod(CacheF
->Fd(),0644);
1355 Map
= CreateDynamicMMap(CacheF
, MMap::Public
);
1356 if (_error
->PendingError() == true)
1358 delete CacheF
.UnGuard();
1359 delete Map
.UnGuard();
1361 std::clog
<< "Open filebased MMap FAILED" << std::endl
;
1363 if (AllowMem
== false)
1365 _error
->MergeWithStack();
1368 _error
->RevertToStack();
1372 _error
->MergeWithStack();
1374 std::clog
<< "Open filebased MMap" << std::endl
;
1377 if (Writeable
== false || CacheFile
.empty() == true)
1379 // Just build it in memory..
1380 Map
= CreateDynamicMMap(NULL
);
1382 std::clog
<< "Open memory Map (not filebased)" << std::endl
;
1385 // Lets try the source cache.
1386 unsigned long CurrentSize
= 0;
1387 unsigned long TotalSize
= 0;
1388 if (CheckValidity(SrcCacheFile
, List
, Files
.begin(),
1389 Files
.begin()+EndOfSource
) == true)
1392 std::clog
<< "srcpkgcache.bin is valid - populate MMap with it." << std::endl
;
1393 // Preload the map with the source cache
1394 FileFd
SCacheF(SrcCacheFile
,FileFd::ReadOnly
);
1395 unsigned long const alloc
= Map
->RawAllocate(SCacheF
.Size());
1396 if ((alloc
== 0 && _error
->PendingError())
1397 || SCacheF
.Read((unsigned char *)Map
->Data() + alloc
,
1398 SCacheF
.Size()) == false)
1401 TotalSize
= ComputeSize(Files
.begin()+EndOfSource
,Files
.end());
1403 // Build the status cache
1404 pkgCacheGenerator
Gen(Map
.Get(),Progress
);
1405 if (_error
->PendingError() == true)
1407 if (BuildCache(Gen
,Progress
,CurrentSize
,TotalSize
,
1408 Files
.begin()+EndOfSource
,Files
.end()) == false)
1414 std::clog
<< "srcpkgcache.bin is NOT valid - rebuild" << std::endl
;
1415 TotalSize
= ComputeSize(Files
.begin(),Files
.end());
1417 // Build the source cache
1418 pkgCacheGenerator
Gen(Map
.Get(),Progress
);
1419 if (_error
->PendingError() == true)
1421 if (BuildCache(Gen
,Progress
,CurrentSize
,TotalSize
,
1422 Files
.begin(),Files
.begin()+EndOfSource
) == false)
1426 if (Writeable
== true && SrcCacheFile
.empty() == false)
1428 FileFd
SCacheF(SrcCacheFile
,FileFd::WriteAtomic
);
1429 if (_error
->PendingError() == true)
1432 fchmod(SCacheF
.Fd(),0644);
1434 // Write out the main data
1435 if (SCacheF
.Write(Map
->Data(),Map
->Size()) == false)
1436 return _error
->Error(_("IO Error saving source cache"));
1439 // Write out the proper header
1440 Gen
.GetCache().HeaderP
->Dirty
= false;
1441 if (SCacheF
.Seek(0) == false ||
1442 SCacheF
.Write(Map
->Data(),sizeof(*Gen
.GetCache().HeaderP
)) == false)
1443 return _error
->Error(_("IO Error saving source cache"));
1444 Gen
.GetCache().HeaderP
->Dirty
= true;
1448 // Build the status cache
1449 if (BuildCache(Gen
,Progress
,CurrentSize
,TotalSize
,
1450 Files
.begin()+EndOfSource
,Files
.end()) == false)
1454 std::clog
<< "Caches are ready for shipping" << std::endl
;
1456 if (_error
->PendingError() == true)
1462 delete Map
.UnGuard();
1463 *OutMap
= new MMap(*CacheF
,0);
1467 *OutMap
= Map
.UnGuard();
1474 // CacheGenerator::MakeOnlyStatusCache - Build only a status files cache/*{{{*/
1475 // ---------------------------------------------------------------------
1477 __deprecated
bool pkgMakeOnlyStatusCache(OpProgress
&Progress
,DynamicMMap
**OutMap
)
1478 { return pkgCacheGenerator::MakeOnlyStatusCache(&Progress
, OutMap
); }
1479 bool pkgCacheGenerator::MakeOnlyStatusCache(OpProgress
*Progress
,DynamicMMap
**OutMap
)
1481 std::vector
<pkgIndexFile
*> Files
;
1482 unsigned long EndOfSource
= Files
.size();
1483 if (_system
->AddStatusFiles(Files
) == false)
1486 SPtr
<DynamicMMap
> Map
= CreateDynamicMMap(NULL
);
1487 unsigned long CurrentSize
= 0;
1488 unsigned long TotalSize
= 0;
1490 TotalSize
= ComputeSize(Files
.begin()+EndOfSource
,Files
.end());
1492 // Build the status cache
1493 if (Progress
!= NULL
)
1494 Progress
->OverallProgress(0,1,1,_("Reading package lists"));
1495 pkgCacheGenerator
Gen(Map
.Get(),Progress
);
1496 if (_error
->PendingError() == true)
1498 if (BuildCache(Gen
,Progress
,CurrentSize
,TotalSize
,
1499 Files
.begin()+EndOfSource
,Files
.end()) == false)
1502 if (_error
->PendingError() == true)
1504 *OutMap
= Map
.UnGuard();
1509 // IsDuplicateDescription /*{{{*/
1510 static bool IsDuplicateDescription(pkgCache::DescIterator Desc
,
1511 MD5SumValue
const &CurMd5
, std::string
const &CurLang
)
1513 // Descriptions in the same link-list have all the same md5
1514 if (Desc
.end() == true || MD5SumValue(Desc
.md5()) != CurMd5
)
1516 for (; Desc
.end() == false; ++Desc
)
1517 if (Desc
.LanguageCode() == CurLang
)
1522 // CacheGenerator::FinishCache /*{{{*/
1523 bool pkgCacheGenerator::FinishCache(OpProgress
*Progress
)