]>
git.saurik.com Git - apt.git/blob - apt-pkg/pkgcachegen.cc
2f05310c1750e6e629c64b8075da91ff51565a89
1 // -*- mode: cpp; mode: fold -*-
3 // $Id: pkgcachegen.cc,v 1.53.2.1 2003/12/24 23:09:17 mdz Exp $
4 /* ######################################################################
6 Package Cache Generator - Generator for the cache structure.
8 This builds the cache structure from the abstract package list parser.
10 ##################################################################### */
12 // Include Files /*{{{*/
15 #include <apt-pkg/pkgcachegen.h>
16 #include <apt-pkg/error.h>
17 #include <apt-pkg/version.h>
18 #include <apt-pkg/progress.h>
19 #include <apt-pkg/sourcelist.h>
20 #include <apt-pkg/configuration.h>
21 #include <apt-pkg/aptconfiguration.h>
22 #include <apt-pkg/strutl.h>
23 #include <apt-pkg/sptr.h>
24 #include <apt-pkg/pkgsystem.h>
25 #include <apt-pkg/macros.h>
26 #include <apt-pkg/tagfile.h>
27 #include <apt-pkg/metaindex.h>
28 #include <apt-pkg/fileutl.h>
38 typedef std::vector
<pkgIndexFile
*>::iterator FileIterator
;
39 template <typename Iter
> std::vector
<Iter
*> pkgCacheGenerator::Dynamic
<Iter
>::toReMap
;
41 static bool IsDuplicateDescription(pkgCache::DescIterator Desc
,
42 MD5SumValue
const &CurMd5
, std::string
const &CurLang
);
46 // CacheGenerator::pkgCacheGenerator - Constructor /*{{{*/
47 // ---------------------------------------------------------------------
48 /* We set the dirty flag and make sure that is written to the disk */
49 pkgCacheGenerator::pkgCacheGenerator(DynamicMMap
*pMap
,OpProgress
*Prog
) :
50 Map(*pMap
), Cache(pMap
,false), Progress(Prog
),
54 memset(UniqHash
,0,sizeof(UniqHash
));
56 if (_error
->PendingError() == true)
61 // Setup the map interface..
62 Cache
.HeaderP
= (pkgCache::Header
*)Map
.Data();
63 if (Map
.RawAllocate(sizeof(pkgCache::Header
)) == 0 && _error
->PendingError() == true)
66 Map
.UsePools(*Cache
.HeaderP
->Pools
,sizeof(Cache
.HeaderP
->Pools
)/sizeof(Cache
.HeaderP
->Pools
[0]));
69 *Cache
.HeaderP
= pkgCache::Header();
70 map_ptrloc
const idxVerSysName
= WriteStringInMap(_system
->VS
->Label
);
71 Cache
.HeaderP
->VerSysName
= idxVerSysName
;
72 // this pointer is set in ReMap, but we need it now for WriteUniqString
73 Cache
.StringItemP
= (pkgCache::StringItem
*)Map
.Data();
74 map_ptrloc
const idxArchitecture
= WriteUniqString(_config
->Find("APT::Architecture"));
75 Cache
.HeaderP
->Architecture
= idxArchitecture
;
76 if (unlikely(idxVerSysName
== 0 || idxArchitecture
== 0))
82 // Map directly from the existing file
84 Map
.UsePools(*Cache
.HeaderP
->Pools
,sizeof(Cache
.HeaderP
->Pools
)/sizeof(Cache
.HeaderP
->Pools
[0]));
85 if (Cache
.VS
!= _system
->VS
)
87 _error
->Error(_("Cache has an incompatible versioning system"));
92 Cache
.HeaderP
->Dirty
= true;
93 Map
.Sync(0,sizeof(pkgCache::Header
));
96 // CacheGenerator::~pkgCacheGenerator - Destructor /*{{{*/
97 // ---------------------------------------------------------------------
98 /* We sync the data then unset the dirty flag in two steps so as to
99 advoid a problem during a crash */
100 pkgCacheGenerator::~pkgCacheGenerator()
102 if (_error
->PendingError() == true)
104 if (Map
.Sync() == false)
107 Cache
.HeaderP
->Dirty
= false;
108 Cache
.HeaderP
->CacheFileSize
= Map
.Size();
109 Map
.Sync(0,sizeof(pkgCache::Header
));
112 void pkgCacheGenerator::ReMap(void const * const oldMap
, void const * const newMap
) {/*{{{*/
113 if (oldMap
== newMap
)
116 if (_config
->FindB("Debug::pkgCacheGen", false))
117 std::clog
<< "Remaping from " << oldMap
<< " to " << newMap
<< std::endl
;
121 CurrentFile
+= (pkgCache::PackageFile
*) newMap
- (pkgCache::PackageFile
*) oldMap
;
123 for (size_t i
= 0; i
< _count(UniqHash
); ++i
)
124 if (UniqHash
[i
] != 0)
125 UniqHash
[i
] += (pkgCache::StringItem
*) newMap
- (pkgCache::StringItem
*) oldMap
;
127 for (std::vector
<pkgCache::GrpIterator
*>::const_iterator i
= Dynamic
<pkgCache::GrpIterator
>::toReMap
.begin();
128 i
!= Dynamic
<pkgCache::GrpIterator
>::toReMap
.end(); ++i
)
129 (*i
)->ReMap(oldMap
, newMap
);
130 for (std::vector
<pkgCache::PkgIterator
*>::const_iterator i
= Dynamic
<pkgCache::PkgIterator
>::toReMap
.begin();
131 i
!= Dynamic
<pkgCache::PkgIterator
>::toReMap
.end(); ++i
)
132 (*i
)->ReMap(oldMap
, newMap
);
133 for (std::vector
<pkgCache::VerIterator
*>::const_iterator i
= Dynamic
<pkgCache::VerIterator
>::toReMap
.begin();
134 i
!= Dynamic
<pkgCache::VerIterator
>::toReMap
.end(); ++i
)
135 (*i
)->ReMap(oldMap
, newMap
);
136 for (std::vector
<pkgCache::DepIterator
*>::const_iterator i
= Dynamic
<pkgCache::DepIterator
>::toReMap
.begin();
137 i
!= Dynamic
<pkgCache::DepIterator
>::toReMap
.end(); ++i
)
138 (*i
)->ReMap(oldMap
, newMap
);
139 for (std::vector
<pkgCache::DescIterator
*>::const_iterator i
= Dynamic
<pkgCache::DescIterator
>::toReMap
.begin();
140 i
!= Dynamic
<pkgCache::DescIterator
>::toReMap
.end(); ++i
)
141 (*i
)->ReMap(oldMap
, newMap
);
142 for (std::vector
<pkgCache::PrvIterator
*>::const_iterator i
= Dynamic
<pkgCache::PrvIterator
>::toReMap
.begin();
143 i
!= Dynamic
<pkgCache::PrvIterator
>::toReMap
.end(); ++i
)
144 (*i
)->ReMap(oldMap
, newMap
);
145 for (std::vector
<pkgCache::PkgFileIterator
*>::const_iterator i
= Dynamic
<pkgCache::PkgFileIterator
>::toReMap
.begin();
146 i
!= Dynamic
<pkgCache::PkgFileIterator
>::toReMap
.end(); ++i
)
147 (*i
)->ReMap(oldMap
, newMap
);
149 // CacheGenerator::WriteStringInMap /*{{{*/
150 map_ptrloc
pkgCacheGenerator::WriteStringInMap(const char *String
,
151 const unsigned long &Len
) {
152 void const * const oldMap
= Map
.Data();
153 map_ptrloc
const index
= Map
.WriteString(String
, Len
);
155 ReMap(oldMap
, Map
.Data());
159 // CacheGenerator::WriteStringInMap /*{{{*/
160 map_ptrloc
pkgCacheGenerator::WriteStringInMap(const char *String
) {
161 void const * const oldMap
= Map
.Data();
162 map_ptrloc
const index
= Map
.WriteString(String
);
164 ReMap(oldMap
, Map
.Data());
168 map_ptrloc
pkgCacheGenerator::AllocateInMap(const unsigned long &size
) {/*{{{*/
169 void const * const oldMap
= Map
.Data();
170 map_ptrloc
const index
= Map
.Allocate(size
);
172 ReMap(oldMap
, Map
.Data());
176 // CacheGenerator::MergeList - Merge the package list /*{{{*/
177 // ---------------------------------------------------------------------
178 /* This provides the generation of the entries in the cache. Each loop
179 goes through a single package record from the underlying parse engine. */
180 bool pkgCacheGenerator::MergeList(ListParser
&List
,
181 pkgCache::VerIterator
*OutVer
)
185 unsigned int Counter
= 0;
186 while (List
.Step() == true)
188 string
const PackageName
= List
.Package();
189 if (PackageName
.empty() == true)
193 if (Counter
% 100 == 0 && Progress
!= 0)
194 Progress
->Progress(List
.Offset());
196 string Arch
= List
.Architecture();
197 string
const Version
= List
.Version();
198 if (Version
.empty() == true && Arch
.empty() == true)
200 // package descriptions
201 if (MergeListGroup(List
, PackageName
) == false)
206 if (Arch
.empty() == true)
208 // use the pseudo arch 'none' for arch-less packages
210 /* We might built a SingleArchCache here, which we don't want to blow up
211 just for these :none packages to a proper MultiArchCache, so just ensure
212 that we have always a native package structure first for SingleArch */
213 pkgCache::PkgIterator NP
;
214 Dynamic
<pkgCache::PkgIterator
> DynPkg(NP
);
215 if (NewPackage(NP
, PackageName
, _config
->Find("APT::Architecture")) == false)
216 // TRANSLATOR: The first placeholder is a package name,
217 // the other two should be copied verbatim as they include debug info
218 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
219 PackageName
.c_str(), "NewPackage", 0);
222 // Get a pointer to the package structure
223 pkgCache::PkgIterator Pkg
;
224 Dynamic
<pkgCache::PkgIterator
> DynPkg(Pkg
);
225 if (NewPackage(Pkg
, PackageName
, Arch
) == false)
226 // TRANSLATOR: The first placeholder is a package name,
227 // the other two should be copied verbatim as they include debug info
228 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
229 PackageName
.c_str(), "NewPackage", 1);
232 if (Version
.empty() == true)
234 if (MergeListPackage(List
, Pkg
) == false)
239 if (MergeListVersion(List
, Pkg
, Version
, OutVer
) == false)
245 FoundFileDeps
|= List
.HasFileDeps();
250 if (Cache
.HeaderP
->PackageCount
>= (1ULL<<sizeof(Cache
.PkgP
->ID
)*8)-1)
251 return _error
->Error(_("Wow, you exceeded the number of package "
252 "names this APT is capable of."));
253 if (Cache
.HeaderP
->VersionCount
>= (1ULL<<(sizeof(Cache
.VerP
->ID
)*8))-1)
254 return _error
->Error(_("Wow, you exceeded the number of versions "
255 "this APT is capable of."));
256 if (Cache
.HeaderP
->DescriptionCount
>= (1ULL<<(sizeof(Cache
.DescP
->ID
)*8))-1)
257 return _error
->Error(_("Wow, you exceeded the number of descriptions "
258 "this APT is capable of."));
259 if (Cache
.HeaderP
->DependsCount
>= (1ULL<<(sizeof(Cache
.DepP
->ID
)*8))-1ULL)
260 return _error
->Error(_("Wow, you exceeded the number of dependencies "
261 "this APT is capable of."));
263 FoundFileDeps
|= List
.HasFileDeps();
266 // CacheGenerator::MergeListGroup /*{{{*/
267 bool pkgCacheGenerator::MergeListGroup(ListParser
&List
, std::string
const &GrpName
)
269 pkgCache::GrpIterator Grp
= Cache
.FindGrp(GrpName
);
270 // a group has no data on it's own, only packages have it but these
271 // stanzas like this come from Translation- files to add descriptions,
272 // but without a version we don't need a description for it…
273 if (Grp
.end() == true)
275 Dynamic
<pkgCache::GrpIterator
> DynGrp(Grp
);
277 pkgCache::PkgIterator Pkg
;
278 Dynamic
<pkgCache::PkgIterator
> DynPkg(Pkg
);
279 for (Pkg
= Grp
.PackageList(); Pkg
.end() == false; Pkg
= Grp
.NextPkg(Pkg
))
280 if (MergeListPackage(List
, Pkg
) == false)
286 // CacheGenerator::MergeListPackage /*{{{*/
287 bool pkgCacheGenerator::MergeListPackage(ListParser
&List
, pkgCache::PkgIterator
&Pkg
)
289 // we first process the package, then the descriptions
290 // (for deb this package processing is in fact a no-op)
291 pkgCache::VerIterator
Ver(Cache
);
292 Dynamic
<pkgCache::VerIterator
> DynVer(Ver
);
293 if (List
.UsePackage(Pkg
, Ver
) == false)
294 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
295 Pkg
.Name(), "UsePackage", 1);
297 // Find the right version to write the description
298 MD5SumValue CurMd5
= List
.Description_md5();
299 std::string CurLang
= List
.DescriptionLanguage();
301 for (Ver
= Pkg
.VersionList(); Ver
.end() == false; ++Ver
)
303 pkgCache::DescIterator VerDesc
= Ver
.DescriptionList();
305 // a version can only have one md5 describing it
306 if (VerDesc
.end() == true || MD5SumValue(VerDesc
.md5()) != CurMd5
)
309 // don't add a new description if we have one for the given
311 if (IsDuplicateDescription(VerDesc
, CurMd5
, CurLang
) == true)
314 pkgCache::DescIterator Desc
;
315 Dynamic
<pkgCache::DescIterator
> DynDesc(Desc
);
317 map_ptrloc
const descindex
= NewDescription(Desc
, CurLang
, CurMd5
, VerDesc
->md5sum
);
318 if (unlikely(descindex
== 0 && _error
->PendingError()))
319 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
320 Pkg
.Name(), "NewDescription", 1);
322 Desc
->ParentPkg
= Pkg
.Index();
324 // we add at the end, so that the start is constant as we need
325 // that to be able to efficiently share these lists
326 VerDesc
= Ver
.DescriptionList(); // old value might be invalid after ReMap
327 for (;VerDesc
.end() == false && VerDesc
->NextDesc
!= 0; ++VerDesc
);
328 map_ptrloc
* const LastNextDesc
= (VerDesc
.end() == true) ? &Ver
->DescriptionList
: &VerDesc
->NextDesc
;
329 *LastNextDesc
= descindex
;
331 if (NewFileDesc(Desc
,List
) == false)
332 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
333 Pkg
.Name(), "NewFileDesc", 1);
335 // we can stop here as all "same" versions will share the description
342 // CacheGenerator::MergeListVersion /*{{{*/
343 bool pkgCacheGenerator::MergeListVersion(ListParser
&List
, pkgCache::PkgIterator
&Pkg
,
344 std::string
const &Version
, pkgCache::VerIterator
* &OutVer
)
346 pkgCache::VerIterator Ver
= Pkg
.VersionList();
347 Dynamic
<pkgCache::VerIterator
> DynVer(Ver
);
348 map_ptrloc
*LastVer
= &Pkg
->VersionList
;
349 void const * oldMap
= Map
.Data();
351 unsigned long const Hash
= List
.VersionHash();
352 if (Ver
.end() == false)
354 /* We know the list is sorted so we use that fact in the search.
355 Insertion of new versions is done with correct sorting */
357 for (; Ver
.end() == false; LastVer
= &Ver
->NextVer
, Ver
++)
359 Res
= Cache
.VS
->CmpVersion(Version
,Ver
.VerStr());
360 // Version is higher as current version - insert here
363 // Versionstrings are equal - is hash also equal?
364 if (Res
== 0 && Ver
->Hash
== Hash
)
366 // proceed with the next till we have either the right
367 // or we found another version (which will be lower)
370 /* We already have a version for this item, record that we saw it */
371 if (Res
== 0 && Ver
.end() == false && Ver
->Hash
== Hash
)
373 if (List
.UsePackage(Pkg
,Ver
) == false)
374 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
375 Pkg
.Name(), "UsePackage", 2);
377 if (NewFileVer(Ver
,List
) == false)
378 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
379 Pkg
.Name(), "NewFileVer", 1);
381 // Read only a single record and return
393 map_ptrloc
const verindex
= NewVersion(Ver
,Version
,*LastVer
);
394 if (verindex
== 0 && _error
->PendingError())
395 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
396 Pkg
.Name(), "NewVersion", 1);
398 if (oldMap
!= Map
.Data())
399 LastVer
+= (map_ptrloc
*) Map
.Data() - (map_ptrloc
*) oldMap
;
401 Ver
->ParentPkg
= Pkg
.Index();
404 if (unlikely(List
.NewVersion(Ver
) == false))
405 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
406 Pkg
.Name(), "NewVersion", 2);
408 if (unlikely(List
.UsePackage(Pkg
,Ver
) == false))
409 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
410 Pkg
.Name(), "UsePackage", 3);
412 if (unlikely(NewFileVer(Ver
,List
) == false))
413 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
414 Pkg
.Name(), "NewFileVer", 2);
416 pkgCache::GrpIterator Grp
= Pkg
.Group();
417 Dynamic
<pkgCache::GrpIterator
> DynGrp(Grp
);
419 /* If it is the first version of this package we need to add implicit
420 Multi-Arch dependencies to all other package versions in the group now -
421 otherwise we just add them for this new version */
422 if (Pkg
.VersionList()->NextVer
== 0)
424 pkgCache::PkgIterator P
= Grp
.PackageList();
425 Dynamic
<pkgCache::PkgIterator
> DynP(P
);
426 for (; P
.end() != true; P
= Grp
.NextPkg(P
))
428 if (P
->ID
== Pkg
->ID
)
430 pkgCache::VerIterator V
= P
.VersionList();
431 Dynamic
<pkgCache::VerIterator
> DynV(V
);
432 for (; V
.end() != true; ++V
)
433 if (unlikely(AddImplicitDepends(V
, Pkg
) == false))
434 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
435 Pkg
.Name(), "AddImplicitDepends", 1);
437 /* :none packages are packages without an architecture. They are forbidden by
438 debian-policy, so usually they will only be in (old) dpkg status files -
439 and dpkg will complain about them - and are pretty rare. We therefore do
440 usually not create conflicts while the parent is created, but only if a :none
441 package (= the target) appears. This creates incorrect dependencies on :none
442 for architecture-specific dependencies on the package we copy from, but we
443 will ignore this bug as architecture-specific dependencies are only allowed
444 in jessie and until then the :none packages should be extinct (hopefully).
445 In other words: This should work long enough to allow graceful removal of
446 these packages, it is not supposed to allow users to keep using them … */
447 if (strcmp(Pkg
.Arch(), "none") == 0)
449 pkgCache::PkgIterator M
= Grp
.FindPreferredPkg();
450 if (M
.end() == false && Pkg
!= M
)
452 pkgCache::DepIterator D
= M
.RevDependsList();
453 Dynamic
<pkgCache::DepIterator
> DynD(D
);
454 for (; D
.end() == false; ++D
)
456 if ((D
->Type
!= pkgCache::Dep::Conflicts
&&
457 D
->Type
!= pkgCache::Dep::DpkgBreaks
&&
458 D
->Type
!= pkgCache::Dep::Replaces
) ||
459 D
.ParentPkg().Group() == Grp
)
462 map_ptrloc
*OldDepLast
= NULL
;
463 pkgCache::VerIterator ConVersion
= D
.ParentVer();
464 Dynamic
<pkgCache::VerIterator
> DynV(ConVersion
);
465 // duplicate the Conflicts/Breaks/Replaces for :none arch
467 NewDepends(Pkg
, ConVersion
, "", 0, D
->Type
, OldDepLast
);
469 NewDepends(Pkg
, ConVersion
, D
.TargetVer(),
470 D
->CompareOp
, D
->Type
, OldDepLast
);
475 if (unlikely(AddImplicitDepends(Grp
, Pkg
, Ver
) == false))
476 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
477 Pkg
.Name(), "AddImplicitDepends", 2);
479 // Read only a single record and return
486 /* Record the Description (it is not translated) */
487 MD5SumValue CurMd5
= List
.Description_md5();
488 if (CurMd5
.Value().empty() == true)
490 std::string CurLang
= List
.DescriptionLanguage();
492 /* Before we add a new description we first search in the group for
493 a version with a description of the same MD5 - if so we reuse this
494 description group instead of creating our own for this version */
495 for (pkgCache::PkgIterator P
= Grp
.PackageList();
496 P
.end() == false; P
= Grp
.NextPkg(P
))
498 for (pkgCache::VerIterator V
= P
.VersionList();
499 V
.end() == false; ++V
)
501 if (IsDuplicateDescription(V
.DescriptionList(), CurMd5
, "") == false)
503 Ver
->DescriptionList
= V
->DescriptionList
;
508 // We haven't found reusable descriptions, so add the first description
509 pkgCache::DescIterator Desc
= Ver
.DescriptionList();
510 Dynamic
<pkgCache::DescIterator
> DynDesc(Desc
);
512 map_ptrloc
const descindex
= NewDescription(Desc
, CurLang
, CurMd5
, 0);
513 if (unlikely(descindex
== 0 && _error
->PendingError()))
514 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
515 Pkg
.Name(), "NewDescription", 2);
517 Desc
->ParentPkg
= Pkg
.Index();
518 Ver
->DescriptionList
= descindex
;
520 if (NewFileDesc(Desc
,List
) == false)
521 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
522 Pkg
.Name(), "NewFileDesc", 2);
528 // CacheGenerator::MergeFileProvides - Merge file provides /*{{{*/
529 // ---------------------------------------------------------------------
530 /* If we found any file depends while parsing the main list we need to
531 resolve them. Since it is undesired to load the entire list of files
532 into the cache as virtual packages we do a two stage effort. MergeList
533 identifies the file depends and this creates Provdies for them by
534 re-parsing all the indexs. */
535 bool pkgCacheGenerator::MergeFileProvides(ListParser
&List
)
539 unsigned int Counter
= 0;
540 while (List
.Step() == true)
542 string PackageName
= List
.Package();
543 if (PackageName
.empty() == true)
545 string Version
= List
.Version();
546 if (Version
.empty() == true)
549 pkgCache::PkgIterator Pkg
= Cache
.FindPkg(PackageName
);
550 Dynamic
<pkgCache::PkgIterator
> DynPkg(Pkg
);
551 if (Pkg
.end() == true)
552 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
553 PackageName
.c_str(), "FindPkg", 1);
555 if (Counter
% 100 == 0 && Progress
!= 0)
556 Progress
->Progress(List
.Offset());
558 unsigned long Hash
= List
.VersionHash();
559 pkgCache::VerIterator Ver
= Pkg
.VersionList();
560 Dynamic
<pkgCache::VerIterator
> DynVer(Ver
);
561 for (; Ver
.end() == false; ++Ver
)
563 if (Ver
->Hash
== Hash
&& Version
.c_str() == Ver
.VerStr())
565 if (List
.CollectFileProvides(Cache
,Ver
) == false)
566 return _error
->Error(_("Error occurred while processing %s (%s%d)"),
567 PackageName
.c_str(), "CollectFileProvides", 1);
572 if (Ver
.end() == true)
573 _error
->Warning(_("Package %s %s was not found while processing file dependencies"),PackageName
.c_str(),Version
.c_str());
579 // CacheGenerator::NewGroup - Add a new group /*{{{*/
580 // ---------------------------------------------------------------------
581 /* This creates a new group structure and adds it to the hash table */
582 bool pkgCacheGenerator::NewGroup(pkgCache::GrpIterator
&Grp
, const string
&Name
)
584 Grp
= Cache
.FindGrp(Name
);
585 if (Grp
.end() == false)
589 map_ptrloc
const Group
= AllocateInMap(sizeof(pkgCache::Group
));
590 if (unlikely(Group
== 0))
593 Grp
= pkgCache::GrpIterator(Cache
, Cache
.GrpP
+ Group
);
594 map_ptrloc
const idxName
= WriteStringInMap(Name
);
595 if (unlikely(idxName
== 0))
599 // Insert it into the hash table
600 unsigned long const Hash
= Cache
.Hash(Name
);
601 Grp
->Next
= Cache
.HeaderP
->GrpHashTable
[Hash
];
602 Cache
.HeaderP
->GrpHashTable
[Hash
] = Group
;
604 Grp
->ID
= Cache
.HeaderP
->GroupCount
++;
608 // CacheGenerator::NewPackage - Add a new package /*{{{*/
609 // ---------------------------------------------------------------------
610 /* This creates a new package structure and adds it to the hash table */
611 bool pkgCacheGenerator::NewPackage(pkgCache::PkgIterator
&Pkg
,const string
&Name
,
612 const string
&Arch
) {
613 pkgCache::GrpIterator Grp
;
614 Dynamic
<pkgCache::GrpIterator
> DynGrp(Grp
);
615 if (unlikely(NewGroup(Grp
, Name
) == false))
618 Pkg
= Grp
.FindPkg(Arch
);
619 if (Pkg
.end() == false)
623 map_ptrloc
const Package
= AllocateInMap(sizeof(pkgCache::Package
));
624 if (unlikely(Package
== 0))
626 Pkg
= pkgCache::PkgIterator(Cache
,Cache
.PkgP
+ Package
);
628 // Insert the package into our package list
629 if (Grp
->FirstPackage
== 0) // the group is new
631 // Insert it into the hash table
632 unsigned long const Hash
= Cache
.Hash(Name
);
633 Pkg
->NextPackage
= Cache
.HeaderP
->PkgHashTable
[Hash
];
634 Cache
.HeaderP
->PkgHashTable
[Hash
] = Package
;
635 Grp
->FirstPackage
= Package
;
637 else // Group the Packages together
639 // this package is the new last package
640 pkgCache::PkgIterator
LastPkg(Cache
, Cache
.PkgP
+ Grp
->LastPackage
);
641 Pkg
->NextPackage
= LastPkg
->NextPackage
;
642 LastPkg
->NextPackage
= Package
;
644 Grp
->LastPackage
= Package
;
646 // Set the name, arch and the ID
647 Pkg
->Name
= Grp
->Name
;
648 Pkg
->Group
= Grp
.Index();
649 // all is mapped to the native architecture
650 map_ptrloc
const idxArch
= (Arch
== "all") ? Cache
.HeaderP
->Architecture
: WriteUniqString(Arch
.c_str());
651 if (unlikely(idxArch
== 0))
654 Pkg
->ID
= Cache
.HeaderP
->PackageCount
++;
659 // CacheGenerator::AddImplicitDepends /*{{{*/
660 bool pkgCacheGenerator::AddImplicitDepends(pkgCache::GrpIterator
&G
,
661 pkgCache::PkgIterator
&P
,
662 pkgCache::VerIterator
&V
)
664 // copy P.Arch() into a string here as a cache remap
665 // in NewDepends() later may alter the pointer location
666 string Arch
= P
.Arch() == NULL
? "" : P
.Arch();
667 map_ptrloc
*OldDepLast
= NULL
;
668 /* MultiArch handling introduces a lot of implicit Dependencies:
669 - MultiArch: same → Co-Installable if they have the same version
670 - All others conflict with all other group members */
671 bool const coInstall
= ((V
->MultiArch
& pkgCache::Version::Same
) == pkgCache::Version::Same
);
672 pkgCache::PkgIterator D
= G
.PackageList();
673 Dynamic
<pkgCache::PkgIterator
> DynD(D
);
674 for (; D
.end() != true; D
= G
.NextPkg(D
))
676 if (Arch
== D
.Arch() || D
->VersionList
== 0)
678 /* We allow only one installed arch at the time
679 per group, therefore each group member conflicts
680 with all other group members */
681 if (coInstall
== true)
683 // Replaces: ${self}:other ( << ${binary:Version})
684 NewDepends(D
, V
, V
.VerStr(),
685 pkgCache::Dep::Less
, pkgCache::Dep::Replaces
,
687 // Breaks: ${self}:other (!= ${binary:Version})
688 NewDepends(D
, V
, V
.VerStr(),
689 pkgCache::Dep::NotEquals
, pkgCache::Dep::DpkgBreaks
,
692 // Conflicts: ${self}:other
694 pkgCache::Dep::NoOp
, pkgCache::Dep::Conflicts
,
700 bool pkgCacheGenerator::AddImplicitDepends(pkgCache::VerIterator
&V
,
701 pkgCache::PkgIterator
&D
)
703 /* MultiArch handling introduces a lot of implicit Dependencies:
704 - MultiArch: same → Co-Installable if they have the same version
705 - All others conflict with all other group members */
706 map_ptrloc
*OldDepLast
= NULL
;
707 bool const coInstall
= ((V
->MultiArch
& pkgCache::Version::Same
) == pkgCache::Version::Same
);
708 if (coInstall
== true)
710 // Replaces: ${self}:other ( << ${binary:Version})
711 NewDepends(D
, V
, V
.VerStr(),
712 pkgCache::Dep::Less
, pkgCache::Dep::Replaces
,
714 // Breaks: ${self}:other (!= ${binary:Version})
715 NewDepends(D
, V
, V
.VerStr(),
716 pkgCache::Dep::NotEquals
, pkgCache::Dep::DpkgBreaks
,
719 // Conflicts: ${self}:other
721 pkgCache::Dep::NoOp
, pkgCache::Dep::Conflicts
,
728 // CacheGenerator::NewFileVer - Create a new File<->Version association /*{{{*/
729 // ---------------------------------------------------------------------
731 bool pkgCacheGenerator::NewFileVer(pkgCache::VerIterator
&Ver
,
734 if (CurrentFile
== 0)
738 map_ptrloc
const VerFile
= AllocateInMap(sizeof(pkgCache::VerFile
));
742 pkgCache::VerFileIterator
VF(Cache
,Cache
.VerFileP
+ VerFile
);
743 VF
->File
= CurrentFile
- Cache
.PkgFileP
;
745 // Link it to the end of the list
746 map_ptrloc
*Last
= &Ver
->FileList
;
747 for (pkgCache::VerFileIterator V
= Ver
.FileList(); V
.end() == false; ++V
)
749 VF
->NextFile
= *Last
;
752 VF
->Offset
= List
.Offset();
753 VF
->Size
= List
.Size();
754 if (Cache
.HeaderP
->MaxVerFileSize
< VF
->Size
)
755 Cache
.HeaderP
->MaxVerFileSize
= VF
->Size
;
756 Cache
.HeaderP
->VerFileCount
++;
761 // CacheGenerator::NewVersion - Create a new Version /*{{{*/
762 // ---------------------------------------------------------------------
763 /* This puts a version structure in the linked list */
764 unsigned long pkgCacheGenerator::NewVersion(pkgCache::VerIterator
&Ver
,
765 const string
&VerStr
,
769 map_ptrloc
const Version
= AllocateInMap(sizeof(pkgCache::Version
));
774 Ver
= pkgCache::VerIterator(Cache
,Cache
.VerP
+ Version
);
775 //Dynamic<pkgCache::VerIterator> DynV(Ver); // caller MergeListVersion already takes care of it
777 Ver
->ID
= Cache
.HeaderP
->VersionCount
++;
778 map_ptrloc
const idxVerStr
= WriteStringInMap(VerStr
);
779 if (unlikely(idxVerStr
== 0))
781 Ver
->VerStr
= idxVerStr
;
786 // CacheGenerator::NewFileDesc - Create a new File<->Desc association /*{{{*/
787 // ---------------------------------------------------------------------
789 bool pkgCacheGenerator::NewFileDesc(pkgCache::DescIterator
&Desc
,
792 if (CurrentFile
== 0)
796 map_ptrloc
const DescFile
= AllocateInMap(sizeof(pkgCache::DescFile
));
800 pkgCache::DescFileIterator
DF(Cache
,Cache
.DescFileP
+ DescFile
);
801 DF
->File
= CurrentFile
- Cache
.PkgFileP
;
803 // Link it to the end of the list
804 map_ptrloc
*Last
= &Desc
->FileList
;
805 for (pkgCache::DescFileIterator D
= Desc
.FileList(); D
.end() == false; ++D
)
808 DF
->NextFile
= *Last
;
811 DF
->Offset
= List
.Offset();
812 DF
->Size
= List
.Size();
813 if (Cache
.HeaderP
->MaxDescFileSize
< DF
->Size
)
814 Cache
.HeaderP
->MaxDescFileSize
= DF
->Size
;
815 Cache
.HeaderP
->DescFileCount
++;
820 // CacheGenerator::NewDescription - Create a new Description /*{{{*/
821 // ---------------------------------------------------------------------
822 /* This puts a description structure in the linked list */
823 map_ptrloc
pkgCacheGenerator::NewDescription(pkgCache::DescIterator
&Desc
,
825 const MD5SumValue
&md5sum
,
826 map_ptrloc idxmd5str
)
829 map_ptrloc
const Description
= AllocateInMap(sizeof(pkgCache::Description
));
830 if (Description
== 0)
834 Desc
= pkgCache::DescIterator(Cache
,Cache
.DescP
+ Description
);
835 Desc
->ID
= Cache
.HeaderP
->DescriptionCount
++;
836 map_ptrloc
const idxlanguage_code
= WriteStringInMap(Lang
);
837 if (unlikely(idxlanguage_code
== 0))
839 Desc
->language_code
= idxlanguage_code
;
842 Desc
->md5sum
= idxmd5str
;
845 map_ptrloc
const idxmd5sum
= WriteStringInMap(md5sum
.Value());
846 if (unlikely(idxmd5sum
== 0))
848 Desc
->md5sum
= idxmd5sum
;
854 // CacheGenerator::NewDepends - Create a dependency element /*{{{*/
855 // ---------------------------------------------------------------------
856 /* This creates a dependency element in the tree. It is linked to the
857 version and to the package that it is pointing to. */
858 bool pkgCacheGenerator::NewDepends(pkgCache::PkgIterator
&Pkg
,
859 pkgCache::VerIterator
&Ver
,
860 string
const &Version
,
861 unsigned int const &Op
,
862 unsigned int const &Type
,
863 map_ptrloc
* &OldDepLast
)
865 void const * const oldMap
= Map
.Data();
867 map_ptrloc
const Dependency
= AllocateInMap(sizeof(pkgCache::Dependency
));
868 if (unlikely(Dependency
== 0))
872 pkgCache::DepIterator
Dep(Cache
,Cache
.DepP
+ Dependency
);
873 Dynamic
<pkgCache::DepIterator
> DynDep(Dep
);
874 Dep
->ParentVer
= Ver
.Index();
877 Dep
->ID
= Cache
.HeaderP
->DependsCount
++;
879 // Probe the reverse dependency list for a version string that matches
880 if (Version
.empty() == false)
882 /* for (pkgCache::DepIterator I = Pkg.RevDependsList(); I.end() == false; I++)
883 if (I->Version != 0 && I.TargetVer() == Version)
884 Dep->Version = I->Version;*/
885 if (Dep
->Version
== 0) {
886 map_ptrloc
const index
= WriteStringInMap(Version
);
887 if (unlikely(index
== 0))
889 Dep
->Version
= index
;
893 // Link it to the package
894 Dep
->Package
= Pkg
.Index();
895 Dep
->NextRevDepends
= Pkg
->RevDepends
;
896 Pkg
->RevDepends
= Dep
.Index();
898 // Do we know where to link the Dependency to?
899 if (OldDepLast
== NULL
)
901 OldDepLast
= &Ver
->DependsList
;
902 for (pkgCache::DepIterator D
= Ver
.DependsList(); D
.end() == false; ++D
)
903 OldDepLast
= &D
->NextDepends
;
904 } else if (oldMap
!= Map
.Data())
905 OldDepLast
+= (map_ptrloc
*) Map
.Data() - (map_ptrloc
*) oldMap
;
907 Dep
->NextDepends
= *OldDepLast
;
908 *OldDepLast
= Dep
.Index();
909 OldDepLast
= &Dep
->NextDepends
;
914 // ListParser::NewDepends - Create the environment for a new dependency /*{{{*/
915 // ---------------------------------------------------------------------
916 /* This creates a Group and the Package to link this dependency to if
917 needed and handles also the caching of the old endpoint */
918 bool pkgCacheGenerator::ListParser::NewDepends(pkgCache::VerIterator
&Ver
,
919 const string
&PackageName
,
921 const string
&Version
,
925 pkgCache::GrpIterator Grp
;
926 Dynamic
<pkgCache::GrpIterator
> DynGrp(Grp
);
927 if (unlikely(Owner
->NewGroup(Grp
, PackageName
) == false))
930 // Locate the target package
931 pkgCache::PkgIterator Pkg
= Grp
.FindPkg(Arch
);
932 // we don't create 'none' packages and their dependencies if we can avoid it …
933 if (Pkg
.end() == true && Arch
== "none" && strcmp(Ver
.ParentPkg().Arch(), "none") != 0)
935 Dynamic
<pkgCache::PkgIterator
> DynPkg(Pkg
);
936 if (Pkg
.end() == true) {
937 if (unlikely(Owner
->NewPackage(Pkg
, PackageName
, Arch
) == false))
941 // Is it a file dependency?
942 if (unlikely(PackageName
[0] == '/'))
943 FoundFileDeps
= true;
945 /* Caching the old end point speeds up generation substantially */
946 if (OldDepVer
!= Ver
) {
951 return Owner
->NewDepends(Pkg
, Ver
, Version
, Op
, Type
, OldDepLast
);
954 // ListParser::NewProvides - Create a Provides element /*{{{*/
955 // ---------------------------------------------------------------------
957 bool pkgCacheGenerator::ListParser::NewProvides(pkgCache::VerIterator
&Ver
,
958 const string
&PkgName
,
959 const string
&PkgArch
,
960 const string
&Version
)
962 pkgCache
&Cache
= Owner
->Cache
;
964 // We do not add self referencing provides
965 if (Ver
.ParentPkg().Name() == PkgName
&& (PkgArch
== Ver
.ParentPkg().Arch() ||
966 (PkgArch
== "all" && strcmp((Cache
.StrP
+ Cache
.HeaderP
->Architecture
), Ver
.ParentPkg().Arch()) == 0)))
970 map_ptrloc
const Provides
= Owner
->AllocateInMap(sizeof(pkgCache::Provides
));
971 if (unlikely(Provides
== 0))
973 Cache
.HeaderP
->ProvidesCount
++;
976 pkgCache::PrvIterator
Prv(Cache
,Cache
.ProvideP
+ Provides
,Cache
.PkgP
);
977 Dynamic
<pkgCache::PrvIterator
> DynPrv(Prv
);
978 Prv
->Version
= Ver
.Index();
979 Prv
->NextPkgProv
= Ver
->ProvidesList
;
980 Ver
->ProvidesList
= Prv
.Index();
981 if (Version
.empty() == false) {
982 map_ptrloc
const idxProvideVersion
= WriteString(Version
);
983 Prv
->ProvideVersion
= idxProvideVersion
;
984 if (unlikely(idxProvideVersion
== 0))
988 // Locate the target package
989 pkgCache::PkgIterator Pkg
;
990 Dynamic
<pkgCache::PkgIterator
> DynPkg(Pkg
);
991 if (unlikely(Owner
->NewPackage(Pkg
,PkgName
, PkgArch
) == false))
994 // Link it to the package
995 Prv
->ParentPkg
= Pkg
.Index();
996 Prv
->NextProvides
= Pkg
->ProvidesList
;
997 Pkg
->ProvidesList
= Prv
.Index();
1002 // CacheGenerator::SelectFile - Select the current file being parsed /*{{{*/
1003 // ---------------------------------------------------------------------
1004 /* This is used to select which file is to be associated with all newly
1005 added versions. The caller is responsible for setting the IMS fields. */
1006 bool pkgCacheGenerator::SelectFile(const string
&File
,const string
&Site
,
1007 const pkgIndexFile
&Index
,
1008 unsigned long Flags
)
1010 // Get some space for the structure
1011 map_ptrloc
const idxFile
= AllocateInMap(sizeof(*CurrentFile
));
1012 if (unlikely(idxFile
== 0))
1014 CurrentFile
= Cache
.PkgFileP
+ idxFile
;
1017 map_ptrloc
const idxFileName
= WriteStringInMap(File
);
1018 map_ptrloc
const idxSite
= WriteUniqString(Site
);
1019 if (unlikely(idxFileName
== 0 || idxSite
== 0))
1021 CurrentFile
->FileName
= idxFileName
;
1022 CurrentFile
->Site
= idxSite
;
1023 CurrentFile
->NextFile
= Cache
.HeaderP
->FileList
;
1024 CurrentFile
->Flags
= Flags
;
1025 CurrentFile
->ID
= Cache
.HeaderP
->PackageFileCount
;
1026 map_ptrloc
const idxIndexType
= WriteUniqString(Index
.GetType()->Label
);
1027 if (unlikely(idxIndexType
== 0))
1029 CurrentFile
->IndexType
= idxIndexType
;
1031 Cache
.HeaderP
->FileList
= CurrentFile
- Cache
.PkgFileP
;
1032 Cache
.HeaderP
->PackageFileCount
++;
1035 Progress
->SubProgress(Index
.Size());
1039 // CacheGenerator::WriteUniqueString - Insert a unique string /*{{{*/
1040 // ---------------------------------------------------------------------
1041 /* This is used to create handles to strings. Given the same text it
1042 always returns the same number */
1043 unsigned long pkgCacheGenerator::WriteUniqString(const char *S
,
1046 /* We use a very small transient hash table here, this speeds up generation
1047 by a fair amount on slower machines */
1048 pkgCache::StringItem
*&Bucket
= UniqHash
[(S
[0]*5 + S
[1]) % _count(UniqHash
)];
1050 stringcmp(S
,S
+Size
,Cache
.StrP
+ Bucket
->String
) == 0)
1051 return Bucket
->String
;
1053 // Search for an insertion point
1054 pkgCache::StringItem
*I
= Cache
.StringItemP
+ Cache
.HeaderP
->StringList
;
1056 map_ptrloc
*Last
= &Cache
.HeaderP
->StringList
;
1057 for (; I
!= Cache
.StringItemP
; Last
= &I
->NextItem
,
1058 I
= Cache
.StringItemP
+ I
->NextItem
)
1060 Res
= stringcmp(S
,S
+Size
,Cache
.StrP
+ I
->String
);
1073 void const * const oldMap
= Map
.Data();
1074 map_ptrloc
const Item
= AllocateInMap(sizeof(pkgCache::StringItem
));
1078 map_ptrloc
const idxString
= WriteStringInMap(S
,Size
);
1079 if (unlikely(idxString
== 0))
1081 if (oldMap
!= Map
.Data()) {
1082 Last
+= (map_ptrloc
*) Map
.Data() - (map_ptrloc
*) oldMap
;
1083 I
+= (pkgCache::StringItem
*) Map
.Data() - (pkgCache::StringItem
*) oldMap
;
1087 // Fill in the structure
1088 pkgCache::StringItem
*ItemP
= Cache
.StringItemP
+ Item
;
1089 ItemP
->NextItem
= I
- Cache
.StringItemP
;
1090 ItemP
->String
= idxString
;
1093 return ItemP
->String
;
1096 // CheckValidity - Check that a cache is up-to-date /*{{{*/
1097 // ---------------------------------------------------------------------
1098 /* This just verifies that each file in the list of index files exists,
1099 has matching attributes with the cache and the cache does not have
1101 static bool CheckValidity(const string
&CacheFile
,
1102 pkgSourceList
&List
,
1107 bool const Debug
= _config
->FindB("Debug::pkgCacheGen", false);
1108 // No file, certainly invalid
1109 if (CacheFile
.empty() == true || FileExists(CacheFile
) == false)
1112 std::clog
<< "CacheFile doesn't exist" << std::endl
;
1116 if (List
.GetLastModifiedTime() > GetModificationTime(CacheFile
))
1119 std::clog
<< "sources.list is newer than the cache" << std::endl
;
1124 FileFd
CacheF(CacheFile
,FileFd::ReadOnly
);
1125 SPtr
<MMap
> Map
= new MMap(CacheF
,0);
1126 pkgCache
Cache(Map
);
1127 if (_error
->PendingError() == true || Map
->Size() == 0)
1130 std::clog
<< "Errors are pending or Map is empty()" << std::endl
;
1135 /* Now we check every index file, see if it is in the cache,
1136 verify the IMS data and check that it is on the disk too.. */
1137 SPtrArray
<bool> Visited
= new bool[Cache
.HeaderP
->PackageFileCount
];
1138 memset(Visited
,0,sizeof(*Visited
)*Cache
.HeaderP
->PackageFileCount
);
1139 for (; Start
!= End
; ++Start
)
1142 std::clog
<< "Checking PkgFile " << (*Start
)->Describe() << ": ";
1143 if ((*Start
)->HasPackages() == false)
1146 std::clog
<< "Has NO packages" << std::endl
;
1150 if ((*Start
)->Exists() == false)
1152 #if 0 // mvo: we no longer give a message here (Default Sources spec)
1153 _error
->WarningE("stat",_("Couldn't stat source package list %s"),
1154 (*Start
)->Describe().c_str());
1157 std::clog
<< "file doesn't exist" << std::endl
;
1161 // FindInCache is also expected to do an IMS check.
1162 pkgCache::PkgFileIterator File
= (*Start
)->FindInCache(Cache
);
1163 if (File
.end() == true)
1166 std::clog
<< "FindInCache returned end-Pointer" << std::endl
;
1170 Visited
[File
->ID
] = true;
1172 std::clog
<< "with ID " << File
->ID
<< " is valid" << std::endl
;
1175 for (unsigned I
= 0; I
!= Cache
.HeaderP
->PackageFileCount
; I
++)
1176 if (Visited
[I
] == false)
1179 std::clog
<< "File with ID" << I
<< " wasn't visited" << std::endl
;
1183 if (_error
->PendingError() == true)
1187 std::clog
<< "Validity failed because of pending errors:" << std::endl
;
1188 _error
->DumpErrors();
1195 *OutMap
= Map
.UnGuard();
1199 // ComputeSize - Compute the total size of a bunch of files /*{{{*/
1200 // ---------------------------------------------------------------------
1201 /* Size is kind of an abstract notion that is only used for the progress
1203 static unsigned long ComputeSize(FileIterator Start
,FileIterator End
)
1205 unsigned long TotalSize
= 0;
1206 for (; Start
!= End
; ++Start
)
1208 if ((*Start
)->HasPackages() == false)
1210 TotalSize
+= (*Start
)->Size();
1215 // BuildCache - Merge the list of index files into the cache /*{{{*/
1216 // ---------------------------------------------------------------------
1218 static bool BuildCache(pkgCacheGenerator
&Gen
,
1219 OpProgress
*Progress
,
1220 unsigned long &CurrentSize
,unsigned long TotalSize
,
1221 FileIterator Start
, FileIterator End
)
1224 for (I
= Start
; I
!= End
; ++I
)
1226 if ((*I
)->HasPackages() == false)
1229 if ((*I
)->Exists() == false)
1232 if ((*I
)->FindInCache(Gen
.GetCache()).end() == false)
1234 _error
->Warning("Duplicate sources.list entry %s",
1235 (*I
)->Describe().c_str());
1239 unsigned long Size
= (*I
)->Size();
1240 if (Progress
!= NULL
)
1241 Progress
->OverallProgress(CurrentSize
,TotalSize
,Size
,_("Reading package lists"));
1242 CurrentSize
+= Size
;
1244 if ((*I
)->Merge(Gen
,Progress
) == false)
1248 if (Gen
.HasFileDeps() == true)
1250 if (Progress
!= NULL
)
1252 TotalSize
= ComputeSize(Start
, End
);
1254 for (I
= Start
; I
!= End
; ++I
)
1256 unsigned long Size
= (*I
)->Size();
1257 if (Progress
!= NULL
)
1258 Progress
->OverallProgress(CurrentSize
,TotalSize
,Size
,_("Collecting File Provides"));
1259 CurrentSize
+= Size
;
1260 if ((*I
)->MergeFileProvides(Gen
,Progress
) == false)
1268 // CacheGenerator::CreateDynamicMMap - load an mmap with configuration options /*{{{*/
1269 DynamicMMap
* pkgCacheGenerator::CreateDynamicMMap(FileFd
*CacheF
, unsigned long Flags
) {
1270 unsigned long const MapStart
= _config
->FindI("APT::Cache-Start", 24*1024*1024);
1271 unsigned long const MapGrow
= _config
->FindI("APT::Cache-Grow", 1*1024*1024);
1272 unsigned long const MapLimit
= _config
->FindI("APT::Cache-Limit", 0);
1273 Flags
|= MMap::Moveable
;
1274 if (_config
->FindB("APT::Cache-Fallback", false) == true)
1275 Flags
|= MMap::Fallback
;
1277 return new DynamicMMap(*CacheF
, Flags
, MapStart
, MapGrow
, MapLimit
);
1279 return new DynamicMMap(Flags
, MapStart
, MapGrow
, MapLimit
);
1282 // CacheGenerator::MakeStatusCache - Construct the status cache /*{{{*/
1283 // ---------------------------------------------------------------------
1284 /* This makes sure that the status cache (the cache that has all
1285 index files from the sources list and all local ones) is ready
1286 to be mmaped. If OutMap is not zero then a MMap object representing
1287 the cache will be stored there. This is pretty much mandetory if you
1288 are using AllowMem. AllowMem lets the function be run as non-root
1289 where it builds the cache 'fast' into a memory buffer. */
1290 __deprecated
bool pkgMakeStatusCache(pkgSourceList
&List
,OpProgress
&Progress
,
1291 MMap
**OutMap
, bool AllowMem
)
1292 { return pkgCacheGenerator::MakeStatusCache(List
, &Progress
, OutMap
, AllowMem
); }
1293 bool pkgCacheGenerator::MakeStatusCache(pkgSourceList
&List
,OpProgress
*Progress
,
1294 MMap
**OutMap
,bool AllowMem
)
1296 bool const Debug
= _config
->FindB("Debug::pkgCacheGen", false);
1298 std::vector
<pkgIndexFile
*> Files
;
1299 for (std::vector
<metaIndex
*>::const_iterator i
= List
.begin();
1303 std::vector
<pkgIndexFile
*> *Indexes
= (*i
)->GetIndexFiles();
1304 for (std::vector
<pkgIndexFile
*>::const_iterator j
= Indexes
->begin();
1305 j
!= Indexes
->end();
1307 Files
.push_back (*j
);
1310 unsigned long const EndOfSource
= Files
.size();
1311 if (_system
->AddStatusFiles(Files
) == false)
1314 // Decide if we can write to the files..
1315 string
const CacheFile
= _config
->FindFile("Dir::Cache::pkgcache");
1316 string
const SrcCacheFile
= _config
->FindFile("Dir::Cache::srcpkgcache");
1318 // ensure the cache directory exists
1319 if (CacheFile
.empty() == false || SrcCacheFile
.empty() == false)
1321 string dir
= _config
->FindDir("Dir::Cache");
1322 size_t const len
= dir
.size();
1323 if (len
> 5 && dir
.find("/apt/", len
- 6, 5) == len
- 5)
1324 dir
= dir
.substr(0, len
- 5);
1325 if (CacheFile
.empty() == false)
1326 CreateDirectory(dir
, flNotFile(CacheFile
));
1327 if (SrcCacheFile
.empty() == false)
1328 CreateDirectory(dir
, flNotFile(SrcCacheFile
));
1331 // Decide if we can write to the cache
1332 bool Writeable
= false;
1333 if (CacheFile
.empty() == false)
1334 Writeable
= access(flNotFile(CacheFile
).c_str(),W_OK
) == 0;
1336 if (SrcCacheFile
.empty() == false)
1337 Writeable
= access(flNotFile(SrcCacheFile
).c_str(),W_OK
) == 0;
1339 std::clog
<< "Do we have write-access to the cache files? " << (Writeable
? "YES" : "NO") << std::endl
;
1341 if (Writeable
== false && AllowMem
== false && CacheFile
.empty() == false)
1342 return _error
->Error(_("Unable to write to %s"),flNotFile(CacheFile
).c_str());
1344 if (Progress
!= NULL
)
1345 Progress
->OverallProgress(0,1,1,_("Reading package lists"));
1347 // Cache is OK, Fin.
1348 if (CheckValidity(CacheFile
, List
, Files
.begin(),Files
.end(),OutMap
) == true)
1350 if (Progress
!= NULL
)
1351 Progress
->OverallProgress(1,1,1,_("Reading package lists"));
1353 std::clog
<< "pkgcache.bin is valid - no need to build anything" << std::endl
;
1356 else if (Debug
== true)
1357 std::clog
<< "pkgcache.bin is NOT valid" << std::endl
;
1359 /* At this point we know we need to reconstruct the package cache,
1361 SPtr
<FileFd
> CacheF
;
1362 SPtr
<DynamicMMap
> Map
;
1363 if (Writeable
== true && CacheFile
.empty() == false)
1365 _error
->PushToStack();
1366 unlink(CacheFile
.c_str());
1367 CacheF
= new FileFd(CacheFile
,FileFd::WriteAtomic
);
1368 fchmod(CacheF
->Fd(),0644);
1369 Map
= CreateDynamicMMap(CacheF
, MMap::Public
);
1370 if (_error
->PendingError() == true)
1372 delete CacheF
.UnGuard();
1373 delete Map
.UnGuard();
1375 std::clog
<< "Open filebased MMap FAILED" << std::endl
;
1377 if (AllowMem
== false)
1379 _error
->MergeWithStack();
1382 _error
->RevertToStack();
1386 _error
->MergeWithStack();
1388 std::clog
<< "Open filebased MMap" << std::endl
;
1391 if (Writeable
== false || CacheFile
.empty() == true)
1393 // Just build it in memory..
1394 Map
= CreateDynamicMMap(NULL
);
1396 std::clog
<< "Open memory Map (not filebased)" << std::endl
;
1399 // Lets try the source cache.
1400 unsigned long CurrentSize
= 0;
1401 unsigned long TotalSize
= 0;
1402 if (CheckValidity(SrcCacheFile
, List
, Files
.begin(),
1403 Files
.begin()+EndOfSource
) == true)
1406 std::clog
<< "srcpkgcache.bin is valid - populate MMap with it." << std::endl
;
1407 // Preload the map with the source cache
1408 FileFd
SCacheF(SrcCacheFile
,FileFd::ReadOnly
);
1409 unsigned long const alloc
= Map
->RawAllocate(SCacheF
.Size());
1410 if ((alloc
== 0 && _error
->PendingError())
1411 || SCacheF
.Read((unsigned char *)Map
->Data() + alloc
,
1412 SCacheF
.Size()) == false)
1415 TotalSize
= ComputeSize(Files
.begin()+EndOfSource
,Files
.end());
1417 // Build the status cache
1418 pkgCacheGenerator
Gen(Map
.Get(),Progress
);
1419 if (_error
->PendingError() == true)
1421 if (BuildCache(Gen
,Progress
,CurrentSize
,TotalSize
,
1422 Files
.begin()+EndOfSource
,Files
.end()) == false)
1428 std::clog
<< "srcpkgcache.bin is NOT valid - rebuild" << std::endl
;
1429 TotalSize
= ComputeSize(Files
.begin(),Files
.end());
1431 // Build the source cache
1432 pkgCacheGenerator
Gen(Map
.Get(),Progress
);
1433 if (_error
->PendingError() == true)
1435 if (BuildCache(Gen
,Progress
,CurrentSize
,TotalSize
,
1436 Files
.begin(),Files
.begin()+EndOfSource
) == false)
1440 if (Writeable
== true && SrcCacheFile
.empty() == false)
1442 FileFd
SCacheF(SrcCacheFile
,FileFd::WriteAtomic
);
1443 if (_error
->PendingError() == true)
1446 fchmod(SCacheF
.Fd(),0644);
1448 // Write out the main data
1449 if (SCacheF
.Write(Map
->Data(),Map
->Size()) == false)
1450 return _error
->Error(_("IO Error saving source cache"));
1453 // Write out the proper header
1454 Gen
.GetCache().HeaderP
->Dirty
= false;
1455 if (SCacheF
.Seek(0) == false ||
1456 SCacheF
.Write(Map
->Data(),sizeof(*Gen
.GetCache().HeaderP
)) == false)
1457 return _error
->Error(_("IO Error saving source cache"));
1458 Gen
.GetCache().HeaderP
->Dirty
= true;
1462 // Build the status cache
1463 if (BuildCache(Gen
,Progress
,CurrentSize
,TotalSize
,
1464 Files
.begin()+EndOfSource
,Files
.end()) == false)
1468 std::clog
<< "Caches are ready for shipping" << std::endl
;
1470 if (_error
->PendingError() == true)
1476 delete Map
.UnGuard();
1477 *OutMap
= new MMap(*CacheF
,0);
1481 *OutMap
= Map
.UnGuard();
1488 // CacheGenerator::MakeOnlyStatusCache - Build only a status files cache/*{{{*/
1489 // ---------------------------------------------------------------------
1491 __deprecated
bool pkgMakeOnlyStatusCache(OpProgress
&Progress
,DynamicMMap
**OutMap
)
1492 { return pkgCacheGenerator::MakeOnlyStatusCache(&Progress
, OutMap
); }
1493 bool pkgCacheGenerator::MakeOnlyStatusCache(OpProgress
*Progress
,DynamicMMap
**OutMap
)
1495 std::vector
<pkgIndexFile
*> Files
;
1496 unsigned long EndOfSource
= Files
.size();
1497 if (_system
->AddStatusFiles(Files
) == false)
1500 SPtr
<DynamicMMap
> Map
= CreateDynamicMMap(NULL
);
1501 unsigned long CurrentSize
= 0;
1502 unsigned long TotalSize
= 0;
1504 TotalSize
= ComputeSize(Files
.begin()+EndOfSource
,Files
.end());
1506 // Build the status cache
1507 if (Progress
!= NULL
)
1508 Progress
->OverallProgress(0,1,1,_("Reading package lists"));
1509 pkgCacheGenerator
Gen(Map
.Get(),Progress
);
1510 if (_error
->PendingError() == true)
1512 if (BuildCache(Gen
,Progress
,CurrentSize
,TotalSize
,
1513 Files
.begin()+EndOfSource
,Files
.end()) == false)
1516 if (_error
->PendingError() == true)
1518 *OutMap
= Map
.UnGuard();
1523 // IsDuplicateDescription /*{{{*/
1524 static bool IsDuplicateDescription(pkgCache::DescIterator Desc
,
1525 MD5SumValue
const &CurMd5
, std::string
const &CurLang
)
1527 // Descriptions in the same link-list have all the same md5
1528 if (Desc
.end() == true || MD5SumValue(Desc
.md5()) != CurMd5
)
1530 for (; Desc
.end() == false; ++Desc
)
1531 if (Desc
.LanguageCode() == CurLang
)
1536 // CacheGenerator::FinishCache /*{{{*/
1537 bool pkgCacheGenerator::FinishCache(OpProgress
*Progress
)