]>
git.saurik.com Git - apt.git/blob - apt-pkg/pkgcachegen.cc
62e4577347d3716e3595fea1068d33679e6190ad
1 // -*- mode: cpp; mode: fold -*-
3 // $Id: pkgcachegen.cc,v 1.53.2.1 2003/12/24 23:09:17 mdz Exp $
4 /* ######################################################################
6 Package Cache Generator - Generator for the cache structure.
8 This builds the cache structure from the abstract package list parser.
10 ##################################################################### */
12 // Include Files /*{{{*/
13 #define APT_COMPATIBILITY 986
15 #include <apt-pkg/pkgcachegen.h>
16 #include <apt-pkg/error.h>
17 #include <apt-pkg/version.h>
18 #include <apt-pkg/progress.h>
19 #include <apt-pkg/sourcelist.h>
20 #include <apt-pkg/configuration.h>
21 #include <apt-pkg/aptconfiguration.h>
22 #include <apt-pkg/strutl.h>
23 #include <apt-pkg/sptr.h>
24 #include <apt-pkg/pkgsystem.h>
25 #include <apt-pkg/macros.h>
27 #include <apt-pkg/tagfile.h>
38 typedef vector
<pkgIndexFile
*>::iterator FileIterator
;
40 // CacheGenerator::pkgCacheGenerator - Constructor /*{{{*/
41 // ---------------------------------------------------------------------
42 /* We set the dirty flag and make sure that is written to the disk */
43 pkgCacheGenerator::pkgCacheGenerator(DynamicMMap
*pMap
,OpProgress
*Prog
) :
44 Map(*pMap
), Cache(pMap
,false), Progress(Prog
),
48 memset(UniqHash
,0,sizeof(UniqHash
));
50 if (_error
->PendingError() == true)
55 // Setup the map interface..
56 Cache
.HeaderP
= (pkgCache::Header
*)Map
.Data();
57 if (Map
.RawAllocate(sizeof(pkgCache::Header
)) == 0 && _error
->PendingError() == true)
60 Map
.UsePools(*Cache
.HeaderP
->Pools
,sizeof(Cache
.HeaderP
->Pools
)/sizeof(Cache
.HeaderP
->Pools
[0]));
63 *Cache
.HeaderP
= pkgCache::Header();
64 Cache
.HeaderP
->VerSysName
= WriteStringInMap(_system
->VS
->Label
);
65 Cache
.HeaderP
->Architecture
= WriteStringInMap(_config
->Find("APT::Architecture"));
70 // Map directly from the existing file
72 Map
.UsePools(*Cache
.HeaderP
->Pools
,sizeof(Cache
.HeaderP
->Pools
)/sizeof(Cache
.HeaderP
->Pools
[0]));
73 if (Cache
.VS
!= _system
->VS
)
75 _error
->Error(_("Cache has an incompatible versioning system"));
80 Cache
.HeaderP
->Dirty
= true;
81 Map
.Sync(0,sizeof(pkgCache::Header
));
84 // CacheGenerator::~pkgCacheGenerator - Destructor /*{{{*/
85 // ---------------------------------------------------------------------
86 /* We sync the data then unset the dirty flag in two steps so as to
87 advoid a problem during a crash */
88 pkgCacheGenerator::~pkgCacheGenerator()
90 if (_error
->PendingError() == true)
92 if (Map
.Sync() == false)
95 Cache
.HeaderP
->Dirty
= false;
96 Map
.Sync(0,sizeof(pkgCache::Header
));
99 // CacheGenerator::WriteStringInMap /*{{{*/
100 unsigned long pkgCacheGenerator::WriteStringInMap(const char *String
,
101 const unsigned long &Len
) {
102 return Map
.WriteString(String
, Len
);
105 // CacheGenerator::WriteStringInMap /*{{{*/
106 unsigned long pkgCacheGenerator::WriteStringInMap(const char *String
) {
107 return Map
.WriteString(String
);
110 unsigned long pkgCacheGenerator::AllocateInMap(const unsigned long &size
) {/*{{{*/
111 return Map
.Allocate(size
);
114 // CacheGenerator::MergeList - Merge the package list /*{{{*/
115 // ---------------------------------------------------------------------
116 /* This provides the generation of the entries in the cache. Each loop
117 goes through a single package record from the underlying parse engine. */
118 bool pkgCacheGenerator::MergeList(ListParser
&List
,
119 pkgCache::VerIterator
*OutVer
)
123 unsigned int Counter
= 0;
124 while (List
.Step() == true)
126 string
const PackageName
= List
.Package();
127 if (PackageName
.empty() == true)
130 /* As we handle Arch all packages as architecture bounded
131 we add all information to every (simulated) arch package */
132 std::vector
<string
> genArch
;
133 if (List
.ArchitectureAll() == true) {
134 genArch
= APT::Configuration::getArchitectures();
135 if (genArch
.size() != 1)
136 genArch
.push_back("all");
138 genArch
.push_back(List
.Architecture());
140 for (std::vector
<string
>::const_iterator arch
= genArch
.begin();
141 arch
!= genArch
.end(); ++arch
)
143 // Get a pointer to the package structure
144 pkgCache::PkgIterator Pkg
;
145 if (NewPackage(Pkg
, PackageName
, *arch
) == false)
146 return _error
->Error(_("Error occurred while processing %s (NewPackage)"),PackageName
.c_str());
148 if (Counter
% 100 == 0 && Progress
!= 0)
149 Progress
->Progress(List
.Offset());
151 /* Get a pointer to the version structure. We know the list is sorted
152 so we use that fact in the search. Insertion of new versions is
153 done with correct sorting */
154 string Version
= List
.Version();
155 if (Version
.empty() == true)
157 // we first process the package, then the descriptions
158 // (this has the bonus that we get MMap error when we run out
160 pkgCache::VerIterator
Ver(Cache
);
161 if (List
.UsePackage(Pkg
, Ver
) == false)
162 return _error
->Error(_("Error occurred while processing %s (UsePackage1)"),
163 PackageName
.c_str());
165 // Find the right version to write the description
166 MD5SumValue CurMd5
= List
.Description_md5();
167 Ver
= Pkg
.VersionList();
168 map_ptrloc
*LastVer
= &Pkg
->VersionList
;
170 for (; Ver
.end() == false; LastVer
= &Ver
->NextVer
, Ver
++)
172 pkgCache::DescIterator Desc
= Ver
.DescriptionList();
173 map_ptrloc
*LastDesc
= &Ver
->DescriptionList
;
174 bool duplicate
=false;
176 // don't add a new description if we have one for the given
178 for ( ; Desc
.end() == false; Desc
++)
179 if (MD5SumValue(Desc
.md5()) == CurMd5
&&
180 Desc
.LanguageCode() == List
.DescriptionLanguage())
185 for (Desc
= Ver
.DescriptionList();
187 LastDesc
= &Desc
->NextDesc
, Desc
++)
189 if (MD5SumValue(Desc
.md5()) == CurMd5
)
191 // Add new description
192 *LastDesc
= NewDescription(Desc
, List
.DescriptionLanguage(), CurMd5
, *LastDesc
);
193 Desc
->ParentPkg
= Pkg
.Index();
195 if ((*LastDesc
== 0 && _error
->PendingError()) || NewFileDesc(Desc
,List
) == false)
196 return _error
->Error(_("Error occurred while processing %s (NewFileDesc1)"),PackageName
.c_str());
205 pkgCache::VerIterator Ver
= Pkg
.VersionList();
206 map_ptrloc
*LastVer
= &Pkg
->VersionList
;
208 unsigned long const Hash
= List
.VersionHash();
209 for (; Ver
.end() == false; LastVer
= &Ver
->NextVer
, Ver
++)
211 Res
= Cache
.VS
->CmpVersion(Version
,Ver
.VerStr());
212 // Version is higher as current version - insert here
215 // Versionstrings are equal - is hash also equal?
216 if (Res
== 0 && Ver
->Hash
== Hash
)
218 // proceed with the next till we have either the right
219 // or we found another version (which will be lower)
222 /* We already have a version for this item, record that we saw it */
223 if (Res
== 0 && Ver
.end() == false && Ver
->Hash
== Hash
)
225 if (List
.UsePackage(Pkg
,Ver
) == false)
226 return _error
->Error(_("Error occurred while processing %s (UsePackage2)"),
227 PackageName
.c_str());
229 if (NewFileVer(Ver
,List
) == false)
230 return _error
->Error(_("Error occurred while processing %s (NewFileVer1)"),
231 PackageName
.c_str());
233 // Read only a single record and return
237 FoundFileDeps
|= List
.HasFileDeps();
245 *LastVer
= NewVersion(Ver
,Version
,*LastVer
);
246 Ver
->ParentPkg
= Pkg
.Index();
249 if ((*LastVer
== 0 && _error
->PendingError()) || List
.NewVersion(Ver
) == false)
250 return _error
->Error(_("Error occurred while processing %s (NewVersion1)"),
251 PackageName
.c_str());
253 if (List
.UsePackage(Pkg
,Ver
) == false)
254 return _error
->Error(_("Error occurred while processing %s (UsePackage3)"),
255 PackageName
.c_str());
257 if (NewFileVer(Ver
,List
) == false)
258 return _error
->Error(_("Error occurred while processing %s (NewVersion2)"),
259 PackageName
.c_str());
261 // Read only a single record and return
265 FoundFileDeps
|= List
.HasFileDeps();
269 /* Record the Description data. Description data always exist in
270 Packages and Translation-* files. */
271 pkgCache::DescIterator Desc
= Ver
.DescriptionList();
272 map_ptrloc
*LastDesc
= &Ver
->DescriptionList
;
274 // Skip to the end of description set
275 for (; Desc
.end() == false; LastDesc
= &Desc
->NextDesc
, Desc
++);
277 // Add new description
278 *LastDesc
= NewDescription(Desc
, List
.DescriptionLanguage(), List
.Description_md5(), *LastDesc
);
279 Desc
->ParentPkg
= Pkg
.Index();
281 if ((*LastDesc
== 0 && _error
->PendingError()) || NewFileDesc(Desc
,List
) == false)
282 return _error
->Error(_("Error occurred while processing %s (NewFileDesc2)"),PackageName
.c_str());
286 FoundFileDeps
|= List
.HasFileDeps();
288 if (Cache
.HeaderP
->PackageCount
>= (1ULL<<sizeof(Cache
.PkgP
->ID
)*8)-1)
289 return _error
->Error(_("Wow, you exceeded the number of package "
290 "names this APT is capable of."));
291 if (Cache
.HeaderP
->VersionCount
>= (1ULL<<(sizeof(Cache
.VerP
->ID
)*8))-1)
292 return _error
->Error(_("Wow, you exceeded the number of versions "
293 "this APT is capable of."));
294 if (Cache
.HeaderP
->DescriptionCount
>= (1ULL<<(sizeof(Cache
.DescP
->ID
)*8))-1)
295 return _error
->Error(_("Wow, you exceeded the number of descriptions "
296 "this APT is capable of."));
297 if (Cache
.HeaderP
->DependsCount
>= (1ULL<<(sizeof(Cache
.DepP
->ID
)*8))-1ULL)
298 return _error
->Error(_("Wow, you exceeded the number of dependencies "
299 "this APT is capable of."));
303 // CacheGenerator::MergeFileProvides - Merge file provides /*{{{*/
304 // ---------------------------------------------------------------------
305 /* If we found any file depends while parsing the main list we need to
306 resolve them. Since it is undesired to load the entire list of files
307 into the cache as virtual packages we do a two stage effort. MergeList
308 identifies the file depends and this creates Provdies for them by
309 re-parsing all the indexs. */
310 bool pkgCacheGenerator::MergeFileProvides(ListParser
&List
)
314 unsigned int Counter
= 0;
315 while (List
.Step() == true)
317 string PackageName
= List
.Package();
318 if (PackageName
.empty() == true)
320 string Version
= List
.Version();
321 if (Version
.empty() == true)
324 pkgCache::PkgIterator Pkg
= Cache
.FindPkg(PackageName
);
325 if (Pkg
.end() == true)
326 return _error
->Error(_("Error occurred while processing %s (FindPkg)"),
327 PackageName
.c_str());
329 if (Counter
% 100 == 0 && Progress
!= 0)
330 Progress
->Progress(List
.Offset());
332 unsigned long Hash
= List
.VersionHash();
333 pkgCache::VerIterator Ver
= Pkg
.VersionList();
334 for (; Ver
.end() == false; Ver
++)
336 if (Ver
->Hash
== Hash
&& Version
.c_str() == Ver
.VerStr())
338 if (List
.CollectFileProvides(Cache
,Ver
) == false)
339 return _error
->Error(_("Error occurred while processing %s (CollectFileProvides)"),PackageName
.c_str());
344 if (Ver
.end() == true)
345 _error
->Warning(_("Package %s %s was not found while processing file dependencies"),PackageName
.c_str(),Version
.c_str());
351 // CacheGenerator::NewGroup - Add a new group /*{{{*/
352 // ---------------------------------------------------------------------
353 /* This creates a new group structure and adds it to the hash table */
354 bool pkgCacheGenerator::NewGroup(pkgCache::GrpIterator
&Grp
, const string
&Name
)
356 Grp
= Cache
.FindGrp(Name
);
357 if (Grp
.end() == false)
361 unsigned long const Group
= AllocateInMap(sizeof(pkgCache::Group
));
362 if (unlikely(Group
== 0))
365 Grp
= pkgCache::GrpIterator(Cache
, Cache
.GrpP
+ Group
);
366 Grp
->Name
= WriteStringInMap(Name
);
367 if (unlikely(Grp
->Name
== 0))
370 // Insert it into the hash table
371 unsigned long const Hash
= Cache
.Hash(Name
);
372 Grp
->Next
= Cache
.HeaderP
->GrpHashTable
[Hash
];
373 Cache
.HeaderP
->GrpHashTable
[Hash
] = Group
;
375 Grp
->ID
= Cache
.HeaderP
->GroupCount
++;
379 // CacheGenerator::NewPackage - Add a new package /*{{{*/
380 // ---------------------------------------------------------------------
381 /* This creates a new package structure and adds it to the hash table */
382 bool pkgCacheGenerator::NewPackage(pkgCache::PkgIterator
&Pkg
,const string
&Name
,
383 const string
&Arch
) {
384 pkgCache::GrpIterator Grp
;
385 if (unlikely(NewGroup(Grp
, Name
) == false))
388 Pkg
= Grp
.FindPkg(Arch
);
389 if (Pkg
.end() == false)
393 unsigned long const Package
= AllocateInMap(sizeof(pkgCache::Package
));
394 if (unlikely(Package
== 0))
396 Pkg
= pkgCache::PkgIterator(Cache
,Cache
.PkgP
+ Package
);
398 // Insert the package into our package list
399 if (Grp
->FirstPackage
== 0) // the group is new
401 // Insert it into the hash table
402 unsigned long const Hash
= Cache
.Hash(Name
);
403 Pkg
->NextPackage
= Cache
.HeaderP
->PkgHashTable
[Hash
];
404 Cache
.HeaderP
->PkgHashTable
[Hash
] = Package
;
405 Grp
->FirstPackage
= Package
;
407 else // Group the Packages together
409 // this package is the new last package
410 pkgCache::PkgIterator
LastPkg(Cache
, Cache
.PkgP
+ Grp
->LastPackage
);
411 Pkg
->NextPackage
= LastPkg
->NextPackage
;
412 LastPkg
->NextPackage
= Package
;
414 Grp
->LastPackage
= Package
;
416 // Set the name, arch and the ID
417 Pkg
->Name
= Grp
->Name
;
418 Pkg
->Group
= Grp
.Index();
419 Pkg
->Arch
= WriteUniqString(Arch
.c_str());
420 if (unlikely(Pkg
->Arch
== 0))
422 Pkg
->ID
= Cache
.HeaderP
->PackageCount
++;
427 // CacheGenerator::NewFileVer - Create a new File<->Version association /*{{{*/
428 // ---------------------------------------------------------------------
430 bool pkgCacheGenerator::NewFileVer(pkgCache::VerIterator
&Ver
,
433 if (CurrentFile
== 0)
437 unsigned long VerFile
= AllocateInMap(sizeof(pkgCache::VerFile
));
441 pkgCache::VerFileIterator
VF(Cache
,Cache
.VerFileP
+ VerFile
);
442 VF
->File
= CurrentFile
- Cache
.PkgFileP
;
444 // Link it to the end of the list
445 map_ptrloc
*Last
= &Ver
->FileList
;
446 for (pkgCache::VerFileIterator V
= Ver
.FileList(); V
.end() == false; V
++)
448 VF
->NextFile
= *Last
;
451 VF
->Offset
= List
.Offset();
452 VF
->Size
= List
.Size();
453 if (Cache
.HeaderP
->MaxVerFileSize
< VF
->Size
)
454 Cache
.HeaderP
->MaxVerFileSize
= VF
->Size
;
455 Cache
.HeaderP
->VerFileCount
++;
460 // CacheGenerator::NewVersion - Create a new Version /*{{{*/
461 // ---------------------------------------------------------------------
462 /* This puts a version structure in the linked list */
463 unsigned long pkgCacheGenerator::NewVersion(pkgCache::VerIterator
&Ver
,
464 const string
&VerStr
,
468 unsigned long Version
= AllocateInMap(sizeof(pkgCache::Version
));
473 Ver
= pkgCache::VerIterator(Cache
,Cache
.VerP
+ Version
);
475 Ver
->ID
= Cache
.HeaderP
->VersionCount
++;
476 Ver
->VerStr
= WriteStringInMap(VerStr
);
477 if (Ver
->VerStr
== 0)
483 // CacheGenerator::NewFileDesc - Create a new File<->Desc association /*{{{*/
484 // ---------------------------------------------------------------------
486 bool pkgCacheGenerator::NewFileDesc(pkgCache::DescIterator
&Desc
,
489 if (CurrentFile
== 0)
493 unsigned long DescFile
= AllocateInMap(sizeof(pkgCache::DescFile
));
497 pkgCache::DescFileIterator
DF(Cache
,Cache
.DescFileP
+ DescFile
);
498 DF
->File
= CurrentFile
- Cache
.PkgFileP
;
500 // Link it to the end of the list
501 map_ptrloc
*Last
= &Desc
->FileList
;
502 for (pkgCache::DescFileIterator D
= Desc
.FileList(); D
.end() == false; D
++)
505 DF
->NextFile
= *Last
;
508 DF
->Offset
= List
.Offset();
509 DF
->Size
= List
.Size();
510 if (Cache
.HeaderP
->MaxDescFileSize
< DF
->Size
)
511 Cache
.HeaderP
->MaxDescFileSize
= DF
->Size
;
512 Cache
.HeaderP
->DescFileCount
++;
517 // CacheGenerator::NewDescription - Create a new Description /*{{{*/
518 // ---------------------------------------------------------------------
519 /* This puts a description structure in the linked list */
520 map_ptrloc
pkgCacheGenerator::NewDescription(pkgCache::DescIterator
&Desc
,
522 const MD5SumValue
&md5sum
,
526 map_ptrloc Description
= AllocateInMap(sizeof(pkgCache::Description
));
527 if (Description
== 0)
531 Desc
= pkgCache::DescIterator(Cache
,Cache
.DescP
+ Description
);
532 Desc
->NextDesc
= Next
;
533 Desc
->ID
= Cache
.HeaderP
->DescriptionCount
++;
534 Desc
->language_code
= WriteStringInMap(Lang
);
535 Desc
->md5sum
= WriteStringInMap(md5sum
.Value());
536 if (Desc
->language_code
== 0 || Desc
->md5sum
== 0)
542 // CacheGenerator::FinishCache - do various finish operations /*{{{*/
543 // ---------------------------------------------------------------------
544 /* This prepares the Cache for delivery */
545 bool pkgCacheGenerator::FinishCache(OpProgress
*Progress
)
547 // FIXME: add progress reporting for this operation
548 // Do we have different architectures in your groups ?
549 vector
<string
> archs
= APT::Configuration::getArchitectures();
550 if (archs
.size() > 1)
552 // Create Conflicts in between the group
553 for (pkgCache::GrpIterator G
= GetCache().GrpBegin(); G
.end() != true; G
++)
555 string
const PkgName
= G
.Name();
556 for (pkgCache::PkgIterator P
= G
.PackageList(); P
.end() != true; P
= G
.NextPkg(P
))
558 if (strcmp(P
.Arch(),"all") == 0)
560 pkgCache::PkgIterator allPkg
;
561 for (pkgCache::VerIterator V
= P
.VersionList(); V
.end() != true; V
++)
563 string
const Arch
= V
.Arch(true);
564 map_ptrloc
*OldDepLast
= NULL
;
565 /* MultiArch handling introduces a lot of implicit Dependencies:
566 - MultiArch: same → Co-Installable if they have the same version
567 - Architecture: all → Need to be Co-Installable for internal reasons
568 - All others conflict with all other group members */
569 bool const coInstall
= (V
->MultiArch
== pkgCache::Version::All
||
570 V
->MultiArch
== pkgCache::Version::Same
);
571 if (V
->MultiArch
== pkgCache::Version::All
&& allPkg
.end() == true)
572 allPkg
= G
.FindPkg("all");
573 for (vector
<string
>::const_iterator A
= archs
.begin(); A
!= archs
.end(); ++A
)
577 /* We allow only one installed arch at the time
578 per group, therefore each group member conflicts
579 with all other group members */
580 pkgCache::PkgIterator D
= G
.FindPkg(*A
);
583 if (coInstall
== true)
585 // Replaces: ${self}:other ( << ${binary:Version})
586 NewDepends(D
, V
, V
.VerStr(),
587 pkgCache::Dep::Less
, pkgCache::Dep::Replaces
,
589 // Breaks: ${self}:other (!= ${binary:Version})
590 NewDepends(D
, V
, V
.VerStr(),
591 pkgCache::Dep::NotEquals
, pkgCache::Dep::DpkgBreaks
,
593 if (V
->MultiArch
== pkgCache::Version::All
)
595 // Depend on ${self}:all which does depend on nothing
596 NewDepends(allPkg
, V
, V
.VerStr(),
597 pkgCache::Dep::Equals
, pkgCache::Dep::Depends
,
601 // Conflicts: ${self}:other
603 pkgCache::Dep::NoOp
, pkgCache::Dep::Conflicts
,
614 // CacheGenerator::NewDepends - Create a dependency element /*{{{*/
615 // ---------------------------------------------------------------------
616 /* This creates a dependency element in the tree. It is linked to the
617 version and to the package that it is pointing to. */
618 bool pkgCacheGenerator::NewDepends(pkgCache::PkgIterator
&Pkg
,
619 pkgCache::VerIterator
&Ver
,
620 string
const &Version
,
621 unsigned int const &Op
,
622 unsigned int const &Type
,
623 map_ptrloc
*OldDepLast
)
626 unsigned long const Dependency
= AllocateInMap(sizeof(pkgCache::Dependency
));
627 if (unlikely(Dependency
== 0))
631 pkgCache::DepIterator
Dep(Cache
,Cache
.DepP
+ Dependency
);
632 Dep
->ParentVer
= Ver
.Index();
635 Dep
->ID
= Cache
.HeaderP
->DependsCount
++;
637 // Probe the reverse dependency list for a version string that matches
638 if (Version
.empty() == false)
640 /* for (pkgCache::DepIterator I = Pkg.RevDependsList(); I.end() == false; I++)
641 if (I->Version != 0 && I.TargetVer() == Version)
642 Dep->Version = I->Version;*/
643 if (Dep
->Version
== 0)
644 if (unlikely((Dep
->Version
= WriteStringInMap(Version
)) == 0))
648 // Link it to the package
649 Dep
->Package
= Pkg
.Index();
650 Dep
->NextRevDepends
= Pkg
->RevDepends
;
651 Pkg
->RevDepends
= Dep
.Index();
653 // Do we know where to link the Dependency to?
654 if (OldDepLast
== NULL
)
656 OldDepLast
= &Ver
->DependsList
;
657 for (pkgCache::DepIterator D
= Ver
.DependsList(); D
.end() == false; D
++)
658 OldDepLast
= &D
->NextDepends
;
661 Dep
->NextDepends
= *OldDepLast
;
662 *OldDepLast
= Dep
.Index();
663 OldDepLast
= &Dep
->NextDepends
;
668 // ListParser::NewDepends - Create the environment for a new dependency /*{{{*/
669 // ---------------------------------------------------------------------
670 /* This creates a Group and the Package to link this dependency to if
671 needed and handles also the caching of the old endpoint */
672 bool pkgCacheGenerator::ListParser::NewDepends(pkgCache::VerIterator
&Ver
,
673 const string
&PackageName
,
675 const string
&Version
,
679 pkgCache::GrpIterator Grp
;
680 if (unlikely(Owner
->NewGroup(Grp
, PackageName
) == false))
683 // Locate the target package
684 pkgCache::PkgIterator Pkg
= Grp
.FindPkg(Arch
);
685 if (Pkg
.end() == true) {
686 if (unlikely(Owner
->NewPackage(Pkg
, PackageName
, Arch
) == false))
690 // Is it a file dependency?
691 if (unlikely(PackageName
[0] == '/'))
692 FoundFileDeps
= true;
694 /* Caching the old end point speeds up generation substantially */
695 if (OldDepVer
!= Ver
) {
700 return Owner
->NewDepends(Pkg
, Ver
, Version
, Op
, Type
, OldDepLast
);
703 // ListParser::NewProvides - Create a Provides element /*{{{*/
704 // ---------------------------------------------------------------------
706 bool pkgCacheGenerator::ListParser::NewProvides(pkgCache::VerIterator
&Ver
,
707 const string
&PkgName
,
708 const string
&PkgArch
,
709 const string
&Version
)
711 pkgCache
&Cache
= Owner
->Cache
;
713 // We do not add self referencing provides
714 if (Ver
.ParentPkg().Name() == PkgName
&& PkgArch
== Ver
.Arch(true))
718 unsigned long const Provides
= Owner
->AllocateInMap(sizeof(pkgCache::Provides
));
719 if (unlikely(Provides
== 0))
721 Cache
.HeaderP
->ProvidesCount
++;
724 pkgCache::PrvIterator
Prv(Cache
,Cache
.ProvideP
+ Provides
,Cache
.PkgP
);
725 Prv
->Version
= Ver
.Index();
726 Prv
->NextPkgProv
= Ver
->ProvidesList
;
727 Ver
->ProvidesList
= Prv
.Index();
728 if (Version
.empty() == false && unlikely((Prv
->ProvideVersion
= WriteString(Version
)) == 0))
731 // Locate the target package
732 pkgCache::PkgIterator Pkg
;
733 if (unlikely(Owner
->NewPackage(Pkg
,PkgName
, PkgArch
) == false))
736 // Link it to the package
737 Prv
->ParentPkg
= Pkg
.Index();
738 Prv
->NextProvides
= Pkg
->ProvidesList
;
739 Pkg
->ProvidesList
= Prv
.Index();
744 // CacheGenerator::SelectFile - Select the current file being parsed /*{{{*/
745 // ---------------------------------------------------------------------
746 /* This is used to select which file is to be associated with all newly
747 added versions. The caller is responsible for setting the IMS fields. */
748 bool pkgCacheGenerator::SelectFile(const string
&File
,const string
&Site
,
749 const pkgIndexFile
&Index
,
752 // Get some space for the structure
753 CurrentFile
= Cache
.PkgFileP
+ AllocateInMap(sizeof(*CurrentFile
));
754 if (CurrentFile
== Cache
.PkgFileP
)
758 CurrentFile
->FileName
= WriteStringInMap(File
);
759 CurrentFile
->Site
= WriteUniqString(Site
);
760 CurrentFile
->NextFile
= Cache
.HeaderP
->FileList
;
761 CurrentFile
->Flags
= Flags
;
762 CurrentFile
->ID
= Cache
.HeaderP
->PackageFileCount
;
763 CurrentFile
->IndexType
= WriteUniqString(Index
.GetType()->Label
);
765 Cache
.HeaderP
->FileList
= CurrentFile
- Cache
.PkgFileP
;
766 Cache
.HeaderP
->PackageFileCount
++;
768 if (CurrentFile
->FileName
== 0)
772 Progress
->SubProgress(Index
.Size());
776 // CacheGenerator::WriteUniqueString - Insert a unique string /*{{{*/
777 // ---------------------------------------------------------------------
778 /* This is used to create handles to strings. Given the same text it
779 always returns the same number */
780 unsigned long pkgCacheGenerator::WriteUniqString(const char *S
,
783 /* We use a very small transient hash table here, this speeds up generation
784 by a fair amount on slower machines */
785 pkgCache::StringItem
*&Bucket
= UniqHash
[(S
[0]*5 + S
[1]) % _count(UniqHash
)];
787 stringcmp(S
,S
+Size
,Cache
.StrP
+ Bucket
->String
) == 0)
788 return Bucket
->String
;
790 // Search for an insertion point
791 pkgCache::StringItem
*I
= Cache
.StringItemP
+ Cache
.HeaderP
->StringList
;
793 map_ptrloc
*Last
= &Cache
.HeaderP
->StringList
;
794 for (; I
!= Cache
.StringItemP
; Last
= &I
->NextItem
,
795 I
= Cache
.StringItemP
+ I
->NextItem
)
797 Res
= stringcmp(S
,S
+Size
,Cache
.StrP
+ I
->String
);
810 unsigned long Item
= AllocateInMap(sizeof(pkgCache::StringItem
));
814 // Fill in the structure
815 pkgCache::StringItem
*ItemP
= Cache
.StringItemP
+ Item
;
816 ItemP
->NextItem
= I
- Cache
.StringItemP
;
818 ItemP
->String
= WriteStringInMap(S
,Size
);
819 if (ItemP
->String
== 0)
823 return ItemP
->String
;
826 // CheckValidity - Check that a cache is up-to-date /*{{{*/
827 // ---------------------------------------------------------------------
828 /* This just verifies that each file in the list of index files exists,
829 has matching attributes with the cache and the cache does not have
831 static bool CheckValidity(const string
&CacheFile
, FileIterator Start
,
832 FileIterator End
,MMap
**OutMap
= 0)
834 bool const Debug
= _config
->FindB("Debug::pkgCacheGen", false);
835 // No file, certainly invalid
836 if (CacheFile
.empty() == true || FileExists(CacheFile
) == false)
839 std::clog
<< "CacheFile doesn't exist" << std::endl
;
844 FileFd
CacheF(CacheFile
,FileFd::ReadOnly
);
845 SPtr
<MMap
> Map
= new MMap(CacheF
,0);
847 if (_error
->PendingError() == true || Map
->Size() == 0)
850 std::clog
<< "Errors are pending or Map is empty()" << std::endl
;
855 /* Now we check every index file, see if it is in the cache,
856 verify the IMS data and check that it is on the disk too.. */
857 SPtrArray
<bool> Visited
= new bool[Cache
.HeaderP
->PackageFileCount
];
858 memset(Visited
,0,sizeof(*Visited
)*Cache
.HeaderP
->PackageFileCount
);
859 for (; Start
!= End
; Start
++)
862 std::clog
<< "Checking PkgFile " << (*Start
)->Describe() << ": ";
863 if ((*Start
)->HasPackages() == false)
866 std::clog
<< "Has NO packages" << std::endl
;
870 if ((*Start
)->Exists() == false)
872 #if 0 // mvo: we no longer give a message here (Default Sources spec)
873 _error
->WarningE("stat",_("Couldn't stat source package list %s"),
874 (*Start
)->Describe().c_str());
877 std::clog
<< "file doesn't exist" << std::endl
;
881 // FindInCache is also expected to do an IMS check.
882 pkgCache::PkgFileIterator File
= (*Start
)->FindInCache(Cache
);
883 if (File
.end() == true)
886 std::clog
<< "FindInCache returned end-Pointer" << std::endl
;
890 Visited
[File
->ID
] = true;
892 std::clog
<< "with ID " << File
->ID
<< " is valid" << std::endl
;
895 for (unsigned I
= 0; I
!= Cache
.HeaderP
->PackageFileCount
; I
++)
896 if (Visited
[I
] == false)
899 std::clog
<< "File with ID" << I
<< " wasn't visited" << std::endl
;
903 if (_error
->PendingError() == true)
907 std::clog
<< "Validity failed because of pending errors:" << std::endl
;
908 _error
->DumpErrors();
915 *OutMap
= Map
.UnGuard();
919 // ComputeSize - Compute the total size of a bunch of files /*{{{*/
920 // ---------------------------------------------------------------------
921 /* Size is kind of an abstract notion that is only used for the progress
923 static unsigned long ComputeSize(FileIterator Start
,FileIterator End
)
925 unsigned long TotalSize
= 0;
926 for (; Start
!= End
; Start
++)
928 if ((*Start
)->HasPackages() == false)
930 TotalSize
+= (*Start
)->Size();
935 // BuildCache - Merge the list of index files into the cache /*{{{*/
936 // ---------------------------------------------------------------------
938 static bool BuildCache(pkgCacheGenerator
&Gen
,
939 OpProgress
*Progress
,
940 unsigned long &CurrentSize
,unsigned long TotalSize
,
941 FileIterator Start
, FileIterator End
)
944 for (I
= Start
; I
!= End
; I
++)
946 if ((*I
)->HasPackages() == false)
949 if ((*I
)->Exists() == false)
952 if ((*I
)->FindInCache(Gen
.GetCache()).end() == false)
954 _error
->Warning("Duplicate sources.list entry %s",
955 (*I
)->Describe().c_str());
959 unsigned long Size
= (*I
)->Size();
960 if (Progress
!= NULL
)
961 Progress
->OverallProgress(CurrentSize
,TotalSize
,Size
,_("Reading package lists"));
964 if ((*I
)->Merge(Gen
,Progress
) == false)
968 if (Gen
.HasFileDeps() == true)
970 if (Progress
!= NULL
)
972 TotalSize
= ComputeSize(Start
, End
);
974 for (I
= Start
; I
!= End
; I
++)
976 unsigned long Size
= (*I
)->Size();
977 if (Progress
!= NULL
)
978 Progress
->OverallProgress(CurrentSize
,TotalSize
,Size
,_("Collecting File Provides"));
980 if ((*I
)->MergeFileProvides(Gen
,Progress
) == false)
988 // CacheGenerator::MakeStatusCache - Construct the status cache /*{{{*/
989 // ---------------------------------------------------------------------
990 /* This makes sure that the status cache (the cache that has all
991 index files from the sources list and all local ones) is ready
992 to be mmaped. If OutMap is not zero then a MMap object representing
993 the cache will be stored there. This is pretty much mandetory if you
994 are using AllowMem. AllowMem lets the function be run as non-root
995 where it builds the cache 'fast' into a memory buffer. */
996 __deprecated
bool pkgMakeStatusCache(pkgSourceList
&List
,OpProgress
&Progress
,
997 MMap
**OutMap
, bool AllowMem
)
998 { return pkgCacheGenerator::MakeStatusCache(List
, &Progress
, OutMap
, AllowMem
); }
999 bool pkgCacheGenerator::MakeStatusCache(pkgSourceList
&List
,OpProgress
*Progress
,
1000 MMap
**OutMap
,bool AllowMem
)
1002 bool const Debug
= _config
->FindB("Debug::pkgCacheGen", false);
1003 unsigned long const MapSize
= _config
->FindI("APT::Cache-Limit",24*1024*1024);
1005 vector
<pkgIndexFile
*> Files
;
1006 for (vector
<metaIndex
*>::const_iterator i
= List
.begin();
1010 vector
<pkgIndexFile
*> *Indexes
= (*i
)->GetIndexFiles();
1011 for (vector
<pkgIndexFile
*>::const_iterator j
= Indexes
->begin();
1012 j
!= Indexes
->end();
1014 Files
.push_back (*j
);
1017 unsigned long const EndOfSource
= Files
.size();
1018 if (_system
->AddStatusFiles(Files
) == false)
1021 // Decide if we can write to the files..
1022 string
const CacheFile
= _config
->FindFile("Dir::Cache::pkgcache");
1023 string
const SrcCacheFile
= _config
->FindFile("Dir::Cache::srcpkgcache");
1025 // ensure the cache directory exists
1026 if (CacheFile
.empty() == false || SrcCacheFile
.empty() == false)
1028 string dir
= _config
->FindDir("Dir::Cache");
1029 size_t const len
= dir
.size();
1030 if (len
> 5 && dir
.find("/apt/", len
- 6, 5) == len
- 5)
1031 dir
= dir
.substr(0, len
- 5);
1032 if (CacheFile
.empty() == false)
1033 CreateDirectory(dir
, flNotFile(CacheFile
));
1034 if (SrcCacheFile
.empty() == false)
1035 CreateDirectory(dir
, flNotFile(SrcCacheFile
));
1038 // Decide if we can write to the cache
1039 bool Writeable
= false;
1040 if (CacheFile
.empty() == false)
1041 Writeable
= access(flNotFile(CacheFile
).c_str(),W_OK
) == 0;
1043 if (SrcCacheFile
.empty() == false)
1044 Writeable
= access(flNotFile(SrcCacheFile
).c_str(),W_OK
) == 0;
1046 std::clog
<< "Do we have write-access to the cache files? " << (Writeable
? "YES" : "NO") << std::endl
;
1048 if (Writeable
== false && AllowMem
== false && CacheFile
.empty() == false)
1049 return _error
->Error(_("Unable to write to %s"),flNotFile(CacheFile
).c_str());
1051 if (Progress
!= NULL
)
1052 Progress
->OverallProgress(0,1,1,_("Reading package lists"));
1054 // Cache is OK, Fin.
1055 if (CheckValidity(CacheFile
,Files
.begin(),Files
.end(),OutMap
) == true)
1057 if (Progress
!= NULL
)
1058 Progress
->OverallProgress(1,1,1,_("Reading package lists"));
1060 std::clog
<< "pkgcache.bin is valid - no need to build anything" << std::endl
;
1063 else if (Debug
== true)
1064 std::clog
<< "pkgcache.bin is NOT valid" << std::endl
;
1066 /* At this point we know we need to reconstruct the package cache,
1068 SPtr
<FileFd
> CacheF
;
1069 SPtr
<DynamicMMap
> Map
;
1070 if (Writeable
== true && CacheFile
.empty() == false)
1072 unlink(CacheFile
.c_str());
1073 CacheF
= new FileFd(CacheFile
,FileFd::WriteEmpty
);
1074 fchmod(CacheF
->Fd(),0644);
1075 Map
= new DynamicMMap(*CacheF
,MMap::Public
,MapSize
);
1076 if (_error
->PendingError() == true)
1079 std::clog
<< "Open filebased MMap" << std::endl
;
1083 // Just build it in memory..
1084 Map
= new DynamicMMap(0,MapSize
);
1086 std::clog
<< "Open memory Map (not filebased)" << std::endl
;
1089 // Lets try the source cache.
1090 unsigned long CurrentSize
= 0;
1091 unsigned long TotalSize
= 0;
1092 if (CheckValidity(SrcCacheFile
,Files
.begin(),
1093 Files
.begin()+EndOfSource
) == true)
1096 std::clog
<< "srcpkgcache.bin is valid - populate MMap with it." << std::endl
;
1097 // Preload the map with the source cache
1098 FileFd
SCacheF(SrcCacheFile
,FileFd::ReadOnly
);
1099 unsigned long const alloc
= Map
->RawAllocate(SCacheF
.Size());
1100 if ((alloc
== 0 && _error
->PendingError())
1101 || SCacheF
.Read((unsigned char *)Map
->Data() + alloc
,
1102 SCacheF
.Size()) == false)
1105 TotalSize
= ComputeSize(Files
.begin()+EndOfSource
,Files
.end());
1107 // Build the status cache
1108 pkgCacheGenerator
Gen(Map
.Get(),Progress
);
1109 if (_error
->PendingError() == true)
1111 if (BuildCache(Gen
,Progress
,CurrentSize
,TotalSize
,
1112 Files
.begin()+EndOfSource
,Files
.end()) == false)
1115 // FIXME: move me to a better place
1116 Gen
.FinishCache(Progress
);
1121 std::clog
<< "srcpkgcache.bin is NOT valid - rebuild" << std::endl
;
1122 TotalSize
= ComputeSize(Files
.begin(),Files
.end());
1124 // Build the source cache
1125 pkgCacheGenerator
Gen(Map
.Get(),Progress
);
1126 if (_error
->PendingError() == true)
1128 if (BuildCache(Gen
,Progress
,CurrentSize
,TotalSize
,
1129 Files
.begin(),Files
.begin()+EndOfSource
) == false)
1133 if (Writeable
== true && SrcCacheFile
.empty() == false)
1135 FileFd
SCacheF(SrcCacheFile
,FileFd::WriteEmpty
);
1136 if (_error
->PendingError() == true)
1139 fchmod(SCacheF
.Fd(),0644);
1141 // Write out the main data
1142 if (SCacheF
.Write(Map
->Data(),Map
->Size()) == false)
1143 return _error
->Error(_("IO Error saving source cache"));
1146 // Write out the proper header
1147 Gen
.GetCache().HeaderP
->Dirty
= false;
1148 if (SCacheF
.Seek(0) == false ||
1149 SCacheF
.Write(Map
->Data(),sizeof(*Gen
.GetCache().HeaderP
)) == false)
1150 return _error
->Error(_("IO Error saving source cache"));
1151 Gen
.GetCache().HeaderP
->Dirty
= true;
1155 // Build the status cache
1156 if (BuildCache(Gen
,Progress
,CurrentSize
,TotalSize
,
1157 Files
.begin()+EndOfSource
,Files
.end()) == false)
1160 // FIXME: move me to a better place
1161 Gen
.FinishCache(Progress
);
1164 std::clog
<< "Caches are ready for shipping" << std::endl
;
1166 if (_error
->PendingError() == true)
1172 delete Map
.UnGuard();
1173 *OutMap
= new MMap(*CacheF
,0);
1177 *OutMap
= Map
.UnGuard();
1184 // CacheGenerator::MakeOnlyStatusCache - Build only a status files cache/*{{{*/
1185 // ---------------------------------------------------------------------
1187 __deprecated
bool pkgMakeOnlyStatusCache(OpProgress
&Progress
,DynamicMMap
**OutMap
)
1188 { return pkgCacheGenerator::MakeOnlyStatusCache(&Progress
, OutMap
); }
1189 bool pkgCacheGenerator::MakeOnlyStatusCache(OpProgress
*Progress
,DynamicMMap
**OutMap
)
1191 unsigned long MapSize
= _config
->FindI("APT::Cache-Limit",20*1024*1024);
1192 vector
<pkgIndexFile
*> Files
;
1193 unsigned long EndOfSource
= Files
.size();
1194 if (_system
->AddStatusFiles(Files
) == false)
1197 SPtr
<DynamicMMap
> Map
= new DynamicMMap(0,MapSize
);
1198 unsigned long CurrentSize
= 0;
1199 unsigned long TotalSize
= 0;
1201 TotalSize
= ComputeSize(Files
.begin()+EndOfSource
,Files
.end());
1203 // Build the status cache
1204 if (Progress
!= NULL
)
1205 Progress
->OverallProgress(0,1,1,_("Reading package lists"));
1206 pkgCacheGenerator
Gen(Map
.Get(),Progress
);
1207 if (_error
->PendingError() == true)
1209 if (BuildCache(Gen
,Progress
,CurrentSize
,TotalSize
,
1210 Files
.begin()+EndOfSource
,Files
.end()) == false)
1213 // FIXME: move me to a better place
1214 Gen
.FinishCache(Progress
);
1216 if (_error
->PendingError() == true)
1218 *OutMap
= Map
.UnGuard();