]>
git.saurik.com Git - apt.git/blob - apt-pkg/pkgcachegen.cc
1 // -*- mode: cpp; mode: fold -*-
3 // $Id: pkgcachegen.cc,v 1.53.2.1 2003/12/24 23:09:17 mdz Exp $
4 /* ######################################################################
6 Package Cache Generator - Generator for the cache structure.
8 This builds the cache structure from the abstract package list parser.
10 ##################################################################### */
12 // Include Files /*{{{*/
13 #define APT_COMPATIBILITY 986
15 #include <apt-pkg/pkgcachegen.h>
16 #include <apt-pkg/error.h>
17 #include <apt-pkg/version.h>
18 #include <apt-pkg/progress.h>
19 #include <apt-pkg/sourcelist.h>
20 #include <apt-pkg/configuration.h>
21 #include <apt-pkg/aptconfiguration.h>
22 #include <apt-pkg/strutl.h>
23 #include <apt-pkg/sptr.h>
24 #include <apt-pkg/pkgsystem.h>
25 #include <apt-pkg/macros.h>
27 #include <apt-pkg/tagfile.h>
38 typedef vector
<pkgIndexFile
*>::iterator FileIterator
;
40 // CacheGenerator::pkgCacheGenerator - Constructor /*{{{*/
41 // ---------------------------------------------------------------------
42 /* We set the dirty flag and make sure that is written to the disk */
43 pkgCacheGenerator::pkgCacheGenerator(DynamicMMap
*pMap
,OpProgress
*Prog
) :
44 Map(*pMap
), Cache(pMap
,false), Progress(Prog
),
48 memset(UniqHash
,0,sizeof(UniqHash
));
50 if (_error
->PendingError() == true)
55 // Setup the map interface..
56 Cache
.HeaderP
= (pkgCache::Header
*)Map
.Data();
57 if (Map
.RawAllocate(sizeof(pkgCache::Header
)) == 0 && _error
->PendingError() == true)
60 Map
.UsePools(*Cache
.HeaderP
->Pools
,sizeof(Cache
.HeaderP
->Pools
)/sizeof(Cache
.HeaderP
->Pools
[0]));
63 *Cache
.HeaderP
= pkgCache::Header();
64 Cache
.HeaderP
->VerSysName
= Map
.WriteString(_system
->VS
->Label
);
65 Cache
.HeaderP
->Architecture
= Map
.WriteString(_config
->Find("APT::Architecture"));
70 // Map directly from the existing file
72 Map
.UsePools(*Cache
.HeaderP
->Pools
,sizeof(Cache
.HeaderP
->Pools
)/sizeof(Cache
.HeaderP
->Pools
[0]));
73 if (Cache
.VS
!= _system
->VS
)
75 _error
->Error(_("Cache has an incompatible versioning system"));
80 Cache
.HeaderP
->Dirty
= true;
81 Map
.Sync(0,sizeof(pkgCache::Header
));
84 // CacheGenerator::~pkgCacheGenerator - Destructor /*{{{*/
85 // ---------------------------------------------------------------------
86 /* We sync the data then unset the dirty flag in two steps so as to
87 advoid a problem during a crash */
88 pkgCacheGenerator::~pkgCacheGenerator()
90 if (_error
->PendingError() == true)
92 if (Map
.Sync() == false)
95 Cache
.HeaderP
->Dirty
= false;
96 Map
.Sync(0,sizeof(pkgCache::Header
));
99 // CacheGenerator::MergeList - Merge the package list /*{{{*/
100 // ---------------------------------------------------------------------
101 /* This provides the generation of the entries in the cache. Each loop
102 goes through a single package record from the underlying parse engine. */
103 bool pkgCacheGenerator::MergeList(ListParser
&List
,
104 pkgCache::VerIterator
*OutVer
)
108 unsigned int Counter
= 0;
109 while (List
.Step() == true)
111 string
const PackageName
= List
.Package();
112 if (PackageName
.empty() == true)
115 /* As we handle Arch all packages as architecture bounded
116 we add all information to every (simulated) arch package */
117 std::vector
<string
> genArch
;
118 if (List
.ArchitectureAll() == true) {
119 genArch
= APT::Configuration::getArchitectures();
120 if (genArch
.size() != 1)
121 genArch
.push_back("all");
123 genArch
.push_back(List
.Architecture());
125 for (std::vector
<string
>::const_iterator arch
= genArch
.begin();
126 arch
!= genArch
.end(); ++arch
)
128 // Get a pointer to the package structure
129 pkgCache::PkgIterator Pkg
;
130 if (NewPackage(Pkg
, PackageName
, *arch
) == false)
131 return _error
->Error(_("Error occurred while processing %s (NewPackage)"),PackageName
.c_str());
133 if (Counter
% 100 == 0 && Progress
!= 0)
134 Progress
->Progress(List
.Offset());
136 /* Get a pointer to the version structure. We know the list is sorted
137 so we use that fact in the search. Insertion of new versions is
138 done with correct sorting */
139 string Version
= List
.Version();
140 if (Version
.empty() == true)
142 // we first process the package, then the descriptions
143 // (this has the bonus that we get MMap error when we run out
145 if (List
.UsePackage(Pkg
,pkgCache::VerIterator(Cache
)) == false)
146 return _error
->Error(_("Error occurred while processing %s (UsePackage1)"),
147 PackageName
.c_str());
149 // Find the right version to write the description
150 MD5SumValue CurMd5
= List
.Description_md5();
151 pkgCache::VerIterator Ver
= Pkg
.VersionList();
152 map_ptrloc
*LastVer
= &Pkg
->VersionList
;
154 for (; Ver
.end() == false; LastVer
= &Ver
->NextVer
, Ver
++)
156 pkgCache::DescIterator Desc
= Ver
.DescriptionList();
157 map_ptrloc
*LastDesc
= &Ver
->DescriptionList
;
158 bool duplicate
=false;
160 // don't add a new description if we have one for the given
162 for ( ; Desc
.end() == false; Desc
++)
163 if (MD5SumValue(Desc
.md5()) == CurMd5
&&
164 Desc
.LanguageCode() == List
.DescriptionLanguage())
169 for (Desc
= Ver
.DescriptionList();
171 LastDesc
= &Desc
->NextDesc
, Desc
++)
173 if (MD5SumValue(Desc
.md5()) == CurMd5
)
175 // Add new description
176 *LastDesc
= NewDescription(Desc
, List
.DescriptionLanguage(), CurMd5
, *LastDesc
);
177 Desc
->ParentPkg
= Pkg
.Index();
179 if ((*LastDesc
== 0 && _error
->PendingError()) || NewFileDesc(Desc
,List
) == false)
180 return _error
->Error(_("Error occurred while processing %s (NewFileDesc1)"),PackageName
.c_str());
189 pkgCache::VerIterator Ver
= Pkg
.VersionList();
190 map_ptrloc
*LastVer
= &Pkg
->VersionList
;
192 unsigned long const Hash
= List
.VersionHash();
193 for (; Ver
.end() == false; LastVer
= &Ver
->NextVer
, Ver
++)
195 Res
= Cache
.VS
->CmpVersion(Version
,Ver
.VerStr());
196 // Version is higher as current version - insert here
199 // Versionstrings are equal - is hash also equal?
200 if (Res
== 0 && Ver
->Hash
== Hash
)
202 // proceed with the next till we have either the right
203 // or we found another version (which will be lower)
206 /* We already have a version for this item, record that we saw it */
207 if (Res
== 0 && Ver
.end() == false && Ver
->Hash
== Hash
)
209 if (List
.UsePackage(Pkg
,Ver
) == false)
210 return _error
->Error(_("Error occurred while processing %s (UsePackage2)"),
211 PackageName
.c_str());
213 if (NewFileVer(Ver
,List
) == false)
214 return _error
->Error(_("Error occurred while processing %s (NewFileVer1)"),
215 PackageName
.c_str());
217 // Read only a single record and return
221 FoundFileDeps
|= List
.HasFileDeps();
229 *LastVer
= NewVersion(Ver
,Version
,*LastVer
);
230 Ver
->ParentPkg
= Pkg
.Index();
233 if ((*LastVer
== 0 && _error
->PendingError()) || List
.NewVersion(Ver
) == false)
234 return _error
->Error(_("Error occurred while processing %s (NewVersion1)"),
235 PackageName
.c_str());
237 if (List
.UsePackage(Pkg
,Ver
) == false)
238 return _error
->Error(_("Error occurred while processing %s (UsePackage3)"),
239 PackageName
.c_str());
241 if (NewFileVer(Ver
,List
) == false)
242 return _error
->Error(_("Error occurred while processing %s (NewVersion2)"),
243 PackageName
.c_str());
245 // Read only a single record and return
249 FoundFileDeps
|= List
.HasFileDeps();
253 /* Record the Description data. Description data always exist in
254 Packages and Translation-* files. */
255 pkgCache::DescIterator Desc
= Ver
.DescriptionList();
256 map_ptrloc
*LastDesc
= &Ver
->DescriptionList
;
258 // Skip to the end of description set
259 for (; Desc
.end() == false; LastDesc
= &Desc
->NextDesc
, Desc
++);
261 // Add new description
262 *LastDesc
= NewDescription(Desc
, List
.DescriptionLanguage(), List
.Description_md5(), *LastDesc
);
263 Desc
->ParentPkg
= Pkg
.Index();
265 if ((*LastDesc
== 0 && _error
->PendingError()) || NewFileDesc(Desc
,List
) == false)
266 return _error
->Error(_("Error occurred while processing %s (NewFileDesc2)"),PackageName
.c_str());
270 FoundFileDeps
|= List
.HasFileDeps();
272 if (Cache
.HeaderP
->PackageCount
>= (1ULL<<sizeof(Cache
.PkgP
->ID
)*8)-1)
273 return _error
->Error(_("Wow, you exceeded the number of package "
274 "names this APT is capable of."));
275 if (Cache
.HeaderP
->VersionCount
>= (1ULL<<(sizeof(Cache
.VerP
->ID
)*8))-1)
276 return _error
->Error(_("Wow, you exceeded the number of versions "
277 "this APT is capable of."));
278 if (Cache
.HeaderP
->DescriptionCount
>= (1ULL<<(sizeof(Cache
.DescP
->ID
)*8))-1)
279 return _error
->Error(_("Wow, you exceeded the number of descriptions "
280 "this APT is capable of."));
281 if (Cache
.HeaderP
->DependsCount
>= (1ULL<<(sizeof(Cache
.DepP
->ID
)*8))-1ULL)
282 return _error
->Error(_("Wow, you exceeded the number of dependencies "
283 "this APT is capable of."));
287 // CacheGenerator::MergeFileProvides - Merge file provides /*{{{*/
288 // ---------------------------------------------------------------------
289 /* If we found any file depends while parsing the main list we need to
290 resolve them. Since it is undesired to load the entire list of files
291 into the cache as virtual packages we do a two stage effort. MergeList
292 identifies the file depends and this creates Provdies for them by
293 re-parsing all the indexs. */
294 bool pkgCacheGenerator::MergeFileProvides(ListParser
&List
)
298 unsigned int Counter
= 0;
299 while (List
.Step() == true)
301 string PackageName
= List
.Package();
302 if (PackageName
.empty() == true)
304 string Version
= List
.Version();
305 if (Version
.empty() == true)
308 pkgCache::PkgIterator Pkg
= Cache
.FindPkg(PackageName
);
309 if (Pkg
.end() == true)
310 return _error
->Error(_("Error occurred while processing %s (FindPkg)"),
311 PackageName
.c_str());
313 if (Counter
% 100 == 0 && Progress
!= 0)
314 Progress
->Progress(List
.Offset());
316 unsigned long Hash
= List
.VersionHash();
317 pkgCache::VerIterator Ver
= Pkg
.VersionList();
318 for (; Ver
.end() == false; Ver
++)
320 if (Ver
->Hash
== Hash
&& Version
.c_str() == Ver
.VerStr())
322 if (List
.CollectFileProvides(Cache
,Ver
) == false)
323 return _error
->Error(_("Error occurred while processing %s (CollectFileProvides)"),PackageName
.c_str());
328 if (Ver
.end() == true)
329 _error
->Warning(_("Package %s %s was not found while processing file dependencies"),PackageName
.c_str(),Version
.c_str());
335 // CacheGenerator::NewGroup - Add a new group /*{{{*/
336 // ---------------------------------------------------------------------
337 /* This creates a new group structure and adds it to the hash table */
338 bool pkgCacheGenerator::NewGroup(pkgCache::GrpIterator
&Grp
, const string
&Name
)
340 Grp
= Cache
.FindGrp(Name
);
341 if (Grp
.end() == false)
345 unsigned long const Group
= Map
.Allocate(sizeof(pkgCache::Group
));
346 if (unlikely(Group
== 0))
349 Grp
= pkgCache::GrpIterator(Cache
, Cache
.GrpP
+ Group
);
350 Grp
->Name
= Map
.WriteString(Name
);
351 if (unlikely(Grp
->Name
== 0))
354 // Insert it into the hash table
355 unsigned long const Hash
= Cache
.Hash(Name
);
356 Grp
->Next
= Cache
.HeaderP
->GrpHashTable
[Hash
];
357 Cache
.HeaderP
->GrpHashTable
[Hash
] = Group
;
359 Cache
.HeaderP
->GroupCount
++;
364 // CacheGenerator::NewPackage - Add a new package /*{{{*/
365 // ---------------------------------------------------------------------
366 /* This creates a new package structure and adds it to the hash table */
367 bool pkgCacheGenerator::NewPackage(pkgCache::PkgIterator
&Pkg
,const string
&Name
,
368 const string
&Arch
) {
369 pkgCache::GrpIterator Grp
;
370 if (unlikely(NewGroup(Grp
, Name
) == false))
373 Pkg
= Grp
.FindPkg(Arch
);
374 if (Pkg
.end() == false)
378 unsigned long const Package
= Map
.Allocate(sizeof(pkgCache::Package
));
379 if (unlikely(Package
== 0))
381 Pkg
= pkgCache::PkgIterator(Cache
,Cache
.PkgP
+ Package
);
383 // Insert the package into our package list
384 if (Grp
->FirstPackage
== 0) // the group is new
386 // Insert it into the hash table
387 unsigned long const Hash
= Cache
.Hash(Name
);
388 Pkg
->NextPackage
= Cache
.HeaderP
->PkgHashTable
[Hash
];
389 Cache
.HeaderP
->PkgHashTable
[Hash
] = Package
;
390 Grp
->FirstPackage
= Package
;
392 else // Group the Packages together
394 // this package is the new last package
395 pkgCache::PkgIterator
LastPkg(Cache
, Cache
.PkgP
+ Grp
->LastPackage
);
396 Pkg
->NextPackage
= LastPkg
->NextPackage
;
397 LastPkg
->NextPackage
= Package
;
399 Grp
->LastPackage
= Package
;
401 // Set the name, arch and the ID
402 Pkg
->Name
= Grp
->Name
;
403 Pkg
->Group
= Grp
.Index();
404 Pkg
->Arch
= WriteUniqString(Arch
.c_str());
405 if (unlikely(Pkg
->Arch
== 0))
407 Pkg
->ID
= Cache
.HeaderP
->PackageCount
++;
412 // CacheGenerator::NewFileVer - Create a new File<->Version association /*{{{*/
413 // ---------------------------------------------------------------------
415 bool pkgCacheGenerator::NewFileVer(pkgCache::VerIterator
&Ver
,
418 if (CurrentFile
== 0)
422 unsigned long VerFile
= Map
.Allocate(sizeof(pkgCache::VerFile
));
426 pkgCache::VerFileIterator
VF(Cache
,Cache
.VerFileP
+ VerFile
);
427 VF
->File
= CurrentFile
- Cache
.PkgFileP
;
429 // Link it to the end of the list
430 map_ptrloc
*Last
= &Ver
->FileList
;
431 for (pkgCache::VerFileIterator V
= Ver
.FileList(); V
.end() == false; V
++)
433 VF
->NextFile
= *Last
;
436 VF
->Offset
= List
.Offset();
437 VF
->Size
= List
.Size();
438 if (Cache
.HeaderP
->MaxVerFileSize
< VF
->Size
)
439 Cache
.HeaderP
->MaxVerFileSize
= VF
->Size
;
440 Cache
.HeaderP
->VerFileCount
++;
445 // CacheGenerator::NewVersion - Create a new Version /*{{{*/
446 // ---------------------------------------------------------------------
447 /* This puts a version structure in the linked list */
448 unsigned long pkgCacheGenerator::NewVersion(pkgCache::VerIterator
&Ver
,
449 const string
&VerStr
,
453 unsigned long Version
= Map
.Allocate(sizeof(pkgCache::Version
));
458 Ver
= pkgCache::VerIterator(Cache
,Cache
.VerP
+ Version
);
460 Ver
->ID
= Cache
.HeaderP
->VersionCount
++;
461 Ver
->VerStr
= Map
.WriteString(VerStr
);
462 if (Ver
->VerStr
== 0)
468 // CacheGenerator::NewFileDesc - Create a new File<->Desc association /*{{{*/
469 // ---------------------------------------------------------------------
471 bool pkgCacheGenerator::NewFileDesc(pkgCache::DescIterator
&Desc
,
474 if (CurrentFile
== 0)
478 unsigned long DescFile
= Map
.Allocate(sizeof(pkgCache::DescFile
));
482 pkgCache::DescFileIterator
DF(Cache
,Cache
.DescFileP
+ DescFile
);
483 DF
->File
= CurrentFile
- Cache
.PkgFileP
;
485 // Link it to the end of the list
486 map_ptrloc
*Last
= &Desc
->FileList
;
487 for (pkgCache::DescFileIterator D
= Desc
.FileList(); D
.end() == false; D
++)
490 DF
->NextFile
= *Last
;
493 DF
->Offset
= List
.Offset();
494 DF
->Size
= List
.Size();
495 if (Cache
.HeaderP
->MaxDescFileSize
< DF
->Size
)
496 Cache
.HeaderP
->MaxDescFileSize
= DF
->Size
;
497 Cache
.HeaderP
->DescFileCount
++;
502 // CacheGenerator::NewDescription - Create a new Description /*{{{*/
503 // ---------------------------------------------------------------------
504 /* This puts a description structure in the linked list */
505 map_ptrloc
pkgCacheGenerator::NewDescription(pkgCache::DescIterator
&Desc
,
507 const MD5SumValue
&md5sum
,
511 map_ptrloc Description
= Map
.Allocate(sizeof(pkgCache::Description
));
512 if (Description
== 0)
516 Desc
= pkgCache::DescIterator(Cache
,Cache
.DescP
+ Description
);
517 Desc
->NextDesc
= Next
;
518 Desc
->ID
= Cache
.HeaderP
->DescriptionCount
++;
519 Desc
->language_code
= Map
.WriteString(Lang
);
520 Desc
->md5sum
= Map
.WriteString(md5sum
.Value());
521 if (Desc
->language_code
== 0 || Desc
->md5sum
== 0)
527 // CacheGenerator::FinishCache - do various finish operations /*{{{*/
528 // ---------------------------------------------------------------------
529 /* This prepares the Cache for delivery */
530 bool pkgCacheGenerator::FinishCache(OpProgress
&Progress
)
532 // FIXME: add progress reporting for this operation
533 // Do we have different architectures in your groups ?
534 vector
<string
> archs
= APT::Configuration::getArchitectures();
535 if (archs
.size() > 1)
537 // Create Conflicts in between the group
538 for (pkgCache::GrpIterator G
= GetCache().GrpBegin(); G
.end() != true; G
++)
540 string
const PkgName
= G
.Name();
541 for (pkgCache::PkgIterator P
= G
.PackageList(); P
.end() != true; P
= G
.NextPkg(P
))
543 if (strcmp(P
.Arch(),"all") == 0)
545 pkgCache::PkgIterator allPkg
;
546 for (pkgCache::VerIterator V
= P
.VersionList(); V
.end() != true; V
++)
548 string
const Arch
= V
.Arch(true);
549 map_ptrloc
*OldDepLast
= NULL
;
550 /* MultiArch handling introduces a lot of implicit Dependencies:
551 - MultiArch: same → Co-Installable if they have the same version
552 - Architecture: all → Need to be Co-Installable for internal reasons
553 - All others conflict with all other group members */
554 bool const coInstall
= (V
->MultiArch
== pkgCache::Version::All
||
555 V
->MultiArch
== pkgCache::Version::Same
);
556 if (V
->MultiArch
== pkgCache::Version::All
&& allPkg
.end() == true)
557 allPkg
= G
.FindPkg("all");
558 for (vector
<string
>::const_iterator A
= archs
.begin(); A
!= archs
.end(); ++A
)
562 /* We allow only one installed arch at the time
563 per group, therefore each group member conflicts
564 with all other group members */
565 pkgCache::PkgIterator D
= G
.FindPkg(*A
);
568 if (coInstall
== true)
570 // Replaces: ${self}:other ( << ${binary:Version})
571 NewDepends(D
, V
, V
.VerStr(),
572 pkgCache::Dep::Less
, pkgCache::Dep::Replaces
,
574 // Breaks: ${self}:other (!= ${binary:Version})
575 NewDepends(D
, V
, V
.VerStr(),
576 pkgCache::Dep::Less
, pkgCache::Dep::DpkgBreaks
,
578 NewDepends(D
, V
, V
.VerStr(),
579 pkgCache::Dep::Greater
, pkgCache::Dep::DpkgBreaks
,
581 if (V
->MultiArch
== pkgCache::Version::All
)
583 // Depend on ${self}:all which does depend on nothing
584 NewDepends(allPkg
, V
, V
.VerStr(),
585 pkgCache::Dep::Equals
, pkgCache::Dep::Depends
,
589 // Conflicts: ${self}:other
591 pkgCache::Dep::NoOp
, pkgCache::Dep::Conflicts
,
602 // CacheGenerator::NewDepends - Create a dependency element /*{{{*/
603 // ---------------------------------------------------------------------
604 /* This creates a dependency element in the tree. It is linked to the
605 version and to the package that it is pointing to. */
606 bool pkgCacheGenerator::NewDepends(pkgCache::PkgIterator
&Pkg
,
607 pkgCache::VerIterator
&Ver
,
608 string
const &Version
,
609 unsigned int const &Op
,
610 unsigned int const &Type
,
611 map_ptrloc
*OldDepLast
)
614 unsigned long const Dependency
= Map
.Allocate(sizeof(pkgCache::Dependency
));
615 if (unlikely(Dependency
== 0))
619 pkgCache::DepIterator
Dep(Cache
,Cache
.DepP
+ Dependency
);
620 Dep
->ParentVer
= Ver
.Index();
623 Dep
->ID
= Cache
.HeaderP
->DependsCount
++;
625 // Probe the reverse dependency list for a version string that matches
626 if (Version
.empty() == false)
628 /* for (pkgCache::DepIterator I = Pkg.RevDependsList(); I.end() == false; I++)
629 if (I->Version != 0 && I.TargetVer() == Version)
630 Dep->Version = I->Version;*/
631 if (Dep
->Version
== 0)
632 if (unlikely((Dep
->Version
= Map
.WriteString(Version
)) == 0))
636 // Link it to the package
637 Dep
->Package
= Pkg
.Index();
638 Dep
->NextRevDepends
= Pkg
->RevDepends
;
639 Pkg
->RevDepends
= Dep
.Index();
641 // Do we know where to link the Dependency to?
642 if (OldDepLast
== NULL
)
644 OldDepLast
= &Ver
->DependsList
;
645 for (pkgCache::DepIterator D
= Ver
.DependsList(); D
.end() == false; D
++)
646 OldDepLast
= &D
->NextDepends
;
649 Dep
->NextDepends
= *OldDepLast
;
650 *OldDepLast
= Dep
.Index();
651 OldDepLast
= &Dep
->NextDepends
;
656 // ListParser::NewDepends - Create the environment for a new dependency /*{{{*/
657 // ---------------------------------------------------------------------
658 /* This creates a Group and the Package to link this dependency to if
659 needed and handles also the caching of the old endpoint */
660 bool pkgCacheGenerator::ListParser::NewDepends(pkgCache::VerIterator Ver
,
661 const string
&PackageName
,
663 const string
&Version
,
667 pkgCache::GrpIterator Grp
;
668 if (unlikely(Owner
->NewGroup(Grp
, PackageName
) == false))
671 // Locate the target package
672 pkgCache::PkgIterator Pkg
= Grp
.FindPkg(Arch
);
673 if (Pkg
.end() == true) {
674 if (unlikely(Owner
->NewPackage(Pkg
, PackageName
, Arch
) == false))
678 // Is it a file dependency?
679 if (unlikely(PackageName
[0] == '/'))
680 FoundFileDeps
= true;
682 /* Caching the old end point speeds up generation substantially */
683 if (OldDepVer
!= Ver
) {
688 return Owner
->NewDepends(Pkg
, Ver
, Version
, Op
, Type
, OldDepLast
);
691 // ListParser::NewProvides - Create a Provides element /*{{{*/
692 // ---------------------------------------------------------------------
694 bool pkgCacheGenerator::ListParser::NewProvides(pkgCache::VerIterator Ver
,
695 const string
&PkgName
,
696 const string
&PkgArch
,
697 const string
&Version
)
699 pkgCache
&Cache
= Owner
->Cache
;
701 // We do not add self referencing provides
702 if (Ver
.ParentPkg().Name() == PkgName
&& PkgArch
== Ver
.Arch(true))
706 unsigned long const Provides
= Owner
->Map
.Allocate(sizeof(pkgCache::Provides
));
707 if (unlikely(Provides
== 0))
709 Cache
.HeaderP
->ProvidesCount
++;
712 pkgCache::PrvIterator
Prv(Cache
,Cache
.ProvideP
+ Provides
,Cache
.PkgP
);
713 Prv
->Version
= Ver
.Index();
714 Prv
->NextPkgProv
= Ver
->ProvidesList
;
715 Ver
->ProvidesList
= Prv
.Index();
716 if (Version
.empty() == false && unlikely((Prv
->ProvideVersion
= WriteString(Version
)) == 0))
719 // Locate the target package
720 pkgCache::PkgIterator Pkg
;
721 if (unlikely(Owner
->NewPackage(Pkg
,PkgName
, PkgArch
) == false))
724 // Link it to the package
725 Prv
->ParentPkg
= Pkg
.Index();
726 Prv
->NextProvides
= Pkg
->ProvidesList
;
727 Pkg
->ProvidesList
= Prv
.Index();
732 // CacheGenerator::SelectFile - Select the current file being parsed /*{{{*/
733 // ---------------------------------------------------------------------
734 /* This is used to select which file is to be associated with all newly
735 added versions. The caller is responsible for setting the IMS fields. */
736 bool pkgCacheGenerator::SelectFile(const string
&File
,const string
&Site
,
737 const pkgIndexFile
&Index
,
740 // Get some space for the structure
741 CurrentFile
= Cache
.PkgFileP
+ Map
.Allocate(sizeof(*CurrentFile
));
742 if (CurrentFile
== Cache
.PkgFileP
)
746 CurrentFile
->FileName
= Map
.WriteString(File
);
747 CurrentFile
->Site
= WriteUniqString(Site
);
748 CurrentFile
->NextFile
= Cache
.HeaderP
->FileList
;
749 CurrentFile
->Flags
= Flags
;
750 CurrentFile
->ID
= Cache
.HeaderP
->PackageFileCount
;
751 CurrentFile
->IndexType
= WriteUniqString(Index
.GetType()->Label
);
753 Cache
.HeaderP
->FileList
= CurrentFile
- Cache
.PkgFileP
;
754 Cache
.HeaderP
->PackageFileCount
++;
756 if (CurrentFile
->FileName
== 0)
760 Progress
->SubProgress(Index
.Size());
764 // CacheGenerator::WriteUniqueString - Insert a unique string /*{{{*/
765 // ---------------------------------------------------------------------
766 /* This is used to create handles to strings. Given the same text it
767 always returns the same number */
768 unsigned long pkgCacheGenerator::WriteUniqString(const char *S
,
771 /* We use a very small transient hash table here, this speeds up generation
772 by a fair amount on slower machines */
773 pkgCache::StringItem
*&Bucket
= UniqHash
[(S
[0]*5 + S
[1]) % _count(UniqHash
)];
775 stringcmp(S
,S
+Size
,Cache
.StrP
+ Bucket
->String
) == 0)
776 return Bucket
->String
;
778 // Search for an insertion point
779 pkgCache::StringItem
*I
= Cache
.StringItemP
+ Cache
.HeaderP
->StringList
;
781 map_ptrloc
*Last
= &Cache
.HeaderP
->StringList
;
782 for (; I
!= Cache
.StringItemP
; Last
= &I
->NextItem
,
783 I
= Cache
.StringItemP
+ I
->NextItem
)
785 Res
= stringcmp(S
,S
+Size
,Cache
.StrP
+ I
->String
);
798 unsigned long Item
= Map
.Allocate(sizeof(pkgCache::StringItem
));
802 // Fill in the structure
803 pkgCache::StringItem
*ItemP
= Cache
.StringItemP
+ Item
;
804 ItemP
->NextItem
= I
- Cache
.StringItemP
;
806 ItemP
->String
= Map
.WriteString(S
,Size
);
807 if (ItemP
->String
== 0)
811 return ItemP
->String
;
814 // CheckValidity - Check that a cache is up-to-date /*{{{*/
815 // ---------------------------------------------------------------------
816 /* This just verifies that each file in the list of index files exists,
817 has matching attributes with the cache and the cache does not have
819 static bool CheckValidity(const string
&CacheFile
, FileIterator Start
,
820 FileIterator End
,MMap
**OutMap
= 0)
822 bool const Debug
= _config
->FindB("Debug::pkgCacheGen", false);
823 // No file, certainly invalid
824 if (CacheFile
.empty() == true || FileExists(CacheFile
) == false)
827 std::clog
<< "CacheFile doesn't exist" << std::endl
;
832 FileFd
CacheF(CacheFile
,FileFd::ReadOnly
);
833 SPtr
<MMap
> Map
= new MMap(CacheF
,0);
835 if (_error
->PendingError() == true || Map
->Size() == 0)
838 std::clog
<< "Errors are pending or Map is empty()" << std::endl
;
843 /* Now we check every index file, see if it is in the cache,
844 verify the IMS data and check that it is on the disk too.. */
845 SPtrArray
<bool> Visited
= new bool[Cache
.HeaderP
->PackageFileCount
];
846 memset(Visited
,0,sizeof(*Visited
)*Cache
.HeaderP
->PackageFileCount
);
847 for (; Start
!= End
; Start
++)
850 std::clog
<< "Checking PkgFile " << (*Start
)->Describe() << ": ";
851 if ((*Start
)->HasPackages() == false)
854 std::clog
<< "Has NO packages" << std::endl
;
858 if ((*Start
)->Exists() == false)
860 #if 0 // mvo: we no longer give a message here (Default Sources spec)
861 _error
->WarningE("stat",_("Couldn't stat source package list %s"),
862 (*Start
)->Describe().c_str());
865 std::clog
<< "file doesn't exist" << std::endl
;
869 // FindInCache is also expected to do an IMS check.
870 pkgCache::PkgFileIterator File
= (*Start
)->FindInCache(Cache
);
871 if (File
.end() == true)
874 std::clog
<< "FindInCache returned end-Pointer" << std::endl
;
878 Visited
[File
->ID
] = true;
880 std::clog
<< "with ID " << File
->ID
<< " is valid" << std::endl
;
883 for (unsigned I
= 0; I
!= Cache
.HeaderP
->PackageFileCount
; I
++)
884 if (Visited
[I
] == false)
887 std::clog
<< "File with ID" << I
<< " wasn't visited" << std::endl
;
891 if (_error
->PendingError() == true)
895 std::clog
<< "Validity failed because of pending errors:" << std::endl
;
896 _error
->DumpErrors();
903 *OutMap
= Map
.UnGuard();
907 // ComputeSize - Compute the total size of a bunch of files /*{{{*/
908 // ---------------------------------------------------------------------
909 /* Size is kind of an abstract notion that is only used for the progress
911 static unsigned long ComputeSize(FileIterator Start
,FileIterator End
)
913 unsigned long TotalSize
= 0;
914 for (; Start
!= End
; Start
++)
916 if ((*Start
)->HasPackages() == false)
918 TotalSize
+= (*Start
)->Size();
923 // BuildCache - Merge the list of index files into the cache /*{{{*/
924 // ---------------------------------------------------------------------
926 static bool BuildCache(pkgCacheGenerator
&Gen
,
927 OpProgress
&Progress
,
928 unsigned long &CurrentSize
,unsigned long TotalSize
,
929 FileIterator Start
, FileIterator End
)
932 for (I
= Start
; I
!= End
; I
++)
934 if ((*I
)->HasPackages() == false)
937 if ((*I
)->Exists() == false)
940 if ((*I
)->FindInCache(Gen
.GetCache()).end() == false)
942 _error
->Warning("Duplicate sources.list entry %s",
943 (*I
)->Describe().c_str());
947 unsigned long Size
= (*I
)->Size();
948 Progress
.OverallProgress(CurrentSize
,TotalSize
,Size
,_("Reading package lists"));
951 if ((*I
)->Merge(Gen
,Progress
) == false)
955 if (Gen
.HasFileDeps() == true)
958 TotalSize
= ComputeSize(Start
, End
);
960 for (I
= Start
; I
!= End
; I
++)
962 unsigned long Size
= (*I
)->Size();
963 Progress
.OverallProgress(CurrentSize
,TotalSize
,Size
,_("Collecting File Provides"));
965 if ((*I
)->MergeFileProvides(Gen
,Progress
) == false)
973 // MakeStatusCache - Construct the status cache /*{{{*/
974 // ---------------------------------------------------------------------
975 /* This makes sure that the status cache (the cache that has all
976 index files from the sources list and all local ones) is ready
977 to be mmaped. If OutMap is not zero then a MMap object representing
978 the cache will be stored there. This is pretty much mandetory if you
979 are using AllowMem. AllowMem lets the function be run as non-root
980 where it builds the cache 'fast' into a memory buffer. */
981 bool pkgMakeStatusCache(pkgSourceList
&List
,OpProgress
&Progress
,
982 MMap
**OutMap
,bool AllowMem
)
984 bool const Debug
= _config
->FindB("Debug::pkgCacheGen", false);
985 unsigned long const MapSize
= _config
->FindI("APT::Cache-Limit",24*1024*1024);
987 vector
<pkgIndexFile
*> Files
;
988 for (vector
<metaIndex
*>::const_iterator i
= List
.begin();
992 vector
<pkgIndexFile
*> *Indexes
= (*i
)->GetIndexFiles();
993 for (vector
<pkgIndexFile
*>::const_iterator j
= Indexes
->begin();
996 Files
.push_back (*j
);
999 unsigned long const EndOfSource
= Files
.size();
1000 if (_system
->AddStatusFiles(Files
) == false)
1003 // Decide if we can write to the files..
1004 string
const CacheFile
= _config
->FindFile("Dir::Cache::pkgcache");
1005 string
const SrcCacheFile
= _config
->FindFile("Dir::Cache::srcpkgcache");
1007 // ensure the cache directory exists
1008 if (CacheFile
.empty() == false || SrcCacheFile
.empty() == false)
1010 string dir
= _config
->FindDir("Dir::Cache");
1011 size_t const len
= dir
.size();
1012 if (len
> 5 && dir
.find("/apt/", len
- 6, 5) == len
- 5)
1013 dir
= dir
.substr(0, len
- 5);
1014 if (CacheFile
.empty() == false)
1015 CreateDirectory(dir
, flNotFile(CacheFile
));
1016 if (SrcCacheFile
.empty() == false)
1017 CreateDirectory(dir
, flNotFile(SrcCacheFile
));
1020 // Decide if we can write to the cache
1021 bool Writeable
= false;
1022 if (CacheFile
.empty() == false)
1023 Writeable
= access(flNotFile(CacheFile
).c_str(),W_OK
) == 0;
1025 if (SrcCacheFile
.empty() == false)
1026 Writeable
= access(flNotFile(SrcCacheFile
).c_str(),W_OK
) == 0;
1028 std::clog
<< "Do we have write-access to the cache files? " << (Writeable
? "YES" : "NO") << std::endl
;
1030 if (Writeable
== false && AllowMem
== false && CacheFile
.empty() == false)
1031 return _error
->Error(_("Unable to write to %s"),flNotFile(CacheFile
).c_str());
1033 Progress
.OverallProgress(0,1,1,_("Reading package lists"));
1035 // Cache is OK, Fin.
1036 if (CheckValidity(CacheFile
,Files
.begin(),Files
.end(),OutMap
) == true)
1038 Progress
.OverallProgress(1,1,1,_("Reading package lists"));
1040 std::clog
<< "pkgcache.bin is valid - no need to build anything" << std::endl
;
1043 else if (Debug
== true)
1044 std::clog
<< "pkgcache.bin is NOT valid" << std::endl
;
1046 /* At this point we know we need to reconstruct the package cache,
1048 SPtr
<FileFd
> CacheF
;
1049 SPtr
<DynamicMMap
> Map
;
1050 if (Writeable
== true && CacheFile
.empty() == false)
1052 unlink(CacheFile
.c_str());
1053 CacheF
= new FileFd(CacheFile
,FileFd::WriteEmpty
);
1054 fchmod(CacheF
->Fd(),0644);
1055 Map
= new DynamicMMap(*CacheF
,MMap::Public
,MapSize
);
1056 if (_error
->PendingError() == true)
1059 std::clog
<< "Open filebased MMap" << std::endl
;
1063 // Just build it in memory..
1064 Map
= new DynamicMMap(0,MapSize
);
1066 std::clog
<< "Open memory Map (not filebased)" << std::endl
;
1069 // Lets try the source cache.
1070 unsigned long CurrentSize
= 0;
1071 unsigned long TotalSize
= 0;
1072 if (CheckValidity(SrcCacheFile
,Files
.begin(),
1073 Files
.begin()+EndOfSource
) == true)
1076 std::clog
<< "srcpkgcache.bin is valid - populate MMap with it." << std::endl
;
1077 // Preload the map with the source cache
1078 FileFd
SCacheF(SrcCacheFile
,FileFd::ReadOnly
);
1079 unsigned long const alloc
= Map
->RawAllocate(SCacheF
.Size());
1080 if ((alloc
== 0 && _error
->PendingError())
1081 || SCacheF
.Read((unsigned char *)Map
->Data() + alloc
,
1082 SCacheF
.Size()) == false)
1085 TotalSize
= ComputeSize(Files
.begin()+EndOfSource
,Files
.end());
1087 // Build the status cache
1088 pkgCacheGenerator
Gen(Map
.Get(),&Progress
);
1089 if (_error
->PendingError() == true)
1091 if (BuildCache(Gen
,Progress
,CurrentSize
,TotalSize
,
1092 Files
.begin()+EndOfSource
,Files
.end()) == false)
1095 // FIXME: move me to a better place
1096 Gen
.FinishCache(Progress
);
1101 std::clog
<< "srcpkgcache.bin is NOT valid - rebuild" << std::endl
;
1102 TotalSize
= ComputeSize(Files
.begin(),Files
.end());
1104 // Build the source cache
1105 pkgCacheGenerator
Gen(Map
.Get(),&Progress
);
1106 if (_error
->PendingError() == true)
1108 if (BuildCache(Gen
,Progress
,CurrentSize
,TotalSize
,
1109 Files
.begin(),Files
.begin()+EndOfSource
) == false)
1113 if (Writeable
== true && SrcCacheFile
.empty() == false)
1115 FileFd
SCacheF(SrcCacheFile
,FileFd::WriteEmpty
);
1116 if (_error
->PendingError() == true)
1119 fchmod(SCacheF
.Fd(),0644);
1121 // Write out the main data
1122 if (SCacheF
.Write(Map
->Data(),Map
->Size()) == false)
1123 return _error
->Error(_("IO Error saving source cache"));
1126 // Write out the proper header
1127 Gen
.GetCache().HeaderP
->Dirty
= false;
1128 if (SCacheF
.Seek(0) == false ||
1129 SCacheF
.Write(Map
->Data(),sizeof(*Gen
.GetCache().HeaderP
)) == false)
1130 return _error
->Error(_("IO Error saving source cache"));
1131 Gen
.GetCache().HeaderP
->Dirty
= true;
1135 // Build the status cache
1136 if (BuildCache(Gen
,Progress
,CurrentSize
,TotalSize
,
1137 Files
.begin()+EndOfSource
,Files
.end()) == false)
1140 // FIXME: move me to a better place
1141 Gen
.FinishCache(Progress
);
1144 std::clog
<< "Caches are ready for shipping" << std::endl
;
1146 if (_error
->PendingError() == true)
1152 delete Map
.UnGuard();
1153 *OutMap
= new MMap(*CacheF
,0);
1157 *OutMap
= Map
.UnGuard();
1164 // MakeOnlyStatusCache - Build a cache with just the status files /*{{{*/
1165 // ---------------------------------------------------------------------
1167 bool pkgMakeOnlyStatusCache(OpProgress
&Progress
,DynamicMMap
**OutMap
)
1169 unsigned long MapSize
= _config
->FindI("APT::Cache-Limit",20*1024*1024);
1170 vector
<pkgIndexFile
*> Files
;
1171 unsigned long EndOfSource
= Files
.size();
1172 if (_system
->AddStatusFiles(Files
) == false)
1175 SPtr
<DynamicMMap
> Map
= new DynamicMMap(0,MapSize
);
1176 unsigned long CurrentSize
= 0;
1177 unsigned long TotalSize
= 0;
1179 TotalSize
= ComputeSize(Files
.begin()+EndOfSource
,Files
.end());
1181 // Build the status cache
1182 Progress
.OverallProgress(0,1,1,_("Reading package lists"));
1183 pkgCacheGenerator
Gen(Map
.Get(),&Progress
);
1184 if (_error
->PendingError() == true)
1186 if (BuildCache(Gen
,Progress
,CurrentSize
,TotalSize
,
1187 Files
.begin()+EndOfSource
,Files
.end()) == false)
1190 // FIXME: move me to a better place
1191 Gen
.FinishCache(Progress
);
1193 if (_error
->PendingError() == true)
1195 *OutMap
= Map
.UnGuard();