]>
Commit | Line | Data |
---|---|---|
1 | // -*- mode: cpp; mode: fold -*- | |
2 | // Description /*{{{*/ | |
3 | // $Id: pkgcachegen.cc,v 1.53.2.1 2003/12/24 23:09:17 mdz Exp $ | |
4 | /* ###################################################################### | |
5 | ||
6 | Package Cache Generator - Generator for the cache structure. | |
7 | ||
8 | This builds the cache structure from the abstract package list parser. | |
9 | ||
10 | ##################################################################### */ | |
11 | /*}}}*/ | |
12 | // Include Files /*{{{*/ | |
13 | #include <config.h> | |
14 | ||
15 | #include <apt-pkg/pkgcachegen.h> | |
16 | #include <apt-pkg/error.h> | |
17 | #include <apt-pkg/version.h> | |
18 | #include <apt-pkg/progress.h> | |
19 | #include <apt-pkg/sourcelist.h> | |
20 | #include <apt-pkg/configuration.h> | |
21 | #include <apt-pkg/aptconfiguration.h> | |
22 | #include <apt-pkg/strutl.h> | |
23 | #include <apt-pkg/sptr.h> | |
24 | #include <apt-pkg/pkgsystem.h> | |
25 | #include <apt-pkg/macros.h> | |
26 | #include <apt-pkg/tagfile.h> | |
27 | #include <apt-pkg/metaindex.h> | |
28 | #include <apt-pkg/fileutl.h> | |
29 | ||
30 | #include <vector> | |
31 | #include <sys/stat.h> | |
32 | #include <unistd.h> | |
33 | #include <errno.h> | |
34 | #include <stdio.h> | |
35 | ||
36 | #include <apti18n.h> | |
37 | /*}}}*/ | |
38 | typedef std::vector<pkgIndexFile *>::iterator FileIterator; | |
39 | template <typename Iter> std::vector<Iter*> pkgCacheGenerator::Dynamic<Iter>::toReMap; | |
40 | ||
41 | static bool IsDuplicateDescription(pkgCache::DescIterator Desc, | |
42 | MD5SumValue const &CurMd5, std::string const &CurLang); | |
43 | ||
44 | using std::string; | |
45 | ||
46 | // CacheGenerator::pkgCacheGenerator - Constructor /*{{{*/ | |
47 | // --------------------------------------------------------------------- | |
48 | /* We set the dirty flag and make sure that is written to the disk */ | |
49 | pkgCacheGenerator::pkgCacheGenerator(DynamicMMap *pMap,OpProgress *Prog) : | |
50 | Map(*pMap), Cache(pMap,false), Progress(Prog), | |
51 | FoundFileDeps(0) | |
52 | { | |
53 | CurrentFile = 0; | |
54 | memset(UniqHash,0,sizeof(UniqHash)); | |
55 | ||
56 | if (_error->PendingError() == true) | |
57 | return; | |
58 | ||
59 | if (Map.Size() == 0) | |
60 | { | |
61 | // Setup the map interface.. | |
62 | Cache.HeaderP = (pkgCache::Header *)Map.Data(); | |
63 | if (Map.RawAllocate(sizeof(pkgCache::Header)) == 0 && _error->PendingError() == true) | |
64 | return; | |
65 | ||
66 | Map.UsePools(*Cache.HeaderP->Pools,sizeof(Cache.HeaderP->Pools)/sizeof(Cache.HeaderP->Pools[0])); | |
67 | ||
68 | // Starting header | |
69 | *Cache.HeaderP = pkgCache::Header(); | |
70 | map_ptrloc const idxVerSysName = WriteStringInMap(_system->VS->Label); | |
71 | Cache.HeaderP->VerSysName = idxVerSysName; | |
72 | // this pointer is set in ReMap, but we need it now for WriteUniqString | |
73 | Cache.StringItemP = (pkgCache::StringItem *)Map.Data(); | |
74 | map_ptrloc const idxArchitecture = WriteUniqString(_config->Find("APT::Architecture")); | |
75 | Cache.HeaderP->Architecture = idxArchitecture; | |
76 | if (unlikely(idxVerSysName == 0 || idxArchitecture == 0)) | |
77 | return; | |
78 | Cache.ReMap(); | |
79 | } | |
80 | else | |
81 | { | |
82 | // Map directly from the existing file | |
83 | Cache.ReMap(); | |
84 | Map.UsePools(*Cache.HeaderP->Pools,sizeof(Cache.HeaderP->Pools)/sizeof(Cache.HeaderP->Pools[0])); | |
85 | if (Cache.VS != _system->VS) | |
86 | { | |
87 | _error->Error(_("Cache has an incompatible versioning system")); | |
88 | return; | |
89 | } | |
90 | } | |
91 | ||
92 | Cache.HeaderP->Dirty = true; | |
93 | Map.Sync(0,sizeof(pkgCache::Header)); | |
94 | } | |
95 | /*}}}*/ | |
96 | // CacheGenerator::~pkgCacheGenerator - Destructor /*{{{*/ | |
97 | // --------------------------------------------------------------------- | |
98 | /* We sync the data then unset the dirty flag in two steps so as to | |
99 | advoid a problem during a crash */ | |
100 | pkgCacheGenerator::~pkgCacheGenerator() | |
101 | { | |
102 | if (_error->PendingError() == true) | |
103 | return; | |
104 | if (Map.Sync() == false) | |
105 | return; | |
106 | ||
107 | Cache.HeaderP->Dirty = false; | |
108 | Cache.HeaderP->CacheFileSize = Map.Size(); | |
109 | Map.Sync(0,sizeof(pkgCache::Header)); | |
110 | } | |
111 | /*}}}*/ | |
112 | void pkgCacheGenerator::ReMap(void const * const oldMap, void const * const newMap) {/*{{{*/ | |
113 | if (oldMap == newMap) | |
114 | return; | |
115 | ||
116 | if (_config->FindB("Debug::pkgCacheGen", false)) | |
117 | std::clog << "Remaping from " << oldMap << " to " << newMap << std::endl; | |
118 | ||
119 | Cache.ReMap(false); | |
120 | ||
121 | CurrentFile += (pkgCache::PackageFile*) newMap - (pkgCache::PackageFile*) oldMap; | |
122 | ||
123 | for (size_t i = 0; i < _count(UniqHash); ++i) | |
124 | if (UniqHash[i] != 0) | |
125 | UniqHash[i] += (pkgCache::StringItem*) newMap - (pkgCache::StringItem*) oldMap; | |
126 | ||
127 | for (std::vector<pkgCache::GrpIterator*>::const_iterator i = Dynamic<pkgCache::GrpIterator>::toReMap.begin(); | |
128 | i != Dynamic<pkgCache::GrpIterator>::toReMap.end(); ++i) | |
129 | (*i)->ReMap(oldMap, newMap); | |
130 | for (std::vector<pkgCache::PkgIterator*>::const_iterator i = Dynamic<pkgCache::PkgIterator>::toReMap.begin(); | |
131 | i != Dynamic<pkgCache::PkgIterator>::toReMap.end(); ++i) | |
132 | (*i)->ReMap(oldMap, newMap); | |
133 | for (std::vector<pkgCache::VerIterator*>::const_iterator i = Dynamic<pkgCache::VerIterator>::toReMap.begin(); | |
134 | i != Dynamic<pkgCache::VerIterator>::toReMap.end(); ++i) | |
135 | (*i)->ReMap(oldMap, newMap); | |
136 | for (std::vector<pkgCache::DepIterator*>::const_iterator i = Dynamic<pkgCache::DepIterator>::toReMap.begin(); | |
137 | i != Dynamic<pkgCache::DepIterator>::toReMap.end(); ++i) | |
138 | (*i)->ReMap(oldMap, newMap); | |
139 | for (std::vector<pkgCache::DescIterator*>::const_iterator i = Dynamic<pkgCache::DescIterator>::toReMap.begin(); | |
140 | i != Dynamic<pkgCache::DescIterator>::toReMap.end(); ++i) | |
141 | (*i)->ReMap(oldMap, newMap); | |
142 | for (std::vector<pkgCache::PrvIterator*>::const_iterator i = Dynamic<pkgCache::PrvIterator>::toReMap.begin(); | |
143 | i != Dynamic<pkgCache::PrvIterator>::toReMap.end(); ++i) | |
144 | (*i)->ReMap(oldMap, newMap); | |
145 | for (std::vector<pkgCache::PkgFileIterator*>::const_iterator i = Dynamic<pkgCache::PkgFileIterator>::toReMap.begin(); | |
146 | i != Dynamic<pkgCache::PkgFileIterator>::toReMap.end(); ++i) | |
147 | (*i)->ReMap(oldMap, newMap); | |
148 | } /*}}}*/ | |
149 | // CacheGenerator::WriteStringInMap /*{{{*/ | |
150 | map_ptrloc pkgCacheGenerator::WriteStringInMap(const char *String, | |
151 | const unsigned long &Len) { | |
152 | void const * const oldMap = Map.Data(); | |
153 | map_ptrloc const index = Map.WriteString(String, Len); | |
154 | if (index != 0) | |
155 | ReMap(oldMap, Map.Data()); | |
156 | return index; | |
157 | } | |
158 | /*}}}*/ | |
159 | // CacheGenerator::WriteStringInMap /*{{{*/ | |
160 | map_ptrloc pkgCacheGenerator::WriteStringInMap(const char *String) { | |
161 | void const * const oldMap = Map.Data(); | |
162 | map_ptrloc const index = Map.WriteString(String); | |
163 | if (index != 0) | |
164 | ReMap(oldMap, Map.Data()); | |
165 | return index; | |
166 | } | |
167 | /*}}}*/ | |
168 | map_ptrloc pkgCacheGenerator::AllocateInMap(const unsigned long &size) {/*{{{*/ | |
169 | void const * const oldMap = Map.Data(); | |
170 | map_ptrloc const index = Map.Allocate(size); | |
171 | if (index != 0) | |
172 | ReMap(oldMap, Map.Data()); | |
173 | return index; | |
174 | } | |
175 | /*}}}*/ | |
176 | // CacheGenerator::MergeList - Merge the package list /*{{{*/ | |
177 | // --------------------------------------------------------------------- | |
178 | /* This provides the generation of the entries in the cache. Each loop | |
179 | goes through a single package record from the underlying parse engine. */ | |
180 | bool pkgCacheGenerator::MergeList(ListParser &List, | |
181 | pkgCache::VerIterator *OutVer) | |
182 | { | |
183 | List.Owner = this; | |
184 | ||
185 | unsigned int Counter = 0; | |
186 | while (List.Step() == true) | |
187 | { | |
188 | string const PackageName = List.Package(); | |
189 | if (PackageName.empty() == true) | |
190 | return false; | |
191 | ||
192 | Counter++; | |
193 | if (Counter % 100 == 0 && Progress != 0) | |
194 | Progress->Progress(List.Offset()); | |
195 | ||
196 | string Arch = List.Architecture(); | |
197 | string const Version = List.Version(); | |
198 | if (Version.empty() == true && Arch.empty() == true) | |
199 | { | |
200 | // package descriptions | |
201 | if (MergeListGroup(List, PackageName) == false) | |
202 | return false; | |
203 | continue; | |
204 | } | |
205 | ||
206 | if (Arch.empty() == true) | |
207 | { | |
208 | // use the pseudo arch 'none' for arch-less packages | |
209 | Arch = "none"; | |
210 | /* We might built a SingleArchCache here, which we don't want to blow up | |
211 | just for these :none packages to a proper MultiArchCache, so just ensure | |
212 | that we have always a native package structure first for SingleArch */ | |
213 | pkgCache::PkgIterator NP; | |
214 | Dynamic<pkgCache::PkgIterator> DynPkg(NP); | |
215 | if (NewPackage(NP, PackageName, _config->Find("APT::Architecture")) == false) | |
216 | // TRANSLATOR: The first placeholder is a package name, | |
217 | // the other two should be copied verbatim as they include debug info | |
218 | return _error->Error(_("Error occurred while processing %s (%s%d)"), | |
219 | PackageName.c_str(), "NewPackage", 0); | |
220 | } | |
221 | ||
222 | // Get a pointer to the package structure | |
223 | pkgCache::PkgIterator Pkg; | |
224 | Dynamic<pkgCache::PkgIterator> DynPkg(Pkg); | |
225 | if (NewPackage(Pkg, PackageName, Arch) == false) | |
226 | // TRANSLATOR: The first placeholder is a package name, | |
227 | // the other two should be copied verbatim as they include debug info | |
228 | return _error->Error(_("Error occurred while processing %s (%s%d)"), | |
229 | PackageName.c_str(), "NewPackage", 1); | |
230 | ||
231 | ||
232 | if (Version.empty() == true) | |
233 | { | |
234 | if (MergeListPackage(List, Pkg) == false) | |
235 | return false; | |
236 | } | |
237 | else | |
238 | { | |
239 | if (MergeListVersion(List, Pkg, Version, OutVer) == false) | |
240 | return false; | |
241 | } | |
242 | ||
243 | if (OutVer != 0) | |
244 | { | |
245 | FoundFileDeps |= List.HasFileDeps(); | |
246 | return true; | |
247 | } | |
248 | } | |
249 | ||
250 | if (Cache.HeaderP->PackageCount >= (1ULL<<sizeof(Cache.PkgP->ID)*8)-1) | |
251 | return _error->Error(_("Wow, you exceeded the number of package " | |
252 | "names this APT is capable of.")); | |
253 | if (Cache.HeaderP->VersionCount >= (1ULL<<(sizeof(Cache.VerP->ID)*8))-1) | |
254 | return _error->Error(_("Wow, you exceeded the number of versions " | |
255 | "this APT is capable of.")); | |
256 | if (Cache.HeaderP->DescriptionCount >= (1ULL<<(sizeof(Cache.DescP->ID)*8))-1) | |
257 | return _error->Error(_("Wow, you exceeded the number of descriptions " | |
258 | "this APT is capable of.")); | |
259 | if (Cache.HeaderP->DependsCount >= (1ULL<<(sizeof(Cache.DepP->ID)*8))-1ULL) | |
260 | return _error->Error(_("Wow, you exceeded the number of dependencies " | |
261 | "this APT is capable of.")); | |
262 | ||
263 | FoundFileDeps |= List.HasFileDeps(); | |
264 | return true; | |
265 | } | |
266 | // CacheGenerator::MergeListGroup /*{{{*/ | |
267 | bool pkgCacheGenerator::MergeListGroup(ListParser &List, std::string const &GrpName) | |
268 | { | |
269 | pkgCache::GrpIterator Grp = Cache.FindGrp(GrpName); | |
270 | // a group has no data on it's own, only packages have it but these | |
271 | // stanzas like this come from Translation- files to add descriptions, | |
272 | // but without a version we don't need a description for it… | |
273 | if (Grp.end() == true) | |
274 | return true; | |
275 | Dynamic<pkgCache::GrpIterator> DynGrp(Grp); | |
276 | ||
277 | pkgCache::PkgIterator Pkg; | |
278 | Dynamic<pkgCache::PkgIterator> DynPkg(Pkg); | |
279 | for (Pkg = Grp.PackageList(); Pkg.end() == false; Pkg = Grp.NextPkg(Pkg)) | |
280 | if (MergeListPackage(List, Pkg) == false) | |
281 | return false; | |
282 | ||
283 | return true; | |
284 | } | |
285 | /*}}}*/ | |
286 | // CacheGenerator::MergeListPackage /*{{{*/ | |
287 | bool pkgCacheGenerator::MergeListPackage(ListParser &List, pkgCache::PkgIterator &Pkg) | |
288 | { | |
289 | // we first process the package, then the descriptions | |
290 | // (for deb this package processing is in fact a no-op) | |
291 | pkgCache::VerIterator Ver(Cache); | |
292 | Dynamic<pkgCache::VerIterator> DynVer(Ver); | |
293 | if (List.UsePackage(Pkg, Ver) == false) | |
294 | return _error->Error(_("Error occurred while processing %s (%s%d)"), | |
295 | Pkg.Name(), "UsePackage", 1); | |
296 | ||
297 | // Find the right version to write the description | |
298 | MD5SumValue CurMd5 = List.Description_md5(); | |
299 | std::string CurLang = List.DescriptionLanguage(); | |
300 | ||
301 | for (Ver = Pkg.VersionList(); Ver.end() == false; ++Ver) | |
302 | { | |
303 | pkgCache::DescIterator Desc = Ver.DescriptionList(); | |
304 | ||
305 | // a version can only have one md5 describing it | |
306 | if (Desc.end() == true || MD5SumValue(Desc.md5()) != CurMd5) | |
307 | continue; | |
308 | ||
309 | // don't add a new description if we have one for the given | |
310 | // md5 && language | |
311 | if (IsDuplicateDescription(Desc, CurMd5, CurLang) == true) | |
312 | continue; | |
313 | ||
314 | Dynamic<pkgCache::DescIterator> DynDesc(Desc); | |
315 | // we add at the end, so that the start is constant as we need | |
316 | // that to be able to efficiently share these lists | |
317 | map_ptrloc *LastDesc = &Ver->DescriptionList; | |
318 | for (;Desc.end() == false && Desc->NextDesc != 0; ++Desc); | |
319 | if (Desc.end() == false) | |
320 | LastDesc = &Desc->NextDesc; | |
321 | ||
322 | void const * const oldMap = Map.Data(); | |
323 | map_ptrloc const descindex = NewDescription(Desc, CurLang, CurMd5, *LastDesc); | |
324 | if (unlikely(descindex == 0 && _error->PendingError())) | |
325 | return _error->Error(_("Error occurred while processing %s (%s%d)"), | |
326 | Pkg.Name(), "NewDescription", 1); | |
327 | if (oldMap != Map.Data()) | |
328 | LastDesc += (map_ptrloc*) Map.Data() - (map_ptrloc*) oldMap; | |
329 | *LastDesc = descindex; | |
330 | Desc->ParentPkg = Pkg.Index(); | |
331 | ||
332 | if ((*LastDesc == 0 && _error->PendingError()) || NewFileDesc(Desc,List) == false) | |
333 | return _error->Error(_("Error occurred while processing %s (%s%d)"), | |
334 | Pkg.Name(), "NewFileDesc", 1); | |
335 | ||
336 | // we can stop here as all "same" versions will share the description | |
337 | break; | |
338 | } | |
339 | ||
340 | return true; | |
341 | } | |
342 | /*}}}*/ | |
343 | // CacheGenerator::MergeListVersion /*{{{*/ | |
344 | bool pkgCacheGenerator::MergeListVersion(ListParser &List, pkgCache::PkgIterator &Pkg, | |
345 | std::string const &Version, pkgCache::VerIterator* &OutVer) | |
346 | { | |
347 | pkgCache::VerIterator Ver = Pkg.VersionList(); | |
348 | Dynamic<pkgCache::VerIterator> DynVer(Ver); | |
349 | map_ptrloc *LastVer = &Pkg->VersionList; | |
350 | void const * oldMap = Map.Data(); | |
351 | ||
352 | unsigned long const Hash = List.VersionHash(); | |
353 | if (Ver.end() == false) | |
354 | { | |
355 | /* We know the list is sorted so we use that fact in the search. | |
356 | Insertion of new versions is done with correct sorting */ | |
357 | int Res = 1; | |
358 | for (; Ver.end() == false; LastVer = &Ver->NextVer, Ver++) | |
359 | { | |
360 | Res = Cache.VS->CmpVersion(Version,Ver.VerStr()); | |
361 | // Version is higher as current version - insert here | |
362 | if (Res > 0) | |
363 | break; | |
364 | // Versionstrings are equal - is hash also equal? | |
365 | if (Res == 0 && Ver->Hash == Hash) | |
366 | break; | |
367 | // proceed with the next till we have either the right | |
368 | // or we found another version (which will be lower) | |
369 | } | |
370 | ||
371 | /* We already have a version for this item, record that we saw it */ | |
372 | if (Res == 0 && Ver.end() == false && Ver->Hash == Hash) | |
373 | { | |
374 | if (List.UsePackage(Pkg,Ver) == false) | |
375 | return _error->Error(_("Error occurred while processing %s (%s%d)"), | |
376 | Pkg.Name(), "UsePackage", 2); | |
377 | ||
378 | if (NewFileVer(Ver,List) == false) | |
379 | return _error->Error(_("Error occurred while processing %s (%s%d)"), | |
380 | Pkg.Name(), "NewFileVer", 1); | |
381 | ||
382 | // Read only a single record and return | |
383 | if (OutVer != 0) | |
384 | { | |
385 | *OutVer = Ver; | |
386 | return true; | |
387 | } | |
388 | ||
389 | return true; | |
390 | } | |
391 | } | |
392 | ||
393 | // Add a new version | |
394 | map_ptrloc const verindex = NewVersion(Ver,Version,*LastVer); | |
395 | if (verindex == 0 && _error->PendingError()) | |
396 | return _error->Error(_("Error occurred while processing %s (%s%d)"), | |
397 | Pkg.Name(), "NewVersion", 1); | |
398 | ||
399 | if (oldMap != Map.Data()) | |
400 | LastVer += (map_ptrloc*) Map.Data() - (map_ptrloc*) oldMap; | |
401 | *LastVer = verindex; | |
402 | Ver->ParentPkg = Pkg.Index(); | |
403 | Ver->Hash = Hash; | |
404 | ||
405 | if (unlikely(List.NewVersion(Ver) == false)) | |
406 | return _error->Error(_("Error occurred while processing %s (%s%d)"), | |
407 | Pkg.Name(), "NewVersion", 2); | |
408 | ||
409 | if (unlikely(List.UsePackage(Pkg,Ver) == false)) | |
410 | return _error->Error(_("Error occurred while processing %s (%s%d)"), | |
411 | Pkg.Name(), "UsePackage", 3); | |
412 | ||
413 | if (unlikely(NewFileVer(Ver,List) == false)) | |
414 | return _error->Error(_("Error occurred while processing %s (%s%d)"), | |
415 | Pkg.Name(), "NewFileVer", 2); | |
416 | ||
417 | pkgCache::GrpIterator Grp = Pkg.Group(); | |
418 | Dynamic<pkgCache::GrpIterator> DynGrp(Grp); | |
419 | ||
420 | /* If it is the first version of this package we need to add implicit | |
421 | Multi-Arch dependencies to all other package versions in the group now - | |
422 | otherwise we just add them for this new version */ | |
423 | if (Pkg.VersionList()->NextVer == 0) | |
424 | { | |
425 | pkgCache::PkgIterator P = Grp.PackageList(); | |
426 | Dynamic<pkgCache::PkgIterator> DynP(P); | |
427 | for (; P.end() != true; P = Grp.NextPkg(P)) | |
428 | { | |
429 | if (P->ID == Pkg->ID) | |
430 | continue; | |
431 | pkgCache::VerIterator V = P.VersionList(); | |
432 | Dynamic<pkgCache::VerIterator> DynV(V); | |
433 | for (; V.end() != true; ++V) | |
434 | if (unlikely(AddImplicitDepends(V, Pkg) == false)) | |
435 | return _error->Error(_("Error occurred while processing %s (%s%d)"), | |
436 | Pkg.Name(), "AddImplicitDepends", 1); | |
437 | } | |
438 | /* :none packages are packages without an architecture. They are forbidden by | |
439 | debian-policy, so usually they will only be in (old) dpkg status files - | |
440 | and dpkg will complain about them - and are pretty rare. We therefore do | |
441 | usually not create conflicts while the parent is created, but only if a :none | |
442 | package (= the target) appears. This creates incorrect dependencies on :none | |
443 | for architecture-specific dependencies on the package we copy from, but we | |
444 | will ignore this bug as architecture-specific dependencies are only allowed | |
445 | in jessie and until then the :none packages should be extinct (hopefully). | |
446 | In other words: This should work long enough to allow graceful removal of | |
447 | these packages, it is not supposed to allow users to keep using them … */ | |
448 | if (strcmp(Pkg.Arch(), "none") == 0) | |
449 | { | |
450 | pkgCache::PkgIterator M = Grp.FindPreferredPkg(); | |
451 | if (M.end() == false && Pkg != M) | |
452 | { | |
453 | pkgCache::DepIterator D = M.RevDependsList(); | |
454 | Dynamic<pkgCache::DepIterator> DynD(D); | |
455 | for (; D.end() == false; ++D) | |
456 | { | |
457 | if ((D->Type != pkgCache::Dep::Conflicts && | |
458 | D->Type != pkgCache::Dep::DpkgBreaks && | |
459 | D->Type != pkgCache::Dep::Replaces) || | |
460 | D.ParentPkg().Group() == Grp) | |
461 | continue; | |
462 | ||
463 | map_ptrloc *OldDepLast = NULL; | |
464 | pkgCache::VerIterator ConVersion = D.ParentVer(); | |
465 | Dynamic<pkgCache::VerIterator> DynV(ConVersion); | |
466 | // duplicate the Conflicts/Breaks/Replaces for :none arch | |
467 | if (D->Version == 0) | |
468 | NewDepends(Pkg, ConVersion, "", 0, D->Type, OldDepLast); | |
469 | else | |
470 | NewDepends(Pkg, ConVersion, D.TargetVer(), | |
471 | D->CompareOp, D->Type, OldDepLast); | |
472 | } | |
473 | } | |
474 | } | |
475 | } | |
476 | if (unlikely(AddImplicitDepends(Grp, Pkg, Ver) == false)) | |
477 | return _error->Error(_("Error occurred while processing %s (%s%d)"), | |
478 | Pkg.Name(), "AddImplicitDepends", 2); | |
479 | ||
480 | // Read only a single record and return | |
481 | if (OutVer != 0) | |
482 | { | |
483 | *OutVer = Ver; | |
484 | return true; | |
485 | } | |
486 | ||
487 | /* Record the Description (it is not translated) */ | |
488 | MD5SumValue CurMd5 = List.Description_md5(); | |
489 | if (CurMd5.Value().empty() == true) | |
490 | return true; | |
491 | std::string CurLang = List.DescriptionLanguage(); | |
492 | ||
493 | /* Before we add a new description we first search in the group for | |
494 | a version with a description of the same MD5 - if so we reuse this | |
495 | description group instead of creating our own for this version */ | |
496 | for (pkgCache::PkgIterator P = Grp.PackageList(); | |
497 | P.end() == false; P = Grp.NextPkg(P)) | |
498 | { | |
499 | for (pkgCache::VerIterator V = P.VersionList(); | |
500 | V.end() == false; ++V) | |
501 | { | |
502 | if (IsDuplicateDescription(V.DescriptionList(), CurMd5, "") == false) | |
503 | continue; | |
504 | Ver->DescriptionList = V->DescriptionList; | |
505 | return true; | |
506 | } | |
507 | } | |
508 | ||
509 | // We haven't found reusable descriptions, so add the first description | |
510 | pkgCache::DescIterator Desc = Ver.DescriptionList(); | |
511 | Dynamic<pkgCache::DescIterator> DynDesc(Desc); | |
512 | map_ptrloc *LastDesc = &Ver->DescriptionList; | |
513 | ||
514 | oldMap = Map.Data(); | |
515 | map_ptrloc const descindex = NewDescription(Desc, CurLang, CurMd5, *LastDesc); | |
516 | if (unlikely(descindex == 0 && _error->PendingError())) | |
517 | return _error->Error(_("Error occurred while processing %s (%s%d)"), | |
518 | Pkg.Name(), "NewDescription", 2); | |
519 | if (oldMap != Map.Data()) | |
520 | LastDesc += (map_ptrloc*) Map.Data() - (map_ptrloc*) oldMap; | |
521 | *LastDesc = descindex; | |
522 | Desc->ParentPkg = Pkg.Index(); | |
523 | ||
524 | if ((*LastDesc == 0 && _error->PendingError()) || NewFileDesc(Desc,List) == false) | |
525 | return _error->Error(_("Error occurred while processing %s (%s%d)"), | |
526 | Pkg.Name(), "NewFileDesc", 2); | |
527 | ||
528 | return true; | |
529 | } | |
530 | /*}}}*/ | |
531 | /*}}}*/ | |
532 | // CacheGenerator::MergeFileProvides - Merge file provides /*{{{*/ | |
533 | // --------------------------------------------------------------------- | |
534 | /* If we found any file depends while parsing the main list we need to | |
535 | resolve them. Since it is undesired to load the entire list of files | |
536 | into the cache as virtual packages we do a two stage effort. MergeList | |
537 | identifies the file depends and this creates Provdies for them by | |
538 | re-parsing all the indexs. */ | |
539 | bool pkgCacheGenerator::MergeFileProvides(ListParser &List) | |
540 | { | |
541 | List.Owner = this; | |
542 | ||
543 | unsigned int Counter = 0; | |
544 | while (List.Step() == true) | |
545 | { | |
546 | string PackageName = List.Package(); | |
547 | if (PackageName.empty() == true) | |
548 | return false; | |
549 | string Version = List.Version(); | |
550 | if (Version.empty() == true) | |
551 | continue; | |
552 | ||
553 | pkgCache::PkgIterator Pkg = Cache.FindPkg(PackageName); | |
554 | Dynamic<pkgCache::PkgIterator> DynPkg(Pkg); | |
555 | if (Pkg.end() == true) | |
556 | return _error->Error(_("Error occurred while processing %s (%s%d)"), | |
557 | PackageName.c_str(), "FindPkg", 1); | |
558 | Counter++; | |
559 | if (Counter % 100 == 0 && Progress != 0) | |
560 | Progress->Progress(List.Offset()); | |
561 | ||
562 | unsigned long Hash = List.VersionHash(); | |
563 | pkgCache::VerIterator Ver = Pkg.VersionList(); | |
564 | Dynamic<pkgCache::VerIterator> DynVer(Ver); | |
565 | for (; Ver.end() == false; ++Ver) | |
566 | { | |
567 | if (Ver->Hash == Hash && Version.c_str() == Ver.VerStr()) | |
568 | { | |
569 | if (List.CollectFileProvides(Cache,Ver) == false) | |
570 | return _error->Error(_("Error occurred while processing %s (%s%d)"), | |
571 | PackageName.c_str(), "CollectFileProvides", 1); | |
572 | break; | |
573 | } | |
574 | } | |
575 | ||
576 | if (Ver.end() == true) | |
577 | _error->Warning(_("Package %s %s was not found while processing file dependencies"),PackageName.c_str(),Version.c_str()); | |
578 | } | |
579 | ||
580 | return true; | |
581 | } | |
582 | /*}}}*/ | |
583 | // CacheGenerator::NewGroup - Add a new group /*{{{*/ | |
584 | // --------------------------------------------------------------------- | |
585 | /* This creates a new group structure and adds it to the hash table */ | |
586 | bool pkgCacheGenerator::NewGroup(pkgCache::GrpIterator &Grp, const string &Name) | |
587 | { | |
588 | Grp = Cache.FindGrp(Name); | |
589 | if (Grp.end() == false) | |
590 | return true; | |
591 | ||
592 | // Get a structure | |
593 | map_ptrloc const Group = AllocateInMap(sizeof(pkgCache::Group)); | |
594 | if (unlikely(Group == 0)) | |
595 | return false; | |
596 | ||
597 | Grp = pkgCache::GrpIterator(Cache, Cache.GrpP + Group); | |
598 | map_ptrloc const idxName = WriteStringInMap(Name); | |
599 | if (unlikely(idxName == 0)) | |
600 | return false; | |
601 | Grp->Name = idxName; | |
602 | ||
603 | // Insert it into the hash table | |
604 | unsigned long const Hash = Cache.Hash(Name); | |
605 | Grp->Next = Cache.HeaderP->GrpHashTable[Hash]; | |
606 | Cache.HeaderP->GrpHashTable[Hash] = Group; | |
607 | ||
608 | Grp->ID = Cache.HeaderP->GroupCount++; | |
609 | return true; | |
610 | } | |
611 | /*}}}*/ | |
612 | // CacheGenerator::NewPackage - Add a new package /*{{{*/ | |
613 | // --------------------------------------------------------------------- | |
614 | /* This creates a new package structure and adds it to the hash table */ | |
615 | bool pkgCacheGenerator::NewPackage(pkgCache::PkgIterator &Pkg,const string &Name, | |
616 | const string &Arch) { | |
617 | pkgCache::GrpIterator Grp; | |
618 | Dynamic<pkgCache::GrpIterator> DynGrp(Grp); | |
619 | if (unlikely(NewGroup(Grp, Name) == false)) | |
620 | return false; | |
621 | ||
622 | Pkg = Grp.FindPkg(Arch); | |
623 | if (Pkg.end() == false) | |
624 | return true; | |
625 | ||
626 | // Get a structure | |
627 | map_ptrloc const Package = AllocateInMap(sizeof(pkgCache::Package)); | |
628 | if (unlikely(Package == 0)) | |
629 | return false; | |
630 | Pkg = pkgCache::PkgIterator(Cache,Cache.PkgP + Package); | |
631 | ||
632 | // Insert the package into our package list | |
633 | if (Grp->FirstPackage == 0) // the group is new | |
634 | { | |
635 | // Insert it into the hash table | |
636 | unsigned long const Hash = Cache.Hash(Name); | |
637 | Pkg->NextPackage = Cache.HeaderP->PkgHashTable[Hash]; | |
638 | Cache.HeaderP->PkgHashTable[Hash] = Package; | |
639 | Grp->FirstPackage = Package; | |
640 | } | |
641 | else // Group the Packages together | |
642 | { | |
643 | // this package is the new last package | |
644 | pkgCache::PkgIterator LastPkg(Cache, Cache.PkgP + Grp->LastPackage); | |
645 | Pkg->NextPackage = LastPkg->NextPackage; | |
646 | LastPkg->NextPackage = Package; | |
647 | } | |
648 | Grp->LastPackage = Package; | |
649 | ||
650 | // Set the name, arch and the ID | |
651 | Pkg->Name = Grp->Name; | |
652 | Pkg->Group = Grp.Index(); | |
653 | // all is mapped to the native architecture | |
654 | map_ptrloc const idxArch = (Arch == "all") ? Cache.HeaderP->Architecture : WriteUniqString(Arch.c_str()); | |
655 | if (unlikely(idxArch == 0)) | |
656 | return false; | |
657 | Pkg->Arch = idxArch; | |
658 | Pkg->ID = Cache.HeaderP->PackageCount++; | |
659 | ||
660 | return true; | |
661 | } | |
662 | /*}}}*/ | |
663 | // CacheGenerator::AddImplicitDepends /*{{{*/ | |
664 | bool pkgCacheGenerator::AddImplicitDepends(pkgCache::GrpIterator &G, | |
665 | pkgCache::PkgIterator &P, | |
666 | pkgCache::VerIterator &V) | |
667 | { | |
668 | // copy P.Arch() into a string here as a cache remap | |
669 | // in NewDepends() later may alter the pointer location | |
670 | string Arch = P.Arch() == NULL ? "" : P.Arch(); | |
671 | map_ptrloc *OldDepLast = NULL; | |
672 | /* MultiArch handling introduces a lot of implicit Dependencies: | |
673 | - MultiArch: same → Co-Installable if they have the same version | |
674 | - All others conflict with all other group members */ | |
675 | bool const coInstall = ((V->MultiArch & pkgCache::Version::Same) == pkgCache::Version::Same); | |
676 | pkgCache::PkgIterator D = G.PackageList(); | |
677 | Dynamic<pkgCache::PkgIterator> DynD(D); | |
678 | for (; D.end() != true; D = G.NextPkg(D)) | |
679 | { | |
680 | if (Arch == D.Arch() || D->VersionList == 0) | |
681 | continue; | |
682 | /* We allow only one installed arch at the time | |
683 | per group, therefore each group member conflicts | |
684 | with all other group members */ | |
685 | if (coInstall == true) | |
686 | { | |
687 | // Replaces: ${self}:other ( << ${binary:Version}) | |
688 | NewDepends(D, V, V.VerStr(), | |
689 | pkgCache::Dep::Less, pkgCache::Dep::Replaces, | |
690 | OldDepLast); | |
691 | // Breaks: ${self}:other (!= ${binary:Version}) | |
692 | NewDepends(D, V, V.VerStr(), | |
693 | pkgCache::Dep::NotEquals, pkgCache::Dep::DpkgBreaks, | |
694 | OldDepLast); | |
695 | } else { | |
696 | // Conflicts: ${self}:other | |
697 | NewDepends(D, V, "", | |
698 | pkgCache::Dep::NoOp, pkgCache::Dep::Conflicts, | |
699 | OldDepLast); | |
700 | } | |
701 | } | |
702 | return true; | |
703 | } | |
704 | bool pkgCacheGenerator::AddImplicitDepends(pkgCache::VerIterator &V, | |
705 | pkgCache::PkgIterator &D) | |
706 | { | |
707 | /* MultiArch handling introduces a lot of implicit Dependencies: | |
708 | - MultiArch: same → Co-Installable if they have the same version | |
709 | - All others conflict with all other group members */ | |
710 | map_ptrloc *OldDepLast = NULL; | |
711 | bool const coInstall = ((V->MultiArch & pkgCache::Version::Same) == pkgCache::Version::Same); | |
712 | if (coInstall == true) | |
713 | { | |
714 | // Replaces: ${self}:other ( << ${binary:Version}) | |
715 | NewDepends(D, V, V.VerStr(), | |
716 | pkgCache::Dep::Less, pkgCache::Dep::Replaces, | |
717 | OldDepLast); | |
718 | // Breaks: ${self}:other (!= ${binary:Version}) | |
719 | NewDepends(D, V, V.VerStr(), | |
720 | pkgCache::Dep::NotEquals, pkgCache::Dep::DpkgBreaks, | |
721 | OldDepLast); | |
722 | } else { | |
723 | // Conflicts: ${self}:other | |
724 | NewDepends(D, V, "", | |
725 | pkgCache::Dep::NoOp, pkgCache::Dep::Conflicts, | |
726 | OldDepLast); | |
727 | } | |
728 | return true; | |
729 | } | |
730 | ||
731 | /*}}}*/ | |
732 | // CacheGenerator::NewFileVer - Create a new File<->Version association /*{{{*/ | |
733 | // --------------------------------------------------------------------- | |
734 | /* */ | |
735 | bool pkgCacheGenerator::NewFileVer(pkgCache::VerIterator &Ver, | |
736 | ListParser &List) | |
737 | { | |
738 | if (CurrentFile == 0) | |
739 | return true; | |
740 | ||
741 | // Get a structure | |
742 | map_ptrloc const VerFile = AllocateInMap(sizeof(pkgCache::VerFile)); | |
743 | if (VerFile == 0) | |
744 | return 0; | |
745 | ||
746 | pkgCache::VerFileIterator VF(Cache,Cache.VerFileP + VerFile); | |
747 | VF->File = CurrentFile - Cache.PkgFileP; | |
748 | ||
749 | // Link it to the end of the list | |
750 | map_ptrloc *Last = &Ver->FileList; | |
751 | for (pkgCache::VerFileIterator V = Ver.FileList(); V.end() == false; ++V) | |
752 | Last = &V->NextFile; | |
753 | VF->NextFile = *Last; | |
754 | *Last = VF.Index(); | |
755 | ||
756 | VF->Offset = List.Offset(); | |
757 | VF->Size = List.Size(); | |
758 | if (Cache.HeaderP->MaxVerFileSize < VF->Size) | |
759 | Cache.HeaderP->MaxVerFileSize = VF->Size; | |
760 | Cache.HeaderP->VerFileCount++; | |
761 | ||
762 | return true; | |
763 | } | |
764 | /*}}}*/ | |
765 | // CacheGenerator::NewVersion - Create a new Version /*{{{*/ | |
766 | // --------------------------------------------------------------------- | |
767 | /* This puts a version structure in the linked list */ | |
768 | unsigned long pkgCacheGenerator::NewVersion(pkgCache::VerIterator &Ver, | |
769 | const string &VerStr, | |
770 | unsigned long Next) | |
771 | { | |
772 | // Get a structure | |
773 | map_ptrloc const Version = AllocateInMap(sizeof(pkgCache::Version)); | |
774 | if (Version == 0) | |
775 | return 0; | |
776 | ||
777 | // Fill it in | |
778 | Ver = pkgCache::VerIterator(Cache,Cache.VerP + Version); | |
779 | //Dynamic<pkgCache::VerIterator> DynV(Ver); // caller MergeListVersion already takes care of it | |
780 | Ver->NextVer = Next; | |
781 | Ver->ID = Cache.HeaderP->VersionCount++; | |
782 | map_ptrloc const idxVerStr = WriteStringInMap(VerStr); | |
783 | if (unlikely(idxVerStr == 0)) | |
784 | return 0; | |
785 | Ver->VerStr = idxVerStr; | |
786 | ||
787 | return Version; | |
788 | } | |
789 | /*}}}*/ | |
790 | // CacheGenerator::NewFileDesc - Create a new File<->Desc association /*{{{*/ | |
791 | // --------------------------------------------------------------------- | |
792 | /* */ | |
793 | bool pkgCacheGenerator::NewFileDesc(pkgCache::DescIterator &Desc, | |
794 | ListParser &List) | |
795 | { | |
796 | if (CurrentFile == 0) | |
797 | return true; | |
798 | ||
799 | // Get a structure | |
800 | map_ptrloc const DescFile = AllocateInMap(sizeof(pkgCache::DescFile)); | |
801 | if (DescFile == 0) | |
802 | return false; | |
803 | ||
804 | pkgCache::DescFileIterator DF(Cache,Cache.DescFileP + DescFile); | |
805 | DF->File = CurrentFile - Cache.PkgFileP; | |
806 | ||
807 | // Link it to the end of the list | |
808 | map_ptrloc *Last = &Desc->FileList; | |
809 | for (pkgCache::DescFileIterator D = Desc.FileList(); D.end() == false; ++D) | |
810 | Last = &D->NextFile; | |
811 | ||
812 | DF->NextFile = *Last; | |
813 | *Last = DF.Index(); | |
814 | ||
815 | DF->Offset = List.Offset(); | |
816 | DF->Size = List.Size(); | |
817 | if (Cache.HeaderP->MaxDescFileSize < DF->Size) | |
818 | Cache.HeaderP->MaxDescFileSize = DF->Size; | |
819 | Cache.HeaderP->DescFileCount++; | |
820 | ||
821 | return true; | |
822 | } | |
823 | /*}}}*/ | |
824 | // CacheGenerator::NewDescription - Create a new Description /*{{{*/ | |
825 | // --------------------------------------------------------------------- | |
826 | /* This puts a description structure in the linked list */ | |
827 | map_ptrloc pkgCacheGenerator::NewDescription(pkgCache::DescIterator &Desc, | |
828 | const string &Lang, | |
829 | const MD5SumValue &md5sum, | |
830 | map_ptrloc Next) | |
831 | { | |
832 | // Get a structure | |
833 | map_ptrloc const Description = AllocateInMap(sizeof(pkgCache::Description)); | |
834 | if (Description == 0) | |
835 | return 0; | |
836 | ||
837 | // Fill it in | |
838 | Desc = pkgCache::DescIterator(Cache,Cache.DescP + Description); | |
839 | Desc->NextDesc = Next; | |
840 | Desc->ID = Cache.HeaderP->DescriptionCount++; | |
841 | map_ptrloc const idxlanguage_code = WriteStringInMap(Lang); | |
842 | map_ptrloc const idxmd5sum = WriteStringInMap(md5sum.Value()); | |
843 | if (unlikely(idxlanguage_code == 0 || idxmd5sum == 0)) | |
844 | return 0; | |
845 | Desc->language_code = idxlanguage_code; | |
846 | Desc->md5sum = idxmd5sum; | |
847 | ||
848 | return Description; | |
849 | } | |
850 | /*}}}*/ | |
851 | // CacheGenerator::NewDepends - Create a dependency element /*{{{*/ | |
852 | // --------------------------------------------------------------------- | |
853 | /* This creates a dependency element in the tree. It is linked to the | |
854 | version and to the package that it is pointing to. */ | |
855 | bool pkgCacheGenerator::NewDepends(pkgCache::PkgIterator &Pkg, | |
856 | pkgCache::VerIterator &Ver, | |
857 | string const &Version, | |
858 | unsigned int const &Op, | |
859 | unsigned int const &Type, | |
860 | map_ptrloc* &OldDepLast) | |
861 | { | |
862 | void const * const oldMap = Map.Data(); | |
863 | // Get a structure | |
864 | map_ptrloc const Dependency = AllocateInMap(sizeof(pkgCache::Dependency)); | |
865 | if (unlikely(Dependency == 0)) | |
866 | return false; | |
867 | ||
868 | // Fill it in | |
869 | pkgCache::DepIterator Dep(Cache,Cache.DepP + Dependency); | |
870 | Dynamic<pkgCache::DepIterator> DynDep(Dep); | |
871 | Dep->ParentVer = Ver.Index(); | |
872 | Dep->Type = Type; | |
873 | Dep->CompareOp = Op; | |
874 | Dep->ID = Cache.HeaderP->DependsCount++; | |
875 | ||
876 | // Probe the reverse dependency list for a version string that matches | |
877 | if (Version.empty() == false) | |
878 | { | |
879 | /* for (pkgCache::DepIterator I = Pkg.RevDependsList(); I.end() == false; I++) | |
880 | if (I->Version != 0 && I.TargetVer() == Version) | |
881 | Dep->Version = I->Version;*/ | |
882 | if (Dep->Version == 0) { | |
883 | map_ptrloc const index = WriteStringInMap(Version); | |
884 | if (unlikely(index == 0)) | |
885 | return false; | |
886 | Dep->Version = index; | |
887 | } | |
888 | } | |
889 | ||
890 | // Link it to the package | |
891 | Dep->Package = Pkg.Index(); | |
892 | Dep->NextRevDepends = Pkg->RevDepends; | |
893 | Pkg->RevDepends = Dep.Index(); | |
894 | ||
895 | // Do we know where to link the Dependency to? | |
896 | if (OldDepLast == NULL) | |
897 | { | |
898 | OldDepLast = &Ver->DependsList; | |
899 | for (pkgCache::DepIterator D = Ver.DependsList(); D.end() == false; ++D) | |
900 | OldDepLast = &D->NextDepends; | |
901 | } else if (oldMap != Map.Data()) | |
902 | OldDepLast += (map_ptrloc*) Map.Data() - (map_ptrloc*) oldMap; | |
903 | ||
904 | Dep->NextDepends = *OldDepLast; | |
905 | *OldDepLast = Dep.Index(); | |
906 | OldDepLast = &Dep->NextDepends; | |
907 | ||
908 | return true; | |
909 | } | |
910 | /*}}}*/ | |
911 | // ListParser::NewDepends - Create the environment for a new dependency /*{{{*/ | |
912 | // --------------------------------------------------------------------- | |
913 | /* This creates a Group and the Package to link this dependency to if | |
914 | needed and handles also the caching of the old endpoint */ | |
915 | bool pkgCacheGenerator::ListParser::NewDepends(pkgCache::VerIterator &Ver, | |
916 | const string &PackageName, | |
917 | const string &Arch, | |
918 | const string &Version, | |
919 | unsigned int Op, | |
920 | unsigned int Type) | |
921 | { | |
922 | pkgCache::GrpIterator Grp; | |
923 | Dynamic<pkgCache::GrpIterator> DynGrp(Grp); | |
924 | if (unlikely(Owner->NewGroup(Grp, PackageName) == false)) | |
925 | return false; | |
926 | ||
927 | // Locate the target package | |
928 | pkgCache::PkgIterator Pkg = Grp.FindPkg(Arch); | |
929 | // we don't create 'none' packages and their dependencies if we can avoid it … | |
930 | if (Pkg.end() == true && Arch == "none" && strcmp(Ver.ParentPkg().Arch(), "none") != 0) | |
931 | return true; | |
932 | Dynamic<pkgCache::PkgIterator> DynPkg(Pkg); | |
933 | if (Pkg.end() == true) { | |
934 | if (unlikely(Owner->NewPackage(Pkg, PackageName, Arch) == false)) | |
935 | return false; | |
936 | } | |
937 | ||
938 | // Is it a file dependency? | |
939 | if (unlikely(PackageName[0] == '/')) | |
940 | FoundFileDeps = true; | |
941 | ||
942 | /* Caching the old end point speeds up generation substantially */ | |
943 | if (OldDepVer != Ver) { | |
944 | OldDepLast = NULL; | |
945 | OldDepVer = Ver; | |
946 | } | |
947 | ||
948 | return Owner->NewDepends(Pkg, Ver, Version, Op, Type, OldDepLast); | |
949 | } | |
950 | /*}}}*/ | |
951 | // ListParser::NewProvides - Create a Provides element /*{{{*/ | |
952 | // --------------------------------------------------------------------- | |
953 | /* */ | |
954 | bool pkgCacheGenerator::ListParser::NewProvides(pkgCache::VerIterator &Ver, | |
955 | const string &PkgName, | |
956 | const string &PkgArch, | |
957 | const string &Version) | |
958 | { | |
959 | pkgCache &Cache = Owner->Cache; | |
960 | ||
961 | // We do not add self referencing provides | |
962 | if (Ver.ParentPkg().Name() == PkgName && (PkgArch == Ver.ParentPkg().Arch() || | |
963 | (PkgArch == "all" && strcmp((Cache.StrP + Cache.HeaderP->Architecture), Ver.ParentPkg().Arch()) == 0))) | |
964 | return true; | |
965 | ||
966 | // Get a structure | |
967 | map_ptrloc const Provides = Owner->AllocateInMap(sizeof(pkgCache::Provides)); | |
968 | if (unlikely(Provides == 0)) | |
969 | return false; | |
970 | Cache.HeaderP->ProvidesCount++; | |
971 | ||
972 | // Fill it in | |
973 | pkgCache::PrvIterator Prv(Cache,Cache.ProvideP + Provides,Cache.PkgP); | |
974 | Dynamic<pkgCache::PrvIterator> DynPrv(Prv); | |
975 | Prv->Version = Ver.Index(); | |
976 | Prv->NextPkgProv = Ver->ProvidesList; | |
977 | Ver->ProvidesList = Prv.Index(); | |
978 | if (Version.empty() == false) { | |
979 | map_ptrloc const idxProvideVersion = WriteString(Version); | |
980 | Prv->ProvideVersion = idxProvideVersion; | |
981 | if (unlikely(idxProvideVersion == 0)) | |
982 | return false; | |
983 | } | |
984 | ||
985 | // Locate the target package | |
986 | pkgCache::PkgIterator Pkg; | |
987 | Dynamic<pkgCache::PkgIterator> DynPkg(Pkg); | |
988 | if (unlikely(Owner->NewPackage(Pkg,PkgName, PkgArch) == false)) | |
989 | return false; | |
990 | ||
991 | // Link it to the package | |
992 | Prv->ParentPkg = Pkg.Index(); | |
993 | Prv->NextProvides = Pkg->ProvidesList; | |
994 | Pkg->ProvidesList = Prv.Index(); | |
995 | ||
996 | return true; | |
997 | } | |
998 | /*}}}*/ | |
999 | // CacheGenerator::SelectFile - Select the current file being parsed /*{{{*/ | |
1000 | // --------------------------------------------------------------------- | |
1001 | /* This is used to select which file is to be associated with all newly | |
1002 | added versions. The caller is responsible for setting the IMS fields. */ | |
1003 | bool pkgCacheGenerator::SelectFile(const string &File,const string &Site, | |
1004 | const pkgIndexFile &Index, | |
1005 | unsigned long Flags) | |
1006 | { | |
1007 | // Get some space for the structure | |
1008 | map_ptrloc const idxFile = AllocateInMap(sizeof(*CurrentFile)); | |
1009 | if (unlikely(idxFile == 0)) | |
1010 | return false; | |
1011 | CurrentFile = Cache.PkgFileP + idxFile; | |
1012 | ||
1013 | // Fill it in | |
1014 | map_ptrloc const idxFileName = WriteStringInMap(File); | |
1015 | map_ptrloc const idxSite = WriteUniqString(Site); | |
1016 | if (unlikely(idxFileName == 0 || idxSite == 0)) | |
1017 | return false; | |
1018 | CurrentFile->FileName = idxFileName; | |
1019 | CurrentFile->Site = idxSite; | |
1020 | CurrentFile->NextFile = Cache.HeaderP->FileList; | |
1021 | CurrentFile->Flags = Flags; | |
1022 | CurrentFile->ID = Cache.HeaderP->PackageFileCount; | |
1023 | map_ptrloc const idxIndexType = WriteUniqString(Index.GetType()->Label); | |
1024 | if (unlikely(idxIndexType == 0)) | |
1025 | return false; | |
1026 | CurrentFile->IndexType = idxIndexType; | |
1027 | PkgFileName = File; | |
1028 | Cache.HeaderP->FileList = CurrentFile - Cache.PkgFileP; | |
1029 | Cache.HeaderP->PackageFileCount++; | |
1030 | ||
1031 | if (Progress != 0) | |
1032 | Progress->SubProgress(Index.Size()); | |
1033 | return true; | |
1034 | } | |
1035 | /*}}}*/ | |
1036 | // CacheGenerator::WriteUniqueString - Insert a unique string /*{{{*/ | |
1037 | // --------------------------------------------------------------------- | |
1038 | /* This is used to create handles to strings. Given the same text it | |
1039 | always returns the same number */ | |
1040 | unsigned long pkgCacheGenerator::WriteUniqString(const char *S, | |
1041 | unsigned int Size) | |
1042 | { | |
1043 | /* We use a very small transient hash table here, this speeds up generation | |
1044 | by a fair amount on slower machines */ | |
1045 | pkgCache::StringItem *&Bucket = UniqHash[(S[0]*5 + S[1]) % _count(UniqHash)]; | |
1046 | if (Bucket != 0 && | |
1047 | stringcmp(S,S+Size,Cache.StrP + Bucket->String) == 0) | |
1048 | return Bucket->String; | |
1049 | ||
1050 | // Search for an insertion point | |
1051 | pkgCache::StringItem *I = Cache.StringItemP + Cache.HeaderP->StringList; | |
1052 | int Res = 1; | |
1053 | map_ptrloc *Last = &Cache.HeaderP->StringList; | |
1054 | for (; I != Cache.StringItemP; Last = &I->NextItem, | |
1055 | I = Cache.StringItemP + I->NextItem) | |
1056 | { | |
1057 | Res = stringcmp(S,S+Size,Cache.StrP + I->String); | |
1058 | if (Res >= 0) | |
1059 | break; | |
1060 | } | |
1061 | ||
1062 | // Match | |
1063 | if (Res == 0) | |
1064 | { | |
1065 | Bucket = I; | |
1066 | return I->String; | |
1067 | } | |
1068 | ||
1069 | // Get a structure | |
1070 | void const * const oldMap = Map.Data(); | |
1071 | map_ptrloc const Item = AllocateInMap(sizeof(pkgCache::StringItem)); | |
1072 | if (Item == 0) | |
1073 | return 0; | |
1074 | ||
1075 | map_ptrloc const idxString = WriteStringInMap(S,Size); | |
1076 | if (unlikely(idxString == 0)) | |
1077 | return 0; | |
1078 | if (oldMap != Map.Data()) { | |
1079 | Last += (map_ptrloc*) Map.Data() - (map_ptrloc*) oldMap; | |
1080 | I += (pkgCache::StringItem*) Map.Data() - (pkgCache::StringItem*) oldMap; | |
1081 | } | |
1082 | *Last = Item; | |
1083 | ||
1084 | // Fill in the structure | |
1085 | pkgCache::StringItem *ItemP = Cache.StringItemP + Item; | |
1086 | ItemP->NextItem = I - Cache.StringItemP; | |
1087 | ItemP->String = idxString; | |
1088 | ||
1089 | Bucket = ItemP; | |
1090 | return ItemP->String; | |
1091 | } | |
1092 | /*}}}*/ | |
1093 | // CheckValidity - Check that a cache is up-to-date /*{{{*/ | |
1094 | // --------------------------------------------------------------------- | |
1095 | /* This just verifies that each file in the list of index files exists, | |
1096 | has matching attributes with the cache and the cache does not have | |
1097 | any extra files. */ | |
1098 | static bool CheckValidity(const string &CacheFile, | |
1099 | pkgSourceList &List, | |
1100 | FileIterator Start, | |
1101 | FileIterator End, | |
1102 | MMap **OutMap = 0) | |
1103 | { | |
1104 | bool const Debug = _config->FindB("Debug::pkgCacheGen", false); | |
1105 | // No file, certainly invalid | |
1106 | if (CacheFile.empty() == true || FileExists(CacheFile) == false) | |
1107 | { | |
1108 | if (Debug == true) | |
1109 | std::clog << "CacheFile doesn't exist" << std::endl; | |
1110 | return false; | |
1111 | } | |
1112 | ||
1113 | if (List.GetLastModifiedTime() > GetModificationTime(CacheFile)) | |
1114 | { | |
1115 | if (Debug == true) | |
1116 | std::clog << "sources.list is newer than the cache" << std::endl; | |
1117 | return false; | |
1118 | } | |
1119 | ||
1120 | // Map it | |
1121 | FileFd CacheF(CacheFile,FileFd::ReadOnly); | |
1122 | SPtr<MMap> Map = new MMap(CacheF,0); | |
1123 | pkgCache Cache(Map); | |
1124 | if (_error->PendingError() == true || Map->Size() == 0) | |
1125 | { | |
1126 | if (Debug == true) | |
1127 | std::clog << "Errors are pending or Map is empty()" << std::endl; | |
1128 | _error->Discard(); | |
1129 | return false; | |
1130 | } | |
1131 | ||
1132 | /* Now we check every index file, see if it is in the cache, | |
1133 | verify the IMS data and check that it is on the disk too.. */ | |
1134 | SPtrArray<bool> Visited = new bool[Cache.HeaderP->PackageFileCount]; | |
1135 | memset(Visited,0,sizeof(*Visited)*Cache.HeaderP->PackageFileCount); | |
1136 | for (; Start != End; ++Start) | |
1137 | { | |
1138 | if (Debug == true) | |
1139 | std::clog << "Checking PkgFile " << (*Start)->Describe() << ": "; | |
1140 | if ((*Start)->HasPackages() == false) | |
1141 | { | |
1142 | if (Debug == true) | |
1143 | std::clog << "Has NO packages" << std::endl; | |
1144 | continue; | |
1145 | } | |
1146 | ||
1147 | if ((*Start)->Exists() == false) | |
1148 | { | |
1149 | #if 0 // mvo: we no longer give a message here (Default Sources spec) | |
1150 | _error->WarningE("stat",_("Couldn't stat source package list %s"), | |
1151 | (*Start)->Describe().c_str()); | |
1152 | #endif | |
1153 | if (Debug == true) | |
1154 | std::clog << "file doesn't exist" << std::endl; | |
1155 | continue; | |
1156 | } | |
1157 | ||
1158 | // FindInCache is also expected to do an IMS check. | |
1159 | pkgCache::PkgFileIterator File = (*Start)->FindInCache(Cache); | |
1160 | if (File.end() == true) | |
1161 | { | |
1162 | if (Debug == true) | |
1163 | std::clog << "FindInCache returned end-Pointer" << std::endl; | |
1164 | return false; | |
1165 | } | |
1166 | ||
1167 | Visited[File->ID] = true; | |
1168 | if (Debug == true) | |
1169 | std::clog << "with ID " << File->ID << " is valid" << std::endl; | |
1170 | } | |
1171 | ||
1172 | for (unsigned I = 0; I != Cache.HeaderP->PackageFileCount; I++) | |
1173 | if (Visited[I] == false) | |
1174 | { | |
1175 | if (Debug == true) | |
1176 | std::clog << "File with ID" << I << " wasn't visited" << std::endl; | |
1177 | return false; | |
1178 | } | |
1179 | ||
1180 | if (_error->PendingError() == true) | |
1181 | { | |
1182 | if (Debug == true) | |
1183 | { | |
1184 | std::clog << "Validity failed because of pending errors:" << std::endl; | |
1185 | _error->DumpErrors(); | |
1186 | } | |
1187 | _error->Discard(); | |
1188 | return false; | |
1189 | } | |
1190 | ||
1191 | if (OutMap != 0) | |
1192 | *OutMap = Map.UnGuard(); | |
1193 | return true; | |
1194 | } | |
1195 | /*}}}*/ | |
1196 | // ComputeSize - Compute the total size of a bunch of files /*{{{*/ | |
1197 | // --------------------------------------------------------------------- | |
1198 | /* Size is kind of an abstract notion that is only used for the progress | |
1199 | meter */ | |
1200 | static unsigned long ComputeSize(FileIterator Start,FileIterator End) | |
1201 | { | |
1202 | unsigned long TotalSize = 0; | |
1203 | for (; Start != End; ++Start) | |
1204 | { | |
1205 | if ((*Start)->HasPackages() == false) | |
1206 | continue; | |
1207 | TotalSize += (*Start)->Size(); | |
1208 | } | |
1209 | return TotalSize; | |
1210 | } | |
1211 | /*}}}*/ | |
1212 | // BuildCache - Merge the list of index files into the cache /*{{{*/ | |
1213 | // --------------------------------------------------------------------- | |
1214 | /* */ | |
1215 | static bool BuildCache(pkgCacheGenerator &Gen, | |
1216 | OpProgress *Progress, | |
1217 | unsigned long &CurrentSize,unsigned long TotalSize, | |
1218 | FileIterator Start, FileIterator End) | |
1219 | { | |
1220 | FileIterator I; | |
1221 | for (I = Start; I != End; ++I) | |
1222 | { | |
1223 | if ((*I)->HasPackages() == false) | |
1224 | continue; | |
1225 | ||
1226 | if ((*I)->Exists() == false) | |
1227 | continue; | |
1228 | ||
1229 | if ((*I)->FindInCache(Gen.GetCache()).end() == false) | |
1230 | { | |
1231 | _error->Warning("Duplicate sources.list entry %s", | |
1232 | (*I)->Describe().c_str()); | |
1233 | continue; | |
1234 | } | |
1235 | ||
1236 | unsigned long Size = (*I)->Size(); | |
1237 | if (Progress != NULL) | |
1238 | Progress->OverallProgress(CurrentSize,TotalSize,Size,_("Reading package lists")); | |
1239 | CurrentSize += Size; | |
1240 | ||
1241 | if ((*I)->Merge(Gen,Progress) == false) | |
1242 | return false; | |
1243 | } | |
1244 | ||
1245 | if (Gen.HasFileDeps() == true) | |
1246 | { | |
1247 | if (Progress != NULL) | |
1248 | Progress->Done(); | |
1249 | TotalSize = ComputeSize(Start, End); | |
1250 | CurrentSize = 0; | |
1251 | for (I = Start; I != End; ++I) | |
1252 | { | |
1253 | unsigned long Size = (*I)->Size(); | |
1254 | if (Progress != NULL) | |
1255 | Progress->OverallProgress(CurrentSize,TotalSize,Size,_("Collecting File Provides")); | |
1256 | CurrentSize += Size; | |
1257 | if ((*I)->MergeFileProvides(Gen,Progress) == false) | |
1258 | return false; | |
1259 | } | |
1260 | } | |
1261 | ||
1262 | return true; | |
1263 | } | |
1264 | /*}}}*/ | |
1265 | // CacheGenerator::CreateDynamicMMap - load an mmap with configuration options /*{{{*/ | |
1266 | DynamicMMap* pkgCacheGenerator::CreateDynamicMMap(FileFd *CacheF, unsigned long Flags) { | |
1267 | unsigned long const MapStart = _config->FindI("APT::Cache-Start", 24*1024*1024); | |
1268 | unsigned long const MapGrow = _config->FindI("APT::Cache-Grow", 1*1024*1024); | |
1269 | unsigned long const MapLimit = _config->FindI("APT::Cache-Limit", 0); | |
1270 | Flags |= MMap::Moveable; | |
1271 | if (_config->FindB("APT::Cache-Fallback", false) == true) | |
1272 | Flags |= MMap::Fallback; | |
1273 | if (CacheF != NULL) | |
1274 | return new DynamicMMap(*CacheF, Flags, MapStart, MapGrow, MapLimit); | |
1275 | else | |
1276 | return new DynamicMMap(Flags, MapStart, MapGrow, MapLimit); | |
1277 | } | |
1278 | /*}}}*/ | |
1279 | // CacheGenerator::MakeStatusCache - Construct the status cache /*{{{*/ | |
1280 | // --------------------------------------------------------------------- | |
1281 | /* This makes sure that the status cache (the cache that has all | |
1282 | index files from the sources list and all local ones) is ready | |
1283 | to be mmaped. If OutMap is not zero then a MMap object representing | |
1284 | the cache will be stored there. This is pretty much mandetory if you | |
1285 | are using AllowMem. AllowMem lets the function be run as non-root | |
1286 | where it builds the cache 'fast' into a memory buffer. */ | |
1287 | __deprecated bool pkgMakeStatusCache(pkgSourceList &List,OpProgress &Progress, | |
1288 | MMap **OutMap, bool AllowMem) | |
1289 | { return pkgCacheGenerator::MakeStatusCache(List, &Progress, OutMap, AllowMem); } | |
1290 | bool pkgCacheGenerator::MakeStatusCache(pkgSourceList &List,OpProgress *Progress, | |
1291 | MMap **OutMap,bool AllowMem) | |
1292 | { | |
1293 | bool const Debug = _config->FindB("Debug::pkgCacheGen", false); | |
1294 | ||
1295 | std::vector<pkgIndexFile *> Files; | |
1296 | for (std::vector<metaIndex *>::const_iterator i = List.begin(); | |
1297 | i != List.end(); | |
1298 | ++i) | |
1299 | { | |
1300 | std::vector <pkgIndexFile *> *Indexes = (*i)->GetIndexFiles(); | |
1301 | for (std::vector<pkgIndexFile *>::const_iterator j = Indexes->begin(); | |
1302 | j != Indexes->end(); | |
1303 | ++j) | |
1304 | Files.push_back (*j); | |
1305 | } | |
1306 | ||
1307 | unsigned long const EndOfSource = Files.size(); | |
1308 | if (_system->AddStatusFiles(Files) == false) | |
1309 | return false; | |
1310 | ||
1311 | // Decide if we can write to the files.. | |
1312 | string const CacheFile = _config->FindFile("Dir::Cache::pkgcache"); | |
1313 | string const SrcCacheFile = _config->FindFile("Dir::Cache::srcpkgcache"); | |
1314 | ||
1315 | // ensure the cache directory exists | |
1316 | if (CacheFile.empty() == false || SrcCacheFile.empty() == false) | |
1317 | { | |
1318 | string dir = _config->FindDir("Dir::Cache"); | |
1319 | size_t const len = dir.size(); | |
1320 | if (len > 5 && dir.find("/apt/", len - 6, 5) == len - 5) | |
1321 | dir = dir.substr(0, len - 5); | |
1322 | if (CacheFile.empty() == false) | |
1323 | CreateDirectory(dir, flNotFile(CacheFile)); | |
1324 | if (SrcCacheFile.empty() == false) | |
1325 | CreateDirectory(dir, flNotFile(SrcCacheFile)); | |
1326 | } | |
1327 | ||
1328 | // Decide if we can write to the cache | |
1329 | bool Writeable = false; | |
1330 | if (CacheFile.empty() == false) | |
1331 | Writeable = access(flNotFile(CacheFile).c_str(),W_OK) == 0; | |
1332 | else | |
1333 | if (SrcCacheFile.empty() == false) | |
1334 | Writeable = access(flNotFile(SrcCacheFile).c_str(),W_OK) == 0; | |
1335 | if (Debug == true) | |
1336 | std::clog << "Do we have write-access to the cache files? " << (Writeable ? "YES" : "NO") << std::endl; | |
1337 | ||
1338 | if (Writeable == false && AllowMem == false && CacheFile.empty() == false) | |
1339 | return _error->Error(_("Unable to write to %s"),flNotFile(CacheFile).c_str()); | |
1340 | ||
1341 | if (Progress != NULL) | |
1342 | Progress->OverallProgress(0,1,1,_("Reading package lists")); | |
1343 | ||
1344 | // Cache is OK, Fin. | |
1345 | if (CheckValidity(CacheFile, List, Files.begin(),Files.end(),OutMap) == true) | |
1346 | { | |
1347 | if (Progress != NULL) | |
1348 | Progress->OverallProgress(1,1,1,_("Reading package lists")); | |
1349 | if (Debug == true) | |
1350 | std::clog << "pkgcache.bin is valid - no need to build anything" << std::endl; | |
1351 | return true; | |
1352 | } | |
1353 | else if (Debug == true) | |
1354 | std::clog << "pkgcache.bin is NOT valid" << std::endl; | |
1355 | ||
1356 | /* At this point we know we need to reconstruct the package cache, | |
1357 | begin. */ | |
1358 | SPtr<FileFd> CacheF; | |
1359 | SPtr<DynamicMMap> Map; | |
1360 | if (Writeable == true && CacheFile.empty() == false) | |
1361 | { | |
1362 | _error->PushToStack(); | |
1363 | unlink(CacheFile.c_str()); | |
1364 | CacheF = new FileFd(CacheFile,FileFd::WriteAtomic); | |
1365 | fchmod(CacheF->Fd(),0644); | |
1366 | Map = CreateDynamicMMap(CacheF, MMap::Public); | |
1367 | if (_error->PendingError() == true) | |
1368 | { | |
1369 | delete CacheF.UnGuard(); | |
1370 | delete Map.UnGuard(); | |
1371 | if (Debug == true) | |
1372 | std::clog << "Open filebased MMap FAILED" << std::endl; | |
1373 | Writeable = false; | |
1374 | if (AllowMem == false) | |
1375 | { | |
1376 | _error->MergeWithStack(); | |
1377 | return false; | |
1378 | } | |
1379 | _error->RevertToStack(); | |
1380 | } | |
1381 | else | |
1382 | { | |
1383 | _error->MergeWithStack(); | |
1384 | if (Debug == true) | |
1385 | std::clog << "Open filebased MMap" << std::endl; | |
1386 | } | |
1387 | } | |
1388 | if (Writeable == false || CacheFile.empty() == true) | |
1389 | { | |
1390 | // Just build it in memory.. | |
1391 | Map = CreateDynamicMMap(NULL); | |
1392 | if (Debug == true) | |
1393 | std::clog << "Open memory Map (not filebased)" << std::endl; | |
1394 | } | |
1395 | ||
1396 | // Lets try the source cache. | |
1397 | unsigned long CurrentSize = 0; | |
1398 | unsigned long TotalSize = 0; | |
1399 | if (CheckValidity(SrcCacheFile, List, Files.begin(), | |
1400 | Files.begin()+EndOfSource) == true) | |
1401 | { | |
1402 | if (Debug == true) | |
1403 | std::clog << "srcpkgcache.bin is valid - populate MMap with it." << std::endl; | |
1404 | // Preload the map with the source cache | |
1405 | FileFd SCacheF(SrcCacheFile,FileFd::ReadOnly); | |
1406 | unsigned long const alloc = Map->RawAllocate(SCacheF.Size()); | |
1407 | if ((alloc == 0 && _error->PendingError()) | |
1408 | || SCacheF.Read((unsigned char *)Map->Data() + alloc, | |
1409 | SCacheF.Size()) == false) | |
1410 | return false; | |
1411 | ||
1412 | TotalSize = ComputeSize(Files.begin()+EndOfSource,Files.end()); | |
1413 | ||
1414 | // Build the status cache | |
1415 | pkgCacheGenerator Gen(Map.Get(),Progress); | |
1416 | if (_error->PendingError() == true) | |
1417 | return false; | |
1418 | if (BuildCache(Gen,Progress,CurrentSize,TotalSize, | |
1419 | Files.begin()+EndOfSource,Files.end()) == false) | |
1420 | return false; | |
1421 | } | |
1422 | else | |
1423 | { | |
1424 | if (Debug == true) | |
1425 | std::clog << "srcpkgcache.bin is NOT valid - rebuild" << std::endl; | |
1426 | TotalSize = ComputeSize(Files.begin(),Files.end()); | |
1427 | ||
1428 | // Build the source cache | |
1429 | pkgCacheGenerator Gen(Map.Get(),Progress); | |
1430 | if (_error->PendingError() == true) | |
1431 | return false; | |
1432 | if (BuildCache(Gen,Progress,CurrentSize,TotalSize, | |
1433 | Files.begin(),Files.begin()+EndOfSource) == false) | |
1434 | return false; | |
1435 | ||
1436 | // Write it back | |
1437 | if (Writeable == true && SrcCacheFile.empty() == false) | |
1438 | { | |
1439 | FileFd SCacheF(SrcCacheFile,FileFd::WriteAtomic); | |
1440 | if (_error->PendingError() == true) | |
1441 | return false; | |
1442 | ||
1443 | fchmod(SCacheF.Fd(),0644); | |
1444 | ||
1445 | // Write out the main data | |
1446 | if (SCacheF.Write(Map->Data(),Map->Size()) == false) | |
1447 | return _error->Error(_("IO Error saving source cache")); | |
1448 | SCacheF.Sync(); | |
1449 | ||
1450 | // Write out the proper header | |
1451 | Gen.GetCache().HeaderP->Dirty = false; | |
1452 | if (SCacheF.Seek(0) == false || | |
1453 | SCacheF.Write(Map->Data(),sizeof(*Gen.GetCache().HeaderP)) == false) | |
1454 | return _error->Error(_("IO Error saving source cache")); | |
1455 | Gen.GetCache().HeaderP->Dirty = true; | |
1456 | SCacheF.Sync(); | |
1457 | } | |
1458 | ||
1459 | // Build the status cache | |
1460 | if (BuildCache(Gen,Progress,CurrentSize,TotalSize, | |
1461 | Files.begin()+EndOfSource,Files.end()) == false) | |
1462 | return false; | |
1463 | } | |
1464 | if (Debug == true) | |
1465 | std::clog << "Caches are ready for shipping" << std::endl; | |
1466 | ||
1467 | if (_error->PendingError() == true) | |
1468 | return false; | |
1469 | if (OutMap != 0) | |
1470 | { | |
1471 | if (CacheF != 0) | |
1472 | { | |
1473 | delete Map.UnGuard(); | |
1474 | *OutMap = new MMap(*CacheF,0); | |
1475 | } | |
1476 | else | |
1477 | { | |
1478 | *OutMap = Map.UnGuard(); | |
1479 | } | |
1480 | } | |
1481 | ||
1482 | return true; | |
1483 | } | |
1484 | /*}}}*/ | |
1485 | // CacheGenerator::MakeOnlyStatusCache - Build only a status files cache/*{{{*/ | |
1486 | // --------------------------------------------------------------------- | |
1487 | /* */ | |
1488 | __deprecated bool pkgMakeOnlyStatusCache(OpProgress &Progress,DynamicMMap **OutMap) | |
1489 | { return pkgCacheGenerator::MakeOnlyStatusCache(&Progress, OutMap); } | |
1490 | bool pkgCacheGenerator::MakeOnlyStatusCache(OpProgress *Progress,DynamicMMap **OutMap) | |
1491 | { | |
1492 | std::vector<pkgIndexFile *> Files; | |
1493 | unsigned long EndOfSource = Files.size(); | |
1494 | if (_system->AddStatusFiles(Files) == false) | |
1495 | return false; | |
1496 | ||
1497 | SPtr<DynamicMMap> Map = CreateDynamicMMap(NULL); | |
1498 | unsigned long CurrentSize = 0; | |
1499 | unsigned long TotalSize = 0; | |
1500 | ||
1501 | TotalSize = ComputeSize(Files.begin()+EndOfSource,Files.end()); | |
1502 | ||
1503 | // Build the status cache | |
1504 | if (Progress != NULL) | |
1505 | Progress->OverallProgress(0,1,1,_("Reading package lists")); | |
1506 | pkgCacheGenerator Gen(Map.Get(),Progress); | |
1507 | if (_error->PendingError() == true) | |
1508 | return false; | |
1509 | if (BuildCache(Gen,Progress,CurrentSize,TotalSize, | |
1510 | Files.begin()+EndOfSource,Files.end()) == false) | |
1511 | return false; | |
1512 | ||
1513 | if (_error->PendingError() == true) | |
1514 | return false; | |
1515 | *OutMap = Map.UnGuard(); | |
1516 | ||
1517 | return true; | |
1518 | } | |
1519 | /*}}}*/ | |
1520 | // IsDuplicateDescription /*{{{*/ | |
1521 | static bool IsDuplicateDescription(pkgCache::DescIterator Desc, | |
1522 | MD5SumValue const &CurMd5, std::string const &CurLang) | |
1523 | { | |
1524 | // Descriptions in the same link-list have all the same md5 | |
1525 | if (Desc.end() == true || MD5SumValue(Desc.md5()) != CurMd5) | |
1526 | return false; | |
1527 | for (; Desc.end() == false; ++Desc) | |
1528 | if (Desc.LanguageCode() == CurLang) | |
1529 | return true; | |
1530 | return false; | |
1531 | } | |
1532 | /*}}}*/ | |
1533 | // CacheGenerator::FinishCache /*{{{*/ | |
1534 | bool pkgCacheGenerator::FinishCache(OpProgress *Progress) | |
1535 | { | |
1536 | return true; | |
1537 | } | |
1538 | /*}}}*/ |