]> git.saurik.com Git - apt.git/blob - apt-pkg/pkgcachegen.cc
use one string to construct the error message instead of using multiple
[apt.git] / apt-pkg / pkgcachegen.cc
1 // -*- mode: cpp; mode: fold -*-
2 // Description /*{{{*/
3 // $Id: pkgcachegen.cc,v 1.53.2.1 2003/12/24 23:09:17 mdz Exp $
4 /* ######################################################################
5
6 Package Cache Generator - Generator for the cache structure.
7
8 This builds the cache structure from the abstract package list parser.
9
10 ##################################################################### */
11 /*}}}*/
12 // Include Files /*{{{*/
13 #include <config.h>
14
15 #include <apt-pkg/pkgcachegen.h>
16 #include <apt-pkg/error.h>
17 #include <apt-pkg/version.h>
18 #include <apt-pkg/progress.h>
19 #include <apt-pkg/sourcelist.h>
20 #include <apt-pkg/configuration.h>
21 #include <apt-pkg/aptconfiguration.h>
22 #include <apt-pkg/strutl.h>
23 #include <apt-pkg/sptr.h>
24 #include <apt-pkg/pkgsystem.h>
25 #include <apt-pkg/macros.h>
26 #include <apt-pkg/tagfile.h>
27 #include <apt-pkg/metaindex.h>
28 #include <apt-pkg/fileutl.h>
29
30 #include <vector>
31 #include <sys/stat.h>
32 #include <unistd.h>
33 #include <errno.h>
34 #include <stdio.h>
35
36 #include <apti18n.h>
37 /*}}}*/
38 typedef vector<pkgIndexFile *>::iterator FileIterator;
39 template <typename Iter> std::vector<Iter*> pkgCacheGenerator::Dynamic<Iter>::toReMap;
40
41 bool IsDuplicateDescription(pkgCache::DescIterator Desc,
42 MD5SumValue const &CurMd5, std::string const &CurLang);
43
44 // CacheGenerator::pkgCacheGenerator - Constructor /*{{{*/
45 // ---------------------------------------------------------------------
46 /* We set the dirty flag and make sure that is written to the disk */
47 pkgCacheGenerator::pkgCacheGenerator(DynamicMMap *pMap,OpProgress *Prog) :
48 Map(*pMap), Cache(pMap,false), Progress(Prog),
49 FoundFileDeps(0)
50 {
51 CurrentFile = 0;
52 memset(UniqHash,0,sizeof(UniqHash));
53
54 if (_error->PendingError() == true)
55 return;
56
57 if (Map.Size() == 0)
58 {
59 // Setup the map interface..
60 Cache.HeaderP = (pkgCache::Header *)Map.Data();
61 if (Map.RawAllocate(sizeof(pkgCache::Header)) == 0 && _error->PendingError() == true)
62 return;
63
64 Map.UsePools(*Cache.HeaderP->Pools,sizeof(Cache.HeaderP->Pools)/sizeof(Cache.HeaderP->Pools[0]));
65
66 // Starting header
67 *Cache.HeaderP = pkgCache::Header();
68 map_ptrloc const idxVerSysName = WriteStringInMap(_system->VS->Label);
69 Cache.HeaderP->VerSysName = idxVerSysName;
70 map_ptrloc const idxArchitecture = WriteStringInMap(_config->Find("APT::Architecture"));
71 Cache.HeaderP->Architecture = idxArchitecture;
72 if (unlikely(idxVerSysName == 0 || idxArchitecture == 0))
73 return;
74 Cache.ReMap();
75 }
76 else
77 {
78 // Map directly from the existing file
79 Cache.ReMap();
80 Map.UsePools(*Cache.HeaderP->Pools,sizeof(Cache.HeaderP->Pools)/sizeof(Cache.HeaderP->Pools[0]));
81 if (Cache.VS != _system->VS)
82 {
83 _error->Error(_("Cache has an incompatible versioning system"));
84 return;
85 }
86 }
87
88 Cache.HeaderP->Dirty = true;
89 Map.Sync(0,sizeof(pkgCache::Header));
90 }
91 /*}}}*/
92 // CacheGenerator::~pkgCacheGenerator - Destructor /*{{{*/
93 // ---------------------------------------------------------------------
94 /* We sync the data then unset the dirty flag in two steps so as to
95 advoid a problem during a crash */
96 pkgCacheGenerator::~pkgCacheGenerator()
97 {
98 if (_error->PendingError() == true)
99 return;
100 if (Map.Sync() == false)
101 return;
102
103 Cache.HeaderP->Dirty = false;
104 Cache.HeaderP->CacheFileSize = Map.Size();
105 Map.Sync(0,sizeof(pkgCache::Header));
106 }
107 /*}}}*/
108 void pkgCacheGenerator::ReMap(void const * const oldMap, void const * const newMap) {/*{{{*/
109 if (oldMap == newMap)
110 return;
111
112 if (_config->FindB("Debug::pkgCacheGen", false))
113 std::clog << "Remaping from " << oldMap << " to " << newMap << std::endl;
114
115 Cache.ReMap(false);
116
117 CurrentFile += (pkgCache::PackageFile*) newMap - (pkgCache::PackageFile*) oldMap;
118
119 for (size_t i = 0; i < _count(UniqHash); ++i)
120 if (UniqHash[i] != 0)
121 UniqHash[i] += (pkgCache::StringItem*) newMap - (pkgCache::StringItem*) oldMap;
122
123 for (std::vector<pkgCache::GrpIterator*>::const_iterator i = Dynamic<pkgCache::GrpIterator>::toReMap.begin();
124 i != Dynamic<pkgCache::GrpIterator>::toReMap.end(); ++i)
125 (*i)->ReMap(oldMap, newMap);
126 for (std::vector<pkgCache::PkgIterator*>::const_iterator i = Dynamic<pkgCache::PkgIterator>::toReMap.begin();
127 i != Dynamic<pkgCache::PkgIterator>::toReMap.end(); ++i)
128 (*i)->ReMap(oldMap, newMap);
129 for (std::vector<pkgCache::VerIterator*>::const_iterator i = Dynamic<pkgCache::VerIterator>::toReMap.begin();
130 i != Dynamic<pkgCache::VerIterator>::toReMap.end(); ++i)
131 (*i)->ReMap(oldMap, newMap);
132 for (std::vector<pkgCache::DepIterator*>::const_iterator i = Dynamic<pkgCache::DepIterator>::toReMap.begin();
133 i != Dynamic<pkgCache::DepIterator>::toReMap.end(); ++i)
134 (*i)->ReMap(oldMap, newMap);
135 for (std::vector<pkgCache::DescIterator*>::const_iterator i = Dynamic<pkgCache::DescIterator>::toReMap.begin();
136 i != Dynamic<pkgCache::DescIterator>::toReMap.end(); ++i)
137 (*i)->ReMap(oldMap, newMap);
138 for (std::vector<pkgCache::PrvIterator*>::const_iterator i = Dynamic<pkgCache::PrvIterator>::toReMap.begin();
139 i != Dynamic<pkgCache::PrvIterator>::toReMap.end(); ++i)
140 (*i)->ReMap(oldMap, newMap);
141 for (std::vector<pkgCache::PkgFileIterator*>::const_iterator i = Dynamic<pkgCache::PkgFileIterator>::toReMap.begin();
142 i != Dynamic<pkgCache::PkgFileIterator>::toReMap.end(); ++i)
143 (*i)->ReMap(oldMap, newMap);
144 } /*}}}*/
145 // CacheGenerator::WriteStringInMap /*{{{*/
146 map_ptrloc pkgCacheGenerator::WriteStringInMap(const char *String,
147 const unsigned long &Len) {
148 void const * const oldMap = Map.Data();
149 map_ptrloc const index = Map.WriteString(String, Len);
150 if (index != 0)
151 ReMap(oldMap, Map.Data());
152 return index;
153 }
154 /*}}}*/
155 // CacheGenerator::WriteStringInMap /*{{{*/
156 map_ptrloc pkgCacheGenerator::WriteStringInMap(const char *String) {
157 void const * const oldMap = Map.Data();
158 map_ptrloc const index = Map.WriteString(String);
159 if (index != 0)
160 ReMap(oldMap, Map.Data());
161 return index;
162 }
163 /*}}}*/
164 map_ptrloc pkgCacheGenerator::AllocateInMap(const unsigned long &size) {/*{{{*/
165 void const * const oldMap = Map.Data();
166 map_ptrloc const index = Map.Allocate(size);
167 if (index != 0)
168 ReMap(oldMap, Map.Data());
169 return index;
170 }
171 /*}}}*/
172 // CacheGenerator::MergeList - Merge the package list /*{{{*/
173 // ---------------------------------------------------------------------
174 /* This provides the generation of the entries in the cache. Each loop
175 goes through a single package record from the underlying parse engine. */
176 bool pkgCacheGenerator::MergeList(ListParser &List,
177 pkgCache::VerIterator *OutVer)
178 {
179 List.Owner = this;
180
181 unsigned int Counter = 0;
182 while (List.Step() == true)
183 {
184 string const PackageName = List.Package();
185 if (PackageName.empty() == true)
186 return false;
187
188 Counter++;
189 if (Counter % 100 == 0 && Progress != 0)
190 Progress->Progress(List.Offset());
191
192 string Arch = List.Architecture();
193 string const Version = List.Version();
194 if (Version.empty() == true && Arch.empty() == true)
195 {
196 if (MergeListGroup(List, PackageName) == false)
197 return false;
198 }
199
200 if (Arch.empty() == true)
201 Arch = _config->Find("APT::Architecture");
202
203 // Get a pointer to the package structure
204 pkgCache::PkgIterator Pkg;
205 Dynamic<pkgCache::PkgIterator> DynPkg(Pkg);
206 if (NewPackage(Pkg, PackageName, Arch) == false)
207 // TRANSLATOR: The first placeholder is a package name,
208 // the other two should be copied verbatim as they include debug info
209 return _error->Error(_("Error occurred while processing %s (%s%d)"),
210 PackageName.c_str(), "NewPackage", 1);
211
212
213 if (Version.empty() == true)
214 {
215 if (MergeListPackage(List, Pkg) == false)
216 return false;
217 }
218 else
219 {
220 if (MergeListVersion(List, Pkg, Version, OutVer) == false)
221 return false;
222 }
223
224 if (OutVer != 0)
225 {
226 FoundFileDeps |= List.HasFileDeps();
227 return true;
228 }
229 }
230
231 if (Cache.HeaderP->PackageCount >= (1ULL<<sizeof(Cache.PkgP->ID)*8)-1)
232 return _error->Error(_("Wow, you exceeded the number of package "
233 "names this APT is capable of."));
234 if (Cache.HeaderP->VersionCount >= (1ULL<<(sizeof(Cache.VerP->ID)*8))-1)
235 return _error->Error(_("Wow, you exceeded the number of versions "
236 "this APT is capable of."));
237 if (Cache.HeaderP->DescriptionCount >= (1ULL<<(sizeof(Cache.DescP->ID)*8))-1)
238 return _error->Error(_("Wow, you exceeded the number of descriptions "
239 "this APT is capable of."));
240 if (Cache.HeaderP->DependsCount >= (1ULL<<(sizeof(Cache.DepP->ID)*8))-1ULL)
241 return _error->Error(_("Wow, you exceeded the number of dependencies "
242 "this APT is capable of."));
243
244 FoundFileDeps |= List.HasFileDeps();
245 return true;
246 }
247 // CacheGenerator::MergeListGroup /*{{{*/
248 bool pkgCacheGenerator::MergeListGroup(ListParser &List, std::string const &GrpName)
249 {
250 pkgCache::GrpIterator Grp = Cache.FindGrp(GrpName);
251 // a group has no data on it's own, only packages have it but these
252 // stanzas like this come from Translation- files to add descriptions,
253 // but without a version we don't need a description for it…
254 if (Grp.end() == true)
255 return true;
256 Dynamic<pkgCache::GrpIterator> DynGrp(Grp);
257
258 pkgCache::PkgIterator Pkg;
259 Dynamic<pkgCache::PkgIterator> DynPkg(Pkg);
260 for (Pkg = Grp.PackageList(); Pkg.end() == false; Pkg = Grp.NextPkg(Pkg))
261 if (MergeListPackage(List, Pkg) == false)
262 return false;
263
264 return true;
265 }
266 /*}}}*/
267 // CacheGenerator::MergeListPackage /*{{{*/
268 bool pkgCacheGenerator::MergeListPackage(ListParser &List, pkgCache::PkgIterator &Pkg)
269 {
270 // we first process the package, then the descriptions
271 // (for deb this package processing is in fact a no-op)
272 pkgCache::VerIterator Ver(Cache);
273 Dynamic<pkgCache::VerIterator> DynVer(Ver);
274 if (List.UsePackage(Pkg, Ver) == false)
275 return _error->Error(_("Error occurred while processing %s (%s%d)"),
276 Pkg.Name(), "UsePackage", 1);
277
278 // Find the right version to write the description
279 MD5SumValue CurMd5 = List.Description_md5();
280 std::string CurLang = List.DescriptionLanguage();
281
282 for (Ver = Pkg.VersionList(); Ver.end() == false; ++Ver)
283 {
284 pkgCache::DescIterator Desc = Ver.DescriptionList();
285
286 // a version can only have one md5 describing it
287 if (MD5SumValue(Desc.md5()) != CurMd5)
288 continue;
289
290 // don't add a new description if we have one for the given
291 // md5 && language
292 if (IsDuplicateDescription(Desc, CurMd5, CurLang) == true)
293 continue;
294
295 Dynamic<pkgCache::DescIterator> DynDesc(Desc);
296 // we add at the end, so that the start is constant as we need
297 // that to be able to efficiently share these lists
298 map_ptrloc *LastDesc = &Ver->DescriptionList;
299 for (;Desc.end() == false && Desc->NextDesc != 0; ++Desc);
300 if (Desc.end() == false)
301 LastDesc = &Desc->NextDesc;
302
303 void const * const oldMap = Map.Data();
304 map_ptrloc const descindex = NewDescription(Desc, CurLang, CurMd5, *LastDesc);
305 if (oldMap != Map.Data())
306 LastDesc += (map_ptrloc*) Map.Data() - (map_ptrloc*) oldMap;
307 *LastDesc = descindex;
308 Desc->ParentPkg = Pkg.Index();
309
310 if ((*LastDesc == 0 && _error->PendingError()) || NewFileDesc(Desc,List) == false)
311 return _error->Error(_("Error occurred while processing %s (%s%d)"),
312 Pkg.Name(), "NewFileDesc", 1);
313
314 // we can stop here as all "same" versions will share the description
315 break;
316 }
317
318 return true;
319 }
320 /*}}}*/
321 // CacheGenerator::MergeListVersion /*{{{*/
322 bool pkgCacheGenerator::MergeListVersion(ListParser &List, pkgCache::PkgIterator &Pkg,
323 std::string const &Version, pkgCache::VerIterator* &OutVer)
324 {
325 pkgCache::VerIterator Ver = Pkg.VersionList();
326 Dynamic<pkgCache::VerIterator> DynVer(Ver);
327 map_ptrloc *LastVer = &Pkg->VersionList;
328 void const * oldMap = Map.Data();
329
330 unsigned long const Hash = List.VersionHash();
331 if (Ver.end() == false)
332 {
333 /* We know the list is sorted so we use that fact in the search.
334 Insertion of new versions is done with correct sorting */
335 int Res = 1;
336 for (; Ver.end() == false; LastVer = &Ver->NextVer, Ver++)
337 {
338 Res = Cache.VS->CmpVersion(Version,Ver.VerStr());
339 // Version is higher as current version - insert here
340 if (Res > 0)
341 break;
342 // Versionstrings are equal - is hash also equal?
343 if (Res == 0 && Ver->Hash == Hash)
344 break;
345 // proceed with the next till we have either the right
346 // or we found another version (which will be lower)
347 }
348
349 /* We already have a version for this item, record that we saw it */
350 if (Res == 0 && Ver.end() == false && Ver->Hash == Hash)
351 {
352 if (List.UsePackage(Pkg,Ver) == false)
353 return _error->Error(_("Error occurred while processing %s (%s%d)"),
354 Pkg.Name(), "UsePackage", 2);
355
356 if (NewFileVer(Ver,List) == false)
357 return _error->Error(_("Error occurred while processing %s (%s%d)"),
358 Pkg.Name(), "NewFileVer", 1);
359
360 // Read only a single record and return
361 if (OutVer != 0)
362 {
363 *OutVer = Ver;
364 return true;
365 }
366
367 return true;
368 }
369 }
370
371 // Add a new version
372 map_ptrloc const verindex = NewVersion(Ver,Version,*LastVer);
373 if (verindex == 0 && _error->PendingError())
374 return _error->Error(_("Error occurred while processing %s (%s%d)"),
375 Pkg.Name(), "NewVersion", 1);
376
377 if (oldMap != Map.Data())
378 LastVer += (map_ptrloc*) Map.Data() - (map_ptrloc*) oldMap;
379 *LastVer = verindex;
380 Ver->ParentPkg = Pkg.Index();
381 Ver->Hash = Hash;
382
383 if (unlikely(List.NewVersion(Ver) == false))
384 return _error->Error(_("Error occurred while processing %s (%s%d)"),
385 Pkg.Name(), "NewVersion", 2);
386
387 if (unlikely(List.UsePackage(Pkg,Ver) == false))
388 return _error->Error(_("Error occurred while processing %s (%s%d)"),
389 Pkg.Name(), "UsePackage", 3);
390
391 if (unlikely(NewFileVer(Ver,List) == false))
392 return _error->Error(_("Error occurred while processing %s (%s%d)"),
393 Pkg.Name(), "NewFileVer", 2);
394
395
396 // Read only a single record and return
397 if (OutVer != 0)
398 {
399 *OutVer = Ver;
400 return true;
401 }
402
403 /* Record the Description (it is not translated) */
404 MD5SumValue CurMd5 = List.Description_md5();
405 if (CurMd5.Value().empty() == true)
406 return true;
407 std::string CurLang = List.DescriptionLanguage();
408
409 /* Before we add a new description we first search in the group for
410 a version with a description of the same MD5 - if so we reuse this
411 description group instead of creating our own for this version */
412 pkgCache::GrpIterator Grp = Pkg.Group();
413 for (pkgCache::PkgIterator P = Grp.PackageList();
414 P.end() == false; P = Grp.NextPkg(P))
415 {
416 for (pkgCache::VerIterator V = P.VersionList();
417 V.end() == false; ++V)
418 {
419 if (IsDuplicateDescription(V.DescriptionList(), CurMd5, "") == false)
420 continue;
421 Ver->DescriptionList = V->DescriptionList;
422 return true;
423 }
424 }
425
426 // We haven't found reusable descriptions, so add the first description
427 pkgCache::DescIterator Desc = Ver.DescriptionList();
428 Dynamic<pkgCache::DescIterator> DynDesc(Desc);
429 map_ptrloc *LastDesc = &Ver->DescriptionList;
430
431 oldMap = Map.Data();
432 map_ptrloc const descindex = NewDescription(Desc, CurLang, CurMd5, *LastDesc);
433 if (oldMap != Map.Data())
434 LastDesc += (map_ptrloc*) Map.Data() - (map_ptrloc*) oldMap;
435 *LastDesc = descindex;
436 Desc->ParentPkg = Pkg.Index();
437
438 if ((*LastDesc == 0 && _error->PendingError()) || NewFileDesc(Desc,List) == false)
439 return _error->Error(_("Error occurred while processing %s (%s%d)"),
440 Pkg.Name(), "NewFileDesc", 2);
441
442 return true;
443 }
444 /*}}}*/
445 /*}}}*/
446 // CacheGenerator::MergeFileProvides - Merge file provides /*{{{*/
447 // ---------------------------------------------------------------------
448 /* If we found any file depends while parsing the main list we need to
449 resolve them. Since it is undesired to load the entire list of files
450 into the cache as virtual packages we do a two stage effort. MergeList
451 identifies the file depends and this creates Provdies for them by
452 re-parsing all the indexs. */
453 bool pkgCacheGenerator::MergeFileProvides(ListParser &List)
454 {
455 List.Owner = this;
456
457 unsigned int Counter = 0;
458 while (List.Step() == true)
459 {
460 string PackageName = List.Package();
461 if (PackageName.empty() == true)
462 return false;
463 string Version = List.Version();
464 if (Version.empty() == true)
465 continue;
466
467 pkgCache::PkgIterator Pkg = Cache.FindPkg(PackageName);
468 Dynamic<pkgCache::PkgIterator> DynPkg(Pkg);
469 if (Pkg.end() == true)
470 return _error->Error(_("Error occurred while processing %s (%s%d)"),
471 PackageName.c_str(), "FindPkg", 1);
472 Counter++;
473 if (Counter % 100 == 0 && Progress != 0)
474 Progress->Progress(List.Offset());
475
476 unsigned long Hash = List.VersionHash();
477 pkgCache::VerIterator Ver = Pkg.VersionList();
478 Dynamic<pkgCache::VerIterator> DynVer(Ver);
479 for (; Ver.end() == false; ++Ver)
480 {
481 if (Ver->Hash == Hash && Version.c_str() == Ver.VerStr())
482 {
483 if (List.CollectFileProvides(Cache,Ver) == false)
484 return _error->Error(_("Error occurred while processing %s (%s%d)"),
485 PackageName.c_str(), "CollectFileProvides", 1);
486 break;
487 }
488 }
489
490 if (Ver.end() == true)
491 _error->Warning(_("Package %s %s was not found while processing file dependencies"),PackageName.c_str(),Version.c_str());
492 }
493
494 return true;
495 }
496 /*}}}*/
497 // CacheGenerator::NewGroup - Add a new group /*{{{*/
498 // ---------------------------------------------------------------------
499 /* This creates a new group structure and adds it to the hash table */
500 bool pkgCacheGenerator::NewGroup(pkgCache::GrpIterator &Grp, const string &Name)
501 {
502 Grp = Cache.FindGrp(Name);
503 if (Grp.end() == false)
504 return true;
505
506 // Get a structure
507 map_ptrloc const Group = AllocateInMap(sizeof(pkgCache::Group));
508 if (unlikely(Group == 0))
509 return false;
510
511 Grp = pkgCache::GrpIterator(Cache, Cache.GrpP + Group);
512 map_ptrloc const idxName = WriteStringInMap(Name);
513 if (unlikely(idxName == 0))
514 return false;
515 Grp->Name = idxName;
516
517 // Insert it into the hash table
518 unsigned long const Hash = Cache.Hash(Name);
519 Grp->Next = Cache.HeaderP->GrpHashTable[Hash];
520 Cache.HeaderP->GrpHashTable[Hash] = Group;
521
522 Grp->ID = Cache.HeaderP->GroupCount++;
523 return true;
524 }
525 /*}}}*/
526 // CacheGenerator::NewPackage - Add a new package /*{{{*/
527 // ---------------------------------------------------------------------
528 /* This creates a new package structure and adds it to the hash table */
529 bool pkgCacheGenerator::NewPackage(pkgCache::PkgIterator &Pkg,const string &Name,
530 const string &Arch) {
531 pkgCache::GrpIterator Grp;
532 Dynamic<pkgCache::GrpIterator> DynGrp(Grp);
533 if (unlikely(NewGroup(Grp, Name) == false))
534 return false;
535
536 Pkg = Grp.FindPkg(Arch);
537 if (Pkg.end() == false)
538 return true;
539
540 // Get a structure
541 map_ptrloc const Package = AllocateInMap(sizeof(pkgCache::Package));
542 if (unlikely(Package == 0))
543 return false;
544 Pkg = pkgCache::PkgIterator(Cache,Cache.PkgP + Package);
545
546 // Insert the package into our package list
547 if (Grp->FirstPackage == 0) // the group is new
548 {
549 // Insert it into the hash table
550 unsigned long const Hash = Cache.Hash(Name);
551 Pkg->NextPackage = Cache.HeaderP->PkgHashTable[Hash];
552 Cache.HeaderP->PkgHashTable[Hash] = Package;
553 Grp->FirstPackage = Package;
554 }
555 else // Group the Packages together
556 {
557 // this package is the new last package
558 pkgCache::PkgIterator LastPkg(Cache, Cache.PkgP + Grp->LastPackage);
559 Pkg->NextPackage = LastPkg->NextPackage;
560 LastPkg->NextPackage = Package;
561 }
562 Grp->LastPackage = Package;
563
564 // Set the name, arch and the ID
565 Pkg->Name = Grp->Name;
566 Pkg->Group = Grp.Index();
567 // all is mapped to the native architecture
568 map_ptrloc const idxArch = (Arch == "all") ? Cache.HeaderP->Architecture : WriteUniqString(Arch.c_str());
569 if (unlikely(idxArch == 0))
570 return false;
571 Pkg->Arch = idxArch;
572 Pkg->ID = Cache.HeaderP->PackageCount++;
573
574 return true;
575 }
576 /*}}}*/
577 // CacheGenerator::NewFileVer - Create a new File<->Version association /*{{{*/
578 // ---------------------------------------------------------------------
579 /* */
580 bool pkgCacheGenerator::NewFileVer(pkgCache::VerIterator &Ver,
581 ListParser &List)
582 {
583 if (CurrentFile == 0)
584 return true;
585
586 // Get a structure
587 map_ptrloc const VerFile = AllocateInMap(sizeof(pkgCache::VerFile));
588 if (VerFile == 0)
589 return 0;
590
591 pkgCache::VerFileIterator VF(Cache,Cache.VerFileP + VerFile);
592 VF->File = CurrentFile - Cache.PkgFileP;
593
594 // Link it to the end of the list
595 map_ptrloc *Last = &Ver->FileList;
596 for (pkgCache::VerFileIterator V = Ver.FileList(); V.end() == false; ++V)
597 Last = &V->NextFile;
598 VF->NextFile = *Last;
599 *Last = VF.Index();
600
601 VF->Offset = List.Offset();
602 VF->Size = List.Size();
603 if (Cache.HeaderP->MaxVerFileSize < VF->Size)
604 Cache.HeaderP->MaxVerFileSize = VF->Size;
605 Cache.HeaderP->VerFileCount++;
606
607 return true;
608 }
609 /*}}}*/
610 // CacheGenerator::NewVersion - Create a new Version /*{{{*/
611 // ---------------------------------------------------------------------
612 /* This puts a version structure in the linked list */
613 unsigned long pkgCacheGenerator::NewVersion(pkgCache::VerIterator &Ver,
614 const string &VerStr,
615 unsigned long Next)
616 {
617 // Get a structure
618 map_ptrloc const Version = AllocateInMap(sizeof(pkgCache::Version));
619 if (Version == 0)
620 return 0;
621
622 // Fill it in
623 Ver = pkgCache::VerIterator(Cache,Cache.VerP + Version);
624 Ver->NextVer = Next;
625 Ver->ID = Cache.HeaderP->VersionCount++;
626 map_ptrloc const idxVerStr = WriteStringInMap(VerStr);
627 if (unlikely(idxVerStr == 0))
628 return 0;
629 Ver->VerStr = idxVerStr;
630
631 return Version;
632 }
633 /*}}}*/
634 // CacheGenerator::NewFileDesc - Create a new File<->Desc association /*{{{*/
635 // ---------------------------------------------------------------------
636 /* */
637 bool pkgCacheGenerator::NewFileDesc(pkgCache::DescIterator &Desc,
638 ListParser &List)
639 {
640 if (CurrentFile == 0)
641 return true;
642
643 // Get a structure
644 map_ptrloc const DescFile = AllocateInMap(sizeof(pkgCache::DescFile));
645 if (DescFile == 0)
646 return false;
647
648 pkgCache::DescFileIterator DF(Cache,Cache.DescFileP + DescFile);
649 DF->File = CurrentFile - Cache.PkgFileP;
650
651 // Link it to the end of the list
652 map_ptrloc *Last = &Desc->FileList;
653 for (pkgCache::DescFileIterator D = Desc.FileList(); D.end() == false; ++D)
654 Last = &D->NextFile;
655
656 DF->NextFile = *Last;
657 *Last = DF.Index();
658
659 DF->Offset = List.Offset();
660 DF->Size = List.Size();
661 if (Cache.HeaderP->MaxDescFileSize < DF->Size)
662 Cache.HeaderP->MaxDescFileSize = DF->Size;
663 Cache.HeaderP->DescFileCount++;
664
665 return true;
666 }
667 /*}}}*/
668 // CacheGenerator::NewDescription - Create a new Description /*{{{*/
669 // ---------------------------------------------------------------------
670 /* This puts a description structure in the linked list */
671 map_ptrloc pkgCacheGenerator::NewDescription(pkgCache::DescIterator &Desc,
672 const string &Lang,
673 const MD5SumValue &md5sum,
674 map_ptrloc Next)
675 {
676 // Get a structure
677 map_ptrloc const Description = AllocateInMap(sizeof(pkgCache::Description));
678 if (Description == 0)
679 return 0;
680
681 // Fill it in
682 Desc = pkgCache::DescIterator(Cache,Cache.DescP + Description);
683 Desc->NextDesc = Next;
684 Desc->ID = Cache.HeaderP->DescriptionCount++;
685 map_ptrloc const idxlanguage_code = WriteStringInMap(Lang);
686 map_ptrloc const idxmd5sum = WriteStringInMap(md5sum.Value());
687 if (unlikely(idxlanguage_code == 0 || idxmd5sum == 0))
688 return 0;
689 Desc->language_code = idxlanguage_code;
690 Desc->md5sum = idxmd5sum;
691
692 return Description;
693 }
694 /*}}}*/
695 // CacheGenerator::FinishCache - do various finish operations /*{{{*/
696 // ---------------------------------------------------------------------
697 /* This prepares the Cache for delivery */
698 bool pkgCacheGenerator::FinishCache(OpProgress *Progress)
699 {
700 // FIXME: add progress reporting for this operation
701 // Do we have different architectures in your groups ?
702 vector<string> archs = APT::Configuration::getArchitectures();
703 if (archs.size() > 1)
704 {
705 // Create Conflicts in between the group
706 pkgCache::GrpIterator G = GetCache().GrpBegin();
707 Dynamic<pkgCache::GrpIterator> DynG(G);
708 for (; G.end() != true; ++G)
709 {
710 string const PkgName = G.Name();
711 pkgCache::PkgIterator P = G.PackageList();
712 Dynamic<pkgCache::PkgIterator> DynP(P);
713 for (; P.end() != true; P = G.NextPkg(P))
714 {
715 pkgCache::PkgIterator allPkg;
716 Dynamic<pkgCache::PkgIterator> DynallPkg(allPkg);
717 pkgCache::VerIterator V = P.VersionList();
718 Dynamic<pkgCache::VerIterator> DynV(V);
719 for (; V.end() != true; ++V)
720 {
721 // copy P.Arch() into a string here as a cache remap
722 // in NewDepends() later may alter the pointer location
723 string Arch = P.Arch() == NULL ? "" : P.Arch();
724 map_ptrloc *OldDepLast = NULL;
725 /* MultiArch handling introduces a lot of implicit Dependencies:
726 - MultiArch: same → Co-Installable if they have the same version
727 - Architecture: all → Need to be Co-Installable for internal reasons
728 - All others conflict with all other group members */
729 bool const coInstall = ((V->MultiArch & pkgCache::Version::Same) == pkgCache::Version::Same);
730 for (vector<string>::const_iterator A = archs.begin(); A != archs.end(); ++A)
731 {
732 if (*A == Arch)
733 continue;
734 /* We allow only one installed arch at the time
735 per group, therefore each group member conflicts
736 with all other group members */
737 pkgCache::PkgIterator D = G.FindPkg(*A);
738 Dynamic<pkgCache::PkgIterator> DynD(D);
739 if (D.end() == true)
740 continue;
741 if (coInstall == true)
742 {
743 // Replaces: ${self}:other ( << ${binary:Version})
744 NewDepends(D, V, V.VerStr(),
745 pkgCache::Dep::Less, pkgCache::Dep::Replaces,
746 OldDepLast);
747 // Breaks: ${self}:other (!= ${binary:Version})
748 NewDepends(D, V, V.VerStr(),
749 pkgCache::Dep::NotEquals, pkgCache::Dep::DpkgBreaks,
750 OldDepLast);
751 } else {
752 // Conflicts: ${self}:other
753 NewDepends(D, V, "",
754 pkgCache::Dep::NoOp, pkgCache::Dep::Conflicts,
755 OldDepLast);
756 }
757 }
758 }
759 }
760 }
761 }
762 return true;
763 }
764 /*}}}*/
765 // CacheGenerator::NewDepends - Create a dependency element /*{{{*/
766 // ---------------------------------------------------------------------
767 /* This creates a dependency element in the tree. It is linked to the
768 version and to the package that it is pointing to. */
769 bool pkgCacheGenerator::NewDepends(pkgCache::PkgIterator &Pkg,
770 pkgCache::VerIterator &Ver,
771 string const &Version,
772 unsigned int const &Op,
773 unsigned int const &Type,
774 map_ptrloc* &OldDepLast)
775 {
776 void const * const oldMap = Map.Data();
777 // Get a structure
778 map_ptrloc const Dependency = AllocateInMap(sizeof(pkgCache::Dependency));
779 if (unlikely(Dependency == 0))
780 return false;
781
782 // Fill it in
783 pkgCache::DepIterator Dep(Cache,Cache.DepP + Dependency);
784 Dynamic<pkgCache::DepIterator> DynDep(Dep);
785 Dep->ParentVer = Ver.Index();
786 Dep->Type = Type;
787 Dep->CompareOp = Op;
788 Dep->ID = Cache.HeaderP->DependsCount++;
789
790 // Probe the reverse dependency list for a version string that matches
791 if (Version.empty() == false)
792 {
793 /* for (pkgCache::DepIterator I = Pkg.RevDependsList(); I.end() == false; I++)
794 if (I->Version != 0 && I.TargetVer() == Version)
795 Dep->Version = I->Version;*/
796 if (Dep->Version == 0) {
797 map_ptrloc const index = WriteStringInMap(Version);
798 if (unlikely(index == 0))
799 return false;
800 Dep->Version = index;
801 }
802 }
803
804 // Link it to the package
805 Dep->Package = Pkg.Index();
806 Dep->NextRevDepends = Pkg->RevDepends;
807 Pkg->RevDepends = Dep.Index();
808
809 // Do we know where to link the Dependency to?
810 if (OldDepLast == NULL)
811 {
812 OldDepLast = &Ver->DependsList;
813 for (pkgCache::DepIterator D = Ver.DependsList(); D.end() == false; ++D)
814 OldDepLast = &D->NextDepends;
815 } else if (oldMap != Map.Data())
816 OldDepLast += (map_ptrloc*) Map.Data() - (map_ptrloc*) oldMap;
817
818 Dep->NextDepends = *OldDepLast;
819 *OldDepLast = Dep.Index();
820 OldDepLast = &Dep->NextDepends;
821
822 return true;
823 }
824 /*}}}*/
825 // ListParser::NewDepends - Create the environment for a new dependency /*{{{*/
826 // ---------------------------------------------------------------------
827 /* This creates a Group and the Package to link this dependency to if
828 needed and handles also the caching of the old endpoint */
829 bool pkgCacheGenerator::ListParser::NewDepends(pkgCache::VerIterator &Ver,
830 const string &PackageName,
831 const string &Arch,
832 const string &Version,
833 unsigned int Op,
834 unsigned int Type)
835 {
836 pkgCache::GrpIterator Grp;
837 Dynamic<pkgCache::GrpIterator> DynGrp(Grp);
838 if (unlikely(Owner->NewGroup(Grp, PackageName) == false))
839 return false;
840
841 // Locate the target package
842 pkgCache::PkgIterator Pkg = Grp.FindPkg(Arch);
843 Dynamic<pkgCache::PkgIterator> DynPkg(Pkg);
844 if (Pkg.end() == true) {
845 if (unlikely(Owner->NewPackage(Pkg, PackageName, Arch) == false))
846 return false;
847 }
848
849 // Is it a file dependency?
850 if (unlikely(PackageName[0] == '/'))
851 FoundFileDeps = true;
852
853 /* Caching the old end point speeds up generation substantially */
854 if (OldDepVer != Ver) {
855 OldDepLast = NULL;
856 OldDepVer = Ver;
857 }
858
859 return Owner->NewDepends(Pkg, Ver, Version, Op, Type, OldDepLast);
860 }
861 /*}}}*/
862 // ListParser::NewProvides - Create a Provides element /*{{{*/
863 // ---------------------------------------------------------------------
864 /* */
865 bool pkgCacheGenerator::ListParser::NewProvides(pkgCache::VerIterator &Ver,
866 const string &PkgName,
867 const string &PkgArch,
868 const string &Version)
869 {
870 pkgCache &Cache = Owner->Cache;
871
872 // We do not add self referencing provides
873 if (Ver.ParentPkg().Name() == PkgName && (PkgArch == Ver.ParentPkg().Arch() ||
874 (PkgArch == "all" && strcmp((Cache.StrP + Cache.HeaderP->Architecture), Ver.ParentPkg().Arch()) == 0)))
875 return true;
876
877 // Get a structure
878 map_ptrloc const Provides = Owner->AllocateInMap(sizeof(pkgCache::Provides));
879 if (unlikely(Provides == 0))
880 return false;
881 Cache.HeaderP->ProvidesCount++;
882
883 // Fill it in
884 pkgCache::PrvIterator Prv(Cache,Cache.ProvideP + Provides,Cache.PkgP);
885 Dynamic<pkgCache::PrvIterator> DynPrv(Prv);
886 Prv->Version = Ver.Index();
887 Prv->NextPkgProv = Ver->ProvidesList;
888 Ver->ProvidesList = Prv.Index();
889 if (Version.empty() == false && unlikely((Prv->ProvideVersion = WriteString(Version)) == 0))
890 return false;
891
892 // Locate the target package
893 pkgCache::PkgIterator Pkg;
894 Dynamic<pkgCache::PkgIterator> DynPkg(Pkg);
895 if (unlikely(Owner->NewPackage(Pkg,PkgName, PkgArch) == false))
896 return false;
897
898 // Link it to the package
899 Prv->ParentPkg = Pkg.Index();
900 Prv->NextProvides = Pkg->ProvidesList;
901 Pkg->ProvidesList = Prv.Index();
902
903 return true;
904 }
905 /*}}}*/
906 // CacheGenerator::SelectFile - Select the current file being parsed /*{{{*/
907 // ---------------------------------------------------------------------
908 /* This is used to select which file is to be associated with all newly
909 added versions. The caller is responsible for setting the IMS fields. */
910 bool pkgCacheGenerator::SelectFile(const string &File,const string &Site,
911 const pkgIndexFile &Index,
912 unsigned long Flags)
913 {
914 // Get some space for the structure
915 map_ptrloc const idxFile = AllocateInMap(sizeof(*CurrentFile));
916 if (unlikely(idxFile == 0))
917 return false;
918 CurrentFile = Cache.PkgFileP + idxFile;
919
920 // Fill it in
921 map_ptrloc const idxFileName = WriteStringInMap(File);
922 map_ptrloc const idxSite = WriteUniqString(Site);
923 if (unlikely(idxFileName == 0 || idxSite == 0))
924 return false;
925 CurrentFile->FileName = idxFileName;
926 CurrentFile->Site = idxSite;
927 CurrentFile->NextFile = Cache.HeaderP->FileList;
928 CurrentFile->Flags = Flags;
929 CurrentFile->ID = Cache.HeaderP->PackageFileCount;
930 map_ptrloc const idxIndexType = WriteUniqString(Index.GetType()->Label);
931 if (unlikely(idxIndexType == 0))
932 return false;
933 CurrentFile->IndexType = idxIndexType;
934 PkgFileName = File;
935 Cache.HeaderP->FileList = CurrentFile - Cache.PkgFileP;
936 Cache.HeaderP->PackageFileCount++;
937
938 if (Progress != 0)
939 Progress->SubProgress(Index.Size());
940 return true;
941 }
942 /*}}}*/
943 // CacheGenerator::WriteUniqueString - Insert a unique string /*{{{*/
944 // ---------------------------------------------------------------------
945 /* This is used to create handles to strings. Given the same text it
946 always returns the same number */
947 unsigned long pkgCacheGenerator::WriteUniqString(const char *S,
948 unsigned int Size)
949 {
950 /* We use a very small transient hash table here, this speeds up generation
951 by a fair amount on slower machines */
952 pkgCache::StringItem *&Bucket = UniqHash[(S[0]*5 + S[1]) % _count(UniqHash)];
953 if (Bucket != 0 &&
954 stringcmp(S,S+Size,Cache.StrP + Bucket->String) == 0)
955 return Bucket->String;
956
957 // Search for an insertion point
958 pkgCache::StringItem *I = Cache.StringItemP + Cache.HeaderP->StringList;
959 int Res = 1;
960 map_ptrloc *Last = &Cache.HeaderP->StringList;
961 for (; I != Cache.StringItemP; Last = &I->NextItem,
962 I = Cache.StringItemP + I->NextItem)
963 {
964 Res = stringcmp(S,S+Size,Cache.StrP + I->String);
965 if (Res >= 0)
966 break;
967 }
968
969 // Match
970 if (Res == 0)
971 {
972 Bucket = I;
973 return I->String;
974 }
975
976 // Get a structure
977 void const * const oldMap = Map.Data();
978 map_ptrloc const Item = AllocateInMap(sizeof(pkgCache::StringItem));
979 if (Item == 0)
980 return 0;
981
982 map_ptrloc const idxString = WriteStringInMap(S,Size);
983 if (unlikely(idxString == 0))
984 return 0;
985 if (oldMap != Map.Data()) {
986 Last += (map_ptrloc*) Map.Data() - (map_ptrloc*) oldMap;
987 I += (pkgCache::StringItem*) Map.Data() - (pkgCache::StringItem*) oldMap;
988 }
989 *Last = Item;
990
991 // Fill in the structure
992 pkgCache::StringItem *ItemP = Cache.StringItemP + Item;
993 ItemP->NextItem = I - Cache.StringItemP;
994 ItemP->String = idxString;
995
996 Bucket = ItemP;
997 return ItemP->String;
998 }
999 /*}}}*/
1000 // CheckValidity - Check that a cache is up-to-date /*{{{*/
1001 // ---------------------------------------------------------------------
1002 /* This just verifies that each file in the list of index files exists,
1003 has matching attributes with the cache and the cache does not have
1004 any extra files. */
1005 static bool CheckValidity(const string &CacheFile,
1006 pkgSourceList &List,
1007 FileIterator Start,
1008 FileIterator End,
1009 MMap **OutMap = 0)
1010 {
1011 bool const Debug = _config->FindB("Debug::pkgCacheGen", false);
1012 // No file, certainly invalid
1013 if (CacheFile.empty() == true || FileExists(CacheFile) == false)
1014 {
1015 if (Debug == true)
1016 std::clog << "CacheFile doesn't exist" << std::endl;
1017 return false;
1018 }
1019
1020 if (List.GetLastModifiedTime() > GetModificationTime(CacheFile))
1021 {
1022 if (Debug == true)
1023 std::clog << "sources.list is newer than the cache" << std::endl;
1024 return false;
1025 }
1026
1027 // Map it
1028 FileFd CacheF(CacheFile,FileFd::ReadOnly);
1029 SPtr<MMap> Map = new MMap(CacheF,0);
1030 pkgCache Cache(Map);
1031 if (_error->PendingError() == true || Map->Size() == 0)
1032 {
1033 if (Debug == true)
1034 std::clog << "Errors are pending or Map is empty()" << std::endl;
1035 _error->Discard();
1036 return false;
1037 }
1038
1039 /* Now we check every index file, see if it is in the cache,
1040 verify the IMS data and check that it is on the disk too.. */
1041 SPtrArray<bool> Visited = new bool[Cache.HeaderP->PackageFileCount];
1042 memset(Visited,0,sizeof(*Visited)*Cache.HeaderP->PackageFileCount);
1043 for (; Start != End; ++Start)
1044 {
1045 if (Debug == true)
1046 std::clog << "Checking PkgFile " << (*Start)->Describe() << ": ";
1047 if ((*Start)->HasPackages() == false)
1048 {
1049 if (Debug == true)
1050 std::clog << "Has NO packages" << std::endl;
1051 continue;
1052 }
1053
1054 if ((*Start)->Exists() == false)
1055 {
1056 #if 0 // mvo: we no longer give a message here (Default Sources spec)
1057 _error->WarningE("stat",_("Couldn't stat source package list %s"),
1058 (*Start)->Describe().c_str());
1059 #endif
1060 if (Debug == true)
1061 std::clog << "file doesn't exist" << std::endl;
1062 continue;
1063 }
1064
1065 // FindInCache is also expected to do an IMS check.
1066 pkgCache::PkgFileIterator File = (*Start)->FindInCache(Cache);
1067 if (File.end() == true)
1068 {
1069 if (Debug == true)
1070 std::clog << "FindInCache returned end-Pointer" << std::endl;
1071 return false;
1072 }
1073
1074 Visited[File->ID] = true;
1075 if (Debug == true)
1076 std::clog << "with ID " << File->ID << " is valid" << std::endl;
1077 }
1078
1079 for (unsigned I = 0; I != Cache.HeaderP->PackageFileCount; I++)
1080 if (Visited[I] == false)
1081 {
1082 if (Debug == true)
1083 std::clog << "File with ID" << I << " wasn't visited" << std::endl;
1084 return false;
1085 }
1086
1087 if (_error->PendingError() == true)
1088 {
1089 if (Debug == true)
1090 {
1091 std::clog << "Validity failed because of pending errors:" << std::endl;
1092 _error->DumpErrors();
1093 }
1094 _error->Discard();
1095 return false;
1096 }
1097
1098 if (OutMap != 0)
1099 *OutMap = Map.UnGuard();
1100 return true;
1101 }
1102 /*}}}*/
1103 // ComputeSize - Compute the total size of a bunch of files /*{{{*/
1104 // ---------------------------------------------------------------------
1105 /* Size is kind of an abstract notion that is only used for the progress
1106 meter */
1107 static unsigned long ComputeSize(FileIterator Start,FileIterator End)
1108 {
1109 unsigned long TotalSize = 0;
1110 for (; Start != End; ++Start)
1111 {
1112 if ((*Start)->HasPackages() == false)
1113 continue;
1114 TotalSize += (*Start)->Size();
1115 }
1116 return TotalSize;
1117 }
1118 /*}}}*/
1119 // BuildCache - Merge the list of index files into the cache /*{{{*/
1120 // ---------------------------------------------------------------------
1121 /* */
1122 static bool BuildCache(pkgCacheGenerator &Gen,
1123 OpProgress *Progress,
1124 unsigned long &CurrentSize,unsigned long TotalSize,
1125 FileIterator Start, FileIterator End)
1126 {
1127 FileIterator I;
1128 for (I = Start; I != End; ++I)
1129 {
1130 if ((*I)->HasPackages() == false)
1131 continue;
1132
1133 if ((*I)->Exists() == false)
1134 continue;
1135
1136 if ((*I)->FindInCache(Gen.GetCache()).end() == false)
1137 {
1138 _error->Warning("Duplicate sources.list entry %s",
1139 (*I)->Describe().c_str());
1140 continue;
1141 }
1142
1143 unsigned long Size = (*I)->Size();
1144 if (Progress != NULL)
1145 Progress->OverallProgress(CurrentSize,TotalSize,Size,_("Reading package lists"));
1146 CurrentSize += Size;
1147
1148 if ((*I)->Merge(Gen,Progress) == false)
1149 return false;
1150 }
1151
1152 if (Gen.HasFileDeps() == true)
1153 {
1154 if (Progress != NULL)
1155 Progress->Done();
1156 TotalSize = ComputeSize(Start, End);
1157 CurrentSize = 0;
1158 for (I = Start; I != End; ++I)
1159 {
1160 unsigned long Size = (*I)->Size();
1161 if (Progress != NULL)
1162 Progress->OverallProgress(CurrentSize,TotalSize,Size,_("Collecting File Provides"));
1163 CurrentSize += Size;
1164 if ((*I)->MergeFileProvides(Gen,Progress) == false)
1165 return false;
1166 }
1167 }
1168
1169 return true;
1170 }
1171 /*}}}*/
1172 // CacheGenerator::CreateDynamicMMap - load an mmap with configuration options /*{{{*/
1173 DynamicMMap* pkgCacheGenerator::CreateDynamicMMap(FileFd *CacheF, unsigned long Flags) {
1174 unsigned long const MapStart = _config->FindI("APT::Cache-Start", 24*1024*1024);
1175 unsigned long const MapGrow = _config->FindI("APT::Cache-Grow", 1*1024*1024);
1176 unsigned long const MapLimit = _config->FindI("APT::Cache-Limit", 0);
1177 Flags |= MMap::Moveable;
1178 if (_config->FindB("APT::Cache-Fallback", false) == true)
1179 Flags |= MMap::Fallback;
1180 if (CacheF != NULL)
1181 return new DynamicMMap(*CacheF, Flags, MapStart, MapGrow, MapLimit);
1182 else
1183 return new DynamicMMap(Flags, MapStart, MapGrow, MapLimit);
1184 }
1185 /*}}}*/
1186 // CacheGenerator::MakeStatusCache - Construct the status cache /*{{{*/
1187 // ---------------------------------------------------------------------
1188 /* This makes sure that the status cache (the cache that has all
1189 index files from the sources list and all local ones) is ready
1190 to be mmaped. If OutMap is not zero then a MMap object representing
1191 the cache will be stored there. This is pretty much mandetory if you
1192 are using AllowMem. AllowMem lets the function be run as non-root
1193 where it builds the cache 'fast' into a memory buffer. */
1194 __deprecated bool pkgMakeStatusCache(pkgSourceList &List,OpProgress &Progress,
1195 MMap **OutMap, bool AllowMem)
1196 { return pkgCacheGenerator::MakeStatusCache(List, &Progress, OutMap, AllowMem); }
1197 bool pkgCacheGenerator::MakeStatusCache(pkgSourceList &List,OpProgress *Progress,
1198 MMap **OutMap,bool AllowMem)
1199 {
1200 bool const Debug = _config->FindB("Debug::pkgCacheGen", false);
1201
1202 vector<pkgIndexFile *> Files;
1203 for (vector<metaIndex *>::const_iterator i = List.begin();
1204 i != List.end();
1205 ++i)
1206 {
1207 vector <pkgIndexFile *> *Indexes = (*i)->GetIndexFiles();
1208 for (vector<pkgIndexFile *>::const_iterator j = Indexes->begin();
1209 j != Indexes->end();
1210 ++j)
1211 Files.push_back (*j);
1212 }
1213
1214 unsigned long const EndOfSource = Files.size();
1215 if (_system->AddStatusFiles(Files) == false)
1216 return false;
1217
1218 // Decide if we can write to the files..
1219 string const CacheFile = _config->FindFile("Dir::Cache::pkgcache");
1220 string const SrcCacheFile = _config->FindFile("Dir::Cache::srcpkgcache");
1221
1222 // ensure the cache directory exists
1223 if (CacheFile.empty() == false || SrcCacheFile.empty() == false)
1224 {
1225 string dir = _config->FindDir("Dir::Cache");
1226 size_t const len = dir.size();
1227 if (len > 5 && dir.find("/apt/", len - 6, 5) == len - 5)
1228 dir = dir.substr(0, len - 5);
1229 if (CacheFile.empty() == false)
1230 CreateDirectory(dir, flNotFile(CacheFile));
1231 if (SrcCacheFile.empty() == false)
1232 CreateDirectory(dir, flNotFile(SrcCacheFile));
1233 }
1234
1235 // Decide if we can write to the cache
1236 bool Writeable = false;
1237 if (CacheFile.empty() == false)
1238 Writeable = access(flNotFile(CacheFile).c_str(),W_OK) == 0;
1239 else
1240 if (SrcCacheFile.empty() == false)
1241 Writeable = access(flNotFile(SrcCacheFile).c_str(),W_OK) == 0;
1242 if (Debug == true)
1243 std::clog << "Do we have write-access to the cache files? " << (Writeable ? "YES" : "NO") << std::endl;
1244
1245 if (Writeable == false && AllowMem == false && CacheFile.empty() == false)
1246 return _error->Error(_("Unable to write to %s"),flNotFile(CacheFile).c_str());
1247
1248 if (Progress != NULL)
1249 Progress->OverallProgress(0,1,1,_("Reading package lists"));
1250
1251 // Cache is OK, Fin.
1252 if (CheckValidity(CacheFile, List, Files.begin(),Files.end(),OutMap) == true)
1253 {
1254 if (Progress != NULL)
1255 Progress->OverallProgress(1,1,1,_("Reading package lists"));
1256 if (Debug == true)
1257 std::clog << "pkgcache.bin is valid - no need to build anything" << std::endl;
1258 return true;
1259 }
1260 else if (Debug == true)
1261 std::clog << "pkgcache.bin is NOT valid" << std::endl;
1262
1263 /* At this point we know we need to reconstruct the package cache,
1264 begin. */
1265 SPtr<FileFd> CacheF;
1266 SPtr<DynamicMMap> Map;
1267 if (Writeable == true && CacheFile.empty() == false)
1268 {
1269 _error->PushToStack();
1270 unlink(CacheFile.c_str());
1271 CacheF = new FileFd(CacheFile,FileFd::WriteAtomic);
1272 fchmod(CacheF->Fd(),0644);
1273 Map = CreateDynamicMMap(CacheF, MMap::Public);
1274 if (_error->PendingError() == true)
1275 {
1276 delete CacheF.UnGuard();
1277 delete Map.UnGuard();
1278 if (Debug == true)
1279 std::clog << "Open filebased MMap FAILED" << std::endl;
1280 Writeable = false;
1281 if (AllowMem == false)
1282 {
1283 _error->MergeWithStack();
1284 return false;
1285 }
1286 _error->RevertToStack();
1287 }
1288 else if (Debug == true)
1289 {
1290 _error->MergeWithStack();
1291 std::clog << "Open filebased MMap" << std::endl;
1292 }
1293 }
1294 if (Writeable == false || CacheFile.empty() == true)
1295 {
1296 // Just build it in memory..
1297 Map = CreateDynamicMMap(NULL);
1298 if (Debug == true)
1299 std::clog << "Open memory Map (not filebased)" << std::endl;
1300 }
1301
1302 // Lets try the source cache.
1303 unsigned long CurrentSize = 0;
1304 unsigned long TotalSize = 0;
1305 if (CheckValidity(SrcCacheFile, List, Files.begin(),
1306 Files.begin()+EndOfSource) == true)
1307 {
1308 if (Debug == true)
1309 std::clog << "srcpkgcache.bin is valid - populate MMap with it." << std::endl;
1310 // Preload the map with the source cache
1311 FileFd SCacheF(SrcCacheFile,FileFd::ReadOnly);
1312 unsigned long const alloc = Map->RawAllocate(SCacheF.Size());
1313 if ((alloc == 0 && _error->PendingError())
1314 || SCacheF.Read((unsigned char *)Map->Data() + alloc,
1315 SCacheF.Size()) == false)
1316 return false;
1317
1318 TotalSize = ComputeSize(Files.begin()+EndOfSource,Files.end());
1319
1320 // Build the status cache
1321 pkgCacheGenerator Gen(Map.Get(),Progress);
1322 if (_error->PendingError() == true)
1323 return false;
1324 if (BuildCache(Gen,Progress,CurrentSize,TotalSize,
1325 Files.begin()+EndOfSource,Files.end()) == false)
1326 return false;
1327
1328 // FIXME: move me to a better place
1329 Gen.FinishCache(Progress);
1330 }
1331 else
1332 {
1333 if (Debug == true)
1334 std::clog << "srcpkgcache.bin is NOT valid - rebuild" << std::endl;
1335 TotalSize = ComputeSize(Files.begin(),Files.end());
1336
1337 // Build the source cache
1338 pkgCacheGenerator Gen(Map.Get(),Progress);
1339 if (_error->PendingError() == true)
1340 return false;
1341 if (BuildCache(Gen,Progress,CurrentSize,TotalSize,
1342 Files.begin(),Files.begin()+EndOfSource) == false)
1343 return false;
1344
1345 // Write it back
1346 if (Writeable == true && SrcCacheFile.empty() == false)
1347 {
1348 FileFd SCacheF(SrcCacheFile,FileFd::WriteAtomic);
1349 if (_error->PendingError() == true)
1350 return false;
1351
1352 fchmod(SCacheF.Fd(),0644);
1353
1354 // Write out the main data
1355 if (SCacheF.Write(Map->Data(),Map->Size()) == false)
1356 return _error->Error(_("IO Error saving source cache"));
1357 SCacheF.Sync();
1358
1359 // Write out the proper header
1360 Gen.GetCache().HeaderP->Dirty = false;
1361 if (SCacheF.Seek(0) == false ||
1362 SCacheF.Write(Map->Data(),sizeof(*Gen.GetCache().HeaderP)) == false)
1363 return _error->Error(_("IO Error saving source cache"));
1364 Gen.GetCache().HeaderP->Dirty = true;
1365 SCacheF.Sync();
1366 }
1367
1368 // Build the status cache
1369 if (BuildCache(Gen,Progress,CurrentSize,TotalSize,
1370 Files.begin()+EndOfSource,Files.end()) == false)
1371 return false;
1372
1373 // FIXME: move me to a better place
1374 Gen.FinishCache(Progress);
1375 }
1376 if (Debug == true)
1377 std::clog << "Caches are ready for shipping" << std::endl;
1378
1379 if (_error->PendingError() == true)
1380 return false;
1381 if (OutMap != 0)
1382 {
1383 if (CacheF != 0)
1384 {
1385 delete Map.UnGuard();
1386 *OutMap = new MMap(*CacheF,0);
1387 }
1388 else
1389 {
1390 *OutMap = Map.UnGuard();
1391 }
1392 }
1393
1394 return true;
1395 }
1396 /*}}}*/
1397 // CacheGenerator::MakeOnlyStatusCache - Build only a status files cache/*{{{*/
1398 // ---------------------------------------------------------------------
1399 /* */
1400 __deprecated bool pkgMakeOnlyStatusCache(OpProgress &Progress,DynamicMMap **OutMap)
1401 { return pkgCacheGenerator::MakeOnlyStatusCache(&Progress, OutMap); }
1402 bool pkgCacheGenerator::MakeOnlyStatusCache(OpProgress *Progress,DynamicMMap **OutMap)
1403 {
1404 vector<pkgIndexFile *> Files;
1405 unsigned long EndOfSource = Files.size();
1406 if (_system->AddStatusFiles(Files) == false)
1407 return false;
1408
1409 SPtr<DynamicMMap> Map = CreateDynamicMMap(NULL);
1410 unsigned long CurrentSize = 0;
1411 unsigned long TotalSize = 0;
1412
1413 TotalSize = ComputeSize(Files.begin()+EndOfSource,Files.end());
1414
1415 // Build the status cache
1416 if (Progress != NULL)
1417 Progress->OverallProgress(0,1,1,_("Reading package lists"));
1418 pkgCacheGenerator Gen(Map.Get(),Progress);
1419 if (_error->PendingError() == true)
1420 return false;
1421 if (BuildCache(Gen,Progress,CurrentSize,TotalSize,
1422 Files.begin()+EndOfSource,Files.end()) == false)
1423 return false;
1424
1425 // FIXME: move me to a better place
1426 Gen.FinishCache(Progress);
1427
1428 if (_error->PendingError() == true)
1429 return false;
1430 *OutMap = Map.UnGuard();
1431
1432 return true;
1433 }
1434 /*}}}*/
1435 // IsDuplicateDescription /*{{{*/
1436 bool IsDuplicateDescription(pkgCache::DescIterator Desc,
1437 MD5SumValue const &CurMd5, std::string const &CurLang)
1438 {
1439 // Descriptions in the same link-list have all the same md5
1440 if (MD5SumValue(Desc.md5()) != CurMd5)
1441 return false;
1442 for (; Desc.end() == false; ++Desc)
1443 if (Desc.LanguageCode() == CurLang)
1444 return true;
1445 return false;
1446 }
1447 /*}}}*/
1448