]> git.saurik.com Git - apt.git/blob - apt-pkg/pkgcachegen.cc
a version can have only a single md5 for descriptions, so we can optimize
[apt.git] / apt-pkg / pkgcachegen.cc
1 // -*- mode: cpp; mode: fold -*-
2 // Description /*{{{*/
3 // $Id: pkgcachegen.cc,v 1.53.2.1 2003/12/24 23:09:17 mdz Exp $
4 /* ######################################################################
5
6 Package Cache Generator - Generator for the cache structure.
7
8 This builds the cache structure from the abstract package list parser.
9
10 ##################################################################### */
11 /*}}}*/
12 // Include Files /*{{{*/
13 #include <config.h>
14
15 #include <apt-pkg/pkgcachegen.h>
16 #include <apt-pkg/error.h>
17 #include <apt-pkg/version.h>
18 #include <apt-pkg/progress.h>
19 #include <apt-pkg/sourcelist.h>
20 #include <apt-pkg/configuration.h>
21 #include <apt-pkg/aptconfiguration.h>
22 #include <apt-pkg/strutl.h>
23 #include <apt-pkg/sptr.h>
24 #include <apt-pkg/pkgsystem.h>
25 #include <apt-pkg/macros.h>
26 #include <apt-pkg/tagfile.h>
27 #include <apt-pkg/metaindex.h>
28 #include <apt-pkg/fileutl.h>
29
30 #include <vector>
31 #include <sys/stat.h>
32 #include <unistd.h>
33 #include <errno.h>
34 #include <stdio.h>
35
36 #include <apti18n.h>
37 /*}}}*/
38 typedef vector<pkgIndexFile *>::iterator FileIterator;
39 template <typename Iter> std::vector<Iter*> pkgCacheGenerator::Dynamic<Iter>::toReMap;
40
41 bool IsDuplicateDescription(pkgCache::DescIterator Desc,
42 MD5SumValue const &CurMd5, std::string const &CurLang);
43
44 // CacheGenerator::pkgCacheGenerator - Constructor /*{{{*/
45 // ---------------------------------------------------------------------
46 /* We set the dirty flag and make sure that is written to the disk */
47 pkgCacheGenerator::pkgCacheGenerator(DynamicMMap *pMap,OpProgress *Prog) :
48 Map(*pMap), Cache(pMap,false), Progress(Prog),
49 FoundFileDeps(0)
50 {
51 CurrentFile = 0;
52 memset(UniqHash,0,sizeof(UniqHash));
53
54 if (_error->PendingError() == true)
55 return;
56
57 if (Map.Size() == 0)
58 {
59 // Setup the map interface..
60 Cache.HeaderP = (pkgCache::Header *)Map.Data();
61 if (Map.RawAllocate(sizeof(pkgCache::Header)) == 0 && _error->PendingError() == true)
62 return;
63
64 Map.UsePools(*Cache.HeaderP->Pools,sizeof(Cache.HeaderP->Pools)/sizeof(Cache.HeaderP->Pools[0]));
65
66 // Starting header
67 *Cache.HeaderP = pkgCache::Header();
68 map_ptrloc const idxVerSysName = WriteStringInMap(_system->VS->Label);
69 Cache.HeaderP->VerSysName = idxVerSysName;
70 map_ptrloc const idxArchitecture = WriteStringInMap(_config->Find("APT::Architecture"));
71 Cache.HeaderP->Architecture = idxArchitecture;
72 if (unlikely(idxVerSysName == 0 || idxArchitecture == 0))
73 return;
74 Cache.ReMap();
75 }
76 else
77 {
78 // Map directly from the existing file
79 Cache.ReMap();
80 Map.UsePools(*Cache.HeaderP->Pools,sizeof(Cache.HeaderP->Pools)/sizeof(Cache.HeaderP->Pools[0]));
81 if (Cache.VS != _system->VS)
82 {
83 _error->Error(_("Cache has an incompatible versioning system"));
84 return;
85 }
86 }
87
88 Cache.HeaderP->Dirty = true;
89 Map.Sync(0,sizeof(pkgCache::Header));
90 }
91 /*}}}*/
92 // CacheGenerator::~pkgCacheGenerator - Destructor /*{{{*/
93 // ---------------------------------------------------------------------
94 /* We sync the data then unset the dirty flag in two steps so as to
95 advoid a problem during a crash */
96 pkgCacheGenerator::~pkgCacheGenerator()
97 {
98 if (_error->PendingError() == true)
99 return;
100 if (Map.Sync() == false)
101 return;
102
103 Cache.HeaderP->Dirty = false;
104 Cache.HeaderP->CacheFileSize = Map.Size();
105 Map.Sync(0,sizeof(pkgCache::Header));
106 }
107 /*}}}*/
108 void pkgCacheGenerator::ReMap(void const * const oldMap, void const * const newMap) {/*{{{*/
109 if (oldMap == newMap)
110 return;
111
112 if (_config->FindB("Debug::pkgCacheGen", false))
113 std::clog << "Remaping from " << oldMap << " to " << newMap << std::endl;
114
115 Cache.ReMap(false);
116
117 CurrentFile += (pkgCache::PackageFile*) newMap - (pkgCache::PackageFile*) oldMap;
118
119 for (size_t i = 0; i < _count(UniqHash); ++i)
120 if (UniqHash[i] != 0)
121 UniqHash[i] += (pkgCache::StringItem*) newMap - (pkgCache::StringItem*) oldMap;
122
123 for (std::vector<pkgCache::GrpIterator*>::const_iterator i = Dynamic<pkgCache::GrpIterator>::toReMap.begin();
124 i != Dynamic<pkgCache::GrpIterator>::toReMap.end(); ++i)
125 (*i)->ReMap(oldMap, newMap);
126 for (std::vector<pkgCache::PkgIterator*>::const_iterator i = Dynamic<pkgCache::PkgIterator>::toReMap.begin();
127 i != Dynamic<pkgCache::PkgIterator>::toReMap.end(); ++i)
128 (*i)->ReMap(oldMap, newMap);
129 for (std::vector<pkgCache::VerIterator*>::const_iterator i = Dynamic<pkgCache::VerIterator>::toReMap.begin();
130 i != Dynamic<pkgCache::VerIterator>::toReMap.end(); ++i)
131 (*i)->ReMap(oldMap, newMap);
132 for (std::vector<pkgCache::DepIterator*>::const_iterator i = Dynamic<pkgCache::DepIterator>::toReMap.begin();
133 i != Dynamic<pkgCache::DepIterator>::toReMap.end(); ++i)
134 (*i)->ReMap(oldMap, newMap);
135 for (std::vector<pkgCache::DescIterator*>::const_iterator i = Dynamic<pkgCache::DescIterator>::toReMap.begin();
136 i != Dynamic<pkgCache::DescIterator>::toReMap.end(); ++i)
137 (*i)->ReMap(oldMap, newMap);
138 for (std::vector<pkgCache::PrvIterator*>::const_iterator i = Dynamic<pkgCache::PrvIterator>::toReMap.begin();
139 i != Dynamic<pkgCache::PrvIterator>::toReMap.end(); ++i)
140 (*i)->ReMap(oldMap, newMap);
141 for (std::vector<pkgCache::PkgFileIterator*>::const_iterator i = Dynamic<pkgCache::PkgFileIterator>::toReMap.begin();
142 i != Dynamic<pkgCache::PkgFileIterator>::toReMap.end(); ++i)
143 (*i)->ReMap(oldMap, newMap);
144 } /*}}}*/
145 // CacheGenerator::WriteStringInMap /*{{{*/
146 map_ptrloc pkgCacheGenerator::WriteStringInMap(const char *String,
147 const unsigned long &Len) {
148 void const * const oldMap = Map.Data();
149 map_ptrloc const index = Map.WriteString(String, Len);
150 if (index != 0)
151 ReMap(oldMap, Map.Data());
152 return index;
153 }
154 /*}}}*/
155 // CacheGenerator::WriteStringInMap /*{{{*/
156 map_ptrloc pkgCacheGenerator::WriteStringInMap(const char *String) {
157 void const * const oldMap = Map.Data();
158 map_ptrloc const index = Map.WriteString(String);
159 if (index != 0)
160 ReMap(oldMap, Map.Data());
161 return index;
162 }
163 /*}}}*/
164 map_ptrloc pkgCacheGenerator::AllocateInMap(const unsigned long &size) {/*{{{*/
165 void const * const oldMap = Map.Data();
166 map_ptrloc const index = Map.Allocate(size);
167 if (index != 0)
168 ReMap(oldMap, Map.Data());
169 return index;
170 }
171 /*}}}*/
172 // CacheGenerator::MergeList - Merge the package list /*{{{*/
173 // ---------------------------------------------------------------------
174 /* This provides the generation of the entries in the cache. Each loop
175 goes through a single package record from the underlying parse engine. */
176 bool pkgCacheGenerator::MergeList(ListParser &List,
177 pkgCache::VerIterator *OutVer)
178 {
179 List.Owner = this;
180
181 unsigned int Counter = 0;
182 while (List.Step() == true)
183 {
184 string const PackageName = List.Package();
185 if (PackageName.empty() == true)
186 return false;
187
188 Counter++;
189 if (Counter % 100 == 0 && Progress != 0)
190 Progress->Progress(List.Offset());
191
192 string Arch = List.Architecture();
193 string const Version = List.Version();
194 if (Version.empty() == true && Arch.empty() == true)
195 {
196 if (MergeListGroup(List, PackageName) == false)
197 return false;
198 }
199
200 if (Arch.empty() == true)
201 Arch = _config->Find("APT::Architecture");
202
203 // Get a pointer to the package structure
204 pkgCache::PkgIterator Pkg;
205 Dynamic<pkgCache::PkgIterator> DynPkg(Pkg);
206 if (NewPackage(Pkg, PackageName, Arch) == false)
207 return _error->Error(_("Error occurred while processing %s (NewPackage)"),PackageName.c_str());
208
209
210 if (Version.empty() == true)
211 {
212 if (MergeListPackage(List, Pkg) == false)
213 return false;
214 }
215 else
216 {
217 if (MergeListVersion(List, Pkg, Version, OutVer) == false)
218 return false;
219 }
220
221 if (OutVer != 0)
222 {
223 FoundFileDeps |= List.HasFileDeps();
224 return true;
225 }
226 }
227
228 if (Cache.HeaderP->PackageCount >= (1ULL<<sizeof(Cache.PkgP->ID)*8)-1)
229 return _error->Error(_("Wow, you exceeded the number of package "
230 "names this APT is capable of."));
231 if (Cache.HeaderP->VersionCount >= (1ULL<<(sizeof(Cache.VerP->ID)*8))-1)
232 return _error->Error(_("Wow, you exceeded the number of versions "
233 "this APT is capable of."));
234 if (Cache.HeaderP->DescriptionCount >= (1ULL<<(sizeof(Cache.DescP->ID)*8))-1)
235 return _error->Error(_("Wow, you exceeded the number of descriptions "
236 "this APT is capable of."));
237 if (Cache.HeaderP->DependsCount >= (1ULL<<(sizeof(Cache.DepP->ID)*8))-1ULL)
238 return _error->Error(_("Wow, you exceeded the number of dependencies "
239 "this APT is capable of."));
240
241 FoundFileDeps |= List.HasFileDeps();
242 return true;
243 }
244 // CacheGenerator::MergeListGroup /*{{{*/
245 bool pkgCacheGenerator::MergeListGroup(ListParser &List, std::string const &GrpName)
246 {
247 pkgCache::GrpIterator Grp = Cache.FindGrp(GrpName);
248 // a group has no data on it's own, only packages have it but these
249 // stanzas like this come from Translation- files to add descriptions,
250 // but without a version we don't need a description for it…
251 if (Grp.end() == true)
252 return true;
253 Dynamic<pkgCache::GrpIterator> DynGrp(Grp);
254
255 pkgCache::PkgIterator Pkg;
256 Dynamic<pkgCache::PkgIterator> DynPkg(Pkg);
257 for (Pkg = Grp.PackageList(); Pkg.end() == false; Pkg = Grp.NextPkg(Pkg))
258 if (MergeListPackage(List, Pkg) == false)
259 return false;
260
261 return true;
262 }
263 /*}}}*/
264 // CacheGenerator::MergeListPackage /*{{{*/
265 bool pkgCacheGenerator::MergeListPackage(ListParser &List, pkgCache::PkgIterator &Pkg)
266 {
267 // we first process the package, then the descriptions
268 // (for deb this package processing is in fact a no-op)
269 pkgCache::VerIterator Ver(Cache);
270 Dynamic<pkgCache::VerIterator> DynVer(Ver);
271 if (List.UsePackage(Pkg, Ver) == false)
272 return _error->Error(_("Error occurred while processing %s (UsePackage1)"),
273 Pkg.Name());
274
275 // Find the right version to write the description
276 MD5SumValue CurMd5 = List.Description_md5();
277 std::string CurLang = List.DescriptionLanguage();
278
279 for (Ver = Pkg.VersionList(); Ver.end() == false; ++Ver)
280 {
281 pkgCache::DescIterator Desc = Ver.DescriptionList();
282
283 // a version can only have one md5 describing it
284 if (MD5SumValue(Desc.md5()) != CurMd5)
285 continue;
286
287 // don't add a new description if we have one for the given
288 // md5 && language
289 if (IsDuplicateDescription(Desc, CurMd5, CurLang) == true)
290 continue;
291
292 Dynamic<pkgCache::DescIterator> DynDesc(Desc);
293 // we add at the end, so that the start is constant as we need
294 // that to be able to efficiently share these lists
295 map_ptrloc *LastDesc = &Ver->DescriptionList;
296 for (;Desc.end() == false && Desc->NextDesc != 0; ++Desc);
297 if (Desc.end() == false)
298 LastDesc = &Desc->NextDesc;
299
300 void const * const oldMap = Map.Data();
301 map_ptrloc const descindex = NewDescription(Desc, CurLang, CurMd5, *LastDesc);
302 if (oldMap != Map.Data())
303 LastDesc += (map_ptrloc*) Map.Data() - (map_ptrloc*) oldMap;
304 *LastDesc = descindex;
305 Desc->ParentPkg = Pkg.Index();
306
307 if ((*LastDesc == 0 && _error->PendingError()) || NewFileDesc(Desc,List) == false)
308 return _error->Error(_("Error occurred while processing %s (NewFileDesc1)"), Pkg.Name());
309
310 // we can stop here as all "same" versions will share the description
311 break;
312 }
313
314 return true;
315 }
316 /*}}}*/
317 // CacheGenerator::MergeListVersion /*{{{*/
318 bool pkgCacheGenerator::MergeListVersion(ListParser &List, pkgCache::PkgIterator &Pkg,
319 std::string const &Version, pkgCache::VerIterator* &OutVer)
320 {
321 pkgCache::VerIterator Ver = Pkg.VersionList();
322 Dynamic<pkgCache::VerIterator> DynVer(Ver);
323 map_ptrloc *LastVer = &Pkg->VersionList;
324 void const * oldMap = Map.Data();
325
326 unsigned long const Hash = List.VersionHash();
327 if (Ver.end() == false)
328 {
329 /* We know the list is sorted so we use that fact in the search.
330 Insertion of new versions is done with correct sorting */
331 int Res = 1;
332 for (; Ver.end() == false; LastVer = &Ver->NextVer, Ver++)
333 {
334 Res = Cache.VS->CmpVersion(Version,Ver.VerStr());
335 // Version is higher as current version - insert here
336 if (Res > 0)
337 break;
338 // Versionstrings are equal - is hash also equal?
339 if (Res == 0 && Ver->Hash == Hash)
340 break;
341 // proceed with the next till we have either the right
342 // or we found another version (which will be lower)
343 }
344
345 /* We already have a version for this item, record that we saw it */
346 if (Res == 0 && Ver.end() == false && Ver->Hash == Hash)
347 {
348 if (List.UsePackage(Pkg,Ver) == false)
349 return _error->Error(_("Error occurred while processing %s (UsePackage2)"),
350 Pkg.Name());
351
352 if (NewFileVer(Ver,List) == false)
353 return _error->Error(_("Error occurred while processing %s (NewFileVer1)"),
354 Pkg.Name());
355
356 // Read only a single record and return
357 if (OutVer != 0)
358 {
359 *OutVer = Ver;
360 return true;
361 }
362
363 return true;
364 }
365 }
366
367 // Add a new version
368 map_ptrloc const verindex = NewVersion(Ver,Version,*LastVer);
369 if (verindex == 0 && _error->PendingError())
370 return _error->Error(_("Error occurred while processing %s (NewVersion%d)"),
371 Pkg.Name(), 1);
372
373 if (oldMap != Map.Data())
374 LastVer += (map_ptrloc*) Map.Data() - (map_ptrloc*) oldMap;
375 *LastVer = verindex;
376 Ver->ParentPkg = Pkg.Index();
377 Ver->Hash = Hash;
378
379 if (List.NewVersion(Ver) == false)
380 return _error->Error(_("Error occurred while processing %s (NewVersion%d)"),
381 Pkg.Name(), 2);
382
383 if (List.UsePackage(Pkg,Ver) == false)
384 return _error->Error(_("Error occurred while processing %s (UsePackage3)"),
385 Pkg.Name());
386
387 if (NewFileVer(Ver,List) == false)
388 return _error->Error(_("Error occurred while processing %s (NewVersion%d)"),
389 Pkg.Name(), 3);
390
391 // Read only a single record and return
392 if (OutVer != 0)
393 {
394 *OutVer = Ver;
395 return true;
396 }
397
398 /* Record the Description (it is not translated) */
399 MD5SumValue CurMd5 = List.Description_md5();
400 if (CurMd5.Value().empty() == true)
401 return true;
402 std::string CurLang = List.DescriptionLanguage();
403
404 /* Before we add a new description we first search in the group for
405 a version with a description of the same MD5 - if so we reuse this
406 description group instead of creating our own for this version */
407 pkgCache::GrpIterator Grp = Pkg.Group();
408 for (pkgCache::PkgIterator P = Grp.PackageList();
409 P.end() == false; P = Grp.NextPkg(P))
410 {
411 for (pkgCache::VerIterator V = P.VersionList();
412 V.end() == false; ++V)
413 {
414 if (IsDuplicateDescription(V.DescriptionList(), CurMd5, "") == false)
415 continue;
416 Ver->DescriptionList = V->DescriptionList;
417 return true;
418 }
419 }
420
421 // We haven't found reusable descriptions, so add the first description
422 pkgCache::DescIterator Desc = Ver.DescriptionList();
423 Dynamic<pkgCache::DescIterator> DynDesc(Desc);
424 map_ptrloc *LastDesc = &Ver->DescriptionList;
425
426 oldMap = Map.Data();
427 map_ptrloc const descindex = NewDescription(Desc, CurLang, CurMd5, *LastDesc);
428 if (oldMap != Map.Data())
429 LastDesc += (map_ptrloc*) Map.Data() - (map_ptrloc*) oldMap;
430 *LastDesc = descindex;
431 Desc->ParentPkg = Pkg.Index();
432
433 if ((*LastDesc == 0 && _error->PendingError()) || NewFileDesc(Desc,List) == false)
434 return _error->Error(_("Error occurred while processing %s (NewFileDesc2)"),Pkg.Name());
435
436 return true;
437 }
438 /*}}}*/
439 /*}}}*/
440 // CacheGenerator::MergeFileProvides - Merge file provides /*{{{*/
441 // ---------------------------------------------------------------------
442 /* If we found any file depends while parsing the main list we need to
443 resolve them. Since it is undesired to load the entire list of files
444 into the cache as virtual packages we do a two stage effort. MergeList
445 identifies the file depends and this creates Provdies for them by
446 re-parsing all the indexs. */
447 bool pkgCacheGenerator::MergeFileProvides(ListParser &List)
448 {
449 List.Owner = this;
450
451 unsigned int Counter = 0;
452 while (List.Step() == true)
453 {
454 string PackageName = List.Package();
455 if (PackageName.empty() == true)
456 return false;
457 string Version = List.Version();
458 if (Version.empty() == true)
459 continue;
460
461 pkgCache::PkgIterator Pkg = Cache.FindPkg(PackageName);
462 Dynamic<pkgCache::PkgIterator> DynPkg(Pkg);
463 if (Pkg.end() == true)
464 return _error->Error(_("Error occurred while processing %s (FindPkg)"),
465 PackageName.c_str());
466 Counter++;
467 if (Counter % 100 == 0 && Progress != 0)
468 Progress->Progress(List.Offset());
469
470 unsigned long Hash = List.VersionHash();
471 pkgCache::VerIterator Ver = Pkg.VersionList();
472 Dynamic<pkgCache::VerIterator> DynVer(Ver);
473 for (; Ver.end() == false; ++Ver)
474 {
475 if (Ver->Hash == Hash && Version.c_str() == Ver.VerStr())
476 {
477 if (List.CollectFileProvides(Cache,Ver) == false)
478 return _error->Error(_("Error occurred while processing %s (CollectFileProvides)"),PackageName.c_str());
479 break;
480 }
481 }
482
483 if (Ver.end() == true)
484 _error->Warning(_("Package %s %s was not found while processing file dependencies"),PackageName.c_str(),Version.c_str());
485 }
486
487 return true;
488 }
489 /*}}}*/
490 // CacheGenerator::NewGroup - Add a new group /*{{{*/
491 // ---------------------------------------------------------------------
492 /* This creates a new group structure and adds it to the hash table */
493 bool pkgCacheGenerator::NewGroup(pkgCache::GrpIterator &Grp, const string &Name)
494 {
495 Grp = Cache.FindGrp(Name);
496 if (Grp.end() == false)
497 return true;
498
499 // Get a structure
500 map_ptrloc const Group = AllocateInMap(sizeof(pkgCache::Group));
501 if (unlikely(Group == 0))
502 return false;
503
504 Grp = pkgCache::GrpIterator(Cache, Cache.GrpP + Group);
505 map_ptrloc const idxName = WriteStringInMap(Name);
506 if (unlikely(idxName == 0))
507 return false;
508 Grp->Name = idxName;
509
510 // Insert it into the hash table
511 unsigned long const Hash = Cache.Hash(Name);
512 Grp->Next = Cache.HeaderP->GrpHashTable[Hash];
513 Cache.HeaderP->GrpHashTable[Hash] = Group;
514
515 Grp->ID = Cache.HeaderP->GroupCount++;
516 return true;
517 }
518 /*}}}*/
519 // CacheGenerator::NewPackage - Add a new package /*{{{*/
520 // ---------------------------------------------------------------------
521 /* This creates a new package structure and adds it to the hash table */
522 bool pkgCacheGenerator::NewPackage(pkgCache::PkgIterator &Pkg,const string &Name,
523 const string &Arch) {
524 pkgCache::GrpIterator Grp;
525 Dynamic<pkgCache::GrpIterator> DynGrp(Grp);
526 if (unlikely(NewGroup(Grp, Name) == false))
527 return false;
528
529 Pkg = Grp.FindPkg(Arch);
530 if (Pkg.end() == false)
531 return true;
532
533 // Get a structure
534 map_ptrloc const Package = AllocateInMap(sizeof(pkgCache::Package));
535 if (unlikely(Package == 0))
536 return false;
537 Pkg = pkgCache::PkgIterator(Cache,Cache.PkgP + Package);
538
539 // Insert the package into our package list
540 if (Grp->FirstPackage == 0) // the group is new
541 {
542 // Insert it into the hash table
543 unsigned long const Hash = Cache.Hash(Name);
544 Pkg->NextPackage = Cache.HeaderP->PkgHashTable[Hash];
545 Cache.HeaderP->PkgHashTable[Hash] = Package;
546 Grp->FirstPackage = Package;
547 }
548 else // Group the Packages together
549 {
550 // this package is the new last package
551 pkgCache::PkgIterator LastPkg(Cache, Cache.PkgP + Grp->LastPackage);
552 Pkg->NextPackage = LastPkg->NextPackage;
553 LastPkg->NextPackage = Package;
554 }
555 Grp->LastPackage = Package;
556
557 // Set the name, arch and the ID
558 Pkg->Name = Grp->Name;
559 Pkg->Group = Grp.Index();
560 // all is mapped to the native architecture
561 map_ptrloc const idxArch = (Arch == "all") ? Cache.HeaderP->Architecture : WriteUniqString(Arch.c_str());
562 if (unlikely(idxArch == 0))
563 return false;
564 Pkg->Arch = idxArch;
565 Pkg->ID = Cache.HeaderP->PackageCount++;
566
567 return true;
568 }
569 /*}}}*/
570 // CacheGenerator::NewFileVer - Create a new File<->Version association /*{{{*/
571 // ---------------------------------------------------------------------
572 /* */
573 bool pkgCacheGenerator::NewFileVer(pkgCache::VerIterator &Ver,
574 ListParser &List)
575 {
576 if (CurrentFile == 0)
577 return true;
578
579 // Get a structure
580 map_ptrloc const VerFile = AllocateInMap(sizeof(pkgCache::VerFile));
581 if (VerFile == 0)
582 return 0;
583
584 pkgCache::VerFileIterator VF(Cache,Cache.VerFileP + VerFile);
585 VF->File = CurrentFile - Cache.PkgFileP;
586
587 // Link it to the end of the list
588 map_ptrloc *Last = &Ver->FileList;
589 for (pkgCache::VerFileIterator V = Ver.FileList(); V.end() == false; ++V)
590 Last = &V->NextFile;
591 VF->NextFile = *Last;
592 *Last = VF.Index();
593
594 VF->Offset = List.Offset();
595 VF->Size = List.Size();
596 if (Cache.HeaderP->MaxVerFileSize < VF->Size)
597 Cache.HeaderP->MaxVerFileSize = VF->Size;
598 Cache.HeaderP->VerFileCount++;
599
600 return true;
601 }
602 /*}}}*/
603 // CacheGenerator::NewVersion - Create a new Version /*{{{*/
604 // ---------------------------------------------------------------------
605 /* This puts a version structure in the linked list */
606 unsigned long pkgCacheGenerator::NewVersion(pkgCache::VerIterator &Ver,
607 const string &VerStr,
608 unsigned long Next)
609 {
610 // Get a structure
611 map_ptrloc const Version = AllocateInMap(sizeof(pkgCache::Version));
612 if (Version == 0)
613 return 0;
614
615 // Fill it in
616 Ver = pkgCache::VerIterator(Cache,Cache.VerP + Version);
617 Ver->NextVer = Next;
618 Ver->ID = Cache.HeaderP->VersionCount++;
619 map_ptrloc const idxVerStr = WriteStringInMap(VerStr);
620 if (unlikely(idxVerStr == 0))
621 return 0;
622 Ver->VerStr = idxVerStr;
623
624 return Version;
625 }
626 /*}}}*/
627 // CacheGenerator::NewFileDesc - Create a new File<->Desc association /*{{{*/
628 // ---------------------------------------------------------------------
629 /* */
630 bool pkgCacheGenerator::NewFileDesc(pkgCache::DescIterator &Desc,
631 ListParser &List)
632 {
633 if (CurrentFile == 0)
634 return true;
635
636 // Get a structure
637 map_ptrloc const DescFile = AllocateInMap(sizeof(pkgCache::DescFile));
638 if (DescFile == 0)
639 return false;
640
641 pkgCache::DescFileIterator DF(Cache,Cache.DescFileP + DescFile);
642 DF->File = CurrentFile - Cache.PkgFileP;
643
644 // Link it to the end of the list
645 map_ptrloc *Last = &Desc->FileList;
646 for (pkgCache::DescFileIterator D = Desc.FileList(); D.end() == false; ++D)
647 Last = &D->NextFile;
648
649 DF->NextFile = *Last;
650 *Last = DF.Index();
651
652 DF->Offset = List.Offset();
653 DF->Size = List.Size();
654 if (Cache.HeaderP->MaxDescFileSize < DF->Size)
655 Cache.HeaderP->MaxDescFileSize = DF->Size;
656 Cache.HeaderP->DescFileCount++;
657
658 return true;
659 }
660 /*}}}*/
661 // CacheGenerator::NewDescription - Create a new Description /*{{{*/
662 // ---------------------------------------------------------------------
663 /* This puts a description structure in the linked list */
664 map_ptrloc pkgCacheGenerator::NewDescription(pkgCache::DescIterator &Desc,
665 const string &Lang,
666 const MD5SumValue &md5sum,
667 map_ptrloc Next)
668 {
669 // Get a structure
670 map_ptrloc const Description = AllocateInMap(sizeof(pkgCache::Description));
671 if (Description == 0)
672 return 0;
673
674 // Fill it in
675 Desc = pkgCache::DescIterator(Cache,Cache.DescP + Description);
676 Desc->NextDesc = Next;
677 Desc->ID = Cache.HeaderP->DescriptionCount++;
678 map_ptrloc const idxlanguage_code = WriteStringInMap(Lang);
679 map_ptrloc const idxmd5sum = WriteStringInMap(md5sum.Value());
680 if (unlikely(idxlanguage_code == 0 || idxmd5sum == 0))
681 return 0;
682 Desc->language_code = idxlanguage_code;
683 Desc->md5sum = idxmd5sum;
684
685 return Description;
686 }
687 /*}}}*/
688 // CacheGenerator::FinishCache - do various finish operations /*{{{*/
689 // ---------------------------------------------------------------------
690 /* This prepares the Cache for delivery */
691 bool pkgCacheGenerator::FinishCache(OpProgress *Progress)
692 {
693 // FIXME: add progress reporting for this operation
694 // Do we have different architectures in your groups ?
695 vector<string> archs = APT::Configuration::getArchitectures();
696 if (archs.size() > 1)
697 {
698 // Create Conflicts in between the group
699 pkgCache::GrpIterator G = GetCache().GrpBegin();
700 Dynamic<pkgCache::GrpIterator> DynG(G);
701 for (; G.end() != true; ++G)
702 {
703 string const PkgName = G.Name();
704 pkgCache::PkgIterator P = G.PackageList();
705 Dynamic<pkgCache::PkgIterator> DynP(P);
706 for (; P.end() != true; P = G.NextPkg(P))
707 {
708 pkgCache::PkgIterator allPkg;
709 Dynamic<pkgCache::PkgIterator> DynallPkg(allPkg);
710 pkgCache::VerIterator V = P.VersionList();
711 Dynamic<pkgCache::VerIterator> DynV(V);
712 for (; V.end() != true; ++V)
713 {
714 // copy P.Arch() into a string here as a cache remap
715 // in NewDepends() later may alter the pointer location
716 string Arch = P.Arch() == NULL ? "" : P.Arch();
717 map_ptrloc *OldDepLast = NULL;
718 /* MultiArch handling introduces a lot of implicit Dependencies:
719 - MultiArch: same → Co-Installable if they have the same version
720 - Architecture: all → Need to be Co-Installable for internal reasons
721 - All others conflict with all other group members */
722 bool const coInstall = ((V->MultiArch & pkgCache::Version::Same) == pkgCache::Version::Same);
723 for (vector<string>::const_iterator A = archs.begin(); A != archs.end(); ++A)
724 {
725 if (*A == Arch)
726 continue;
727 /* We allow only one installed arch at the time
728 per group, therefore each group member conflicts
729 with all other group members */
730 pkgCache::PkgIterator D = G.FindPkg(*A);
731 Dynamic<pkgCache::PkgIterator> DynD(D);
732 if (D.end() == true)
733 continue;
734 if (coInstall == true)
735 {
736 // Replaces: ${self}:other ( << ${binary:Version})
737 NewDepends(D, V, V.VerStr(),
738 pkgCache::Dep::Less, pkgCache::Dep::Replaces,
739 OldDepLast);
740 // Breaks: ${self}:other (!= ${binary:Version})
741 NewDepends(D, V, V.VerStr(),
742 pkgCache::Dep::NotEquals, pkgCache::Dep::DpkgBreaks,
743 OldDepLast);
744 } else {
745 // Conflicts: ${self}:other
746 NewDepends(D, V, "",
747 pkgCache::Dep::NoOp, pkgCache::Dep::Conflicts,
748 OldDepLast);
749 }
750 }
751 }
752 }
753 }
754 }
755 return true;
756 }
757 /*}}}*/
758 // CacheGenerator::NewDepends - Create a dependency element /*{{{*/
759 // ---------------------------------------------------------------------
760 /* This creates a dependency element in the tree. It is linked to the
761 version and to the package that it is pointing to. */
762 bool pkgCacheGenerator::NewDepends(pkgCache::PkgIterator &Pkg,
763 pkgCache::VerIterator &Ver,
764 string const &Version,
765 unsigned int const &Op,
766 unsigned int const &Type,
767 map_ptrloc* &OldDepLast)
768 {
769 void const * const oldMap = Map.Data();
770 // Get a structure
771 map_ptrloc const Dependency = AllocateInMap(sizeof(pkgCache::Dependency));
772 if (unlikely(Dependency == 0))
773 return false;
774
775 // Fill it in
776 pkgCache::DepIterator Dep(Cache,Cache.DepP + Dependency);
777 Dynamic<pkgCache::DepIterator> DynDep(Dep);
778 Dep->ParentVer = Ver.Index();
779 Dep->Type = Type;
780 Dep->CompareOp = Op;
781 Dep->ID = Cache.HeaderP->DependsCount++;
782
783 // Probe the reverse dependency list for a version string that matches
784 if (Version.empty() == false)
785 {
786 /* for (pkgCache::DepIterator I = Pkg.RevDependsList(); I.end() == false; I++)
787 if (I->Version != 0 && I.TargetVer() == Version)
788 Dep->Version = I->Version;*/
789 if (Dep->Version == 0) {
790 map_ptrloc const index = WriteStringInMap(Version);
791 if (unlikely(index == 0))
792 return false;
793 Dep->Version = index;
794 }
795 }
796
797 // Link it to the package
798 Dep->Package = Pkg.Index();
799 Dep->NextRevDepends = Pkg->RevDepends;
800 Pkg->RevDepends = Dep.Index();
801
802 // Do we know where to link the Dependency to?
803 if (OldDepLast == NULL)
804 {
805 OldDepLast = &Ver->DependsList;
806 for (pkgCache::DepIterator D = Ver.DependsList(); D.end() == false; ++D)
807 OldDepLast = &D->NextDepends;
808 } else if (oldMap != Map.Data())
809 OldDepLast += (map_ptrloc*) Map.Data() - (map_ptrloc*) oldMap;
810
811 Dep->NextDepends = *OldDepLast;
812 *OldDepLast = Dep.Index();
813 OldDepLast = &Dep->NextDepends;
814
815 return true;
816 }
817 /*}}}*/
818 // ListParser::NewDepends - Create the environment for a new dependency /*{{{*/
819 // ---------------------------------------------------------------------
820 /* This creates a Group and the Package to link this dependency to if
821 needed and handles also the caching of the old endpoint */
822 bool pkgCacheGenerator::ListParser::NewDepends(pkgCache::VerIterator &Ver,
823 const string &PackageName,
824 const string &Arch,
825 const string &Version,
826 unsigned int Op,
827 unsigned int Type)
828 {
829 pkgCache::GrpIterator Grp;
830 Dynamic<pkgCache::GrpIterator> DynGrp(Grp);
831 if (unlikely(Owner->NewGroup(Grp, PackageName) == false))
832 return false;
833
834 // Locate the target package
835 pkgCache::PkgIterator Pkg = Grp.FindPkg(Arch);
836 Dynamic<pkgCache::PkgIterator> DynPkg(Pkg);
837 if (Pkg.end() == true) {
838 if (unlikely(Owner->NewPackage(Pkg, PackageName, Arch) == false))
839 return false;
840 }
841
842 // Is it a file dependency?
843 if (unlikely(PackageName[0] == '/'))
844 FoundFileDeps = true;
845
846 /* Caching the old end point speeds up generation substantially */
847 if (OldDepVer != Ver) {
848 OldDepLast = NULL;
849 OldDepVer = Ver;
850 }
851
852 return Owner->NewDepends(Pkg, Ver, Version, Op, Type, OldDepLast);
853 }
854 /*}}}*/
855 // ListParser::NewProvides - Create a Provides element /*{{{*/
856 // ---------------------------------------------------------------------
857 /* */
858 bool pkgCacheGenerator::ListParser::NewProvides(pkgCache::VerIterator &Ver,
859 const string &PkgName,
860 const string &PkgArch,
861 const string &Version)
862 {
863 pkgCache &Cache = Owner->Cache;
864
865 // We do not add self referencing provides
866 if (Ver.ParentPkg().Name() == PkgName && (PkgArch == Ver.ParentPkg().Arch() ||
867 (PkgArch == "all" && strcmp((Cache.StrP + Cache.HeaderP->Architecture), Ver.ParentPkg().Arch()) == 0)))
868 return true;
869
870 // Get a structure
871 map_ptrloc const Provides = Owner->AllocateInMap(sizeof(pkgCache::Provides));
872 if (unlikely(Provides == 0))
873 return false;
874 Cache.HeaderP->ProvidesCount++;
875
876 // Fill it in
877 pkgCache::PrvIterator Prv(Cache,Cache.ProvideP + Provides,Cache.PkgP);
878 Dynamic<pkgCache::PrvIterator> DynPrv(Prv);
879 Prv->Version = Ver.Index();
880 Prv->NextPkgProv = Ver->ProvidesList;
881 Ver->ProvidesList = Prv.Index();
882 if (Version.empty() == false && unlikely((Prv->ProvideVersion = WriteString(Version)) == 0))
883 return false;
884
885 // Locate the target package
886 pkgCache::PkgIterator Pkg;
887 Dynamic<pkgCache::PkgIterator> DynPkg(Pkg);
888 if (unlikely(Owner->NewPackage(Pkg,PkgName, PkgArch) == false))
889 return false;
890
891 // Link it to the package
892 Prv->ParentPkg = Pkg.Index();
893 Prv->NextProvides = Pkg->ProvidesList;
894 Pkg->ProvidesList = Prv.Index();
895
896 return true;
897 }
898 /*}}}*/
899 // CacheGenerator::SelectFile - Select the current file being parsed /*{{{*/
900 // ---------------------------------------------------------------------
901 /* This is used to select which file is to be associated with all newly
902 added versions. The caller is responsible for setting the IMS fields. */
903 bool pkgCacheGenerator::SelectFile(const string &File,const string &Site,
904 const pkgIndexFile &Index,
905 unsigned long Flags)
906 {
907 // Get some space for the structure
908 map_ptrloc const idxFile = AllocateInMap(sizeof(*CurrentFile));
909 if (unlikely(idxFile == 0))
910 return false;
911 CurrentFile = Cache.PkgFileP + idxFile;
912
913 // Fill it in
914 map_ptrloc const idxFileName = WriteStringInMap(File);
915 map_ptrloc const idxSite = WriteUniqString(Site);
916 if (unlikely(idxFileName == 0 || idxSite == 0))
917 return false;
918 CurrentFile->FileName = idxFileName;
919 CurrentFile->Site = idxSite;
920 CurrentFile->NextFile = Cache.HeaderP->FileList;
921 CurrentFile->Flags = Flags;
922 CurrentFile->ID = Cache.HeaderP->PackageFileCount;
923 map_ptrloc const idxIndexType = WriteUniqString(Index.GetType()->Label);
924 if (unlikely(idxIndexType == 0))
925 return false;
926 CurrentFile->IndexType = idxIndexType;
927 PkgFileName = File;
928 Cache.HeaderP->FileList = CurrentFile - Cache.PkgFileP;
929 Cache.HeaderP->PackageFileCount++;
930
931 if (Progress != 0)
932 Progress->SubProgress(Index.Size());
933 return true;
934 }
935 /*}}}*/
936 // CacheGenerator::WriteUniqueString - Insert a unique string /*{{{*/
937 // ---------------------------------------------------------------------
938 /* This is used to create handles to strings. Given the same text it
939 always returns the same number */
940 unsigned long pkgCacheGenerator::WriteUniqString(const char *S,
941 unsigned int Size)
942 {
943 /* We use a very small transient hash table here, this speeds up generation
944 by a fair amount on slower machines */
945 pkgCache::StringItem *&Bucket = UniqHash[(S[0]*5 + S[1]) % _count(UniqHash)];
946 if (Bucket != 0 &&
947 stringcmp(S,S+Size,Cache.StrP + Bucket->String) == 0)
948 return Bucket->String;
949
950 // Search for an insertion point
951 pkgCache::StringItem *I = Cache.StringItemP + Cache.HeaderP->StringList;
952 int Res = 1;
953 map_ptrloc *Last = &Cache.HeaderP->StringList;
954 for (; I != Cache.StringItemP; Last = &I->NextItem,
955 I = Cache.StringItemP + I->NextItem)
956 {
957 Res = stringcmp(S,S+Size,Cache.StrP + I->String);
958 if (Res >= 0)
959 break;
960 }
961
962 // Match
963 if (Res == 0)
964 {
965 Bucket = I;
966 return I->String;
967 }
968
969 // Get a structure
970 void const * const oldMap = Map.Data();
971 map_ptrloc const Item = AllocateInMap(sizeof(pkgCache::StringItem));
972 if (Item == 0)
973 return 0;
974
975 map_ptrloc const idxString = WriteStringInMap(S,Size);
976 if (unlikely(idxString == 0))
977 return 0;
978 if (oldMap != Map.Data()) {
979 Last += (map_ptrloc*) Map.Data() - (map_ptrloc*) oldMap;
980 I += (pkgCache::StringItem*) Map.Data() - (pkgCache::StringItem*) oldMap;
981 }
982 *Last = Item;
983
984 // Fill in the structure
985 pkgCache::StringItem *ItemP = Cache.StringItemP + Item;
986 ItemP->NextItem = I - Cache.StringItemP;
987 ItemP->String = idxString;
988
989 Bucket = ItemP;
990 return ItemP->String;
991 }
992 /*}}}*/
993 // CheckValidity - Check that a cache is up-to-date /*{{{*/
994 // ---------------------------------------------------------------------
995 /* This just verifies that each file in the list of index files exists,
996 has matching attributes with the cache and the cache does not have
997 any extra files. */
998 static bool CheckValidity(const string &CacheFile,
999 pkgSourceList &List,
1000 FileIterator Start,
1001 FileIterator End,
1002 MMap **OutMap = 0)
1003 {
1004 bool const Debug = _config->FindB("Debug::pkgCacheGen", false);
1005 // No file, certainly invalid
1006 if (CacheFile.empty() == true || FileExists(CacheFile) == false)
1007 {
1008 if (Debug == true)
1009 std::clog << "CacheFile doesn't exist" << std::endl;
1010 return false;
1011 }
1012
1013 if (List.GetLastModifiedTime() > GetModificationTime(CacheFile))
1014 {
1015 if (Debug == true)
1016 std::clog << "sources.list is newer than the cache" << std::endl;
1017 return false;
1018 }
1019
1020 // Map it
1021 FileFd CacheF(CacheFile,FileFd::ReadOnly);
1022 SPtr<MMap> Map = new MMap(CacheF,0);
1023 pkgCache Cache(Map);
1024 if (_error->PendingError() == true || Map->Size() == 0)
1025 {
1026 if (Debug == true)
1027 std::clog << "Errors are pending or Map is empty()" << std::endl;
1028 _error->Discard();
1029 return false;
1030 }
1031
1032 /* Now we check every index file, see if it is in the cache,
1033 verify the IMS data and check that it is on the disk too.. */
1034 SPtrArray<bool> Visited = new bool[Cache.HeaderP->PackageFileCount];
1035 memset(Visited,0,sizeof(*Visited)*Cache.HeaderP->PackageFileCount);
1036 for (; Start != End; ++Start)
1037 {
1038 if (Debug == true)
1039 std::clog << "Checking PkgFile " << (*Start)->Describe() << ": ";
1040 if ((*Start)->HasPackages() == false)
1041 {
1042 if (Debug == true)
1043 std::clog << "Has NO packages" << std::endl;
1044 continue;
1045 }
1046
1047 if ((*Start)->Exists() == false)
1048 {
1049 #if 0 // mvo: we no longer give a message here (Default Sources spec)
1050 _error->WarningE("stat",_("Couldn't stat source package list %s"),
1051 (*Start)->Describe().c_str());
1052 #endif
1053 if (Debug == true)
1054 std::clog << "file doesn't exist" << std::endl;
1055 continue;
1056 }
1057
1058 // FindInCache is also expected to do an IMS check.
1059 pkgCache::PkgFileIterator File = (*Start)->FindInCache(Cache);
1060 if (File.end() == true)
1061 {
1062 if (Debug == true)
1063 std::clog << "FindInCache returned end-Pointer" << std::endl;
1064 return false;
1065 }
1066
1067 Visited[File->ID] = true;
1068 if (Debug == true)
1069 std::clog << "with ID " << File->ID << " is valid" << std::endl;
1070 }
1071
1072 for (unsigned I = 0; I != Cache.HeaderP->PackageFileCount; I++)
1073 if (Visited[I] == false)
1074 {
1075 if (Debug == true)
1076 std::clog << "File with ID" << I << " wasn't visited" << std::endl;
1077 return false;
1078 }
1079
1080 if (_error->PendingError() == true)
1081 {
1082 if (Debug == true)
1083 {
1084 std::clog << "Validity failed because of pending errors:" << std::endl;
1085 _error->DumpErrors();
1086 }
1087 _error->Discard();
1088 return false;
1089 }
1090
1091 if (OutMap != 0)
1092 *OutMap = Map.UnGuard();
1093 return true;
1094 }
1095 /*}}}*/
1096 // ComputeSize - Compute the total size of a bunch of files /*{{{*/
1097 // ---------------------------------------------------------------------
1098 /* Size is kind of an abstract notion that is only used for the progress
1099 meter */
1100 static unsigned long ComputeSize(FileIterator Start,FileIterator End)
1101 {
1102 unsigned long TotalSize = 0;
1103 for (; Start != End; ++Start)
1104 {
1105 if ((*Start)->HasPackages() == false)
1106 continue;
1107 TotalSize += (*Start)->Size();
1108 }
1109 return TotalSize;
1110 }
1111 /*}}}*/
1112 // BuildCache - Merge the list of index files into the cache /*{{{*/
1113 // ---------------------------------------------------------------------
1114 /* */
1115 static bool BuildCache(pkgCacheGenerator &Gen,
1116 OpProgress *Progress,
1117 unsigned long &CurrentSize,unsigned long TotalSize,
1118 FileIterator Start, FileIterator End)
1119 {
1120 FileIterator I;
1121 for (I = Start; I != End; ++I)
1122 {
1123 if ((*I)->HasPackages() == false)
1124 continue;
1125
1126 if ((*I)->Exists() == false)
1127 continue;
1128
1129 if ((*I)->FindInCache(Gen.GetCache()).end() == false)
1130 {
1131 _error->Warning("Duplicate sources.list entry %s",
1132 (*I)->Describe().c_str());
1133 continue;
1134 }
1135
1136 unsigned long Size = (*I)->Size();
1137 if (Progress != NULL)
1138 Progress->OverallProgress(CurrentSize,TotalSize,Size,_("Reading package lists"));
1139 CurrentSize += Size;
1140
1141 if ((*I)->Merge(Gen,Progress) == false)
1142 return false;
1143 }
1144
1145 if (Gen.HasFileDeps() == true)
1146 {
1147 if (Progress != NULL)
1148 Progress->Done();
1149 TotalSize = ComputeSize(Start, End);
1150 CurrentSize = 0;
1151 for (I = Start; I != End; ++I)
1152 {
1153 unsigned long Size = (*I)->Size();
1154 if (Progress != NULL)
1155 Progress->OverallProgress(CurrentSize,TotalSize,Size,_("Collecting File Provides"));
1156 CurrentSize += Size;
1157 if ((*I)->MergeFileProvides(Gen,Progress) == false)
1158 return false;
1159 }
1160 }
1161
1162 return true;
1163 }
1164 /*}}}*/
1165 // CacheGenerator::CreateDynamicMMap - load an mmap with configuration options /*{{{*/
1166 DynamicMMap* pkgCacheGenerator::CreateDynamicMMap(FileFd *CacheF, unsigned long Flags) {
1167 unsigned long const MapStart = _config->FindI("APT::Cache-Start", 24*1024*1024);
1168 unsigned long const MapGrow = _config->FindI("APT::Cache-Grow", 1*1024*1024);
1169 unsigned long const MapLimit = _config->FindI("APT::Cache-Limit", 0);
1170 Flags |= MMap::Moveable;
1171 if (_config->FindB("APT::Cache-Fallback", false) == true)
1172 Flags |= MMap::Fallback;
1173 if (CacheF != NULL)
1174 return new DynamicMMap(*CacheF, Flags, MapStart, MapGrow, MapLimit);
1175 else
1176 return new DynamicMMap(Flags, MapStart, MapGrow, MapLimit);
1177 }
1178 /*}}}*/
1179 // CacheGenerator::MakeStatusCache - Construct the status cache /*{{{*/
1180 // ---------------------------------------------------------------------
1181 /* This makes sure that the status cache (the cache that has all
1182 index files from the sources list and all local ones) is ready
1183 to be mmaped. If OutMap is not zero then a MMap object representing
1184 the cache will be stored there. This is pretty much mandetory if you
1185 are using AllowMem. AllowMem lets the function be run as non-root
1186 where it builds the cache 'fast' into a memory buffer. */
1187 __deprecated bool pkgMakeStatusCache(pkgSourceList &List,OpProgress &Progress,
1188 MMap **OutMap, bool AllowMem)
1189 { return pkgCacheGenerator::MakeStatusCache(List, &Progress, OutMap, AllowMem); }
1190 bool pkgCacheGenerator::MakeStatusCache(pkgSourceList &List,OpProgress *Progress,
1191 MMap **OutMap,bool AllowMem)
1192 {
1193 bool const Debug = _config->FindB("Debug::pkgCacheGen", false);
1194
1195 vector<pkgIndexFile *> Files;
1196 for (vector<metaIndex *>::const_iterator i = List.begin();
1197 i != List.end();
1198 ++i)
1199 {
1200 vector <pkgIndexFile *> *Indexes = (*i)->GetIndexFiles();
1201 for (vector<pkgIndexFile *>::const_iterator j = Indexes->begin();
1202 j != Indexes->end();
1203 ++j)
1204 Files.push_back (*j);
1205 }
1206
1207 unsigned long const EndOfSource = Files.size();
1208 if (_system->AddStatusFiles(Files) == false)
1209 return false;
1210
1211 // Decide if we can write to the files..
1212 string const CacheFile = _config->FindFile("Dir::Cache::pkgcache");
1213 string const SrcCacheFile = _config->FindFile("Dir::Cache::srcpkgcache");
1214
1215 // ensure the cache directory exists
1216 if (CacheFile.empty() == false || SrcCacheFile.empty() == false)
1217 {
1218 string dir = _config->FindDir("Dir::Cache");
1219 size_t const len = dir.size();
1220 if (len > 5 && dir.find("/apt/", len - 6, 5) == len - 5)
1221 dir = dir.substr(0, len - 5);
1222 if (CacheFile.empty() == false)
1223 CreateDirectory(dir, flNotFile(CacheFile));
1224 if (SrcCacheFile.empty() == false)
1225 CreateDirectory(dir, flNotFile(SrcCacheFile));
1226 }
1227
1228 // Decide if we can write to the cache
1229 bool Writeable = false;
1230 if (CacheFile.empty() == false)
1231 Writeable = access(flNotFile(CacheFile).c_str(),W_OK) == 0;
1232 else
1233 if (SrcCacheFile.empty() == false)
1234 Writeable = access(flNotFile(SrcCacheFile).c_str(),W_OK) == 0;
1235 if (Debug == true)
1236 std::clog << "Do we have write-access to the cache files? " << (Writeable ? "YES" : "NO") << std::endl;
1237
1238 if (Writeable == false && AllowMem == false && CacheFile.empty() == false)
1239 return _error->Error(_("Unable to write to %s"),flNotFile(CacheFile).c_str());
1240
1241 if (Progress != NULL)
1242 Progress->OverallProgress(0,1,1,_("Reading package lists"));
1243
1244 // Cache is OK, Fin.
1245 if (CheckValidity(CacheFile, List, Files.begin(),Files.end(),OutMap) == true)
1246 {
1247 if (Progress != NULL)
1248 Progress->OverallProgress(1,1,1,_("Reading package lists"));
1249 if (Debug == true)
1250 std::clog << "pkgcache.bin is valid - no need to build anything" << std::endl;
1251 return true;
1252 }
1253 else if (Debug == true)
1254 std::clog << "pkgcache.bin is NOT valid" << std::endl;
1255
1256 /* At this point we know we need to reconstruct the package cache,
1257 begin. */
1258 SPtr<FileFd> CacheF;
1259 SPtr<DynamicMMap> Map;
1260 if (Writeable == true && CacheFile.empty() == false)
1261 {
1262 _error->PushToStack();
1263 unlink(CacheFile.c_str());
1264 CacheF = new FileFd(CacheFile,FileFd::WriteAtomic);
1265 fchmod(CacheF->Fd(),0644);
1266 Map = CreateDynamicMMap(CacheF, MMap::Public);
1267 if (_error->PendingError() == true)
1268 {
1269 delete CacheF.UnGuard();
1270 delete Map.UnGuard();
1271 if (Debug == true)
1272 std::clog << "Open filebased MMap FAILED" << std::endl;
1273 Writeable = false;
1274 if (AllowMem == false)
1275 {
1276 _error->MergeWithStack();
1277 return false;
1278 }
1279 _error->RevertToStack();
1280 }
1281 else if (Debug == true)
1282 {
1283 _error->MergeWithStack();
1284 std::clog << "Open filebased MMap" << std::endl;
1285 }
1286 }
1287 if (Writeable == false || CacheFile.empty() == true)
1288 {
1289 // Just build it in memory..
1290 Map = CreateDynamicMMap(NULL);
1291 if (Debug == true)
1292 std::clog << "Open memory Map (not filebased)" << std::endl;
1293 }
1294
1295 // Lets try the source cache.
1296 unsigned long CurrentSize = 0;
1297 unsigned long TotalSize = 0;
1298 if (CheckValidity(SrcCacheFile, List, Files.begin(),
1299 Files.begin()+EndOfSource) == true)
1300 {
1301 if (Debug == true)
1302 std::clog << "srcpkgcache.bin is valid - populate MMap with it." << std::endl;
1303 // Preload the map with the source cache
1304 FileFd SCacheF(SrcCacheFile,FileFd::ReadOnly);
1305 unsigned long const alloc = Map->RawAllocate(SCacheF.Size());
1306 if ((alloc == 0 && _error->PendingError())
1307 || SCacheF.Read((unsigned char *)Map->Data() + alloc,
1308 SCacheF.Size()) == false)
1309 return false;
1310
1311 TotalSize = ComputeSize(Files.begin()+EndOfSource,Files.end());
1312
1313 // Build the status cache
1314 pkgCacheGenerator Gen(Map.Get(),Progress);
1315 if (_error->PendingError() == true)
1316 return false;
1317 if (BuildCache(Gen,Progress,CurrentSize,TotalSize,
1318 Files.begin()+EndOfSource,Files.end()) == false)
1319 return false;
1320
1321 // FIXME: move me to a better place
1322 Gen.FinishCache(Progress);
1323 }
1324 else
1325 {
1326 if (Debug == true)
1327 std::clog << "srcpkgcache.bin is NOT valid - rebuild" << std::endl;
1328 TotalSize = ComputeSize(Files.begin(),Files.end());
1329
1330 // Build the source cache
1331 pkgCacheGenerator Gen(Map.Get(),Progress);
1332 if (_error->PendingError() == true)
1333 return false;
1334 if (BuildCache(Gen,Progress,CurrentSize,TotalSize,
1335 Files.begin(),Files.begin()+EndOfSource) == false)
1336 return false;
1337
1338 // Write it back
1339 if (Writeable == true && SrcCacheFile.empty() == false)
1340 {
1341 FileFd SCacheF(SrcCacheFile,FileFd::WriteAtomic);
1342 if (_error->PendingError() == true)
1343 return false;
1344
1345 fchmod(SCacheF.Fd(),0644);
1346
1347 // Write out the main data
1348 if (SCacheF.Write(Map->Data(),Map->Size()) == false)
1349 return _error->Error(_("IO Error saving source cache"));
1350 SCacheF.Sync();
1351
1352 // Write out the proper header
1353 Gen.GetCache().HeaderP->Dirty = false;
1354 if (SCacheF.Seek(0) == false ||
1355 SCacheF.Write(Map->Data(),sizeof(*Gen.GetCache().HeaderP)) == false)
1356 return _error->Error(_("IO Error saving source cache"));
1357 Gen.GetCache().HeaderP->Dirty = true;
1358 SCacheF.Sync();
1359 }
1360
1361 // Build the status cache
1362 if (BuildCache(Gen,Progress,CurrentSize,TotalSize,
1363 Files.begin()+EndOfSource,Files.end()) == false)
1364 return false;
1365
1366 // FIXME: move me to a better place
1367 Gen.FinishCache(Progress);
1368 }
1369 if (Debug == true)
1370 std::clog << "Caches are ready for shipping" << std::endl;
1371
1372 if (_error->PendingError() == true)
1373 return false;
1374 if (OutMap != 0)
1375 {
1376 if (CacheF != 0)
1377 {
1378 delete Map.UnGuard();
1379 *OutMap = new MMap(*CacheF,0);
1380 }
1381 else
1382 {
1383 *OutMap = Map.UnGuard();
1384 }
1385 }
1386
1387 return true;
1388 }
1389 /*}}}*/
1390 // CacheGenerator::MakeOnlyStatusCache - Build only a status files cache/*{{{*/
1391 // ---------------------------------------------------------------------
1392 /* */
1393 __deprecated bool pkgMakeOnlyStatusCache(OpProgress &Progress,DynamicMMap **OutMap)
1394 { return pkgCacheGenerator::MakeOnlyStatusCache(&Progress, OutMap); }
1395 bool pkgCacheGenerator::MakeOnlyStatusCache(OpProgress *Progress,DynamicMMap **OutMap)
1396 {
1397 vector<pkgIndexFile *> Files;
1398 unsigned long EndOfSource = Files.size();
1399 if (_system->AddStatusFiles(Files) == false)
1400 return false;
1401
1402 SPtr<DynamicMMap> Map = CreateDynamicMMap(NULL);
1403 unsigned long CurrentSize = 0;
1404 unsigned long TotalSize = 0;
1405
1406 TotalSize = ComputeSize(Files.begin()+EndOfSource,Files.end());
1407
1408 // Build the status cache
1409 if (Progress != NULL)
1410 Progress->OverallProgress(0,1,1,_("Reading package lists"));
1411 pkgCacheGenerator Gen(Map.Get(),Progress);
1412 if (_error->PendingError() == true)
1413 return false;
1414 if (BuildCache(Gen,Progress,CurrentSize,TotalSize,
1415 Files.begin()+EndOfSource,Files.end()) == false)
1416 return false;
1417
1418 // FIXME: move me to a better place
1419 Gen.FinishCache(Progress);
1420
1421 if (_error->PendingError() == true)
1422 return false;
1423 *OutMap = Map.UnGuard();
1424
1425 return true;
1426 }
1427 /*}}}*/
1428 // IsDuplicateDescription /*{{{*/
1429 bool IsDuplicateDescription(pkgCache::DescIterator Desc,
1430 MD5SumValue const &CurMd5, std::string const &CurLang)
1431 {
1432 // Descriptions in the same link-list have all the same md5
1433 if (MD5SumValue(Desc.md5()) != CurMd5)
1434 return false;
1435 for (; Desc.end() == false; ++Desc)
1436 if (Desc.LanguageCode() == CurLang)
1437 return true;
1438 return false;
1439 }
1440 /*}}}*/
1441