X-Git-Url: https://git.saurik.com/apt.git/blobdiff_plain/a52f938bcf7f31d348c6ce1d462a8ce14a38b8cc..e0c4f06387ade5761e644b5b96e41284c12da23c:/apt-pkg/pkgcachegen.cc diff --git a/apt-pkg/pkgcachegen.cc b/apt-pkg/pkgcachegen.cc index 053d6396d..1ba791b45 100644 --- a/apt-pkg/pkgcachegen.cc +++ b/apt-pkg/pkgcachegen.cc @@ -114,7 +114,7 @@ bool pkgCacheGenerator::MergeList(ListParser &List, pkgCache::PkgIterator Pkg; if (NewPackage(Pkg,PackageName) == false) - return _error->Error(_("Error occured while processing %s (NewPackage)"),PackageName.c_str()); + return _error->Error(_("Error occurred while processing %s (NewPackage)"),PackageName.c_str()); Counter++; if (Counter % 100 == 0 && Progress != 0) Progress->Progress(List.Offset()); @@ -125,31 +125,39 @@ bool pkgCacheGenerator::MergeList(ListParser &List, string Version = List.Version(); if (Version.empty() == true) { + // we first process the package, then the descriptions + // (this has the bonus that we get MMap error when we run out + // of MMap space) + if (List.UsePackage(Pkg,pkgCache::VerIterator(Cache)) == false) + return _error->Error(_("Error occurred while processing %s (UsePackage1)"), + PackageName.c_str()); + // Find the right version to write the description MD5SumValue CurMd5 = List.Description_md5(); pkgCache::VerIterator Ver = Pkg.VersionList(); map_ptrloc *LastVer = &Pkg->VersionList; - + for (; Ver.end() == false; LastVer = &Ver->NextVer, Ver++) { pkgCache::DescIterator Desc = Ver.DescriptionList(); map_ptrloc *LastDesc = &Ver->DescriptionList; - + for (; Desc.end() == false; LastDesc = &Desc->NextDesc, Desc++) - if (MD5SumValue(Desc.md5()) == CurMd5) { + { + + if (MD5SumValue(Desc.md5()) == CurMd5) + { // Add new description *LastDesc = NewDescription(Desc, List.DescriptionLanguage(), CurMd5, *LastDesc); Desc->ParentPkg = Pkg.Index(); - + if (NewFileDesc(Desc,List) == false) return _error->Error(_("Error occured while processing %s (NewFileDesc1)"),PackageName.c_str()); break; } + } } - - if (List.UsePackage(Pkg,pkgCache::VerIterator(Cache)) == false) - return _error->Error(_("Error occured while processing %s (UsePackage1)"), - PackageName.c_str()); + continue; } @@ -169,11 +177,11 @@ bool pkgCacheGenerator::MergeList(ListParser &List, if (Res == 0 && Ver->Hash == Hash) { if (List.UsePackage(Pkg,Ver) == false) - return _error->Error(_("Error occured while processing %s (UsePackage2)"), + return _error->Error(_("Error occurred while processing %s (UsePackage2)"), PackageName.c_str()); if (NewFileVer(Ver,List) == false) - return _error->Error(_("Error occured while processing %s (NewFileVer1)"), + return _error->Error(_("Error occurred while processing %s (NewFileVer1)"), PackageName.c_str()); // Read only a single record and return @@ -204,15 +212,15 @@ bool pkgCacheGenerator::MergeList(ListParser &List, Ver->Hash = Hash; if (List.NewVersion(Ver) == false) - return _error->Error(_("Error occured while processing %s (NewVersion1)"), + return _error->Error(_("Error occurred while processing %s (NewVersion1)"), PackageName.c_str()); if (List.UsePackage(Pkg,Ver) == false) - return _error->Error(_("Error occured while processing %s (UsePackage3)"), + return _error->Error(_("Error occurred while processing %s (UsePackage3)"), PackageName.c_str()); if (NewFileVer(Ver,List) == false) - return _error->Error(_("Error occured while processing %s (NewVersion2)"), + return _error->Error(_("Error occurred while processing %s (NewVersion2)"), PackageName.c_str()); // Read only a single record and return @@ -279,7 +287,7 @@ bool pkgCacheGenerator::MergeFileProvides(ListParser &List) pkgCache::PkgIterator Pkg = Cache.FindPkg(PackageName); if (Pkg.end() == true) - return _error->Error(_("Error occured while processing %s (FindPkg)"), + return _error->Error(_("Error occurred while processing %s (FindPkg)"), PackageName.c_str()); Counter++; if (Counter % 100 == 0 && Progress != 0) @@ -292,7 +300,7 @@ bool pkgCacheGenerator::MergeFileProvides(ListParser &List) if (Ver->Hash == Hash && Version.c_str() == Ver.VerStr()) { if (List.CollectFileProvides(Cache,Ver) == false) - return _error->Error(_("Error occured while processing %s (CollectFileProvides)"),PackageName.c_str()); + return _error->Error(_("Error occurred while processing %s (CollectFileProvides)"),PackageName.c_str()); break; } } @@ -403,14 +411,15 @@ bool pkgCacheGenerator::NewFileDesc(pkgCache::DescIterator &Desc, unsigned long DescFile = Map.Allocate(sizeof(pkgCache::DescFile)); if (DescFile == 0) return 0; - + pkgCache::DescFileIterator DF(Cache,Cache.DescFileP + DescFile); DF->File = CurrentFile - Cache.PkgFileP; - + // Link it to the end of the list map_ptrloc *Last = &Desc->FileList; for (pkgCache::DescFileIterator D = Desc.FileList(); D.end() == false; D++) Last = &D->NextFile; + DF->NextFile = *Last; *Last = DF.Index(); @@ -736,7 +745,7 @@ static bool BuildCache(pkgCacheGenerator &Gen, } unsigned long Size = (*I)->Size(); - Progress.OverallProgress(CurrentSize,TotalSize,Size,_("Reading Package Lists")); + Progress.OverallProgress(CurrentSize,TotalSize,Size,_("Reading package lists")); CurrentSize += Size; if ((*I)->Merge(Gen,Progress) == false) @@ -805,12 +814,12 @@ bool pkgMakeStatusCache(pkgSourceList &List,OpProgress &Progress, if (Writeable == false && AllowMem == false && CacheFile.empty() == false) return _error->Error(_("Unable to write to %s"),flNotFile(CacheFile).c_str()); - Progress.OverallProgress(0,1,1,_("Reading Package Lists")); + Progress.OverallProgress(0,1,1,_("Reading package lists")); // Cache is OK, Fin. if (CheckValidity(CacheFile,Files.begin(),Files.end(),OutMap) == true) { - Progress.OverallProgress(1,1,1,_("Reading Package Lists")); + Progress.OverallProgress(1,1,1,_("Reading package lists")); return true; } @@ -933,7 +942,7 @@ bool pkgMakeOnlyStatusCache(OpProgress &Progress,DynamicMMap **OutMap) TotalSize = ComputeSize(Files.begin()+EndOfSource,Files.end()); // Build the status cache - Progress.OverallProgress(0,1,1,_("Reading Package Lists")); + Progress.OverallProgress(0,1,1,_("Reading package lists")); pkgCacheGenerator Gen(Map.Get(),&Progress); if (_error->PendingError() == true) return false;