X-Git-Url: https://git.saurik.com/apt.git/blobdiff_plain/5465192b9aeb1ccea778950ccf2d1b7b32f2cd91..516582f486e967c8b9ca8635b524757ba12131ba:/apt-pkg/pkgcachegen.cc?ds=sidebyside diff --git a/apt-pkg/pkgcachegen.cc b/apt-pkg/pkgcachegen.cc index 9529f42dc..68175a24a 100644 --- a/apt-pkg/pkgcachegen.cc +++ b/apt-pkg/pkgcachegen.cc @@ -42,7 +42,8 @@ #include #include - /*}}}*/ + +template using Dynamic = pkgCacheGenerator::Dynamic; /*}}}*/ typedef std::vector::iterator FileIterator; template std::vector pkgCacheGenerator::Dynamic::toReMap; @@ -56,7 +57,7 @@ using std::string; /* We set the dirty flag and make sure that is written to the disk */ pkgCacheGenerator::pkgCacheGenerator(DynamicMMap *pMap,OpProgress *Prog) : Map(*pMap), Cache(pMap,false), Progress(Prog), - CurrentRlsFile(NULL), CurrentFile(NULL), FoundFileDeps(0), d(NULL) + CurrentRlsFile(NULL), CurrentFile(NULL), d(NULL) { if (_error->PendingError() == true) return; @@ -229,22 +230,6 @@ bool pkgCacheGenerator::MergeList(ListParser &List, continue; } - if (Arch.empty() == true) - { - // use the pseudo arch 'none' for arch-less packages - Arch = "none"; - /* We might built a SingleArchCache here, which we don't want to blow up - just for these :none packages to a proper MultiArchCache, so just ensure - that we have always a native package structure first for SingleArch */ - pkgCache::PkgIterator NP; - Dynamic DynPkg(NP); - if (NewPackage(NP, PackageName, _config->Find("APT::Architecture")) == false) - // TRANSLATOR: The first placeholder is a package name, - // the other two should be copied verbatim as they include debug info - return _error->Error(_("Error occurred while processing %s (%s%d)"), - PackageName.c_str(), "NewPackage", 0); - } - // Get a pointer to the package structure pkgCache::PkgIterator Pkg; Dynamic DynPkg(Pkg); @@ -267,10 +252,7 @@ bool pkgCacheGenerator::MergeList(ListParser &List, } if (OutVer != 0) - { - FoundFileDeps |= List.HasFileDeps(); return true; - } } if (Cache.HeaderP->PackageCount >= std::numeric_limits::max()) @@ -286,7 +268,6 @@ bool pkgCacheGenerator::MergeList(ListParser &List, return _error->Error(_("Wow, you exceeded the number of dependencies " "this APT is capable of.")); - FoundFileDeps |= List.HasFileDeps(); return true; } // CacheGenerator::MergeListGroup /*{{{*/ @@ -444,40 +425,6 @@ bool pkgCacheGenerator::MergeListVersion(ListParser &List, pkgCache::PkgIterator return _error->Error(_("Error occurred while processing %s (%s%d)"), Pkg.Name(), "AddImplicitDepends", 1); } - /* :none packages are packages without an architecture. They are forbidden by - debian-policy, so usually they will only be in (old) dpkg status files - - and dpkg will complain about them - and are pretty rare. We therefore do - usually not create conflicts while the parent is created, but only if a :none - package (= the target) appears. This creates incorrect dependencies on :none - for architecture-specific dependencies on the package we copy from, but we - will ignore this bug as architecture-specific dependencies are only allowed - in jessie and until then the :none packages should be extinct (hopefully). - In other words: This should work long enough to allow graceful removal of - these packages, it is not supposed to allow users to keep using them … */ - if (strcmp(Pkg.Arch(), "none") == 0) - { - pkgCache::PkgIterator M = Grp.FindPreferredPkg(); - if (M.end() == false && Pkg != M) - { - pkgCache::DepIterator D = M.RevDependsList(); - Dynamic DynD(D); - for (; D.end() == false; ++D) - { - if ((D->Type != pkgCache::Dep::Conflicts && - D->Type != pkgCache::Dep::DpkgBreaks && - D->Type != pkgCache::Dep::Replaces) || - D.ParentPkg().Group() == Grp) - continue; - - map_pointer_t *OldDepLast = NULL; - pkgCache::VerIterator ConVersion = D.ParentVer(); - Dynamic DynV(ConVersion); - // duplicate the Conflicts/Breaks/Replaces for :none arch - NewDepends(Pkg, ConVersion, D->Version, - D->CompareOp, D->Type, OldDepLast); - } - } - } } if (unlikely(AddImplicitDepends(Grp, Pkg, Ver) == false)) return _error->Error(_("Error occurred while processing %s (%s%d)"), @@ -547,57 +494,6 @@ bool pkgCacheGenerator::AddNewDescription(ListParser &List, pkgCache::VerIterato } /*}}}*/ /*}}}*/ -// CacheGenerator::MergeFileProvides - Merge file provides /*{{{*/ -// --------------------------------------------------------------------- -/* If we found any file depends while parsing the main list we need to - resolve them. Since it is undesired to load the entire list of files - into the cache as virtual packages we do a two stage effort. MergeList - identifies the file depends and this creates Provdies for them by - re-parsing all the indexs. */ -bool pkgCacheGenerator::MergeFileProvides(ListParser &List) -{ - List.Owner = this; - - unsigned int Counter = 0; - while (List.Step() == true) - { - string PackageName = List.Package(); - if (PackageName.empty() == true) - return false; - string Version = List.Version(); - if (Version.empty() == true) - continue; - - pkgCache::PkgIterator Pkg = Cache.FindPkg(PackageName); - Dynamic DynPkg(Pkg); - if (Pkg.end() == true) - return _error->Error(_("Error occurred while processing %s (%s%d)"), - PackageName.c_str(), "FindPkg", 1); - Counter++; - if (Counter % 100 == 0 && Progress != 0) - Progress->Progress(List.Offset()); - - unsigned short Hash = List.VersionHash(); - pkgCache::VerIterator Ver = Pkg.VersionList(); - Dynamic DynVer(Ver); - for (; Ver.end() == false; ++Ver) - { - if (List.SameVersion(Hash, Ver) == true && Version == Ver.VerStr()) - { - if (List.CollectFileProvides(Cache,Ver) == false) - return _error->Error(_("Error occurred while processing %s (%s%d)"), - PackageName.c_str(), "CollectFileProvides", 1); - break; - } - } - - if (Ver.end() == true) - _error->Warning(_("Package %s %s was not found while processing file dependencies"),PackageName.c_str(),Version.c_str()); - } - - return true; -} - /*}}}*/ // CacheGenerator::NewGroup - Add a new group /*{{{*/ // --------------------------------------------------------------------- /* This creates a new group structure and adds it to the hash table */ @@ -1051,7 +947,7 @@ bool pkgCacheGenerator::NewDepends(pkgCache::PkgIterator &Pkg, // --------------------------------------------------------------------- /* This creates a Group and the Package to link this dependency to if needed and handles also the caching of the old endpoint */ -bool pkgCacheGenerator::ListParser::NewDepends(pkgCache::VerIterator &Ver, +bool pkgCacheListParser::NewDepends(pkgCache::VerIterator &Ver, const string &PackageName, const string &Arch, const string &Version, @@ -1063,10 +959,6 @@ bool pkgCacheGenerator::ListParser::NewDepends(pkgCache::VerIterator &Ver, if (unlikely(Owner->NewGroup(Grp, PackageName) == false)) return false; - // Is it a file dependency? - if (unlikely(PackageName[0] == '/')) - FoundFileDeps = true; - map_stringitem_t idxVersion = 0; if (Version.empty() == false) { @@ -1077,7 +969,7 @@ bool pkgCacheGenerator::ListParser::NewDepends(pkgCache::VerIterator &Ver, if (idxVersion == 0) { - idxVersion = StoreString(VERSIONNUMBER, Version); + idxVersion = StoreString(pkgCacheGenerator::VERSIONNUMBER, Version); if (unlikely(idxVersion == 0)) return false; } @@ -1124,7 +1016,7 @@ bool pkgCacheGenerator::ListParser::NewDepends(pkgCache::VerIterator &Ver, } /*}}}*/ // ListParser::NewProvides - Create a Provides element /*{{{*/ -bool pkgCacheGenerator::ListParser::NewProvides(pkgCache::VerIterator &Ver, +bool pkgCacheListParser::NewProvides(pkgCache::VerIterator &Ver, const string &PkgName, const string &PkgArch, const string &Version, @@ -1145,7 +1037,7 @@ bool pkgCacheGenerator::ListParser::NewProvides(pkgCache::VerIterator &Ver, map_stringitem_t idxProvideVersion = 0; if (Version.empty() == false) { - idxProvideVersion = StoreString(VERSIONNUMBER, Version); + idxProvideVersion = StoreString(pkgCacheGenerator::VERSIONNUMBER, Version); if (unlikely(idxProvideVersion == 0)) return false; } @@ -1178,7 +1070,7 @@ bool pkgCacheGenerator::NewProvides(pkgCache::VerIterator &Ver, } /*}}}*/ // ListParser::NewProvidesAllArch - add provides for all architectures /*{{{*/ -bool pkgCacheGenerator::ListParser::NewProvidesAllArch(pkgCache::VerIterator &Ver, string const &Package, +bool pkgCacheListParser::NewProvidesAllArch(pkgCache::VerIterator &Ver, string const &Package, string const &Version, uint8_t const Flags) { pkgCache &Cache = Owner->Cache; pkgCache::GrpIterator const Grp = Cache.FindGrp(Package); @@ -1188,7 +1080,7 @@ bool pkgCacheGenerator::ListParser::NewProvidesAllArch(pkgCache::VerIterator &Ve { map_stringitem_t idxProvideVersion = 0; if (Version.empty() == false) { - idxProvideVersion = StoreString(VERSIONNUMBER, Version); + idxProvideVersion = StoreString(pkgCacheGenerator::VERSIONNUMBER, Version); if (unlikely(idxProvideVersion == 0)) return false; } @@ -1209,7 +1101,7 @@ bool pkgCacheGenerator::ListParser::NewProvidesAllArch(pkgCache::VerIterator &Ve return true; } /*}}}*/ -bool pkgCacheGenerator::ListParser::SameVersion(unsigned short const Hash,/*{{{*/ +bool pkgCacheListParser::SameVersion(unsigned short const Hash, /*{{{*/ pkgCache::VerIterator const &Ver) { return Hash == Ver->Hash; @@ -1358,8 +1250,8 @@ static bool CheckValidity(const string &CacheFile, // Map it FileFd CacheF(CacheFile,FileFd::ReadOnly); - SPtr Map = new MMap(CacheF,0); - pkgCache Cache(Map); + std::unique_ptr Map(new MMap(CacheF,0)); + pkgCache Cache(Map.get()); if (_error->PendingError() == true || Map->Size() == 0) { if (Debug == true) @@ -1368,8 +1260,8 @@ static bool CheckValidity(const string &CacheFile, return false; } - SPtrArray RlsVisited = new bool[Cache.HeaderP->ReleaseFileCount]; - memset(RlsVisited,0,sizeof(*RlsVisited)*Cache.HeaderP->ReleaseFileCount); + std::unique_ptr RlsVisited(new bool[Cache.HeaderP->ReleaseFileCount]); + memset(RlsVisited.get(),0,sizeof(RlsVisited[0])*Cache.HeaderP->ReleaseFileCount); std::vector Files; for (pkgSourceList::const_iterator i = List.begin(); i != List.end(); ++i) { @@ -1403,8 +1295,8 @@ static bool CheckValidity(const string &CacheFile, /* Now we check every index file, see if it is in the cache, verify the IMS data and check that it is on the disk too.. */ - SPtrArray Visited = new bool[Cache.HeaderP->PackageFileCount]; - memset(Visited,0,sizeof(*Visited)*Cache.HeaderP->PackageFileCount); + std::unique_ptr Visited(new bool[Cache.HeaderP->PackageFileCount]); + memset(Visited.get(),0,sizeof(Visited[0])*Cache.HeaderP->PackageFileCount); for (std::vector::const_reverse_iterator PkgFile = Files.rbegin(); PkgFile != Files.rend(); ++PkgFile) { if (Debug == true) @@ -1450,7 +1342,7 @@ static bool CheckValidity(const string &CacheFile, } if (OutMap != 0) - *OutMap = Map.UnGuard(); + *OutMap = Map.release(); return true; } /*}}}*/ @@ -1489,13 +1381,9 @@ static bool BuildCache(pkgCacheGenerator &Gen, FileIterator const Start, FileIterator const End) { std::vector Files; - bool const HasFileDeps = Gen.HasFileDeps(); bool mergeFailure = false; auto const indexFileMerge = [&](pkgIndexFile * const I) { - if (HasFileDeps) - Files.push_back(I); - if (I->HasPackages() == false || mergeFailure) return; @@ -1547,24 +1435,6 @@ static bool BuildCache(pkgCacheGenerator &Gen, if (mergeFailure) return false; } - - if (HasFileDeps == true) - { - if (Progress != NULL) - Progress->Done(); - TotalSize = ComputeSize(List, Start, End); - CurrentSize = 0; - for (std::vector::const_iterator I = Files.begin(); I != Files.end(); ++I) - { - map_filesize_t Size = (*I)->Size(); - if (Progress != NULL) - Progress->OverallProgress(CurrentSize,TotalSize,Size,_("Collecting File Provides")); - CurrentSize += Size; - if ((*I)->MergeFileProvides(Gen,Progress) == false) - return false; - } - } - return true; } /*}}}*/ @@ -1613,16 +1483,16 @@ static bool writeBackMMapToFile(pkgCacheGenerator * const Gen, DynamicMMap * con return true; } static bool loadBackMMapFromFile(std::unique_ptr &Gen, - SPtr &Map, OpProgress * const Progress, std::string const &FileName) + std::unique_ptr &Map, OpProgress * const Progress, std::string const &FileName) { - Map = CreateDynamicMMap(NULL, 0); + Map.reset(CreateDynamicMMap(NULL, 0)); FileFd CacheF(FileName, FileFd::ReadOnly); map_pointer_t const alloc = Map->RawAllocate(CacheF.Size()); if ((alloc == 0 && _error->PendingError()) || CacheF.Read((unsigned char *)Map->Data() + alloc, CacheF.Size()) == false) return false; - Gen.reset(new pkgCacheGenerator(Map.Get(),Progress)); + Gen.reset(new pkgCacheGenerator(Map.get(),Progress)); return true; } APT_DEPRECATED bool pkgMakeStatusCache(pkgSourceList &List,OpProgress &Progress, @@ -1708,7 +1578,7 @@ bool pkgCacheGenerator::MakeStatusCache(pkgSourceList &List,OpProgress *Progress } // At this point we know we need to construct something, so get storage ready - SPtr Map = CreateDynamicMMap(NULL, 0); + std::unique_ptr Map(CreateDynamicMMap(NULL, 0)); if (Debug == true) std::clog << "Open memory Map (not filebased)" << std::endl; @@ -1729,7 +1599,7 @@ bool pkgCacheGenerator::MakeStatusCache(pkgSourceList &List,OpProgress *Progress { if (Debug == true) std::clog << "srcpkgcache.bin is NOT valid - rebuild" << std::endl; - Gen.reset(new pkgCacheGenerator(Map.Get(),Progress)); + Gen.reset(new pkgCacheGenerator(Map.get(),Progress)); TotalSize += ComputeSize(&List, Files.begin(),Files.end()); if (BuildCache(*Gen, Progress, CurrentSize, TotalSize, &List, @@ -1737,7 +1607,7 @@ bool pkgCacheGenerator::MakeStatusCache(pkgSourceList &List,OpProgress *Progress return false; if (Writeable == true && SrcCacheFile.empty() == false) - if (writeBackMMapToFile(Gen.get(), Map.Get(), SrcCacheFile) == false) + if (writeBackMMapToFile(Gen.get(), Map.get(), SrcCacheFile) == false) return false; } @@ -1750,7 +1620,7 @@ bool pkgCacheGenerator::MakeStatusCache(pkgSourceList &List,OpProgress *Progress return false; if (Writeable == true && CacheFile.empty() == false) - if (writeBackMMapToFile(Gen.get(), Map.Get(), CacheFile) == false) + if (writeBackMMapToFile(Gen.get(), Map.get(), CacheFile) == false) return false; } @@ -1774,7 +1644,7 @@ bool pkgCacheGenerator::MakeStatusCache(pkgSourceList &List,OpProgress *Progress } if (OutMap != nullptr) - *OutMap = Map.UnGuard(); + *OutMap = Map.release(); if (Debug == true) std::clog << "Everything is ready for shipping" << std::endl; @@ -1792,7 +1662,7 @@ bool pkgCacheGenerator::MakeOnlyStatusCache(OpProgress *Progress,DynamicMMap **O if (_system->AddStatusFiles(Files) == false) return false; - SPtr Map = CreateDynamicMMap(NULL, 0); + std::unique_ptr Map(CreateDynamicMMap(NULL, 0)); map_filesize_t CurrentSize = 0; map_filesize_t TotalSize = 0; @@ -1801,7 +1671,7 @@ bool pkgCacheGenerator::MakeOnlyStatusCache(OpProgress *Progress,DynamicMMap **O // Build the status cache if (Progress != NULL) Progress->OverallProgress(0,1,1,_("Reading package lists")); - pkgCacheGenerator Gen(Map.Get(),Progress); + pkgCacheGenerator Gen(Map.get(),Progress); if (_error->PendingError() == true) return false; if (BuildCache(Gen,Progress,CurrentSize,TotalSize, NULL, @@ -1810,7 +1680,7 @@ bool pkgCacheGenerator::MakeOnlyStatusCache(OpProgress *Progress,DynamicMMap **O if (_error->PendingError() == true) return false; - *OutMap = Map.UnGuard(); + *OutMap = Map.release(); return true; } @@ -1828,12 +1698,6 @@ static bool IsDuplicateDescription(pkgCache::DescIterator Desc, return false; } /*}}}*/ -// CacheGenerator::FinishCache /*{{{*/ -bool pkgCacheGenerator::FinishCache(OpProgress * /*Progress*/) -{ - return true; -} - /*}}}*/ -pkgCacheGenerator::ListParser::ListParser() : Owner(NULL), OldDepLast(NULL), FoundFileDeps(false), d(NULL) {} -pkgCacheGenerator::ListParser::~ListParser() {} +pkgCacheListParser::pkgCacheListParser() : Owner(NULL), OldDepLast(NULL), d(NULL) {} +pkgCacheListParser::~pkgCacheListParser() {}