X-Git-Url: https://git.saurik.com/apt.git/blobdiff_plain/a6deb7a0ea70e945dfd750d1bdc1b783b187873e..fa47f406cf434e175885c5920175c0cedcd62746:/apt-pkg/pkgcachegen.cc?ds=inline diff --git a/apt-pkg/pkgcachegen.cc b/apt-pkg/pkgcachegen.cc index 76c2f0074..a26237296 100644 --- a/apt-pkg/pkgcachegen.cc +++ b/apt-pkg/pkgcachegen.cc @@ -49,6 +49,7 @@ static bool IsDuplicateDescription(pkgCache::DescIterator Desc, MD5SumValue const &CurMd5, std::string const &CurLang); using std::string; +using APT::StringView; // CacheGenerator::pkgCacheGenerator - Constructor /*{{{*/ // --------------------------------------------------------------------- @@ -57,14 +58,15 @@ pkgCacheGenerator::pkgCacheGenerator(DynamicMMap *pMap,OpProgress *Prog) : Map(*pMap), Cache(pMap,false), Progress(Prog), CurrentRlsFile(NULL), CurrentFile(NULL), d(NULL) { - if (_error->PendingError() == true) - return; - if (Map.Size() == 0) { // Setup the map interface.. Cache.HeaderP = (pkgCache::Header *)Map.Data(); - if (Map.RawAllocate(sizeof(pkgCache::Header)) == 0 && _error->PendingError() == true) + _error->PushToStack(); + Map.RawAllocate(sizeof(pkgCache::Header)); + bool const newError = _error->PendingError(); + _error->MergeWithStack(); + if (newError) return; Map.UsePools(*Cache.HeaderP->Pools,sizeof(Cache.HeaderP->Pools)/sizeof(Cache.HeaderP->Pools[0])); @@ -100,6 +102,8 @@ pkgCacheGenerator::pkgCacheGenerator(DynamicMMap *pMap,OpProgress *Prog) : else Cache.HeaderP->SetArchitectures(idxArchitecture); + // Calculate the hash for the empty map, so ReMap does not fail + Cache.HeaderP->CacheFileSize = Cache.CacheHash(); Cache.ReMap(); } else @@ -124,13 +128,16 @@ pkgCacheGenerator::pkgCacheGenerator(DynamicMMap *pMap,OpProgress *Prog) : advoid a problem during a crash */ pkgCacheGenerator::~pkgCacheGenerator() { - if (_error->PendingError() == true) + if (_error->PendingError() == true || Map.validData() == false) return; if (Map.Sync() == false) return; Cache.HeaderP->Dirty = false; - Cache.HeaderP->CacheFileSize = Map.Size(); + Cache.HeaderP->CacheFileSize = Cache.CacheHash(); + + if (_config->FindB("Debug::pkgCacheGen", false)) + std::clog << "Produced cache with hash " << Cache.HeaderP->CacheFileSize << std::endl; Map.Sync(0,sizeof(pkgCache::Header)); } /*}}}*/ @@ -301,8 +308,6 @@ bool pkgCacheGenerator::MergeListPackage(ListParser &List, pkgCache::PkgIterator // Find the right version to write the description MD5SumValue CurMd5 = List.Description_md5(); - if (CurMd5.Value().empty() == true && List.Description("").empty() == true) - return true; std::vector availDesc = List.AvailableDescriptionLanguages(); for (Ver = Pkg.VersionList(); Ver.end() == false; ++Ver) { @@ -382,7 +387,7 @@ bool pkgCacheGenerator::MergeListVersion(ListParser &List, pkgCache::PkgIterator // Add a new version map_pointer_t const verindex = NewVersion(Ver, Version, Pkg.Index(), Hash, *LastVer); - if (verindex == 0 && _error->PendingError()) + if (unlikely(verindex == 0)) return _error->Error(_("Error occurred while processing %s (%s%d)"), Pkg.Name(), "NewVersion", 1); @@ -437,8 +442,6 @@ bool pkgCacheGenerator::MergeListVersion(ListParser &List, pkgCache::PkgIterator /* Record the Description(s) based on their master md5sum */ MD5SumValue CurMd5 = List.Description_md5(); - if (CurMd5.Value().empty() == true && List.Description("").empty() == true) - return true; /* Before we add a new description we first search in the group for a version with a description of the same MD5 - if so we reuse this @@ -470,7 +473,7 @@ bool pkgCacheGenerator::AddNewDescription(ListParser &List, pkgCache::VerIterato Dynamic DynDesc(Desc); map_pointer_t const descindex = NewDescription(Desc, lang, CurMd5, md5idx); - if (unlikely(descindex == 0 && _error->PendingError())) + if (unlikely(descindex == 0)) return _error->Error(_("Error occurred while processing %s (%s%d)"), Ver.ParentPkg().Name(), "NewDescription", 1); @@ -495,7 +498,7 @@ bool pkgCacheGenerator::AddNewDescription(ListParser &List, pkgCache::VerIterato // CacheGenerator::NewGroup - Add a new group /*{{{*/ // --------------------------------------------------------------------- /* This creates a new group structure and adds it to the hash table */ -bool pkgCacheGenerator::NewGroup(pkgCache::GrpIterator &Grp, const string &Name) +bool pkgCacheGenerator::NewGroup(pkgCache::GrpIterator &Grp, StringView Name) { Grp = Cache.FindGrp(Name); if (Grp.end() == false) @@ -515,7 +518,8 @@ bool pkgCacheGenerator::NewGroup(pkgCache::GrpIterator &Grp, const string &Name) // Insert it into the hash table unsigned long const Hash = Cache.Hash(Name); map_pointer_t *insertAt = &Cache.HeaderP->GrpHashTableP()[Hash]; - while (*insertAt != 0 && strcasecmp(Name.c_str(), Cache.StrP + (Cache.GrpP + *insertAt)->Name) > 0) + + while (*insertAt != 0 && Name.compare(Cache.ViewString((Cache.GrpP + *insertAt)->Name)) > 0) insertAt = &(Cache.GrpP + *insertAt)->Next; Grp->Next = *insertAt; *insertAt = Group; @@ -527,8 +531,8 @@ bool pkgCacheGenerator::NewGroup(pkgCache::GrpIterator &Grp, const string &Name) // CacheGenerator::NewPackage - Add a new package /*{{{*/ // --------------------------------------------------------------------- /* This creates a new package structure and adds it to the hash table */ -bool pkgCacheGenerator::NewPackage(pkgCache::PkgIterator &Pkg,const string &Name, - const string &Arch) { +bool pkgCacheGenerator::NewPackage(pkgCache::PkgIterator &Pkg, StringView Name, + StringView Arch) { pkgCache::GrpIterator Grp; Dynamic DynGrp(Grp); if (unlikely(NewGroup(Grp, Name) == false)) @@ -561,7 +565,7 @@ bool pkgCacheGenerator::NewPackage(pkgCache::PkgIterator &Pkg,const string &Name // Insert it into the hash table map_id_t const Hash = Cache.Hash(Name); map_pointer_t *insertAt = &Cache.HeaderP->PkgHashTableP()[Hash]; - while (*insertAt != 0 && strcasecmp(Name.c_str(), Cache.StrP + (Cache.GrpP + (Cache.PkgP + *insertAt)->Group)->Name) > 0) + while (*insertAt != 0 && Name.compare(Cache.StrP + (Cache.GrpP + (Cache.PkgP + *insertAt)->Group)->Name) > 0) insertAt = &(Cache.PkgP + *insertAt)->NextPackage; Pkg->NextPackage = *insertAt; *insertAt = Package; @@ -629,6 +633,37 @@ bool pkgCacheGenerator::NewPackage(pkgCache::PkgIterator &Pkg,const string &Name LastPkg->NextPackage = Package; } Grp->LastPackage = Package; + + // lazy-create foo (of amd64) provides foo:amd64 at the time we first need it + if (Arch == "any") + { + size_t const found = Name.find(':'); + StringView const NameA = Name.substr(0, found); + StringView const ArchA = Name.substr(found + 1); + pkgCache::PkgIterator PkgA = Cache.FindPkg(NameA, ArchA); + if (PkgA.end() == false) + { + Dynamic DynPkgA(PkgA); + pkgCache::PrvIterator Prv = PkgA.ProvidesList(); + for (; Prv.end() == false; ++Prv) + { + if (Prv.IsMultiArchImplicit()) + continue; + pkgCache::VerIterator V = Prv.OwnerVer(); + if (ArchA != V.ParentPkg().Arch()) + continue; + if (NewProvides(V, Pkg, V->VerStr, pkgCache::Flag::MultiArchImplicit | pkgCache::Flag::ArchSpecific) == false) + return false; + } + pkgCache::VerIterator V = PkgA.VersionList(); + Dynamic DynV(V); + for (; V.end() == false; ++V) + { + if (NewProvides(V, Pkg, V->VerStr, pkgCache::Flag::MultiArchImplicit | pkgCache::Flag::ArchSpecific) == false) + return false; + } + } + } return true; } /*}}}*/ @@ -960,9 +995,9 @@ bool pkgCacheGenerator::NewDepends(pkgCache::PkgIterator &Pkg, /* This creates a Group and the Package to link this dependency to if needed and handles also the caching of the old endpoint */ bool pkgCacheListParser::NewDepends(pkgCache::VerIterator &Ver, - const string &PackageName, - const string &Arch, - const string &Version, + StringView PackageName, + StringView Arch, + StringView Version, uint8_t const Op, uint8_t const Type) { @@ -976,7 +1011,7 @@ bool pkgCacheListParser::NewDepends(pkgCache::VerIterator &Ver, { int const CmpOp = Op & 0x0F; // =-deps are used (79:1) for lockstep on same-source packages (e.g. data-packages) - if (CmpOp == pkgCache::Dep::Equals && strcmp(Version.c_str(), Ver.VerStr()) == 0) + if (CmpOp == pkgCache::Dep::Equals && Version == Ver.VerStr()) idxVersion = Ver->VerStr; if (idxVersion == 0) @@ -1029,16 +1064,17 @@ bool pkgCacheListParser::NewDepends(pkgCache::VerIterator &Ver, /*}}}*/ // ListParser::NewProvides - Create a Provides element /*{{{*/ bool pkgCacheListParser::NewProvides(pkgCache::VerIterator &Ver, - const string &PkgName, - const string &PkgArch, - const string &Version, + StringView PkgName, + StringView PkgArch, + StringView Version, uint8_t const Flags) { pkgCache const &Cache = Owner->Cache; // We do not add self referencing provides if (Ver.ParentPkg().Name() == PkgName && (PkgArch == Ver.ParentPkg().Arch() || - (PkgArch == "all" && strcmp((Cache.StrP + Cache.HeaderP->Architecture), Ver.ParentPkg().Arch()) == 0))) + (PkgArch == "all" && strcmp((Cache.StrP + Cache.HeaderP->Architecture), Ver.ParentPkg().Arch()) == 0)) && + (Version.empty() || Version == Ver.VerStr())) return true; // Locate the target package @@ -1082,8 +1118,8 @@ bool pkgCacheGenerator::NewProvides(pkgCache::VerIterator &Ver, } /*}}}*/ // ListParser::NewProvidesAllArch - add provides for all architectures /*{{{*/ -bool pkgCacheListParser::NewProvidesAllArch(pkgCache::VerIterator &Ver, string const &Package, - string const &Version, uint8_t const Flags) { +bool pkgCacheListParser::NewProvidesAllArch(pkgCache::VerIterator &Ver, StringView Package, + StringView Version, uint8_t const Flags) { pkgCache &Cache = Owner->Cache; pkgCache::GrpIterator Grp = Cache.FindGrp(Package); Dynamic DynGrp(Grp); @@ -1218,23 +1254,21 @@ bool pkgCacheGenerator::SelectFile(std::string const &File, map_stringitem_t pkgCacheGenerator::StoreString(enum StringType const type, const char *S, unsigned int Size) { - std::string const key(S, Size); - - std::map * strings; + auto strings = &strMixed; switch(type) { case MIXED: strings = &strMixed; break; case PKGNAME: strings = &strPkgNames; break; case VERSIONNUMBER: strings = &strVersions; break; case SECTION: strings = &strSections; break; - default: _error->Fatal("Unknown enum type used for string storage of '%s'", key.c_str()); return 0; + default: _error->Fatal("Unknown enum type used for string storage of '%.*s'", Size, S); return 0; } - std::map::const_iterator const item = strings->find(key); + auto const item = strings->find({S, Size, nullptr, 0}); if (item != strings->end()) - return item->second; + return item->item; map_stringitem_t const idxString = WriteStringInMap(S,Size); - strings->insert(std::make_pair(key, idxString)); + strings->insert({nullptr, Size, this, idxString}); return idxString; } /*}}}*/ @@ -1243,12 +1277,19 @@ map_stringitem_t pkgCacheGenerator::StoreString(enum StringType const type, cons /* This just verifies that each file in the list of index files exists, has matching attributes with the cache and the cache does not have any extra files. */ +class APT_HIDDEN ScopedErrorRevert { +public: + ScopedErrorRevert() { _error->PushToStack(); } + ~ScopedErrorRevert() { _error->RevertToStack(); } +}; static bool CheckValidity(const string &CacheFile, pkgSourceList &List, FileIterator const Start, FileIterator const End, - MMap **OutMap = 0) + MMap **OutMap = 0, + pkgCache **OutCache = 0) { + ScopedErrorRevert ser; bool const Debug = _config->FindB("Debug::pkgCacheGen", false); // No file, certainly invalid if (CacheFile.empty() == true || FileExists(CacheFile) == false) @@ -1268,12 +1309,14 @@ static bool CheckValidity(const string &CacheFile, // Map it FileFd CacheF(CacheFile,FileFd::ReadOnly); std::unique_ptr Map(new MMap(CacheF,0)); - pkgCache Cache(Map.get()); - if (_error->PendingError() == true || Map->Size() == 0) + if (unlikely(Map->validData()) == false) + return false; + std::unique_ptr CacheP(new pkgCache(Map.get())); + pkgCache &Cache = *CacheP.get(); + if (_error->PendingError() || Map->Size() == 0) { if (Debug == true) std::clog << "Errors are pending or Map is empty() for " << CacheFile << std::endl; - _error->Discard(); return false; } @@ -1352,14 +1395,15 @@ static bool CheckValidity(const string &CacheFile, if (Debug == true) { std::clog << "Validity failed because of pending errors:" << std::endl; - _error->DumpErrors(); + _error->DumpErrors(std::clog, GlobalError::DEBUG, false); } - _error->Discard(); return false; } - + if (OutMap != 0) *OutMap = Map.release(); + if (OutCache != 0) + *OutCache = CacheP.release(); return true; } /*}}}*/ @@ -1397,7 +1441,6 @@ static bool BuildCache(pkgCacheGenerator &Gen, pkgSourceList const * const List, FileIterator const Start, FileIterator const End) { - std::vector Files; bool mergeFailure = false; auto const indexFileMerge = [&](pkgIndexFile * const I) { @@ -1480,7 +1523,7 @@ static bool writeBackMMapToFile(pkgCacheGenerator * const Gen, DynamicMMap * con std::string const &FileName) { FileFd SCacheF(FileName, FileFd::WriteAtomic); - if (_error->PendingError() == true) + if (SCacheF.IsOpen() == false || SCacheF.Failed()) return false; fchmod(SCacheF.Fd(),0644); @@ -1488,36 +1531,48 @@ static bool writeBackMMapToFile(pkgCacheGenerator * const Gen, DynamicMMap * con // Write out the main data if (SCacheF.Write(Map->Data(),Map->Size()) == false) return _error->Error(_("IO Error saving source cache")); - SCacheF.Sync(); // Write out the proper header Gen->GetCache().HeaderP->Dirty = false; + Gen->GetCache().HeaderP->CacheFileSize = Gen->GetCache().CacheHash(); if (SCacheF.Seek(0) == false || SCacheF.Write(Map->Data(),sizeof(*Gen->GetCache().HeaderP)) == false) return _error->Error(_("IO Error saving source cache")); Gen->GetCache().HeaderP->Dirty = true; - SCacheF.Sync(); return true; } static bool loadBackMMapFromFile(std::unique_ptr &Gen, std::unique_ptr &Map, OpProgress * const Progress, std::string const &FileName) { Map.reset(CreateDynamicMMap(NULL, 0)); + if (unlikely(Map->validData()) == false) + return false; FileFd CacheF(FileName, FileFd::ReadOnly); + if (CacheF.IsOpen() == false || CacheF.Failed()) + return false; + _error->PushToStack(); map_pointer_t const alloc = Map->RawAllocate(CacheF.Size()); - if ((alloc == 0 && _error->PendingError()) - || CacheF.Read((unsigned char *)Map->Data() + alloc, - CacheF.Size()) == false) + bool const newError = _error->PendingError(); + _error->MergeWithStack(); + if (alloc == 0 && newError) + return false; + if (CacheF.Read((unsigned char *)Map->Data() + alloc, CacheF.Size()) == false) return false; Gen.reset(new pkgCacheGenerator(Map.get(),Progress)); return true; } -APT_DEPRECATED bool pkgMakeStatusCache(pkgSourceList &List,OpProgress &Progress, +bool pkgMakeStatusCache(pkgSourceList &List,OpProgress &Progress, MMap **OutMap, bool AllowMem) { return pkgCacheGenerator::MakeStatusCache(List, &Progress, OutMap, AllowMem); } bool pkgCacheGenerator::MakeStatusCache(pkgSourceList &List,OpProgress *Progress, - MMap **OutMap,bool AllowMem) + MMap **OutMap,bool) +{ + return pkgCacheGenerator::MakeStatusCache(List, Progress, OutMap, nullptr, true); +} +bool pkgCacheGenerator::MakeStatusCache(pkgSourceList &List,OpProgress *Progress, + MMap **OutMap,pkgCache **OutCache, bool) { + // FIXME: deprecate the ignored AllowMem parameter bool const Debug = _config->FindB("Debug::pkgCacheGen", false); std::vector Files; @@ -1548,7 +1603,8 @@ bool pkgCacheGenerator::MakeStatusCache(pkgSourceList &List,OpProgress *Progress bool srcpkgcache_fine = false; bool volatile_fine = List.GetVolatileFiles().empty(); - if (CheckValidity(CacheFile, List, Files.begin(), Files.end(), volatile_fine ? OutMap : NULL) == true) + if (CheckValidity(CacheFile, List, Files.begin(), Files.end(), volatile_fine ? OutMap : NULL, + volatile_fine ? OutCache : NULL) == true) { if (Debug == true) std::clog << "pkgcache.bin is valid - no need to build any cache" << std::endl; @@ -1582,20 +1638,12 @@ bool pkgCacheGenerator::MakeStatusCache(pkgSourceList &List,OpProgress *Progress if (Debug == true) std::clog << "Do we have write-access to the cache files? " << (Writeable ? "YES" : "NO") << std::endl; - - if (Writeable == false && AllowMem == false) - { - if (CacheFile.empty() == false) - return _error->Error(_("Unable to write to %s"),flNotFile(CacheFile).c_str()); - else if (SrcCacheFile.empty() == false) - return _error->Error(_("Unable to write to %s"),flNotFile(SrcCacheFile).c_str()); - else - return _error->Error("Unable to create caches as file usage is disabled, but memory not allowed either!"); - } } // At this point we know we need to construct something, so get storage ready std::unique_ptr Map(CreateDynamicMMap(NULL, 0)); + if (unlikely(Map->validData()) == false) + return false; if (Debug == true) std::clog << "Open memory Map (not filebased)" << std::endl; @@ -1669,9 +1717,12 @@ bool pkgCacheGenerator::MakeStatusCache(pkgSourceList &List,OpProgress *Progress } /*}}}*/ // CacheGenerator::MakeOnlyStatusCache - Build only a status files cache/*{{{*/ -// --------------------------------------------------------------------- -/* */ -APT_DEPRECATED bool pkgMakeOnlyStatusCache(OpProgress &Progress,DynamicMMap **OutMap) +class APT_HIDDEN ScopedErrorMerge { +public: + ScopedErrorMerge() { _error->PushToStack(); } + ~ScopedErrorMerge() { _error->MergeWithStack(); } +}; +bool pkgMakeOnlyStatusCache(OpProgress &Progress,DynamicMMap **OutMap) { return pkgCacheGenerator::MakeOnlyStatusCache(&Progress, OutMap); } bool pkgCacheGenerator::MakeOnlyStatusCache(OpProgress *Progress,DynamicMMap **OutMap) { @@ -1679,12 +1730,14 @@ bool pkgCacheGenerator::MakeOnlyStatusCache(OpProgress *Progress,DynamicMMap **O if (_system->AddStatusFiles(Files) == false) return false; + ScopedErrorMerge sem; std::unique_ptr Map(CreateDynamicMMap(NULL, 0)); + if (unlikely(Map->validData()) == false) + return false; map_filesize_t CurrentSize = 0; map_filesize_t TotalSize = 0; - TotalSize = ComputeSize(NULL, Files.begin(), Files.end()); - + // Build the status cache if (Progress != NULL) Progress->OverallProgress(0,1,1,_("Reading package lists"));