#include <apt-pkg/strutl.h>
#include <apt-pkg/sptr.h>
#include <apt-pkg/pkgsystem.h>
+#include <apt-pkg/macros.h>
#include <apt-pkg/tagfile.h>
#include <unistd.h>
#include <errno.h>
#include <stdio.h>
-#include <system.h>
+
+#include <apt-pkg/deblistparser.h>
/*}}}*/
typedef vector<pkgIndexFile *>::iterator FileIterator;
-uint32_t hashlittle( const void *key, size_t length, uint32_t initval);
// CacheGenerator::pkgCacheGenerator - Constructor /*{{{*/
// ---------------------------------------------------------------------
{
// Setup the map interface..
Cache.HeaderP = (pkgCache::Header *)Map.Data();
- Map.RawAllocate(sizeof(pkgCache::Header));
+ if (Map.RawAllocate(sizeof(pkgCache::Header)) == 0 && _error->PendingError() == true)
+ return;
+
Map.UsePools(*Cache.HeaderP->Pools,sizeof(Cache.HeaderP->Pools)/sizeof(Cache.HeaderP->Pools[0]));
-
+
// Starting header
*Cache.HeaderP = pkgCache::Header();
Cache.HeaderP->VerSysName = Map.WriteString(_system->VS->Label);
Cache.HeaderP->Architecture = Map.WriteString(_config->Find("APT::Architecture"));
- Cache.ReMap();
+ Cache.ReMap();
}
else
{
pkgCache::VerIterator *OutVer)
{
List.Owner = this;
+ debListParser *debian(dynamic_cast<debListParser *>(&List));
unsigned int Counter = 0;
+ step:
while (List.Step() == true)
{
// Get a pointer to the package structure
- string PackageName = List.Package();
+ srkString PackageName;
+ if (debian != NULL)
+ PackageName = debian->Find("Package");
+ else
+ PackageName = List.Package();
if (PackageName.empty() == true)
return false;
pkgCache::PkgIterator Pkg;
- if (NewPackage(Pkg,PackageName) == false)
- return _error->Error(_("Error occurred while processing %s (NewPackage)"),PackageName.c_str());
+ if (NewPackage(Pkg,PackageName) == false) {
+ _error->Warning(_("Error occurred while processing %s (NewPackage)"),std::string(PackageName).c_str());
+ goto step;
+ }
+
Counter++;
if (Counter % 100 == 0 && Progress != 0)
Progress->Progress(List.Offset());
+ string language(List.DescriptionLanguage());
+
/* Get a pointer to the version structure. We know the list is sorted
so we use that fact in the search. Insertion of new versions is
done with correct sorting */
- string Version = List.Version();
+ srkString Version;
+ if (debian != NULL)
+ Version = debian->Find("Version");
+ else
+ Version = List.Version();
if (Version.empty() == true)
{
// we first process the package, then the descriptions
// (this has the bonus that we get MMap error when we run out
// of MMap space)
- if (List.UsePackage(Pkg,pkgCache::VerIterator(Cache)) == false)
- return _error->Error(_("Error occurred while processing %s (UsePackage1)"),
- PackageName.c_str());
+ if (List.UsePackage(Pkg,pkgCache::VerIterator(Cache)) == false) {
+ _error->Warning(_("Error occurred while processing %s (UsePackage1)"),
+ std::string(PackageName).c_str());
+ goto step;
+ }
// Find the right version to write the description
MD5SumValue CurMd5 = List.Description_md5();
pkgCache::VerIterator Ver = Pkg.VersionList();
map_ptrloc *LastVer = &Pkg->VersionList;
- for (; Ver.end() == false; LastVer = &Ver->NextVer, Ver++)
+ for (; Ver.end() == false; LastVer = &Ver->NextVer, Ver++)
{
pkgCache::DescIterator Desc = Ver.DescriptionList();
map_ptrloc *LastDesc = &Ver->DescriptionList;
// don't add a new description if we have one for the given
// md5 && language
- for ( ; Desc.end() == false; Desc++)
+ for ( ; Desc.end() == false; Desc++)
if (MD5SumValue(Desc.md5()) == CurMd5 &&
- Desc.LanguageCode() == List.DescriptionLanguage())
+ Desc.LanguageCode() == language)
duplicate=true;
if(duplicate)
continue;
for (Desc = Ver.DescriptionList();
- Desc.end() == false;
+ Desc.end() == false;
LastDesc = &Desc->NextDesc, Desc++)
{
if (MD5SumValue(Desc.md5()) == CurMd5)
{
// Add new description
- *LastDesc = NewDescription(Desc, List.DescriptionLanguage(), CurMd5, *LastDesc);
+ *LastDesc = NewDescription(Desc, language, CurMd5, *LastDesc);
Desc->ParentPkg = Pkg.Index();
- if (NewFileDesc(Desc,List) == false)
- return _error->Error(_("Error occurred while processing %s (NewFileDesc1)"),PackageName.c_str());
+ if ((*LastDesc == 0 && _error->PendingError()) || NewFileDesc(Desc,List) == false) {
+ _error->Warning(_("Error occurred while processing %s (NewFileDesc1)"),std::string(PackageName).c_str());
+ goto step;
+ }
break;
}
}
unsigned long Hash = List.VersionHash();
if (Res == 0 && Ver->Hash == Hash)
{
- if (List.UsePackage(Pkg,Ver) == false)
- return _error->Error(_("Error occurred while processing %s (UsePackage2)"),
- PackageName.c_str());
+ if (List.UsePackage(Pkg,Ver) == false) {
+ _error->Warning(_("Error occurred while processing %s (UsePackage2)"),
+ std::string(PackageName).c_str());
+ goto step;
+ }
- if (NewFileVer(Ver,List) == false)
- return _error->Error(_("Error occurred while processing %s (NewFileVer1)"),
- PackageName.c_str());
+ if (NewFileVer(Ver,List) == false) {
+ _error->Warning(_("Error occurred while processing %s (NewFileVer1)"),
+ std::string(PackageName).c_str());
+ goto step;
+ }
// Read only a single record and return
if (OutVer != 0)
Ver->ParentPkg = Pkg.Index();
Ver->Hash = Hash;
- if (List.NewVersion(Ver) == false)
- return _error->Error(_("Error occurred while processing %s (NewVersion1)"),
- PackageName.c_str());
+ if ((*LastVer == 0 && _error->PendingError()) || List.NewVersion(Ver) == false) {
+ _error->Warning(_("Error occurred while processing %s (NewVersion1)"),
+ std::string(PackageName).c_str());
+ goto step;
+ }
- if (List.UsePackage(Pkg,Ver) == false)
- return _error->Error(_("Error occurred while processing %s (UsePackage3)"),
- PackageName.c_str());
+ if (List.UsePackage(Pkg,Ver) == false) {
+ _error->Warning(_("Error occurred while processing %s (UsePackage3)"),
+ std::string(PackageName).c_str());
+ goto step;
+ }
- if (NewFileVer(Ver,List) == false)
- return _error->Error(_("Error occurred while processing %s (NewVersion2)"),
- PackageName.c_str());
+ if (NewFileVer(Ver,List) == false) {
+ _error->Warning(_("Error occurred while processing %s (NewVersion2)"),
+ std::string(PackageName).c_str());
+ goto step;
+ }
// Read only a single record and return
if (OutVer != 0)
for (; Desc.end() == false; LastDesc = &Desc->NextDesc, Desc++);
// Add new description
- *LastDesc = NewDescription(Desc, List.DescriptionLanguage(), List.Description_md5(), *LastDesc);
+ *LastDesc = NewDescription(Desc, language, List.Description_md5(), *LastDesc);
Desc->ParentPkg = Pkg.Index();
- if (NewFileDesc(Desc,List) == false)
- return _error->Error(_("Error occurred while processing %s (NewFileDesc2)"),PackageName.c_str());
+ if ((*LastDesc == 0 && _error->PendingError()) || NewFileDesc(Desc,List) == false) {
+ _error->Warning(_("Error occurred while processing %s (NewFileDesc2)"),std::string(PackageName).c_str());
+ goto step;
+ }
}
FoundFileDeps |= List.HasFileDeps();
if (Cache.HeaderP->PackageCount >= (1ULL<<sizeof(Cache.PkgP->ID)*8)-1)
return _error->Error(_("Wow, you exceeded the number of package "
"names this APT is capable of."));
- if (Cache.HeaderP->VersionCount >= (1ULL<<(sizeof(Cache.VerP->ID)*8))-1)
+ if (Cache.HeaderP->VersionCount >= (1ULL<<(sizeof(Cache.VerP->ID1)*8+sizeof(Cache.VerP->ID2)*8))-1)
return _error->Error(_("Wow, you exceeded the number of versions "
"this APT is capable of."));
if (Cache.HeaderP->DescriptionCount >= (1ULL<<(sizeof(Cache.DescP->ID)*8))-1)
// ---------------------------------------------------------------------
/* This creates a new package structure and adds it to the hash table */
bool pkgCacheGenerator::NewPackage(pkgCache::PkgIterator &Pkg,const string &Name)
+{
+ return NewPackage(Pkg, srkString(Name));
+}
+
+bool pkgCacheGenerator::NewPackage(pkgCache::PkgIterator &Pkg,const srkString &Name)
{
Pkg = Cache.FindPkg(Name);
if (Pkg.end() == false)
Cache.HeaderP->HashTable[Hash] = Package;
// Set the name and the ID
- Pkg->Name = Map.WriteString(Name);
+ Pkg->Name = Map.WriteString(Name.Start,Name.Size);
if (Pkg->Name == 0)
return false;
Pkg->ID = Cache.HeaderP->PackageCount++;
unsigned long pkgCacheGenerator::NewVersion(pkgCache::VerIterator &Ver,
const string &VerStr,
unsigned long Next)
+{
+ return NewVersion(Ver, srkString(VerStr), Next);
+}
+
+unsigned long pkgCacheGenerator::NewVersion(pkgCache::VerIterator &Ver,
+ const srkString &VerStr,
+ unsigned long Next)
{
// Get a structure
unsigned long Version = Map.Allocate(sizeof(pkgCache::Version));
// Fill it in
Ver = pkgCache::VerIterator(Cache,Cache.VerP + Version);
Ver->NextVer = Next;
- Ver->ID = Cache.HeaderP->VersionCount++;
- Ver->VerStr = Map.WriteString(VerStr);
+ unsigned int ID = Cache.HeaderP->VersionCount++;
+ Ver->ID1 = ID & 0xffff;
+ Ver->ID2 = ID >> 16;
+ Ver->VerStr = Map.WriteString(VerStr.Start, VerStr.Size);
if (Ver->VerStr == 0)
return 0;
// Get a structure
unsigned long DescFile = Map.Allocate(sizeof(pkgCache::DescFile));
if (DescFile == 0)
- return 0;
+ return false;
pkgCache::DescFileIterator DF(Cache,Cache.DescFileP + DescFile);
DF->File = CurrentFile - Cache.PkgFileP;
Desc->ID = Cache.HeaderP->DescriptionCount++;
Desc->language_code = Map.WriteString(Lang);
Desc->md5sum = Map.WriteString(md5sum.Value());
+ if (Desc->language_code == 0 || Desc->md5sum == 0)
+ return 0;
return Description;
}
const string &Version,
unsigned int Op,
unsigned int Type)
+{
+ return NewDepends(Ver, srkString(PackageName), srkString(Version), Op, Type);
+}
+
+bool pkgCacheGenerator::ListParser::NewDepends(pkgCache::VerIterator Ver,
+ const srkString &PackageName,
+ const srkString &Version,
+ unsigned int Op,
+ unsigned int Type)
{
pkgCache &Cache = Owner->Cache;
bool pkgCacheGenerator::ListParser::NewProvides(pkgCache::VerIterator Ver,
const string &PackageName,
const string &Version)
+{
+ return NewProvides(Ver, srkString(PackageName), srkString(Version));
+}
+
+bool pkgCacheGenerator::ListParser::NewProvides(pkgCache::VerIterator Ver,
+ const srkString &PackageName,
+ const srkString &Version)
{
pkgCache &Cache = Owner->Cache;
unsigned long pkgCacheGenerator::WriteUniqString(const char *S,
unsigned int Size)
{
- uint32_t hash = hashlittle(S, Size, 0xdeadbeef);
-
- /* We use a VERY LARGE INTRANSIENT hash table here, this speeds up generation
- by AN INSANE amount on ALL machines */
- pkgCache::StringItem **Bucket2;
- while (true) {
- Bucket2 = &UniqHash[hash % _count(UniqHash)];
- if (*Bucket2 == NULL)
- break;
- if (stringcmp(S,S+Size,Cache.StrP + (*Bucket2)->String) == 0)
- return (*Bucket2)->String;
- hash += 7;
- }
+ /* We use a very small transient hash table here, this speeds up generation
+ by a fair amount on slower machines */
+ pkgCache::StringItem *&Bucket = UniqHash[(S[0]*5 + S[1]) % _count(UniqHash)];
+ if (Bucket != 0 &&
+ stringcmp(S,S+Size,Cache.StrP + Bucket->String) == 0)
+ return Bucket->String;
- pkgCache::StringItem *&Bucket = *Bucket2;
+ // Search for an insertion point
pkgCache::StringItem *I = Cache.StringItemP + Cache.HeaderP->StringList;
+ int Res = 1;
map_ptrloc *Last = &Cache.HeaderP->StringList;
+ for (; I != Cache.StringItemP; Last = &I->NextItem,
+ I = Cache.StringItemP + I->NextItem)
+ {
+ Res = stringcmp(S,S+Size,Cache.StrP + I->String);
+ if (Res >= 0)
+ break;
+ }
+
+ // Match
+ if (Res == 0)
+ {
+ Bucket = I;
+ return I->String;
+ }
// Get a structure
unsigned long Item = Map.Allocate(sizeof(pkgCache::StringItem));
return ItemP->String;
}
/*}}}*/
-
// CheckValidity - Check that a cache is up-to-date /*{{{*/
// ---------------------------------------------------------------------
/* This just verifies that each file in the list of index files exists,
static bool CheckValidity(const string &CacheFile, FileIterator Start,
FileIterator End,MMap **OutMap = 0)
{
+ bool const Debug = _config->FindB("Debug::pkgCacheGen", false);
// No file, certainly invalid
if (CacheFile.empty() == true || FileExists(CacheFile) == false)
+ {
+ if (Debug == true)
+ std::clog << "CacheFile doesn't exist" << std::endl;
return false;
-
+ }
+
// Map it
FileFd CacheF(CacheFile,FileFd::ReadOnly);
- SPtr<MMap> Map = new MMap(CacheF,MMap::Public | MMap::ReadOnly);
+ SPtr<MMap> Map = new MMap(CacheF,0);
pkgCache Cache(Map);
if (_error->PendingError() == true || Map->Size() == 0)
{
+ if (Debug == true)
+ std::clog << "Errors are pending or Map is empty()" << std::endl;
_error->Discard();
return false;
}
SPtrArray<bool> Visited = new bool[Cache.HeaderP->PackageFileCount];
memset(Visited,0,sizeof(*Visited)*Cache.HeaderP->PackageFileCount);
for (; Start != End; Start++)
- {
+ {
+ if (Debug == true)
+ std::clog << "Checking PkgFile " << (*Start)->Describe() << ": ";
if ((*Start)->HasPackages() == false)
+ {
+ if (Debug == true)
+ std::clog << "Has NO packages" << std::endl;
continue;
+ }
if ((*Start)->Exists() == false)
{
_error->WarningE("stat",_("Couldn't stat source package list %s"),
(*Start)->Describe().c_str());
#endif
+ if (Debug == true)
+ std::clog << "file doesn't exist" << std::endl;
continue;
}
// FindInCache is also expected to do an IMS check.
pkgCache::PkgFileIterator File = (*Start)->FindInCache(Cache);
if (File.end() == true)
+ {
+ if (Debug == true)
+ std::clog << "FindInCache returned end-Pointer" << std::endl;
return false;
+ }
Visited[File->ID] = true;
+ if (Debug == true)
+ std::clog << "with ID " << File->ID << " is valid" << std::endl;
}
for (unsigned I = 0; I != Cache.HeaderP->PackageFileCount; I++)
if (Visited[I] == false)
+ {
+ if (Debug == true)
+ std::clog << "File with ID" << I << " wasn't visited" << std::endl;
return false;
+ }
if (_error->PendingError() == true)
{
+ if (Debug == true)
+ {
+ std::clog << "Validity failed because of pending errors:" << std::endl;
+ _error->DumpErrors();
+ }
_error->Discard();
return false;
}
bool pkgMakeStatusCache(pkgSourceList &List,OpProgress &Progress,
MMap **OutMap,bool AllowMem)
{
- unsigned long MapSize = _config->FindI("APT::Cache-Limit",24*1024*1024);
+ bool const Debug = _config->FindB("Debug::pkgCacheGen", false);
+ unsigned long const MapSize = _config->FindI("APT::Cache-Limit",128*1024*1024);
vector<pkgIndexFile *> Files;
for (vector<metaIndex *>::const_iterator i = List.begin();
Files.push_back (*j);
}
- unsigned long EndOfSource = Files.size();
+ unsigned long const EndOfSource = Files.size();
if (_system->AddStatusFiles(Files) == false)
return false;
-
+
// Decide if we can write to the files..
- string CacheFile = _config->FindFile("Dir::Cache::pkgcache");
- string SrcCacheFile = _config->FindFile("Dir::Cache::srcpkgcache");
+ string const CacheFile = _config->FindFile("Dir::Cache::pkgcache");
+ string const SrcCacheFile = _config->FindFile("Dir::Cache::srcpkgcache");
// Decide if we can write to the cache
bool Writeable = false;
else
if (SrcCacheFile.empty() == false)
Writeable = access(flNotFile(SrcCacheFile).c_str(),W_OK) == 0;
-
+ if (Debug == true)
+ std::clog << "Do we have write-access to the cache files? " << (Writeable ? "YES" : "NO") << std::endl;
+
if (Writeable == false && AllowMem == false && CacheFile.empty() == false)
return _error->Error(_("Unable to write to %s"),flNotFile(CacheFile).c_str());
if (CheckValidity(CacheFile,Files.begin(),Files.end(),OutMap) == true)
{
Progress.OverallProgress(1,1,1,_("Reading package lists"));
+ if (Debug == true)
+ std::clog << "pkgcache.bin is valid - no need to build anything" << std::endl;
return true;
}
+ else if (Debug == true)
+ std::clog << "pkgcache.bin is NOT valid" << std::endl;
/* At this point we know we need to reconstruct the package cache,
begin. */
Map = new DynamicMMap(*CacheF,MMap::Public,MapSize);
if (_error->PendingError() == true)
return false;
+ if (Debug == true)
+ std::clog << "Open filebased MMap" << std::endl;
}
else
{
// Just build it in memory..
- Map = new DynamicMMap(MMap::Public,MapSize);
+ Map = new DynamicMMap(0,MapSize);
+ if (Debug == true)
+ std::clog << "Open memory Map (not filebased)" << std::endl;
}
// Lets try the source cache.
if (CheckValidity(SrcCacheFile,Files.begin(),
Files.begin()+EndOfSource) == true)
{
+ if (Debug == true)
+ std::clog << "srcpkgcache.bin is valid - populate MMap with it." << std::endl;
// Preload the map with the source cache
FileFd SCacheF(SrcCacheFile,FileFd::ReadOnly);
- if (SCacheF.Read((unsigned char *)Map->Data() + Map->RawAllocate(SCacheF.Size()),
- SCacheF.Size()) == false)
+ unsigned long const alloc = Map->RawAllocate(SCacheF.Size());
+ if ((alloc == 0 && _error->PendingError())
+ || SCacheF.Read((unsigned char *)Map->Data() + alloc,
+ SCacheF.Size()) == false)
return false;
TotalSize = ComputeSize(Files.begin()+EndOfSource,Files.end());
-
+
// Build the status cache
pkgCacheGenerator Gen(Map.Get(),&Progress);
if (_error->PendingError() == true)
}
else
{
+ if (Debug == true)
+ std::clog << "srcpkgcache.bin is NOT valid - rebuild" << std::endl;
TotalSize = ComputeSize(Files.begin(),Files.end());
// Build the source cache
Files.begin()+EndOfSource,Files.end()) == false)
return false;
}
+ if (Debug == true)
+ std::clog << "Caches are ready for shipping" << std::endl;
if (_error->PendingError() == true)
return false;
if (CacheF != 0)
{
delete Map.UnGuard();
- *OutMap = new MMap(*CacheF,MMap::Public | MMap::ReadOnly);
+ *OutMap = new MMap(*CacheF,0);
}
else
{
/* */
bool pkgMakeOnlyStatusCache(OpProgress &Progress,DynamicMMap **OutMap)
{
- unsigned long MapSize = _config->FindI("APT::Cache-Limit",20*1024*1024);
+ unsigned long MapSize = _config->FindI("APT::Cache-Limit",128*1024*1024);
vector<pkgIndexFile *> Files;
unsigned long EndOfSource = Files.size();
if (_system->AddStatusFiles(Files) == false)
return false;
- SPtr<DynamicMMap> Map;
- Map = new DynamicMMap(MMap::Public,MapSize);
+ SPtr<DynamicMMap> Map = new DynamicMMap(0,MapSize);
unsigned long CurrentSize = 0;
unsigned long TotalSize = 0;