X-Git-Url: https://git.saurik.com/apt.git/blobdiff_plain/ad00ae81eb9e1f5384f8fe32879d483c72bbdace..ddc1d8d08eaff6c71c6062654ddd9d8981799ae9:/apt-pkg/pkgcachegen.cc diff --git a/apt-pkg/pkgcachegen.cc b/apt-pkg/pkgcachegen.cc index f5bf5c589..0a645ca7e 100644 --- a/apt-pkg/pkgcachegen.cc +++ b/apt-pkg/pkgcachegen.cc @@ -1,6 +1,6 @@ // -*- mode: cpp; mode: fold -*- // Description /*{{{*/ -// $Id: pkgcachegen.cc,v 1.11 1998/07/19 04:22:02 jgg Exp $ +// $Id: pkgcachegen.cc,v 1.41 1999/07/26 17:46:07 jgg Exp $ /* ###################################################################### Package Cache Generator - Generator for the cache structure. @@ -17,17 +17,26 @@ #include #include #include -#include +#include +#include +#include +#include +#include +#include #include #include +#include /*}}}*/ // CacheGenerator::pkgCacheGenerator - Constructor /*{{{*/ // --------------------------------------------------------------------- /* We set the diry flag and make sure that is written to the disk */ -pkgCacheGenerator::pkgCacheGenerator(DynamicMMap &Map) : Map(Map), Cache(Map) +pkgCacheGenerator::pkgCacheGenerator(DynamicMMap &Map,OpProgress &Prog) : + Map(Map), Cache(Map), Progress(&Prog) { + CurrentFile = 0; + if (_error->PendingError() == true) return; @@ -39,6 +48,7 @@ pkgCacheGenerator::pkgCacheGenerator(DynamicMMap &Map) : Map(Map), Cache(Map) Cache.HeaderP->Dirty = true; Map.Sync(0,sizeof(pkgCache::Header)); Map.UsePools(*Cache.HeaderP->Pools,sizeof(Cache.HeaderP->Pools)/sizeof(Cache.HeaderP->Pools[0])); + memset(UniqHash,0,sizeof(UniqHash)); } /*}}}*/ // CacheGenerator::~pkgCacheGenerator - Destructor /*{{{*/ @@ -60,18 +70,26 @@ pkgCacheGenerator::~pkgCacheGenerator() // --------------------------------------------------------------------- /* This provides the generation of the entries in the cache. Each loop goes through a single package record from the underlying parse engine. */ -bool pkgCacheGenerator::MergeList(ListParser &List) +bool pkgCacheGenerator::MergeList(ListParser &List, + pkgCache::VerIterator *OutVer) { List.Owner = this; + unsigned int Counter = 0; while (List.Step() == true) { // Get a pointer to the package structure string PackageName = List.Package(); - pkgCache::PkgIterator Pkg; - if (NewPackage(Pkg,PackageName) == false) + if (PackageName.empty() == true) return false; + pkgCache::PkgIterator Pkg; + if (NewPackage(Pkg,PackageName) == false) + return _error->Error("Error occured while processing %s (NewPackage)",PackageName.c_str()); + Counter++; + if (Counter % 100 == 0 && Progress != 0) + Progress->Progress(List.Offset()); + /* Get a pointer to the version structure. We know the list is sorted so we use that fact in the search. Insertion of new versions is done with correct sorting */ @@ -79,12 +97,12 @@ bool pkgCacheGenerator::MergeList(ListParser &List) if (Version.empty() == true) { if (List.UsePackage(Pkg,pkgCache::VerIterator(Cache)) == false) - return false; + return _error->Error("Error occured while processing %s (UsePackage1)",PackageName.c_str()); continue; } pkgCache::VerIterator Ver = Pkg.VersionList(); - unsigned long *Last = &Pkg->VersionList; + map_ptrloc *Last = &Pkg->VersionList; int Res = 1; for (; Ver.end() == false; Last = &Ver->NextVer, Ver++) { @@ -96,28 +114,56 @@ bool pkgCacheGenerator::MergeList(ListParser &List) /* We already have a version for this item, record that we saw it */ - if (Res == 0) + unsigned long Hash = List.VersionHash(); + if (Res == 0 && Ver->Hash == Hash) { if (List.UsePackage(Pkg,Ver) == false) - return false; - + return _error->Error("Error occured while processing %s (UsePackage2)",PackageName.c_str()); + if (NewFileVer(Ver,List) == false) - return false; + return _error->Error("Error occured while processing %s (NewFileVer1)",PackageName.c_str()); + + // Read only a single record and return + if (OutVer != 0) + { + *OutVer = Ver; + return true; + } continue; } + // Skip to the end of the same version set. + if (Res == 0) + { + for (; Ver.end() == false; Last = &Ver->NextVer, Ver++) + { + Res = pkgVersionCompare(Version.begin(),Version.end(),Ver.VerStr(), + Ver.VerStr() + strlen(Ver.VerStr())); + if (Res != 0) + break; + } + } + // Add a new version *Last = NewVersion(Ver,Version,*Last); Ver->ParentPkg = Pkg.Index(); + Ver->Hash = Hash; if (List.NewVersion(Ver) == false) - return false; + return _error->Error("Error occured while processing %s (NewVersion1)",PackageName.c_str()); if (List.UsePackage(Pkg,Ver) == false) - return false; + return _error->Error("Error occured while processing %s (UsePackage3)",PackageName.c_str()); if (NewFileVer(Ver,List) == false) - return false; + return _error->Error("Error occured while processing %s (NewVersion2)",PackageName.c_str()); + + // Read only a single record and return + if (OutVer != 0) + { + *OutVer = Ver; + return true; + } } return true; @@ -159,6 +205,9 @@ bool pkgCacheGenerator::NewPackage(pkgCache::PkgIterator &Pkg,string Name) bool pkgCacheGenerator::NewFileVer(pkgCache::VerIterator &Ver, ListParser &List) { + if (CurrentFile == 0) + return true; + // Get a structure unsigned long VerFile = Map.Allocate(sizeof(pkgCache::VerFile)); if (VerFile == 0) @@ -166,12 +215,20 @@ bool pkgCacheGenerator::NewFileVer(pkgCache::VerIterator &Ver, pkgCache::VerFileIterator VF(Cache,Cache.VerFileP + VerFile); VF->File = CurrentFile - Cache.PkgFileP; - VF->NextFile = Ver->FileList; - Ver->FileList = VF.Index(); + + // Link it to the end of the list + map_ptrloc *Last = &Ver->FileList; + for (pkgCache::VerFileIterator V = Ver.FileList(); V.end() == false; V++) + Last = &V->NextFile; + VF->NextFile = *Last; + *Last = VF.Index(); + VF->Offset = List.Offset(); VF->Size = List.Size(); if (Cache.HeaderP->MaxVerFileSize < VF->Size) Cache.HeaderP->MaxVerFileSize = VF->Size; + Cache.HeaderP->VerFileCount++; + return true; } /*}}}*/ @@ -230,26 +287,33 @@ bool pkgCacheGenerator::ListParser::NewDepends(pkgCache::VerIterator Ver, // Probe the reverse dependency list for a version string that matches if (Version.empty() == false) { - for (pkgCache::DepIterator I = Pkg.RevDependsList(); I.end() == false; I++) +/* for (pkgCache::DepIterator I = Pkg.RevDependsList(); I.end() == false; I++, Hit++) if (I->Version != 0 && I.TargetVer() == Version) - Dep->Version = I->Version; + Dep->Version = I->Version;*/ if (Dep->Version == 0) if ((Dep->Version = WriteString(Version)) == 0) return false; } - + // Link it to the package Dep->Package = Pkg.Index(); Dep->NextRevDepends = Pkg->RevDepends; Pkg->RevDepends = Dep.Index(); - // Link it to the version (at the end of the list) - unsigned long *Last = &Ver->DependsList; - for (pkgCache::DepIterator D = Ver.DependsList(); D.end() == false; D++) - Last = &D->NextDepends; - Dep->NextDepends = *Last; - *Last = Dep.Index(); + /* Link it to the version (at the end of the list) + Caching the old end point speeds up generation substantially */ + if (OldDepVer != Ver) + { + OldDepLast = &Ver->DependsList; + for (pkgCache::DepIterator D = Ver.DependsList(); D.end() == false; D++) + OldDepLast = &D->NextDepends; + OldDepVer = Ver; + } + Dep->NextDepends = *OldDepLast; + *OldDepLast = Dep.Index(); + OldDepLast = &Dep->NextDepends; + return true; } /*}}}*/ @@ -270,6 +334,7 @@ bool pkgCacheGenerator::ListParser::NewProvides(pkgCache::VerIterator Ver, unsigned long Provides = Owner->Map.Allocate(sizeof(pkgCache::Provides)); if (Provides == 0) return false; + Cache.HeaderP->ProvidesCount++; // Fill it in pkgCache::PrvIterator Prv(Cache,Cache.ProvideP + Provides,Cache.PkgP); @@ -313,11 +378,16 @@ bool pkgCacheGenerator::SelectFile(string File,unsigned long Flags) CurrentFile->mtime = Buf.st_mtime; CurrentFile->NextFile = Cache.HeaderP->FileList; CurrentFile->Flags = Flags; + CurrentFile->ID = Cache.HeaderP->PackageFileCount; PkgFileName = File; Cache.HeaderP->FileList = CurrentFile - Cache.PkgFileP; - + Cache.HeaderP->PackageFileCount++; + if (CurrentFile->FileName == 0) return false; + + if (Progress != 0) + Progress->SubProgress(Buf.st_size); return true; } /*}}}*/ @@ -328,10 +398,17 @@ bool pkgCacheGenerator::SelectFile(string File,unsigned long Flags) unsigned long pkgCacheGenerator::WriteUniqString(const char *S, unsigned int Size) { + /* We use a very small transient hash table here, this speeds up generation + by a fair amount on slower machines */ + pkgCache::StringItem *&Bucket = UniqHash[(S[0]*5 + S[1]) % _count(UniqHash)]; + if (Bucket != 0 && + stringcmp(S,S+Size,Cache.StrP + Bucket->String) == 0) + return Bucket->String; + // Search for an insertion point pkgCache::StringItem *I = Cache.StringItemP + Cache.HeaderP->StringList; int Res = 1; - unsigned long *Last = &Cache.HeaderP->StringList; + map_ptrloc *Last = &Cache.HeaderP->StringList; for (; I != Cache.StringItemP; Last = &I->NextItem, I = Cache.StringItemP + I->NextItem) { @@ -342,7 +419,10 @@ unsigned long pkgCacheGenerator::WriteUniqString(const char *S, // Match if (Res == 0) + { + Bucket = I; return I->String; + } // Get a structure unsigned long Item = Map.Allocate(sizeof(pkgCache::StringItem)); @@ -357,6 +437,476 @@ unsigned long pkgCacheGenerator::WriteUniqString(const char *S, if (ItemP->String == 0) return 0; + Bucket = ItemP; return ItemP->String; } /*}}}*/ + +// SrcCacheCheck - Check if the source package cache is uptodate /*{{{*/ +// --------------------------------------------------------------------- +/* The source cache is checked against the source list and the files + on disk, any difference results in a false. */ +bool pkgSrcCacheCheck(pkgSourceList &List) +{ + if (_error->PendingError() == true) + return false; + + string CacheFile = _config->FindFile("Dir::Cache::srcpkgcache"); + string ListDir = _config->FindDir("Dir::State::lists"); + + // Count the number of missing files + int Missing = 0; + for (pkgSourceList::const_iterator I = List.begin(); I != List.end(); I++) + { + // Only cache deb source types. + if (I->Type != pkgSourceList::Item::Deb) + { + Missing++; + continue; + } + + string File = ListDir + URItoFileName(I->PackagesURI()); + struct stat Buf; + if (stat(File.c_str(),&Buf) != 0) + { + _error->WarningE("stat","Couldn't stat source package list '%s' (%s)", + I->PackagesInfo().c_str(),File.c_str()); + Missing++; + } + } + + // Open the source package cache + if (FileExists(CacheFile) == false) + return false; + + FileFd CacheF(CacheFile,FileFd::ReadOnly); + if (_error->PendingError() == true) + { + _error->Discard(); + return false; + } + + MMap Map(CacheF,MMap::Public | MMap::ReadOnly); + if (_error->PendingError() == true || Map.Size() == 0) + { + _error->Discard(); + return false; + } + + pkgCache Cache(Map); + if (_error->PendingError() == true) + { + _error->Discard(); + return false; + } + + // They are certianly out of sync + if (Cache.Head().PackageFileCount != List.size() - Missing) + return false; + + for (pkgCache::PkgFileIterator F(Cache); F.end() == false; F++) + { + // Search for a match in the source list + bool Bad = true; + for (pkgSourceList::const_iterator I = List.begin(); + I != List.end(); I++) + { + // Only cache deb source types. + if (I->Type != pkgSourceList::Item::Deb) + continue; + + string File = ListDir + URItoFileName(I->PackagesURI()); + if (F.FileName() == File) + { + Bad = false; + break; + } + } + + // Check if the file matches what was cached + Bad |= !F.IsOk(); + if (Bad == true) + return false; + } + + return true; +} + /*}}}*/ +// PkgCacheCheck - Check if the package cache is uptodate /*{{{*/ +// --------------------------------------------------------------------- +/* This does a simple check of all files used to compose the cache */ +bool pkgPkgCacheCheck(string CacheFile) +{ + if (_error->PendingError() == true) + return false; + + // Open the source package cache + if (FileExists(CacheFile) == false) + return false; + + FileFd CacheF(CacheFile,FileFd::ReadOnly); + if (_error->PendingError() == true) + { + _error->Discard(); + return false; + } + + MMap Map(CacheF,MMap::Public | MMap::ReadOnly); + if (_error->PendingError() == true || Map.Size() == 0) + { + _error->Discard(); + return false; + } + + pkgCache Cache(Map); + if (_error->PendingError() == true) + { + _error->Discard(); + return false; + } + + // Status files that must be in the cache + string Status[3]; + Status[0] = _config->FindFile("Dir::State::xstatus"); + Status[1]= _config->FindFile("Dir::State::userstatus"); + Status[2] = _config->FindFile("Dir::State::status"); + + // Cheack each file + for (pkgCache::PkgFileIterator F(Cache); F.end() == false; F++) + { + if (F.IsOk() == false) + return false; + + // See if this is one of the status files + for (int I = 0; I != 3; I++) + if (F.FileName() == Status[I]) + Status[I] = string(); + } + + // Make sure all the status files are loaded. + for (int I = 0; I != 3; I++) + { + if (Status[I].empty() == false && FileExists(Status[I]) == true) + return false; + } + + return true; +} + /*}}}*/ +// AddStatusSize - Add the size of the status files /*{{{*/ +// --------------------------------------------------------------------- +/* This adds the size of all the status files to the size counter */ +bool pkgAddStatusSize(unsigned long &TotalSize) +{ + // Grab the file names + string xstatus = _config->FindFile("Dir::State::xstatus"); + string userstatus = _config->FindFile("Dir::State::userstatus"); + string status = _config->FindFile("Dir::State::status"); + + // Grab the sizes + struct stat Buf; + if (stat(xstatus.c_str(),&Buf) == 0) + TotalSize += Buf.st_size; + if (stat(userstatus.c_str(),&Buf) == 0) + TotalSize += Buf.st_size; + if (stat(status.c_str(),&Buf) != 0) + return _error->Errno("stat","Couldn't stat the status file %s",status.c_str()); + TotalSize += Buf.st_size; + + return true; +} + /*}}}*/ +// MergeStatus - Add the status files to the cache /*{{{*/ +// --------------------------------------------------------------------- +/* This adds the status files to the map */ +bool pkgMergeStatus(OpProgress &Progress,pkgCacheGenerator &Gen, + unsigned long &CurrentSize,unsigned long TotalSize) +{ + // Grab the file names + string Status[3]; + Status[0] = _config->FindFile("Dir::State::xstatus"); + Status[1]= _config->FindFile("Dir::State::userstatus"); + Status[2] = _config->FindFile("Dir::State::status"); + + for (int I = 0; I != 3; I++) + { + // Check if the file exists and it is not the primary status file. + string File = Status[I]; + if (I != 2 && FileExists(File) == false) + continue; + + FileFd Pkg(File,FileFd::ReadOnly); + debListParser Parser(Pkg); + Progress.OverallProgress(CurrentSize,TotalSize,Pkg.Size(),"Reading Package Lists"); + if (_error->PendingError() == true) + return _error->Error("Problem opening %s",File.c_str()); + CurrentSize += Pkg.Size(); + + Progress.SubProgress(0,"Local Package State - " + flNotDir(File)); + if (Gen.SelectFile(File,pkgCache::Flag::NotSource) == false) + return _error->Error("Problem with SelectFile %s",File.c_str()); + + if (Gen.MergeList(Parser) == false) + return _error->Error("Problem with MergeList %s",File.c_str()); + Progress.Progress(Pkg.Size()); + } + + return true; +} + /*}}}*/ +// GenerateSrcCache - Write the source package lists to the map /*{{{*/ +// --------------------------------------------------------------------- +/* This puts the source package cache into the given generator. */ +bool pkgGenerateSrcCache(pkgSourceList &List,OpProgress &Progress, + pkgCacheGenerator &Gen, + unsigned long &CurrentSize,unsigned long &TotalSize) +{ + string ListDir = _config->FindDir("Dir::State::lists"); + + // Prepare the progress indicator + TotalSize = 0; + struct stat Buf; + for (pkgSourceList::const_iterator I = List.begin(); I != List.end(); I++) + { + string File = ListDir + URItoFileName(I->PackagesURI()); + if (stat(File.c_str(),&Buf) != 0) + continue; + TotalSize += Buf.st_size; + } + + if (pkgAddStatusSize(TotalSize) == false) + return false; + + // Generate the pkg source cache + CurrentSize = 0; + for (pkgSourceList::const_iterator I = List.begin(); I != List.end(); I++) + { + // Only cache deb source types. + if (I->Type != pkgSourceList::Item::Deb) + continue; + + string File = ListDir + URItoFileName(I->PackagesURI()); + + if (FileExists(File) == false) + continue; + + FileFd Pkg(File,FileFd::ReadOnly); + debListParser Parser(Pkg); + Progress.OverallProgress(CurrentSize,TotalSize,Pkg.Size(),"Reading Package Lists"); + if (_error->PendingError() == true) + return _error->Error("Problem opening %s",File.c_str()); + CurrentSize += Pkg.Size(); + + Progress.SubProgress(0,I->PackagesInfo()); + if (Gen.SelectFile(File) == false) + return _error->Error("Problem with SelectFile %s",File.c_str()); + + if (Gen.MergeList(Parser) == false) + return _error->Error("Problem with MergeList %s",File.c_str()); + + // Check the release file + string RFile = ListDir + URItoFileName(I->ReleaseURI()); + if (FileExists(RFile) == true) + { + FileFd Rel(RFile,FileFd::ReadOnly); + if (_error->PendingError() == true) + return false; + Parser.LoadReleaseInfo(Gen.GetCurFile(),Rel); + } + } + + return true; +} + /*}}}*/ +// MakeStatusCache - Generates a cache that includes the status files /*{{{*/ +// --------------------------------------------------------------------- +/* This copies the package source cache and then merges the status and + xstatus files into it. */ +bool pkgMakeStatusCache(pkgSourceList &List,OpProgress &Progress) +{ + Progress.OverallProgress(0,1,1,"Reading Package Lists"); + + string CacheFile = _config->FindFile("Dir::Cache::pkgcache"); + bool SrcOk = pkgSrcCacheCheck(List); + bool PkgOk = SrcOk && pkgPkgCacheCheck(CacheFile); + + // Rebuild the source and package caches + if (SrcOk == false) + { + string SCacheFile = _config->FindFile("Dir::Cache::srcpkgcache"); + FileFd SCacheF(SCacheFile,FileFd::WriteEmpty); + FileFd CacheF(CacheFile,FileFd::WriteEmpty); + DynamicMMap Map(CacheF,MMap::Public); + if (_error->PendingError() == true) + return false; + + pkgCacheGenerator Gen(Map,Progress); + unsigned long CurrentSize = 0; + unsigned long TotalSize = 0; + if (pkgGenerateSrcCache(List,Progress,Gen,CurrentSize,TotalSize) == false) + return false; + + // Write the src cache + Gen.GetCache().HeaderP->Dirty = false; + if (SCacheF.Write(Map.Data(),Map.Size()) == false) + return _error->Error("IO Error saving source cache"); + Gen.GetCache().HeaderP->Dirty = true; + + // Merge in the source caches + return pkgMergeStatus(Progress,Gen,CurrentSize,TotalSize); + } + + if (PkgOk == true) + { + Progress.OverallProgress(1,1,1,"Reading Package Lists"); + return true; + } + + // We use the source cache to generate the package cache + string SCacheFile = _config->FindFile("Dir::Cache::srcpkgcache"); + + FileFd SCacheF(SCacheFile,FileFd::ReadOnly); + FileFd CacheF(CacheFile,FileFd::WriteEmpty); + DynamicMMap Map(CacheF,MMap::Public); + if (_error->PendingError() == true) + return false; + + // Preload the map with the source cache + if (SCacheF.Read((unsigned char *)Map.Data() + Map.RawAllocate(SCacheF.Size()), + SCacheF.Size()) == false) + return false; + + pkgCacheGenerator Gen(Map,Progress); + + // Compute the progress + unsigned long TotalSize = 0; + if (pkgAddStatusSize(TotalSize) == false) + return false; + + unsigned long CurrentSize = 0; + return pkgMergeStatus(Progress,Gen,CurrentSize,TotalSize); +} + /*}}}*/ +// MakeStatusCacheMem - Returns a map for the status cache /*{{{*/ +// --------------------------------------------------------------------- +/* This creates a map object for the status cache. If the process has write + access to the caches then it is the same as MakeStatusCache, otherwise it + creates a memory block and puts the cache in there. */ +MMap *pkgMakeStatusCacheMem(pkgSourceList &List,OpProgress &Progress) +{ + /* If the cache file is writeable this is just a wrapper for + MakeStatusCache */ + string CacheFile = _config->FindFile("Dir::Cache::pkgcache"); + bool Writeable = (access(CacheFile.c_str(),W_OK) == 0) || + (errno == ENOENT); + + if (Writeable == true) + { + if (pkgMakeStatusCache(List,Progress) == false) + return 0; + + // Open the cache file + FileFd File(_config->FindFile("Dir::Cache::pkgcache"),FileFd::ReadOnly); + if (_error->PendingError() == true) + return 0; + + MMap *Map = new MMap(File,MMap::Public | MMap::ReadOnly); + if (_error->PendingError() == true) + { + delete Map; + return 0; + } + return Map; + } + + // Mostly from MakeStatusCache.. + Progress.OverallProgress(0,1,1,"Reading Package Lists"); + + bool SrcOk = pkgSrcCacheCheck(List); + bool PkgOk = SrcOk && pkgPkgCacheCheck(CacheFile); + + // Rebuild the source and package caches + if (SrcOk == false) + { + DynamicMMap *Map = new DynamicMMap(MMap::Public); + if (_error->PendingError() == true) + { + delete Map; + return 0; + } + + pkgCacheGenerator Gen(*Map,Progress); + unsigned long CurrentSize = 0; + unsigned long TotalSize = 0; + if (pkgGenerateSrcCache(List,Progress,Gen,CurrentSize,TotalSize) == false) + { + delete Map; + return 0; + } + + // Merge in the source caches + if (pkgMergeStatus(Progress,Gen,CurrentSize,TotalSize) == false) + { + delete Map; + return 0; + } + + return Map; + } + + if (PkgOk == true) + { + Progress.OverallProgress(1,1,1,"Reading Package Lists"); + + // Open the cache file + FileFd File(_config->FindFile("Dir::Cache::pkgcache"),FileFd::ReadOnly); + if (_error->PendingError() == true) + return 0; + + MMap *Map = new MMap(File,MMap::Public | MMap::ReadOnly); + if (_error->PendingError() == true) + { + delete Map; + return 0; + } + return Map; + } + + // We use the source cache to generate the package cache + string SCacheFile = _config->FindFile("Dir::Cache::srcpkgcache"); + FileFd SCacheF(SCacheFile,FileFd::ReadOnly); + DynamicMMap *Map = new DynamicMMap(MMap::Public); + if (_error->PendingError() == true) + { + delete Map; + return 0; + } + + // Preload the map with the source cache + if (SCacheF.Read((unsigned char *)Map->Data() + Map->RawAllocate(SCacheF.Size()), + SCacheF.Size()) == false) + { + delete Map; + return 0; + } + + pkgCacheGenerator Gen(*Map,Progress); + + // Compute the progress + unsigned long TotalSize = 0; + if (pkgAddStatusSize(TotalSize) == false) + { + delete Map; + return 0; + } + + unsigned long CurrentSize = 0; + if (pkgMergeStatus(Progress,Gen,CurrentSize,TotalSize) == false) + { + delete Map; + return 0; + } + + return Map; +} + /*}}}*/