From: Michael Vogt Date: Tue, 10 Jul 2012 11:26:11 +0000 (+0200) Subject: merged the relevant bits from lp:~mvo/apt/lp346386 X-Git-Tag: 0.9.13.exp1ubuntu1~51^2~3 X-Git-Url: https://git.saurik.com/apt.git/commitdiff_plain/5a8cd0703bcc751ff5eb0ae5bf196e61bd428c91?hp=479a2beb0f5605652689d74a78dd224b69ba2e8f merged the relevant bits from lp:~mvo/apt/lp346386 --- diff --git a/.bzr-builddeb/default.conf b/.bzr-builddeb/default.conf index 9c55498ce..c33445b56 100644 --- a/.bzr-builddeb/default.conf +++ b/.bzr-builddeb/default.conf @@ -1,2 +1,6 @@ [BUILDDEB] native = true + +[HOOKS] +pre-export = ./prepare-release pre-export +post-build = ./prepare-release post-build diff --git a/.bzrignore b/.bzrignore index dab38cf71..6dcdc8eb5 100644 --- a/.bzrignore +++ b/.bzrignore @@ -26,4 +26,5 @@ abicheck/tests/ # apt man pages and other documentation ./doc/*/ !./doc/examples/ +./doc/examples/sources.list !./doc/po/ diff --git a/COMPILING b/COMPILING index 5ea5e52d1..bc934c846 100644 --- a/COMPILING +++ b/COMPILING @@ -36,7 +36,6 @@ functionality. Patches to make autoconf detect these cases and generate the required shims are OK. Current shims: - * C99 integer types 'inttypes.h' * sys/statvfs.h to convert from BSD/old-glibc statfs to SUS statvfs * rfc2553 hostname resolution (methods/rfc*), shims to normal gethostbyname. The more adventurous could steal the KAME IPv6 enabled resolvers for those diff --git a/Makefile b/Makefile index 9a47014c3..98b6d337a 100644 --- a/Makefile +++ b/Makefile @@ -9,8 +9,8 @@ endif .PHONY: default default: startup all -.PHONY: headers library clean veryclean all binary program doc test -all headers library clean veryclean binary program doc dirs test: +.PHONY: headers library clean veryclean all binary program doc test update-po +all headers library clean veryclean binary program doc manpages debiandoc test update-po startup dirs: $(MAKE) -C apt-pkg $@ $(MAKE) -C apt-inst $@ $(MAKE) -C methods $@ @@ -21,9 +21,14 @@ all headers library clean veryclean binary program doc dirs test: $(MAKE) -C po $@ $(MAKE) -C test $@ +all headers library clean veryclean binary program doc manpages debiandoc test update-po: startup dirs + +dirs: startup + # Some very common aliases -.PHONY: maintainer-clean dist-clean distclean pristine sanity +.PHONY: maintainer-clean dist-clean distclean pristine sanity maintainer-clean dist-clean distclean pristine sanity: veryclean +veryclean: clean # The startup target builds the necessary configure scripts. It should # be used after a CVS checkout. diff --git a/apt-inst/database.cc b/apt-inst/database.cc deleted file mode 100644 index da7613491..000000000 --- a/apt-inst/database.cc +++ /dev/null @@ -1,35 +0,0 @@ -// -*- mode: cpp; mode: fold -*- -// Description /*{{{*/ -// $Id: database.cc,v 1.2 2001/02/20 07:03:16 jgg Exp $ -/* ###################################################################### - - Data Base Abstraction - - ##################################################################### */ - /*}}}*/ -// Include Files /*{{{*/ -#include - -#include -#include -#include - /*}}}*/ - -// DataBase::GetMetaTmp - Get the temp dir /*{{{*/ -// --------------------------------------------------------------------- -/* This re-initializes the meta temporary directory if it hasn't yet - been inited for this cycle. The flag is the emptyness of MetaDir */ -bool pkgDataBase::GetMetaTmp(std::string &Dir) -{ - if (MetaDir.empty() == true) - if (InitMetaTmp(MetaDir) == false) - return false; - Dir = MetaDir; - return true; -} - /*}}}*/ -pkgDataBase::~pkgDataBase() -{ - delete Cache; - delete FList; -} diff --git a/apt-inst/database.h b/apt-inst/database.h deleted file mode 100644 index 64e149f98..000000000 --- a/apt-inst/database.h +++ /dev/null @@ -1,60 +0,0 @@ -// -*- mode: cpp; mode: fold -*- -// Description /*{{{*/ -// $Id: database.h,v 1.2 2001/02/20 07:03:16 jgg Exp $ -/* ###################################################################### - - Data Base Abstraction - - This class provides a simple interface to an abstract notion of a - database directory for storing state information about the system. - - The 'Meta' information for a package is the control information and - setup scripts stored inside the archive. GetMetaTmp returns the name of - a directory that is used to store named files containing the control - information. - - The File Listing is the database of installed files. It is loaded - into the memory/persistent cache structure by the ReadFileList method. - - ##################################################################### */ - /*}}}*/ -#ifndef PKGLIB_DATABASE_H -#define PKGLIB_DATABASE_H - -#include - -#include - -#ifndef APT_8_CLEANER_HEADERS -#include -#endif - -class pkgFLCache; -class OpProgress; - -class pkgDataBase -{ - protected: - - pkgCacheGenerator *Cache; - pkgFLCache *FList; - std::string MetaDir; - virtual bool InitMetaTmp(std::string &Dir) = 0; - - public: - - // Some manipulators for the cache and generator - inline pkgCache &GetCache() {return Cache->GetCache();}; - inline pkgFLCache &GetFLCache() {return *FList;}; - inline pkgCacheGenerator &GetGenerator() {return *Cache;}; - - bool GetMetaTmp(std::string &Dir); - virtual bool ReadyFileList(OpProgress &Progress) = 0; - virtual bool ReadyPkgCache(OpProgress &Progress) = 0; - virtual bool LoadChanges() = 0; - - pkgDataBase() : Cache(0), FList(0) {}; - virtual ~pkgDataBase(); -}; - -#endif diff --git a/apt-inst/deb/debfile.cc b/apt-inst/deb/debfile.cc index 4bd065cf8..ab4037915 100644 --- a/apt-inst/deb/debfile.cc +++ b/apt-inst/deb/debfile.cc @@ -18,7 +18,6 @@ // Include Files /*{{{*/ #include -#include #include #include #include @@ -90,42 +89,6 @@ const ARArchive::Member *debDebFile::GotoMember(const char *Name) return Member; } /*}}}*/ -// DebFile::ExtractControl - Extract Control information /*{{{*/ -// --------------------------------------------------------------------- -/* Extract the control information into the Database's temporary - directory. */ -bool debDebFile::ExtractControl(pkgDataBase &DB) -{ - // Get the archive member and positition the file - const ARArchive::Member *Member = GotoMember("control.tar.gz"); - if (Member == 0) - return false; - - // Prepare Tar - ControlExtract Extract; - ExtractTar Tar(File,Member->Size,"gzip"); - if (_error->PendingError() == true) - return false; - - // Get into the temporary directory - std::string Cwd = SafeGetCWD(); - std::string Tmp; - if (DB.GetMetaTmp(Tmp) == false) - return false; - if (chdir(Tmp.c_str()) != 0) - return _error->Errno("chdir",_("Couldn't change to %s"),Tmp.c_str()); - - // Do extraction - if (Tar.Go(Extract) == false) - return false; - - // Switch out of the tmp directory. - if (chdir(Cwd.c_str()) != 0) - chdir("/"); - - return true; -} - /*}}}*/ // DebFile::ExtractArchive - Extract the archive data itself /*{{{*/ // --------------------------------------------------------------------- /* Simple wrapper around tar.. */ @@ -167,32 +130,6 @@ bool debDebFile::ExtractArchive(pkgDirStream &Stream) return Tar.Go(Stream); } /*}}}*/ -// DebFile::MergeControl - Merge the control information /*{{{*/ -// --------------------------------------------------------------------- -/* This reads the extracted control file into the cache and returns the - version that was parsed. All this really does is select the correct - parser and correct file to parse. */ -pkgCache::VerIterator debDebFile::MergeControl(pkgDataBase &DB) -{ - // Open the control file - std::string Tmp; - if (DB.GetMetaTmp(Tmp) == false) - return pkgCache::VerIterator(DB.GetCache()); - FileFd Fd(Tmp + "control",FileFd::ReadOnly); - if (_error->PendingError() == true) - return pkgCache::VerIterator(DB.GetCache()); - - // Parse it - debListParser Parse(&Fd); - pkgCache::VerIterator Ver(DB.GetCache()); - if (DB.GetGenerator().MergeList(Parse,&Ver) == false) - return pkgCache::VerIterator(DB.GetCache()); - - if (Ver.end() == true) - _error->Error(_("Failed to locate a valid control file")); - return Ver; -} - /*}}}*/ // DebFile::ControlExtract::DoItem - Control Tar Extraction /*{{{*/ // --------------------------------------------------------------------- diff --git a/apt-inst/deb/debfile.h b/apt-inst/deb/debfile.h index 5e1ea1d2f..38211fb0f 100644 --- a/apt-inst/deb/debfile.h +++ b/apt-inst/deb/debfile.h @@ -30,11 +30,10 @@ #include #ifndef APT_8_CLEANER_HEADERS -#include +#include #endif class FileFd; -class pkgDataBase; class debDebFile { @@ -46,13 +45,10 @@ class debDebFile bool CheckMember(const char *Name); public: - class ControlExtract; class MemControlExtract; - - bool ExtractControl(pkgDataBase &DB); + bool ExtractArchive(pkgDirStream &Stream); - pkgCache::VerIterator MergeControl(pkgDataBase &DB); const ARArchive::Member *GotoMember(const char *Name); inline FileFd &GetFile() {return File;}; diff --git a/apt-inst/deb/dpkgdb.cc b/apt-inst/deb/dpkgdb.cc deleted file mode 100644 index 819c123f6..000000000 --- a/apt-inst/deb/dpkgdb.cc +++ /dev/null @@ -1,494 +0,0 @@ -// -*- mode: cpp; mode: fold -*- -// Description /*{{{*/ -// $Id: dpkgdb.cc,v 1.7.2.1 2004/01/16 18:58:50 mdz Exp $ -/* ###################################################################### - - DPKGv1 Database Implemenation - - This class provides parsers and other implementations for the DPKGv1 - database. It reads the diversion file, the list files and the status - file to build both the list of currently installed files and the - currently installed package list. - - ##################################################################### */ - /*}}}*/ -// Include Files /*{{{*/ -#include - -#include -#include -#include -#include -#include -#include -#include -#include - -#include -#include -#include -#include -#include -#include -#include -#include -#include - /*}}}*/ -using namespace std; - -// EraseDir - Erase A Directory /*{{{*/ -// --------------------------------------------------------------------- -/* This is necessary to create a new empty sub directory. The caller should - invoke mkdir after this with the proper permissions and check for - error. Maybe stick this in fileutils */ -static bool EraseDir(const char *Dir) -{ - // First we try a simple RM - if (rmdir(Dir) == 0 || - errno == ENOENT) - return true; - - // A file? Easy enough.. - if (errno == ENOTDIR) - { - if (unlink(Dir) != 0) - return _error->Errno("unlink",_("Failed to remove %s"),Dir); - return true; - } - - // Should not happen - if (errno != ENOTEMPTY) - return _error->Errno("rmdir",_("Failed to remove %s"),Dir); - - // Purge it using rm - pid_t Pid = ExecFork(); - - // Spawn the subprocess - if (Pid == 0) - { - execlp(_config->Find("Dir::Bin::rm","/bin/rm").c_str(), - "rm","-rf","--",Dir,(char *)NULL); - _exit(100); - } - return ExecWait(Pid,_config->Find("dir::bin::rm","/bin/rm").c_str()); -} - /*}}}*/ -// DpkgDB::debDpkgDB - Constructor /*{{{*/ -// --------------------------------------------------------------------- -/* */ -debDpkgDB::debDpkgDB() : CacheMap(0), FileMap(0) -{ - AdminDir = flNotFile(_config->Find("Dir::State::status")); - DiverInode = 0; - DiverTime = 0; -} - /*}}}*/ -// DpkgDB::~debDpkgDB - Destructor /*{{{*/ -// --------------------------------------------------------------------- -/* */ -debDpkgDB::~debDpkgDB() -{ - delete Cache; - Cache = 0; - delete CacheMap; - CacheMap = 0; - - delete FList; - FList = 0; - delete FileMap; - FileMap = 0; -} - /*}}}*/ -// DpkgDB::InitMetaTmp - Get the temp dir for meta information /*{{{*/ -// --------------------------------------------------------------------- -/* This creats+empties the meta temporary directory /var/lib/dpkg/tmp.ci - Only one package at a time can be using the returned meta directory. */ -bool debDpkgDB::InitMetaTmp(string &Dir) -{ - string Tmp = AdminDir + "tmp.ci/"; - if (EraseDir(Tmp.c_str()) == false) - return _error->Error(_("Unable to create %s"),Tmp.c_str()); - if (mkdir(Tmp.c_str(),0755) != 0) - return _error->Errno("mkdir",_("Unable to create %s"),Tmp.c_str()); - - // Verify it is on the same filesystem as the main info directory - dev_t Dev; - struct stat St; - if (stat((AdminDir + "info").c_str(),&St) != 0) - return _error->Errno("stat",_("Failed to stat %sinfo"),AdminDir.c_str()); - Dev = St.st_dev; - if (stat(Tmp.c_str(),&St) != 0) - return _error->Errno("stat",_("Failed to stat %s"),Tmp.c_str()); - if (Dev != St.st_dev) - return _error->Error(_("The info and temp directories need to be on the same filesystem")); - - // Done - Dir = Tmp; - return true; -} - /*}}}*/ -// DpkgDB::ReadyPkgCache - Prepare the cache with the current status /*{{{*/ -// --------------------------------------------------------------------- -/* This reads in the status file into an empty cache. This really needs - to be somehow unified with the high level APT notion of the Database - directory, but there is no clear way on how to do that yet. */ -bool debDpkgDB::ReadyPkgCache(OpProgress &Progress) -{ - if (Cache != 0) - { - Progress.OverallProgress(1,1,1,_("Reading package lists")); - return true; - } - - if (CacheMap != 0) - { - delete CacheMap; - CacheMap = 0; - } - - if (pkgCacheGenerator::MakeOnlyStatusCache(&Progress,&CacheMap) == false) - return false; - Cache->DropProgress(); - - return true; -} - /*}}}*/ -// DpkgDB::ReadFList - Read the File Listings in /*{{{*/ -// --------------------------------------------------------------------- -/* This reads the file listing in from the state directory. This is a - performance critical routine, as it needs to parse about 50k lines of - text spread over a hundred or more files. For an initial cold start - most of the time is spent in reading file inodes and so on, not - actually parsing. */ -bool debDpkgDB::ReadFList(OpProgress &Progress) -{ - // Count the number of packages we need to read information for - unsigned long Total = 0; - pkgCache &Cache = this->Cache->GetCache(); - for (pkgCache::PkgIterator I = Cache.PkgBegin(); I.end() == false; I++) - { - // Only not installed packages have no files. - if (I->CurrentState == pkgCache::State::NotInstalled) - continue; - Total++; - } - - /* Switch into the admin dir, this prevents useless lookups for the - path components */ - string Cwd = SafeGetCWD(); - if (chdir((AdminDir + "info/").c_str()) != 0) - return _error->Errno("chdir",_("Failed to change to the admin dir %sinfo"),AdminDir.c_str()); - - // Allocate a buffer. Anything larger than this buffer will be mmaped - unsigned long BufSize = 32*1024; - char *Buffer = new char[BufSize]; - - // Begin Loading them - unsigned long Count = 0; - char Name[300]; - for (pkgCache::PkgIterator I = Cache.PkgBegin(); I.end() == false; I++) - { - /* Only not installed packages have no files. ConfFile packages have - file lists but we don't want to read them in */ - if (I->CurrentState == pkgCache::State::NotInstalled || - I->CurrentState == pkgCache::State::ConfigFiles) - continue; - - // Fetch a package handle to associate with the file - pkgFLCache::PkgIterator FlPkg = FList->GetPkg(I.Name(),0,true); - if (FlPkg.end() == true) - { - _error->Error(_("Internal error getting a package name")); - break; - } - - Progress.OverallProgress(Count,Total,1,_("Reading file listing")); - - // Open the list file - snprintf(Name,sizeof(Name),"%s.list",I.Name()); - int Fd = open(Name,O_RDONLY); - - /* Okay this is very strange and bad.. Best thing is to bail and - instruct the user to look into it. */ - struct stat Stat; - if (Fd == -1 || fstat(Fd,&Stat) != 0) - { - _error->Errno("open",_("Failed to open the list file '%sinfo/%s'. If you " - "cannot restore this file then make it empty " - "and immediately re-install the same version of the package!"), - AdminDir.c_str(),Name); - break; - } - - // Set File to be a memory buffer containing the whole file - char *File; - if ((unsigned)Stat.st_size < BufSize) - { - if (read(Fd,Buffer,Stat.st_size) != Stat.st_size) - { - _error->Errno("read",_("Failed reading the list file %sinfo/%s"), - AdminDir.c_str(),Name); - close(Fd); - break; - } - File = Buffer; - } - else - { - // Use mmap - File = (char *)mmap(0,Stat.st_size,PROT_READ,MAP_PRIVATE,Fd,0); - if (File == (char *)(-1)) - { - _error->Errno("mmap",_("Failed reading the list file %sinfo/%s"), - AdminDir.c_str(),Name); - close(Fd); - break; - } - } - - // Parse it - const char *Start = File; - const char *End = File; - const char *Finish = File + Stat.st_size; - for (; End < Finish; End++) - { - // Not an end of line - if (*End != '\n' && End + 1 < Finish) - continue; - - // Skip blank lines - if (End - Start > 1) - { - pkgFLCache::NodeIterator Node = FList->GetNode(Start,End, - FlPkg.Offset(),true,false); - if (Node.end() == true) - { - _error->Error(_("Internal error getting a node")); - break; - } - } - - // Skip past the end of line - for (; *End == '\n' && End < Finish; End++); - Start = End; - } - - close(Fd); - if ((unsigned)Stat.st_size >= BufSize) - munmap((caddr_t)File,Stat.st_size); - - // Failed - if (End < Finish) - break; - - Count++; - } - - delete [] Buffer; - if (chdir(Cwd.c_str()) != 0) - chdir("/"); - - return !_error->PendingError(); -} - /*}}}*/ -// DpkgDB::ReadDiversions - Load the diversions file /*{{{*/ -// --------------------------------------------------------------------- -/* Read the diversion file in from disk. This is usually invoked by - LoadChanges before performing an operation that uses the FLCache. */ -bool debDpkgDB::ReadDiversions() -{ - struct stat Stat; - if (stat((AdminDir + "diversions").c_str(),&Stat) != 0) - return true; - - if (_error->PendingError() == true) - return false; - - FILE *Fd = fopen((AdminDir + "diversions").c_str(),"r"); - if (Fd == 0) - return _error->Errno("fopen",_("Failed to open the diversions file %sdiversions"),AdminDir.c_str()); - - FList->BeginDiverLoad(); - while (1) - { - char From[300]; - char To[300]; - char Package[100]; - - // Read the three lines in - if (fgets(From,sizeof(From),Fd) == 0) - break; - if (fgets(To,sizeof(To),Fd) == 0 || - fgets(Package,sizeof(Package),Fd) == 0) - { - _error->Error(_("The diversion file is corrupted")); - break; - } - - // Strip the \ns - unsigned long Len = strlen(From); - if (Len < 2 || From[Len-1] != '\n') - _error->Error(_("Invalid line in the diversion file: %s"),From); - else - From[Len-1] = 0; - Len = strlen(To); - if (Len < 2 || To[Len-1] != '\n') - _error->Error(_("Invalid line in the diversion file: %s"),To); - else - To[Len-1] = 0; - Len = strlen(Package); - if (Len < 2 || Package[Len-1] != '\n') - _error->Error(_("Invalid line in the diversion file: %s"),Package); - else - Package[Len-1] = 0; - - // Make sure the lines were parsed OK - if (_error->PendingError() == true) - break; - - // Fetch a package - if (strcmp(Package,":") == 0) - Package[0] = 0; - pkgFLCache::PkgIterator FlPkg = FList->GetPkg(Package,0,true); - if (FlPkg.end() == true) - { - _error->Error(_("Internal error getting a package name")); - break; - } - - // Install the diversion - if (FList->AddDiversion(FlPkg,From,To) == false) - { - _error->Error(_("Internal error adding a diversion")); - break; - } - } - if (_error->PendingError() == false) - FList->FinishDiverLoad(); - - DiverInode = Stat.st_ino; - DiverTime = Stat.st_mtime; - - fclose(Fd); - return !_error->PendingError(); -} - /*}}}*/ -// DpkgDB::ReadFileList - Read the file listing /*{{{*/ -// --------------------------------------------------------------------- -/* Read in the file listing. The file listing is created from three - sources, *.list, Conffile sections and the Diversion table. */ -bool debDpkgDB::ReadyFileList(OpProgress &Progress) -{ - if (Cache == 0) - return _error->Error(_("The pkg cache must be initialized first")); - if (FList != 0) - { - Progress.OverallProgress(1,1,1,_("Reading file listing")); - return true; - } - - // Create the cache and read in the file listing - FileMap = new DynamicMMap(MMap::Public); - FList = new pkgFLCache(*FileMap); - if (_error->PendingError() == true || - ReadFList(Progress) == false || - ReadConfFiles() == false || - ReadDiversions() == false) - { - delete FList; - delete FileMap; - FileMap = 0; - FList = 0; - return false; - } - - cout << "Node: " << FList->HeaderP->NodeCount << ',' << FList->HeaderP->UniqNodes << endl; - cout << "Dir: " << FList->HeaderP->DirCount << endl; - cout << "Package: " << FList->HeaderP->PackageCount << endl; - cout << "HashSize: " << FList->HeaderP->HashSize << endl; - cout << "Size: " << FileMap->Size() << endl; - cout << endl; - - return true; -} - /*}}}*/ -// DpkgDB::ReadConfFiles - Read the conf file sections from the s-file /*{{{*/ -// --------------------------------------------------------------------- -/* Reading the conf files is done by reparsing the status file. This is - actually rather fast so it is no big deal. */ -bool debDpkgDB::ReadConfFiles() -{ - FileFd File(_config->FindFile("Dir::State::status"),FileFd::ReadOnly); - pkgTagFile Tags(&File); - if (_error->PendingError() == true) - return false; - - pkgTagSection Section; - while (1) - { - // Skip to the next section - unsigned long Offset = Tags.Offset(); - if (Tags.Step(Section) == false) - break; - - // Parse the line - const char *Start; - const char *Stop; - if (Section.Find("Conffiles",Start,Stop) == false) - continue; - - const char *PkgStart; - const char *PkgEnd; - if (Section.Find("Package",PkgStart,PkgEnd) == false) - return _error->Error(_("Failed to find a Package: header, offset %lu"),Offset); - - // Snag a package record for it - pkgFLCache::PkgIterator FlPkg = FList->GetPkg(PkgStart,PkgEnd,true); - if (FlPkg.end() == true) - return _error->Error(_("Internal error getting a package name")); - - // Parse the conf file lines - while (1) - { - for (; isspace(*Start) != 0 && Start < Stop; Start++); - if (Start == Stop) - break; - - // Split it into words - const char *End = Start; - for (; isspace(*End) == 0 && End < Stop; End++); - const char *StartMd5 = End; - for (; isspace(*StartMd5) != 0 && StartMd5 < Stop; StartMd5++); - const char *EndMd5 = StartMd5; - for (; isspace(*EndMd5) == 0 && EndMd5 < Stop; EndMd5++); - if (StartMd5 == EndMd5 || Start == End) - return _error->Error(_("Bad ConfFile section in the status file. Offset %lu"),Offset); - - // Insert a new entry - unsigned char MD5[16]; - if (Hex2Num(string(StartMd5,EndMd5-StartMd5),MD5,16) == false) - return _error->Error(_("Error parsing MD5. Offset %lu"),Offset); - - if (FList->AddConfFile(Start,End,FlPkg,MD5) == false) - return false; - Start = EndMd5; - } - } - - return true; -} - /*}}}*/ -// DpkgDB::LoadChanges - Read in any changed state files /*{{{*/ -// --------------------------------------------------------------------- -/* The only file in the dpkg system that can change while packages are - unpacking is the diversions file. */ -bool debDpkgDB::LoadChanges() -{ - struct stat Stat; - if (stat((AdminDir + "diversions").c_str(),&Stat) != 0) - return true; - if (DiverInode == Stat.st_ino && DiverTime == Stat.st_mtime) - return true; - return ReadDiversions(); -} - /*}}}*/ diff --git a/apt-inst/deb/dpkgdb.h b/apt-inst/deb/dpkgdb.h deleted file mode 100644 index f28563a93..000000000 --- a/apt-inst/deb/dpkgdb.h +++ /dev/null @@ -1,55 +0,0 @@ -// -*- mode: cpp; mode: fold -*- -// Description /*{{{*/ -// $Id: dpkgdb.h,v 1.2 2001/02/20 07:03:17 jgg Exp $ -/* ###################################################################### - - DPKGv1 Data Base Implemenation - - The DPKGv1 database is typically stored in /var/lib/dpkg/. For - DPKGv1 the 'meta' information is the contents of the .deb control.tar.gz - member prepended by the package name. The meta information is unpacked - in its temporary directory and then migrated into the main list dir - at a checkpoint. - - Journaling is providing by syncronized file writes to the updates sub - directory. - - ##################################################################### */ - /*}}}*/ -#ifndef PKGLIB_DPKGDB_H -#define PKGLIB_DPKGDB_H - - -#include - -#include - -class DynamicMMap; -class OpProgress; - -class debDpkgDB : public pkgDataBase -{ - protected: - - std::string AdminDir; - DynamicMMap *CacheMap; - DynamicMMap *FileMap; - unsigned long DiverInode; - signed long DiverTime; - - virtual bool InitMetaTmp(std::string &Dir); - bool ReadFList(OpProgress &Progress); - bool ReadDiversions(); - bool ReadConfFiles(); - - public: - - virtual bool ReadyFileList(OpProgress &Progress); - virtual bool ReadyPkgCache(OpProgress &Progress); - virtual bool LoadChanges(); - - debDpkgDB(); - virtual ~debDpkgDB(); -}; - -#endif diff --git a/apt-inst/makefile b/apt-inst/makefile index 1b9cc2676..cfb22741a 100644 --- a/apt-inst/makefile +++ b/apt-inst/makefile @@ -14,7 +14,7 @@ include ../buildlib/libversion.mak # The library name LIBRARY=apt-inst -MAJOR=1.4 +MAJOR=1.5 MINOR=0 SLIBS=$(PTHREADLIB) -lapt-pkg APT_DOMAIN:=libapt-inst$(MAJOR) @@ -23,12 +23,11 @@ APT_DOMAIN:=libapt-inst$(MAJOR) SOURCE = contrib/extracttar.cc contrib/arfile.cc # Source code for the main library -SOURCE+= filelist.cc database.cc dirstream.cc extract.cc \ - deb/dpkgdb.cc deb/debfile.cc +SOURCE+= filelist.cc dirstream.cc extract.cc deb/debfile.cc # Public header files -HEADERS = extracttar.h arfile.h filelist.h database.h extract.h \ - dpkgdb.h dirstream.h debfile.h +HEADERS = extracttar.h arfile.h filelist.h extract.h \ + dirstream.h debfile.h HEADERS := $(addprefix apt-pkg/,$(HEADERS)) include $(LIBRARY_H) diff --git a/apt-pkg/acquire-method.cc b/apt-pkg/acquire-method.cc index 2041fd9e9..5bc1c159a 100644 --- a/apt-pkg/acquire-method.cc +++ b/apt-pkg/acquire-method.cc @@ -95,12 +95,7 @@ void pkgAcqMethod::Fail(string Err,bool Transient) { std::cout << "400 URI Failure\nURI: " << Queue->Uri << "\n" << "Message: " << Err << " " << IP << "\n"; - // Dequeue - FetchItem *Tmp = Queue; - Queue = Queue->Next; - delete Tmp; - if (Tmp == QueueBack) - QueueBack = Queue; + Dequeue(); } else std::cout << "400 URI Failure\nURI: \nMessage: " << Err << "\n"; @@ -211,13 +206,7 @@ void pkgAcqMethod::URIDone(FetchResult &Res, FetchResult *Alt) } std::cout << "\n" << std::flush; - - // Dequeue - FetchItem *Tmp = Queue; - Queue = Queue->Next; - delete Tmp; - if (Tmp == QueueBack) - QueueBack = Queue; + Dequeue(); } /*}}}*/ // AcqMethod::MediaFail - Syncronous request for new media /*{{{*/ @@ -423,26 +412,14 @@ void pkgAcqMethod::Status(const char *Format,...) /*}}}*/ // AcqMethod::Redirect - Send a redirect message /*{{{*/ // --------------------------------------------------------------------- -/* This method sends the redirect message and also manipulates the queue - to keep the pipeline synchronized. */ +/* This method sends the redirect message and dequeues the item as + * the worker will enqueue again later on to the right queue */ void pkgAcqMethod::Redirect(const string &NewURI) { std::cout << "103 Redirect\nURI: " << Queue->Uri << "\n" << "New-URI: " << NewURI << "\n" << "\n" << std::flush; - - // Change the URI for the request. - Queue->Uri = NewURI; - - /* To keep the pipeline synchronized, move the current request to - the end of the queue, past the end of the current pipeline. */ - FetchItem *I; - for (I = Queue; I->Next != 0; I = I->Next) ; - I->Next = Queue; - Queue = Queue->Next; - I->Next->Next = 0; - if (QueueBack == 0) - QueueBack = I->Next; + Dequeue(); } /*}}}*/ // AcqMethod::FetchResult::FetchResult - Constructor /*{{{*/ @@ -465,3 +442,11 @@ void pkgAcqMethod::FetchResult::TakeHashes(Hashes &Hash) SHA512Sum = Hash.SHA512.Result(); } /*}}}*/ +void pkgAcqMethod::Dequeue() { /*{{{*/ + FetchItem const * const Tmp = Queue; + Queue = Queue->Next; + if (Tmp == QueueBack) + QueueBack = Queue; + delete Tmp; +} + /*}}}*/ diff --git a/apt-pkg/acquire-method.h b/apt-pkg/acquire-method.h index 2dd9ad685..00f99e0a0 100644 --- a/apt-pkg/acquire-method.h +++ b/apt-pkg/acquire-method.h @@ -104,6 +104,9 @@ class pkgAcqMethod pkgAcqMethod(const char *Ver,unsigned long Flags = 0); virtual ~pkgAcqMethod() {}; + + private: + void Dequeue(); }; /** @} */ diff --git a/apt-pkg/acquire-worker.cc b/apt-pkg/acquire-worker.cc index 3bb977e14..9d90b08bc 100644 --- a/apt-pkg/acquire-worker.cc +++ b/apt-pkg/acquire-worker.cc @@ -244,6 +244,21 @@ bool pkgAcquire::Worker::RunMessages() string NewURI = LookupTag(Message,"New-URI",URI.c_str()); Itm->URI = NewURI; + + ItemDone(); + + pkgAcquire::Item *Owner = Itm->Owner; + pkgAcquire::ItemDesc Desc = *Itm; + + // Change the status so that it can be dequeued + Owner->Status = pkgAcquire::Item::StatIdle; + // Mark the item as done (taking care of all queues) + // and then put it in the main queue again + OwnerQ->ItemDone(Itm); + OwnerQ->Owner->Enqueue(Desc); + + if (Log != 0) + Log->Done(Desc); break; } @@ -431,7 +446,9 @@ bool pkgAcquire::Worker::MediaChange(string Message) << Drive << ":" // drive << msg.str() // l10n message << endl; - write(status_fd, status.str().c_str(), status.str().size()); + + std::string const dlstatus = status.str(); + FileFd::Write(status_fd, dlstatus.c_str(), dlstatus.size()); } if (Log == 0 || Log->MediaChange(LookupTag(Message,"Media"), @@ -465,40 +482,19 @@ bool pkgAcquire::Worker::SendConfiguration() if (OutFd == -1) return false; - - string Message = "601 Configuration\n"; - Message.reserve(2000); - /* Write out all of the configuration directives by walking the + /* Write out all of the configuration directives by walking the configuration tree */ - const Configuration::Item *Top = _config->Tree(0); - for (; Top != 0;) - { - if (Top->Value.empty() == false) - { - string Line = "Config-Item: " + QuoteString(Top->FullTag(),"=\"\n") + "="; - Line += QuoteString(Top->Value,"\n") + '\n'; - Message += Line; - } - - if (Top->Child != 0) - { - Top = Top->Child; - continue; - } - - while (Top != 0 && Top->Next == 0) - Top = Top->Parent; - if (Top != 0) - Top = Top->Next; - } - Message += '\n'; + std::ostringstream Message; + Message << "601 Configuration\n"; + _config->Dump(Message, NULL, "Config-Item: %F=%V\n", false); + Message << '\n'; if (Debug == true) - clog << " -> " << Access << ':' << QuoteString(Message,"\n") << endl; - OutQueue += Message; - OutReady = true; - + clog << " -> " << Access << ':' << QuoteString(Message.str(),"\n") << endl; + OutQueue += Message.str(); + OutReady = true; + return true; } /*}}}*/ @@ -536,10 +532,10 @@ bool pkgAcquire::Worker::OutFdReady() Res = write(OutFd,OutQueue.c_str(),OutQueue.length()); } while (Res < 0 && errno == EINTR); - + if (Res <= 0) return MethodFailure(); - + OutQueue.erase(0,Res); if (OutQueue.empty() == true) OutReady = false; diff --git a/apt-pkg/acquire.cc b/apt-pkg/acquire.cc index 573a85c2f..a8a5abd34 100644 --- a/apt-pkg/acquire.cc +++ b/apt-pkg/acquire.cc @@ -244,11 +244,19 @@ void pkgAcquire::Dequeue(Item *Itm) { Queue *I = Queues; bool Res = false; - for (; I != 0; I = I->Next) - Res |= I->Dequeue(Itm); - if (Debug == true) clog << "Dequeuing " << Itm->DestFile << endl; + + for (; I != 0; I = I->Next) + { + if (I->Dequeue(Itm)) + { + Res = true; + if (Debug == true) + clog << "Dequeued from " << I->Name << endl; + } + } + if (Res == true) ToFetch--; } @@ -269,9 +277,30 @@ string pkgAcquire::QueueName(string Uri,MethodConfig const *&Config) /* Single-Instance methods get exactly one queue per URI. This is also used for the Access queue method */ if (Config->SingleInstance == true || QueueMode == QueueAccess) - return U.Access; + return U.Access; - return U.Access + ':' + U.Host; + string AccessSchema = U.Access + ':', + FullQueueName = AccessSchema + U.Host; + unsigned int Instances = 0, SchemaLength = AccessSchema.length(); + + Queue *I = Queues; + for (; I != 0; I = I->Next) { + // if the queue already exists, re-use it + if (I->Name == FullQueueName) + return FullQueueName; + + if (I->Name.compare(0, SchemaLength, AccessSchema) == 0) + Instances++; + } + + if (Debug) { + clog << "Found " << Instances << " instances of " << U.Access << endl; + } + + if (Instances >= (unsigned int)_config->FindI("Acquire::QueueHost::Limit",10)) + return U.Access; + + return FullQueueName; } /*}}}*/ // Acquire::GetConfig - Fetch the configuration information /*{{{*/ @@ -872,7 +901,9 @@ bool pkgAcquireStatus::Pulse(pkgAcquire *Owner) << ":" << (CurrentBytes/float(TotalBytes)*100.0) << ":" << msg << endl; - write(fd, status.str().c_str(), status.str().size()); + + std::string const dlstatus = status.str(); + FileFd::Write(fd, dlstatus.c_str(), dlstatus.size()); } return true; diff --git a/apt-pkg/algorithms.cc b/apt-pkg/algorithms.cc index 8beb2d51c..2f5fcc7ab 100644 --- a/apt-pkg/algorithms.cc +++ b/apt-pkg/algorithms.cc @@ -58,6 +58,12 @@ pkgSimulate::pkgSimulate(pkgDepCache *Cache) : pkgPackageManager(Cache), FileNames[I] = Jnk; } /*}}}*/ +// Simulate::~Simulate - Destructor /*{{{*/ +pkgSimulate::~pkgSimulate() +{ + delete[] Flags; +} + /*}}}*/ // Simulate::Describe - Describe a package /*{{{*/ // --------------------------------------------------------------------- /* Parameter Current == true displays the current package version, @@ -356,11 +362,36 @@ bool pkgDistUpgrade(pkgDepCache &Cache) if (I->CurrentVer != 0) Cache.MarkInstall(I, true, 0, false); - /* Now, auto upgrade all essential packages - this ensures that - the essential packages are present and working */ - for (pkgCache::PkgIterator I = Cache.PkgBegin(); I.end() == false; ++I) - if ((I->Flags & pkgCache::Flag::Essential) == pkgCache::Flag::Essential) - Cache.MarkInstall(I, true, 0, false); + /* Now, install each essential package which is not installed + (and not provided by another package in the same name group) */ + std::string essential = _config->Find("pkgCacheGen::Essential", "all"); + if (essential == "all") + { + for (pkgCache::GrpIterator G = Cache.GrpBegin(); G.end() == false; ++G) + { + bool isEssential = false; + bool instEssential = false; + for (pkgCache::PkgIterator P = G.PackageList(); P.end() == false; P = G.NextPkg(P)) + { + if ((P->Flags & pkgCache::Flag::Essential) != pkgCache::Flag::Essential) + continue; + isEssential = true; + if (Cache[P].Install() == true) + { + instEssential = true; + break; + } + } + if (isEssential == false || instEssential == true) + continue; + pkgCache::PkgIterator P = G.FindPreferredPkg(); + Cache.MarkInstall(P, true, 0, false); + } + } + else if (essential != "none") + for (pkgCache::PkgIterator I = Cache.PkgBegin(); I.end() == false; ++I) + if ((I->Flags & pkgCache::Flag::Essential) == pkgCache::Flag::Essential) + Cache.MarkInstall(I, true, 0, false); /* We do it again over all previously installed packages to force conflict resolution on them all. */ @@ -1453,7 +1484,7 @@ void pkgPrioSortList(pkgCache &Cache,pkgCache::Version **List) qsort(List,Count,sizeof(*List),PrioComp); } /*}}}*/ -// ListUpdate - update the cache files /*{{{*/ +// ListUpdate - construct Fetcher and update the cache files /*{{{*/ // --------------------------------------------------------------------- /* This is a simple wrapper to update the cache. it will fetch stuff * from the network (or any other sources defined in sources.list) @@ -1462,7 +1493,6 @@ bool ListUpdate(pkgAcquireStatus &Stat, pkgSourceList &List, int PulseInterval) { - pkgAcquire::RunResult res; pkgAcquire Fetcher; if (Fetcher.Setup(&Stat, _config->FindDir("Dir::State::Lists")) == false) return false; @@ -1471,11 +1501,24 @@ bool ListUpdate(pkgAcquireStatus &Stat, if (List.GetIndexes(&Fetcher) == false) return false; + return AcquireUpdate(Fetcher, PulseInterval, true); +} + /*}}}*/ +// AcquireUpdate - take Fetcher and update the cache files /*{{{*/ +// --------------------------------------------------------------------- +/* This is a simple wrapper to update the cache with a provided acquire + * If you only need control over Status and the used SourcesList use + * ListUpdate method instead. + */ +bool AcquireUpdate(pkgAcquire &Fetcher, int const PulseInterval, + bool const RunUpdateScripts, bool const ListCleanup) +{ // Run scripts - RunScripts("APT::Update::Pre-Invoke"); - - // check arguments - if(PulseInterval>0) + if (RunUpdateScripts == true) + RunScripts("APT::Update::Pre-Invoke"); + + pkgAcquire::RunResult res; + if(PulseInterval > 0) res = Fetcher.Run(PulseInterval); else res = Fetcher.Run(); @@ -1512,7 +1555,7 @@ bool ListUpdate(pkgAcquireStatus &Stat, // Clean out any old list files // Keep "APT::Get::List-Cleanup" name for compatibility, but // this is really a global option for the APT library now - if (!TransientNetworkFailure && !Failed && + if (!TransientNetworkFailure && !Failed && ListCleanup == true && (_config->FindB("APT::Get::List-Cleanup",true) == true && _config->FindB("APT::List-Cleanup",true) == true)) { @@ -1529,11 +1572,14 @@ bool ListUpdate(pkgAcquireStatus &Stat, // Run the success scripts if all was fine - if(!TransientNetworkFailure && !Failed) - RunScripts("APT::Update::Post-Invoke-Success"); + if (RunUpdateScripts == true) + { + if(!TransientNetworkFailure && !Failed) + RunScripts("APT::Update::Post-Invoke-Success"); - // Run the other scripts - RunScripts("APT::Update::Post-Invoke"); + // Run the other scripts + RunScripts("APT::Update::Post-Invoke"); + } return true; } /*}}}*/ diff --git a/apt-pkg/algorithms.h b/apt-pkg/algorithms.h index 37eacf1f8..aff8a68f2 100644 --- a/apt-pkg/algorithms.h +++ b/apt-pkg/algorithms.h @@ -78,6 +78,7 @@ private: public: pkgSimulate(pkgDepCache *Cache); + ~pkgSimulate(); }; /*}}}*/ class pkgProblemResolver /*{{{*/ @@ -147,5 +148,7 @@ bool pkgMinimizeUpgrade(pkgDepCache &Cache); void pkgPrioSortList(pkgCache &Cache,pkgCache::Version **List); bool ListUpdate(pkgAcquireStatus &progress, pkgSourceList &List, int PulseInterval=0); - +bool AcquireUpdate(pkgAcquire &Fetcher, int const PulseInterval = 0, + bool const RunUpdateScripts = true, bool const ListCleanup = true); + #endif diff --git a/apt-pkg/aptconfiguration.cc b/apt-pkg/aptconfiguration.cc index 4324f0e63..d31ccb642 100644 --- a/apt-pkg/aptconfiguration.cc +++ b/apt-pkg/aptconfiguration.cc @@ -47,6 +47,7 @@ const Configuration::getCompressionTypes(bool const &Cached) { _config->CndSet("Acquire::CompressionTypes::gz","gzip"); setDefaultConfigurationForCompressors(); + std::vector const compressors = getCompressors(); // accept non-list order as override setting for config settings on commandline std::string const overrideOrder = _config->Find("Acquire::CompressionTypes::Order",""); @@ -60,15 +61,17 @@ const Configuration::getCompressionTypes(bool const &Cached) { if ((*o).empty() == true) continue; // ignore types we have no method ready to use - if (_config->Exists(std::string("Acquire::CompressionTypes::").append(*o)) == false) + std::string const method = std::string("Acquire::CompressionTypes::").append(*o); + if (_config->Exists(method) == false) continue; // ignore types we have no app ready to use - std::string const appsetting = std::string("Dir::Bin::").append(*o); - if (_config->Exists(appsetting) == true) { - std::string const app = _config->FindFile(appsetting.c_str(), ""); - if (app.empty() == false && FileExists(app) == false) - continue; - } + std::string const app = _config->Find(method); + std::vector::const_iterator c = compressors.begin(); + for (; c != compressors.end(); ++c) + if (c->Name == app) + break; + if (c == compressors.end()) + continue; types.push_back(*o); } @@ -84,12 +87,12 @@ const Configuration::getCompressionTypes(bool const &Cached) { if (std::find(types.begin(),types.end(),Types->Tag) != types.end()) continue; // ignore types we have no app ready to use - std::string const appsetting = std::string("Dir::Bin::").append(Types->Value); - if (appsetting.empty() == false && _config->Exists(appsetting) == true) { - std::string const app = _config->FindFile(appsetting.c_str(), ""); - if (app.empty() == false && FileExists(app) == false) - continue; - } + std::vector::const_iterator c = compressors.begin(); + for (; c != compressors.end(); ++c) + if (c->Name == Types->Value) + break; + if (c == compressors.end()) + continue; types.push_back(Types->Tag); } @@ -141,7 +144,7 @@ std::vector const Configuration::getLanguages(bool const &All, if (D != 0) { builtin.push_back("none"); for (struct dirent *Ent = readdir(D); Ent != 0; Ent = readdir(D)) { - string const name = Ent->d_name; + string const name = SubstVar(Ent->d_name, "%5f", "_"); size_t const foundDash = name.rfind("-"); size_t const foundUnderscore = name.rfind("_", foundDash); if (foundDash == string::npos || foundUnderscore == string::npos || @@ -231,17 +234,21 @@ std::vector const Configuration::getLanguages(bool const &All, // override the configuration settings vector of languages. string const forceLang = _config->Find("Acquire::Languages",""); if (forceLang.empty() == false) { - if (forceLang == "environment") { - codes = environment; - } else if (forceLang != "none") - codes.push_back(forceLang); - else //if (forceLang == "none") - builtin.clear(); - allCodes = codes; - for (std::vector::const_iterator b = builtin.begin(); - b != builtin.end(); ++b) - if (std::find(allCodes.begin(), allCodes.end(), *b) == allCodes.end()) - allCodes.push_back(*b); + if (forceLang == "none") { + codes.clear(); + allCodes.clear(); + allCodes.push_back("none"); + } else { + if (forceLang == "environment") + codes = environment; + else + codes.push_back(forceLang); + allCodes = codes; + for (std::vector::const_iterator b = builtin.begin(); + b != builtin.end(); ++b) + if (std::find(allCodes.begin(), allCodes.end(), *b) == allCodes.end()) + allCodes.push_back(*b); + } if (All == true) return allCodes; else @@ -432,9 +439,30 @@ bool const Configuration::checkArchitecture(std::string const &Arch) { // setDefaultConfigurationForCompressors /*{{{*/ void Configuration::setDefaultConfigurationForCompressors() { // Set default application paths to check for optional compression types - _config->CndSet("Dir::Bin::lzma", "/usr/bin/lzma"); - _config->CndSet("Dir::Bin::xz", "/usr/bin/xz"); _config->CndSet("Dir::Bin::bzip2", "/bin/bzip2"); + _config->CndSet("Dir::Bin::xz", "/usr/bin/xz"); + if (FileExists(_config->FindFile("Dir::Bin::xz")) == true) { + _config->Clear("Dir::Bin::lzma"); + _config->Set("APT::Compressor::lzma::Binary", "xz"); + if (_config->Exists("APT::Compressor::lzma::CompressArg") == false) { + _config->Set("APT::Compressor::lzma::CompressArg::", "--format=lzma"); + _config->Set("APT::Compressor::lzma::CompressArg::", "-9"); + } + if (_config->Exists("APT::Compressor::lzma::UncompressArg") == false) { + _config->Set("APT::Compressor::lzma::UncompressArg::", "--format=lzma"); + _config->Set("APT::Compressor::lzma::UncompressArg::", "-d"); + } + } else { + _config->CndSet("Dir::Bin::lzma", "/usr/bin/lzma"); + if (_config->Exists("APT::Compressor::lzma::CompressArg") == false) { + _config->Set("APT::Compressor::lzma::CompressArg::", "--suffix="); + _config->Set("APT::Compressor::lzma::CompressArg::", "-9"); + } + if (_config->Exists("APT::Compressor::lzma::UncompressArg") == false) { + _config->Set("APT::Compressor::lzma::UncompressArg::", "--suffix="); + _config->Set("APT::Compressor::lzma::UncompressArg::", "-d"); + } + } } /*}}}*/ // getCompressors - Return Vector of usbale compressors /*{{{*/ @@ -453,15 +481,23 @@ const Configuration::getCompressors(bool const Cached) { setDefaultConfigurationForCompressors(); - compressors.push_back(Compressor(".", "", "", "", "", 1)); + compressors.push_back(Compressor(".", "", "", NULL, NULL, 1)); if (_config->Exists("Dir::Bin::gzip") == false || FileExists(_config->FindFile("Dir::Bin::gzip")) == true) compressors.push_back(Compressor("gzip",".gz","gzip","-9n","-d",2)); +#ifdef HAVE_ZLIB + else + compressors.push_back(Compressor("gzip",".gz","false", NULL, NULL, 2)); +#endif if (_config->Exists("Dir::Bin::bzip2") == false || FileExists(_config->FindFile("Dir::Bin::bzip2")) == true) compressors.push_back(Compressor("bzip2",".bz2","bzip2","-9","-d",3)); - if (_config->Exists("Dir::Bin::lzma") == false || FileExists(_config->FindFile("Dir::Bin::lzma")) == true) - compressors.push_back(Compressor("lzma",".lzma","lzma","-9","-d",4)); +#ifdef HAVE_BZ2 + else + compressors.push_back(Compressor("bzip2",".bz2","false", NULL, NULL, 3)); +#endif if (_config->Exists("Dir::Bin::xz") == false || FileExists(_config->FindFile("Dir::Bin::xz")) == true) - compressors.push_back(Compressor("xz",".xz","xz","-6","-d",5)); + compressors.push_back(Compressor("xz",".xz","xz","-6","-d",4)); + if (_config->Exists("Dir::Bin::lzma") == false || FileExists(_config->FindFile("Dir::Bin::lzma")) == true) + compressors.push_back(Compressor("lzma",".lzma","lzma","-9","-d",5)); std::vector const comp = _config->FindVector("APT::Compressor"); for (std::vector::const_iterator c = comp.begin(); @@ -494,7 +530,7 @@ Configuration::Compressor::Compressor(char const *name, char const *extension, char const *binary, char const *compressArg, char const *uncompressArg, unsigned short const cost) { - std::string const config = std::string("APT:Compressor::").append(name).append("::"); + std::string const config = std::string("APT::Compressor::").append(name).append("::"); Name = _config->Find(std::string(config).append("Name"), name); Extension = _config->Find(std::string(config).append("Extension"), extension); Binary = _config->Find(std::string(config).append("Binary"), binary); diff --git a/apt-pkg/cachefilter.cc b/apt-pkg/cachefilter.cc index 9ec3fa699..35f95fe22 100644 --- a/apt-pkg/cachefilter.cc +++ b/apt-pkg/cachefilter.cc @@ -9,12 +9,14 @@ #include #include #include - -#include +#include #include #include +#include + +#include /*}}}*/ namespace APT { namespace CacheFilter { @@ -52,5 +54,54 @@ PackageNameMatchesRegEx::~PackageNameMatchesRegEx() { /*{{{*/ delete pattern; } /*}}}*/ + +// CompleteArch to - tuple /*{{{*/ +//---------------------------------------------------------------------- +/* The complete architecture, consisting of -. */ +static std::string CompleteArch(std::string const &arch) { + if (arch.find('-') != std::string::npos) { + // ensure that only -any- is replaced and not something like company- + std::string complete = std::string("-").append(arch).append("-"); + complete = SubstVar(complete, "-any-", "-*-"); + complete = complete.substr(1, complete.size()-2); + return complete; + } + else if (arch == "armel") return "linux-arm"; + else if (arch == "armhf") return "linux-arm"; + else if (arch == "lpia") return "linux-i386"; + else if (arch == "powerpcspe") return "linux-powerpc"; + else if (arch == "uclibc-linux-armel") return "linux-arm"; + else if (arch == "uclinux-armel") return "uclinux-arm"; + else if (arch == "any") return "*-*"; + else return "linux-" + arch; +} + /*}}}*/ +PackageArchitectureMatchesSpecification::PackageArchitectureMatchesSpecification(std::string const &pattern, bool const isPattern) :/*{{{*/ + literal(pattern), isPattern(isPattern), d(NULL) { + complete = CompleteArch(pattern); +} + /*}}}*/ +bool PackageArchitectureMatchesSpecification::operator() (char const * const &arch) {/*{{{*/ + if (strcmp(literal.c_str(), arch) == 0 || + strcmp(complete.c_str(), arch) == 0) + return true; + std::string const pkgarch = CompleteArch(arch); + if (isPattern == true) + return fnmatch(complete.c_str(), pkgarch.c_str(), 0) == 0; + return fnmatch(pkgarch.c_str(), complete.c_str(), 0) == 0; +} + /*}}}*/ +bool PackageArchitectureMatchesSpecification::operator() (pkgCache::PkgIterator const &Pkg) {/*{{{*/ + return (*this)(Pkg.Arch()); +} + /*}}}*/ +bool PackageArchitectureMatchesSpecification::operator() (pkgCache::VerIterator const &Ver) {/*{{{*/ + return (*this)(Ver.ParentPkg()); +} + /*}}}*/ +PackageArchitectureMatchesSpecification::~PackageArchitectureMatchesSpecification() { /*{{{*/ +} + /*}}}*/ + } } diff --git a/apt-pkg/cachefilter.h b/apt-pkg/cachefilter.h index 5d426008b..25cd43f47 100644 --- a/apt-pkg/cachefilter.h +++ b/apt-pkg/cachefilter.h @@ -26,6 +26,36 @@ public: ~PackageNameMatchesRegEx(); }; /*}}}*/ +// PackageArchitectureMatchesSpecification /*{{{*/ +/** \class PackageArchitectureMatchesSpecification + \brief matching against architecture specification strings + + The strings are of the format - where either component, + or the whole string, can be the wildcard "any" as defined in + debian-policy §11.1 "Architecture specification strings". + + Examples: i386, mipsel, linux-any, any-amd64, any */ +class PackageArchitectureMatchesSpecification { + std::string literal; + std::string complete; + bool isPattern; + /** \brief dpointer placeholder (for later in case we need it) */ + void *d; +public: + /** \brief matching against architecture specification strings + * + * @param pattern is the architecture specification string + * @param isPattern defines if the given \b pattern is a + * architecture specification pattern to match others against + * or if it is the fixed string and matched against patterns + */ + PackageArchitectureMatchesSpecification(std::string const &pattern, bool const isPattern = true); + bool operator() (char const * const &arch); + bool operator() (pkgCache::PkgIterator const &Pkg); + bool operator() (pkgCache::VerIterator const &Ver); + ~PackageArchitectureMatchesSpecification(); +}; + /*}}}*/ } } #endif diff --git a/apt-pkg/cacheiterators.h b/apt-pkg/cacheiterators.h index d5e018be9..dcd353119 100644 --- a/apt-pkg/cacheiterators.h +++ b/apt-pkg/cacheiterators.h @@ -285,6 +285,7 @@ class pkgCache::DepIterator : public Iterator { bool IsNegative() const; bool IsIgnorable(PrvIterator const &Prv) const; bool IsIgnorable(PkgIterator const &Pkg) const; + bool IsMultiArchImplicit() const; void GlobOr(DepIterator &Start,DepIterator &End); Version **AllTargets() const; bool SmartTargetPkg(PkgIterator &Result) const; @@ -329,8 +330,9 @@ class pkgCache::PrvIterator : public Iterator { inline VerIterator OwnerVer() const {return VerIterator(*Owner,Owner->VerP + S->Version);}; inline PkgIterator OwnerPkg() const {return PkgIterator(*Owner,Owner->PkgP + Owner->VerP[S->Version].ParentPkg);}; - inline PrvIterator() : Iterator(), Type(PrvVer) {}; + bool IsMultiArchImplicit() const; + inline PrvIterator() : Iterator(), Type(PrvVer) {}; inline PrvIterator(pkgCache &Owner, Provides *Trg, Version*) : Iterator(Owner, Trg), Type(PrvVer) { if (S == 0) diff --git a/apt-pkg/cacheset.cc b/apt-pkg/cacheset.cc index b892ab4bf..784d1f0bf 100644 --- a/apt-pkg/cacheset.cc +++ b/apt-pkg/cacheset.cc @@ -182,15 +182,61 @@ pkgCache::PkgIterator PackageContainerInterface::FromName(pkgCacheFile &Cache, return Pkg; } /*}}}*/ +// FromGroup - Returns the package defined by this string /*{{{*/ +bool PackageContainerInterface::FromGroup(PackageContainerInterface * const pci, pkgCacheFile &Cache, + std::string pkg, CacheSetHelper &helper) { + if (unlikely(Cache.GetPkgCache() == 0)) + return false; + + size_t const archfound = pkg.find_last_of(':'); + std::string arch; + if (archfound != std::string::npos) { + arch = pkg.substr(archfound+1); + pkg.erase(archfound); + } + + pkgCache::GrpIterator Grp = Cache.GetPkgCache()->FindGrp(pkg); + if (Grp.end() == false) { + if (arch.empty() == true) { + pkgCache::PkgIterator Pkg = Grp.FindPreferredPkg(); + if (Pkg.end() == false) + { + pci->insert(Pkg); + return true; + } + } else { + bool found = false; + // for 'linux-any' return the first package matching, for 'linux-*' return all matches + bool const isGlobal = arch.find('*') != std::string::npos; + APT::CacheFilter::PackageArchitectureMatchesSpecification pams(arch); + for (pkgCache::PkgIterator Pkg = Grp.PackageList(); Pkg.end() == false; Pkg = Grp.NextPkg(Pkg)) { + if (pams(Pkg) == false) + continue; + pci->insert(Pkg); + found = true; + if (isGlobal == false) + break; + } + if (found == true) + return true; + } + } + + pkgCache::PkgIterator Pkg = helper.canNotFindPkgName(Cache, pkg); + if (Pkg.end() == true) + return false; + + pci->insert(Pkg); + return true; +} + /*}}}*/ // FromString - Return all packages matching a specific string /*{{{*/ bool PackageContainerInterface::FromString(PackageContainerInterface * const pci, pkgCacheFile &Cache, std::string const &str, CacheSetHelper &helper) { bool found = true; _error->PushToStack(); - pkgCache::PkgIterator Pkg = FromName(Cache, str, helper); - if (Pkg.end() == false) - pci->insert(Pkg); - else if (FromTask(pci, Cache, str, helper) == false && + if (FromGroup(pci, Cache, str, helper) == false && + FromTask(pci, Cache, str, helper) == false && FromRegEx(pci, Cache, str, helper) == false) { helper.canNotFindPackage(pci, Cache, str); @@ -217,6 +263,7 @@ bool PackageContainerInterface::FromModifierCommandLine(unsigned short &modID, P pkgCacheFile &Cache, const char * cmdline, std::list const &mods, CacheSetHelper &helper) { std::string str = cmdline; + unsigned short fallback = modID; bool modifierPresent = false; for (std::list::const_iterator mod = mods.begin(); mod != mods.end(); ++mod) { @@ -243,6 +290,7 @@ bool PackageContainerInterface::FromModifierCommandLine(unsigned short &modID, P helper.showErrors(errors); if (Pkg.end() == false) { pci->insert(Pkg); + modID = fallback; return true; } } @@ -281,13 +329,14 @@ bool VersionContainerInterface::FromModifierCommandLine(unsigned short &modID, modifierPresent = true; break; } - if (modifierPresent == true) { bool const errors = helper.showErrors(false); bool const found = VersionContainerInterface::FromString(vci, Cache, cmdline, select, helper, true); helper.showErrors(errors); - if (found == true) + if (found == true) { + modID = fallback; return true; + } } return FromString(vci, Cache, str, select, helper); } diff --git a/apt-pkg/cacheset.h b/apt-pkg/cacheset.h index 6f0a0e358..2a45910ba 100644 --- a/apt-pkg/cacheset.h +++ b/apt-pkg/cacheset.h @@ -139,6 +139,7 @@ public: static bool FromTask(PackageContainerInterface * const pci, pkgCacheFile &Cache, std::string pattern, CacheSetHelper &helper); static bool FromRegEx(PackageContainerInterface * const pci, pkgCacheFile &Cache, std::string pattern, CacheSetHelper &helper); static pkgCache::PkgIterator FromName(pkgCacheFile &Cache, std::string const &pattern, CacheSetHelper &helper); + static bool FromGroup(PackageContainerInterface * const pci, pkgCacheFile &Cache, std::string pattern, CacheSetHelper &helper); static bool FromString(PackageContainerInterface * const pci, pkgCacheFile &Cache, std::string const &pattern, CacheSetHelper &helper); static bool FromCommandLine(PackageContainerInterface * const pci, pkgCacheFile &Cache, const char **cmdline, CacheSetHelper &helper); @@ -186,7 +187,7 @@ public: /*{{{*/ pkgCache::PkgIterator getPkg(void) const { return *_iter; } inline pkgCache::PkgIterator operator*(void) const { return *_iter; }; operator typename Container::iterator(void) const { return _iter; } - operator typename PackageContainer::const_iterator() { return PackageContainer::const_iterator(_iter); } + operator typename PackageContainer::const_iterator() { return typename PackageContainer::const_iterator(_iter); } inline iterator& operator++() { ++_iter; return *this; } inline iterator operator++(int) { iterator tmp(*this); operator++(); return tmp; } inline bool operator!=(iterator const &i) const { return _iter != i._iter; }; @@ -506,7 +507,7 @@ public: /*{{{*/ pkgCache::VerIterator getVer(void) const { return *_iter; } inline pkgCache::VerIterator operator*(void) const { return *_iter; }; operator typename Container::iterator(void) const { return _iter; } - operator typename VersionContainer::const_iterator() { return VersionContainer::const_iterator(_iter); } + operator typename VersionContainer::const_iterator() { return typename VersionContainer::const_iterator(_iter); } inline iterator& operator++() { ++_iter; return *this; } inline iterator operator++(int) { iterator tmp(*this); operator++(); return tmp; } inline bool operator!=(iterator const &i) const { return _iter != i._iter; }; diff --git a/apt-pkg/cdrom.cc b/apt-pkg/cdrom.cc index 4462d4e24..8462e8286 100644 --- a/apt-pkg/cdrom.cc +++ b/apt-pkg/cdrom.cc @@ -409,28 +409,12 @@ bool pkgCdrom::WriteDatabase(Configuration &Cnf) /* Write out all of the configuration directives by walking the configuration tree */ - const Configuration::Item *Top = Cnf.Tree(0); - for (; Top != 0;) - { - // Print the config entry - if (Top->Value.empty() == false) - Out << Top->FullTag() + " \"" << Top->Value << "\";" << endl; - - if (Top->Child != 0) - { - Top = Top->Child; - continue; - } - - while (Top != 0 && Top->Next == 0) - Top = Top->Parent; - if (Top != 0) - Top = Top->Next; - } + Cnf.Dump(Out, NULL, "%f \"%v\";\n", false); Out.close(); - - link(DFile.c_str(),string(DFile + '~').c_str()); + + if (FileExists(DFile) == true && link(DFile.c_str(),string(DFile + '~').c_str()) != 0) + return _error->Errno("link", "Failed to link %s to %s~", DFile.c_str(), DFile.c_str()); if (rename(NewFile.c_str(),DFile.c_str()) != 0) return _error->Errno("rename","Failed to rename %s.new to %s", DFile.c_str(),DFile.c_str()); @@ -697,7 +681,8 @@ bool pkgCdrom::Add(pkgCdromStatus *log) /*{{{*/ return false; } - chdir(StartDir.c_str()); + if (chdir(StartDir.c_str()) != 0) + return _error->Errno("chdir","Unable to change to %s", StartDir.c_str()); if (_config->FindB("Debug::aptcdrom",false) == true) { diff --git a/apt-pkg/clean.cc b/apt-pkg/clean.cc index ed8fa1aa9..9c167eaa5 100644 --- a/apt-pkg/clean.cc +++ b/apt-pkg/clean.cc @@ -54,9 +54,11 @@ bool pkgArchiveCleaner::Go(std::string Dir,pkgCache &Cache) struct stat St; if (stat(Dir->d_name,&St) != 0) { - chdir(StartDir.c_str()); + _error->Errno("stat",_("Unable to stat %s."),Dir->d_name); closedir(D); - return _error->Errno("stat",_("Unable to stat %s."),Dir->d_name); + if (chdir(StartDir.c_str()) != 0) + return _error->Errno("chdir", _("Unable to change to %s"), StartDir.c_str()); + return false; } // Grab the package name @@ -115,8 +117,9 @@ bool pkgArchiveCleaner::Go(std::string Dir,pkgCache &Cache) Erase(Dir->d_name,Pkg,Ver,St); }; - chdir(StartDir.c_str()); closedir(D); - return true; + if (chdir(StartDir.c_str()) != 0) + return _error->Errno("chdir", _("Unable to change to %s"), StartDir.c_str()); + return true; } /*}}}*/ diff --git a/apt-pkg/contrib/cmndline.cc b/apt-pkg/contrib/cmndline.cc index 159f330a1..75d02cad4 100644 --- a/apt-pkg/contrib/cmndline.cc +++ b/apt-pkg/contrib/cmndline.cc @@ -92,8 +92,9 @@ bool CommandLine::Parse(int argc,const char **argv) // Match up to a = against the list Args *A; const char *OptEnd = strchrnul(Opt, '='); - for (A = ArgList; A->end() == false && - stringcasecmp(Opt,OptEnd,A->LongOpt) != 0; A++); + for (A = ArgList; A->end() == false && + (A->LongOpt == 0 || stringcasecmp(Opt,OptEnd,A->LongOpt) != 0); + ++A); // Failed, look for a word after the first - (no-foo) bool PreceedMatch = false; @@ -105,7 +106,8 @@ bool CommandLine::Parse(int argc,const char **argv) Opt++; for (A = ArgList; A->end() == false && - stringcasecmp(Opt,OptEnd,A->LongOpt) != 0; A++); + (A->LongOpt == 0 || stringcasecmp(Opt,OptEnd,A->LongOpt) != 0); + ++A); // Failed again.. if (A->end() == true && OptEnd - Opt != 1) diff --git a/apt-pkg/contrib/configuration.cc b/apt-pkg/contrib/configuration.cc index 36866a35a..4de17e3e1 100644 --- a/apt-pkg/contrib/configuration.cc +++ b/apt-pkg/contrib/configuration.cc @@ -171,48 +171,60 @@ string Configuration::Find(const char *Name,const char *Default) const string Configuration::FindFile(const char *Name,const char *Default) const { const Item *RootItem = Lookup("RootDir"); - std::string rootDir = (RootItem == 0) ? "" : RootItem->Value; - if(rootDir.size() > 0 && rootDir[rootDir.size() - 1] != '/') - rootDir.push_back('/'); + std::string result = (RootItem == 0) ? "" : RootItem->Value; + if(result.empty() == false && result[result.size() - 1] != '/') + result.push_back('/'); const Item *Itm = Lookup(Name); if (Itm == 0 || Itm->Value.empty() == true) { - if (Default == 0) - return rootDir; - else - return rootDir + Default; + if (Default != 0) + result.append(Default); } - - string val = Itm->Value; - while (Itm->Parent != 0) + else { - if (Itm->Parent->Value.empty() == true) + string val = Itm->Value; + while (Itm->Parent != 0) { - Itm = Itm->Parent; - continue; - } + if (Itm->Parent->Value.empty() == true) + { + Itm = Itm->Parent; + continue; + } + + // Absolute + if (val.length() >= 1 && val[0] == '/') + { + if (val.compare(0, 9, "/dev/null") == 0) + val.erase(9); + break; + } - // Absolute - if (val.length() >= 1 && val[0] == '/') - break; + // ~/foo or ./foo + if (val.length() >= 2 && (val[0] == '~' || val[0] == '.') && val[1] == '/') + break; - // ~/foo or ./foo - if (val.length() >= 2 && (val[0] == '~' || val[0] == '.') && val[1] == '/') - break; - - // ../foo - if (val.length() >= 3 && val[0] == '.' && val[1] == '.' && val[2] == '/') - break; - - if (Itm->Parent->Value.end()[-1] != '/') - val.insert(0, "/"); + // ../foo + if (val.length() >= 3 && val[0] == '.' && val[1] == '.' && val[2] == '/') + break; + + if (Itm->Parent->Value.end()[-1] != '/') + val.insert(0, "/"); - val.insert(0, Itm->Parent->Value); - Itm = Itm->Parent; + val.insert(0, Itm->Parent->Value); + Itm = Itm->Parent; + } + result.append(val); } - return rootDir + val; + // do some normalisation by removing // and /./ from the path + size_t found = string::npos; + while ((found = result.find("/./")) != string::npos) + result.replace(found, 3, "/"); + while ((found = result.find("//")) != string::npos) + result.replace(found, 2, "/"); + + return result; } /*}}}*/ // Configuration::FindDir - Find a directory name /*{{{*/ @@ -222,7 +234,12 @@ string Configuration::FindDir(const char *Name,const char *Default) const { string Res = FindFile(Name,Default); if (Res.end()[-1] != '/') + { + size_t const found = Res.rfind("/dev/null"); + if (found != string::npos && found == Res.size() - 9) + return Res; // /dev/null returning return Res + '/'; + } return Res; } /*}}}*/ @@ -482,24 +499,80 @@ bool Configuration::ExistsAny(const char *Name) const /* Dump the entire configuration space */ void Configuration::Dump(ostream& str) { - /* Write out all of the configuration directives by walking the + Dump(str, NULL, "%f \"%v\";\n", true); +} +void Configuration::Dump(ostream& str, char const * const root, + char const * const formatstr, bool const emptyValue) +{ + const Configuration::Item* Top = Tree(root); + if (Top == 0) + return; + const Configuration::Item* const Root = (root == NULL) ? NULL : Top; + std::vector const format = VectorizeString(formatstr, '%'); + + /* Write out all of the configuration directives by walking the configuration tree */ - const Configuration::Item *Top = Tree(0); - for (; Top != 0;) - { - str << Top->FullTag() << " \"" << Top->Value << "\";" << endl; - + do { + if (emptyValue == true || Top->Value.empty() == emptyValue) + { + std::vector::const_iterator f = format.begin(); + str << *f; + for (++f; f != format.end(); ++f) + { + if (f->empty() == true) + { + ++f; + str << '%' << *f; + continue; + } + char const type = (*f)[0]; + if (type == 'f') + str << Top->FullTag(); + else if (type == 't') + str << Top->Tag; + else if (type == 'v') + str << Top->Value; + else if (type == 'F') + str << QuoteString(Top->FullTag(), "=\"\n"); + else if (type == 'T') + str << QuoteString(Top->Tag, "=\"\n"); + else if (type == 'V') + str << QuoteString(Top->Value, "=\"\n"); + else if (type == 'n') + str << "\n"; + else if (type == 'N') + str << "\t"; + else + str << '%' << type; + str << f->c_str() + 1; + } + } + if (Top->Child != 0) { Top = Top->Child; continue; } - + while (Top != 0 && Top->Next == 0) Top = Top->Parent; if (Top != 0) Top = Top->Next; - } + + if (Root != NULL) + { + const Configuration::Item* I = Top; + while(I != 0) + { + if (I == Root) + break; + else + I = I->Parent; + } + if (I == 0) + break; + } + } while (Top != 0); } /*}}}*/ diff --git a/apt-pkg/contrib/configuration.h b/apt-pkg/contrib/configuration.h index 4c2e75041..ea94c2fe6 100644 --- a/apt-pkg/contrib/configuration.h +++ b/apt-pkg/contrib/configuration.h @@ -103,6 +103,8 @@ class Configuration inline void Dump() { Dump(std::clog); }; void Dump(std::ostream& str); + void Dump(std::ostream& str, char const * const root, + char const * const format, bool const emptyValue); Configuration(const Item *Root); Configuration(); diff --git a/apt-pkg/contrib/fileutl.cc b/apt-pkg/contrib/fileutl.cc index 7af5f5f5e..20d2a02f5 100644 --- a/apt-pkg/contrib/fileutl.cc +++ b/apt-pkg/contrib/fileutl.cc @@ -44,14 +44,11 @@ #include #include -// FIXME: Compressor Fds have some speed disadvantages and are a bit buggy currently, -// so while the current implementation satisfies the testcases it is not a real option -// to disable it for now -#define APT_USE_ZLIB 1 -#if APT_USE_ZLIB -#include -#else -#pragma message "Usage of zlib is DISABLED!" +#ifdef HAVE_ZLIB + #include +#endif +#ifdef HAVE_BZ2 + #include #endif #ifdef WORDS_BIGENDIAN @@ -65,10 +62,15 @@ using namespace std; class FileFdPrivate { public: -#if APT_USE_ZLIB +#ifdef HAVE_ZLIB gzFile gz; #else void* gz; +#endif +#ifdef HAVE_BZ2 + BZFILE* bz2; +#else + void* bz2; #endif int compressed_fd; pid_t compressor_pid; @@ -76,8 +78,34 @@ class FileFdPrivate { APT::Configuration::Compressor compressor; unsigned int openmode; unsigned long long seekpos; - FileFdPrivate() : gz(NULL), compressed_fd(-1), compressor_pid(-1), pipe(false), + FileFdPrivate() : gz(NULL), bz2(NULL), + compressed_fd(-1), compressor_pid(-1), pipe(false), openmode(0), seekpos(0) {}; + bool CloseDown(std::string const &FileName) + { + bool Res = true; +#ifdef HAVE_ZLIB + if (gz != NULL) { + int const e = gzclose(gz); + gz = NULL; + // gzdclose() on empty files always fails with "buffer error" here, ignore that + if (e != 0 && e != Z_BUF_ERROR) + Res &= _error->Errno("close",_("Problem closing the gzip file %s"), FileName.c_str()); + } +#endif +#ifdef HAVE_BZ2 + if (bz2 != NULL) { + BZ2_bzclose(bz2); + bz2 = NULL; + } +#endif + if (compressor_pid > 0) + ExecWait(compressor_pid, "FileFdCompressor", true); + compressor_pid = -1; + + return Res; + } + ~FileFdPrivate() { CloseDown(""); } }; // RunScripts - Run a set of scripts from a configuration subtree /*{{{*/ @@ -855,7 +883,6 @@ bool FileFd::Open(string FileName,unsigned int const Mode,CompressMode Compress, if (Compress == Auto && (Mode & WriteOnly) == WriteOnly) return _error->Error("Autodetection on %s only works in ReadOnly openmode!", FileName.c_str()); - // FIXME: Denote inbuilt compressors somehow - as we don't need to have the binaries for them std::vector const compressors = APT::Configuration::getCompressors(); std::vector::const_iterator compressor = compressors.begin(); if (Compress == Auto) @@ -923,8 +950,6 @@ bool FileFd::Open(string FileName,unsigned int const Mode,CompressMode Compress, bool FileFd::Open(string FileName,unsigned int const Mode,APT::Configuration::Compressor const &compressor, unsigned long const Perms) { Close(); - d = new FileFdPrivate; - d->openmode = Mode; Flags = AutoClose; if ((Mode & WriteOnly) != WriteOnly && (Mode & (Atomic | Create | Empty | Exclusive)) != 0) @@ -1018,10 +1043,21 @@ bool FileFd::OpenDescriptor(int Fd, unsigned int const Mode, CompressMode Compre bool FileFd::OpenDescriptor(int Fd, unsigned int const Mode, APT::Configuration::Compressor const &compressor, bool AutoClose) { Close(); - d = new FileFdPrivate; - d->openmode = Mode; Flags = (AutoClose) ? FileFd::AutoClose : 0; - iFd = Fd; + if (AutoClose == false && ( +#ifdef HAVE_ZLIB + compressor.Name == "gzip" || +#endif +#ifdef HAVE_BZ2 + compressor.Name == "bzip2" || +#endif + false)) + { + // Need to duplicate fd here or gzclose for cleanup will close the fd as well + iFd = dup(Fd); + } + else + iFd = Fd; this->FileName = ""; if (OpenInternDescriptor(Mode, compressor) == false) { @@ -1033,32 +1069,71 @@ bool FileFd::OpenDescriptor(int Fd, unsigned int const Mode, APT::Configuration: } bool FileFd::OpenInternDescriptor(unsigned int const Mode, APT::Configuration::Compressor const &compressor) { - d->compressor = compressor; if (compressor.Name == "." || compressor.Binary.empty() == true) return true; -#if APT_USE_ZLIB - else if (compressor.Name == "gzip") + + if (d == NULL) { + d = new FileFdPrivate(); + d->openmode = Mode; + d->compressor = compressor; + } + +#ifdef HAVE_ZLIB + if (compressor.Name == "gzip") + { + if (d->gz != NULL) + { + gzclose(d->gz); + d->gz = NULL; + } if ((Mode & ReadWrite) == ReadWrite) d->gz = gzdopen(iFd, "r+"); else if ((Mode & WriteOnly) == WriteOnly) d->gz = gzdopen(iFd, "w"); else - d->gz = gzdopen (iFd, "r"); + d->gz = gzdopen(iFd, "r"); if (d->gz == NULL) return false; Flags |= Compressed; return true; } #endif +#ifdef HAVE_BZ2 + if (compressor.Name == "bzip2") + { + if (d->bz2 != NULL) + { + BZ2_bzclose(d->bz2); + d->bz2 = NULL; + } + if ((Mode & ReadWrite) == ReadWrite) + d->bz2 = BZ2_bzdopen(iFd, "r+"); + else if ((Mode & WriteOnly) == WriteOnly) + d->bz2 = BZ2_bzdopen(iFd, "w"); + else + d->bz2 = BZ2_bzdopen(iFd, "r"); + if (d->bz2 == NULL) + return false; + Flags |= Compressed; + return true; + } +#endif + + // collect zombies here in case we reopen + if (d->compressor_pid > 0) + ExecWait(d->compressor_pid, "FileFdCompressor", true); if ((Mode & ReadWrite) == ReadWrite) + { + Flags |= Fail; return _error->Error("ReadWrite mode is not supported for file %s", FileName.c_str()); + } bool const Comp = (Mode & WriteOnly) == WriteOnly; - // Handle 'decompression' of empty files if (Comp == false) { + // Handle 'decompression' of empty files struct stat Buf; fstat(iFd, &Buf); if (Buf.st_size == 0 && S_ISFIFO(Buf.st_mode) == false) @@ -1067,13 +1142,19 @@ bool FileFd::OpenInternDescriptor(unsigned int const Mode, APT::Configuration::C // We don't need the file open - instead let the compressor open it // as he properly knows better how to efficiently read from 'his' file if (FileName.empty() == false) + { close(iFd); + iFd = -1; + } } // Create a data pipe int Pipe[2] = {-1,-1}; if (pipe(Pipe) != 0) + { + Flags |= Fail; return _error->Errno("pipe",_("Failed to create subprocess IPC")); + } for (int J = 0; J != 2; J++) SetCloseExec(Pipe[J],true); @@ -1100,6 +1181,12 @@ bool FileFd::OpenInternDescriptor(unsigned int const Mode, APT::Configuration::C dup2(d->compressed_fd,STDIN_FILENO); dup2(Pipe[1],STDOUT_FILENO); } + int const nullfd = open("/dev/null", O_WRONLY); + if (nullfd != -1) + { + dup2(nullfd,STDERR_FILENO); + close(nullfd); + } SetCloseExec(STDOUT_FILENO,false); SetCloseExec(STDIN_FILENO,false); @@ -1129,8 +1216,6 @@ bool FileFd::OpenInternDescriptor(unsigned int const Mode, APT::Configuration::C close(Pipe[0]); else close(Pipe[1]); - if (Comp == true || FileName.empty() == true) - close(d->compressed_fd); return true; } @@ -1142,6 +1227,12 @@ bool FileFd::OpenInternDescriptor(unsigned int const Mode, APT::Configuration::C FileFd::~FileFd() { Close(); + if (d != NULL) + { + d->CloseDown(FileName); + delete d; + d = NULL; + } } /*}}}*/ // FileFd::Read - Read a bit of the file /*{{{*/ @@ -1157,9 +1248,14 @@ bool FileFd::Read(void *To,unsigned long long Size,unsigned long long *Actual) *((char *)To) = '\0'; do { -#if APT_USE_ZLIB - if (d->gz != NULL) - Res = gzread(d->gz,To,Size); +#ifdef HAVE_ZLIB + if (d != NULL && d->gz != NULL) + Res = gzread(d->gz,To,Size); + else +#endif +#ifdef HAVE_BZ2 + if (d != NULL && d->bz2 != NULL) + Res = BZ2_bzread(d->bz2,To,Size); else #endif Res = read(iFd,To,Size); @@ -1169,21 +1265,31 @@ bool FileFd::Read(void *To,unsigned long long Size,unsigned long long *Actual) if (errno == EINTR) continue; Flags |= Fail; -#if APT_USE_ZLIB - if (d->gz != NULL) +#ifdef HAVE_ZLIB + if (d != NULL && d->gz != NULL) { int err; char const * const errmsg = gzerror(d->gz, &err); if (err != Z_ERRNO) return _error->Error("gzread: %s (%d: %s)", _("Read error"), err, errmsg); } +#endif +#ifdef HAVE_BZ2 + if (d != NULL && d->bz2 != NULL) + { + int err; + char const * const errmsg = BZ2_bzerror(d->bz2, &err); + if (err != BZ_IO_ERROR) + return _error->Error("BZ2_bzread: %s (%d: %s)", _("Read error"), err, errmsg); + } #endif return _error->Errno("read",_("Read error")); } To = (char *)To + Res; Size -= Res; - d->seekpos += Res; + if (d != NULL) + d->seekpos += Res; if (Actual != 0) *Actual += Res; } @@ -1210,8 +1316,8 @@ bool FileFd::Read(void *To,unsigned long long Size,unsigned long long *Actual) char* FileFd::ReadLine(char *To, unsigned long long const Size) { *To = '\0'; -#if APT_USE_ZLIB - if (d->gz != NULL) +#ifdef HAVE_ZLIB + if (d != NULL && d->gz != NULL) return gzgets(d->gz, To, Size); #endif @@ -1241,10 +1347,15 @@ bool FileFd::Write(const void *From,unsigned long long Size) errno = 0; do { -#if APT_USE_ZLIB - if (d->gz != NULL) +#ifdef HAVE_ZLIB + if (d != NULL && d->gz != NULL) Res = gzwrite(d->gz,From,Size); else +#endif +#ifdef HAVE_BZ2 + if (d != NULL && d->bz2 != NULL) + Res = BZ2_bzwrite(d->bz2,(void*)From,Size); + else #endif Res = write(iFd,From,Size); if (Res < 0 && errno == EINTR) @@ -1252,12 +1363,31 @@ bool FileFd::Write(const void *From,unsigned long long Size) if (Res < 0) { Flags |= Fail; +#ifdef HAVE_ZLIB + if (d != NULL && d->gz != NULL) + { + int err; + char const * const errmsg = gzerror(d->gz, &err); + if (err != Z_ERRNO) + return _error->Error("gzwrite: %s (%d: %s)", _("Write error"), err, errmsg); + } +#endif +#ifdef HAVE_BZ2 + if (d != NULL && d->bz2 != NULL) + { + int err; + char const * const errmsg = BZ2_bzerror(d->bz2, &err); + if (err != BZ_IO_ERROR) + return _error->Error("BZ2_bzwrite: %s (%d: %s)", _("Write error"), err, errmsg); + } +#endif return _error->Errno("write",_("Write error")); } From = (char *)From + Res; Size -= Res; - d->seekpos += Res; + if (d != NULL) + d->seekpos += Res; } while (Res > 0 && Size > 0); @@ -1266,6 +1396,28 @@ bool FileFd::Write(const void *From,unsigned long long Size) Flags |= Fail; return _error->Error(_("write, still have %llu to write but couldn't"), Size); +} +bool FileFd::Write(int Fd, const void *From, unsigned long long Size) +{ + int Res; + errno = 0; + do + { + Res = write(Fd,From,Size); + if (Res < 0 && errno == EINTR) + continue; + if (Res < 0) + return _error->Errno("write",_("Write error")); + + From = (char *)From + Res; + Size -= Res; + } + while (Res > 0 && Size > 0); + + if (Size == 0) + return true; + + return _error->Error(_("write, still have %llu to write but couldn't"), Size); } /*}}}*/ // FileFd::Seek - Seek in the file /*{{{*/ @@ -1273,7 +1425,11 @@ bool FileFd::Write(const void *From,unsigned long long Size) /* */ bool FileFd::Seek(unsigned long long To) { - if (d->pipe == true) + if (d != NULL && (d->pipe == true +#ifdef HAVE_BZ2 + || d->bz2 != NULL +#endif + )) { // Our poor man seeking in pipes is costly, so try to avoid it unsigned long long seekpos = Tell(); @@ -1283,9 +1439,17 @@ bool FileFd::Seek(unsigned long long To) return Skip(To - seekpos); if ((d->openmode & ReadOnly) != ReadOnly) + { + Flags |= Fail; return _error->Error("Reopen is only implemented for read-only files!"); - close(iFd); - iFd = 0; + } +#ifdef HAVE_BZ2 + if (d->bz2 != NULL) + BZ2_bzclose(d->bz2); +#endif + if (iFd != -1) + close(iFd); + iFd = -1; if (TemporaryFileName.empty() == false) iFd = open(TemporaryFileName.c_str(), O_RDONLY); else if (FileName.empty() == false) @@ -1295,12 +1459,18 @@ bool FileFd::Seek(unsigned long long To) if (d->compressed_fd > 0) if (lseek(d->compressed_fd, 0, SEEK_SET) != 0) iFd = d->compressed_fd; - if (iFd <= 0) + if (iFd < 0) + { + Flags |= Fail; return _error->Error("Reopen is not implemented for pipes opened with FileFd::OpenDescriptor()!"); + } } if (OpenInternDescriptor(d->openmode, d->compressor) == false) + { + Flags |= Fail; return _error->Error("Seek on file %s because it couldn't be reopened", FileName.c_str()); + } if (To != 0) return Skip(To); @@ -1309,8 +1479,8 @@ bool FileFd::Seek(unsigned long long To) return true; } int res; -#if APT_USE_ZLIB - if (d->gz) +#ifdef HAVE_ZLIB + if (d != NULL && d->gz) res = gzseek(d->gz,To,SEEK_SET); else #endif @@ -1321,7 +1491,8 @@ bool FileFd::Seek(unsigned long long To) return _error->Error("Unable to seek to %llu", To); } - d->seekpos = To; + if (d != NULL) + d->seekpos = To; return true; } /*}}}*/ @@ -1330,7 +1501,11 @@ bool FileFd::Seek(unsigned long long To) /* */ bool FileFd::Skip(unsigned long long Over) { - if (d->pipe == true) + if (d != NULL && (d->pipe == true +#ifdef HAVE_BZ2 + || d->bz2 != NULL +#endif + )) { d->seekpos += Over; char buffer[1024]; @@ -1338,15 +1513,18 @@ bool FileFd::Skip(unsigned long long Over) { unsigned long long toread = std::min((unsigned long long) sizeof(buffer), Over); if (Read(buffer, toread) == false) + { + Flags |= Fail; return _error->Error("Unable to seek ahead %llu",Over); + } Over -= toread; } return true; } int res; -#if APT_USE_ZLIB - if (d->gz != NULL) +#ifdef HAVE_ZLIB + if (d != NULL && d->gz != NULL) res = gzseek(d->gz,Over,SEEK_CUR); else #endif @@ -1356,7 +1534,8 @@ bool FileFd::Skip(unsigned long long Over) Flags |= Fail; return _error->Error("Unable to seek ahead %llu",Over); } - d->seekpos = res; + if (d != NULL) + d->seekpos = res; return true; } @@ -1366,11 +1545,13 @@ bool FileFd::Skip(unsigned long long Over) /* */ bool FileFd::Truncate(unsigned long long To) { - if (d->gz != NULL) +#if defined HAVE_ZLIB || defined HAVE_BZ2 + if (d != NULL && (d->gz != NULL || d->bz2 != NULL)) { Flags |= Fail; - return _error->Error("Truncating gzipped files is not implemented (%s)", FileName.c_str()); + return _error->Error("Truncating compressed files is not implemented (%s)", FileName.c_str()); } +#endif if (ftruncate(iFd,To) != 0) { Flags |= Fail; @@ -1389,19 +1570,27 @@ unsigned long long FileFd::Tell() // seeking around, but not all users of FileFd use always Seek() and co // so d->seekpos isn't always true and we can just use it as a hint if // we have nothing else, but not always as an authority… - if (d->pipe == true) + if (d != NULL && (d->pipe == true +#ifdef HAVE_BZ2 + || d->bz2 != NULL +#endif + )) return d->seekpos; off_t Res; -#if APT_USE_ZLIB - if (d->gz != NULL) +#ifdef HAVE_ZLIB + if (d != NULL && d->gz != NULL) Res = gztell(d->gz); else #endif Res = lseek(iFd,0,SEEK_CUR); if (Res == (off_t)-1) + { + Flags |= Fail; _error->Errno("lseek","Failed to determine the current file position"); - d->seekpos = Res; + } + if (d != NULL) + d->seekpos = Res; return Res; } /*}}}*/ @@ -1411,17 +1600,24 @@ unsigned long long FileFd::Tell() unsigned long long FileFd::FileSize() { struct stat Buf; - if (d->pipe == false && fstat(iFd,&Buf) != 0) + if ((d == NULL || d->pipe == false) && fstat(iFd,&Buf) != 0) + { + Flags |= Fail; return _error->Errno("fstat","Unable to determine the file size"); + } // for compressor pipes st_size is undefined and at 'best' zero - if (d->pipe == true || S_ISFIFO(Buf.st_mode)) + if ((d != NULL && d->pipe == true) || S_ISFIFO(Buf.st_mode)) { // we set it here, too, as we get the info here for free // in theory the Open-methods should take care of it already - d->pipe = true; + if (d != NULL) + d->pipe = true; if (stat(FileName.c_str(), &Buf) != 0) + { + Flags |= Fail; return _error->Errno("stat","Unable to determine the file size"); + } } return Buf.st_size; @@ -1436,7 +1632,11 @@ unsigned long long FileFd::Size() // for compressor pipes st_size is undefined and at 'best' zero, // so we 'read' the content and 'seek' back - see there - if (d->pipe == true) + if (d != NULL && (d->pipe == true +#ifdef HAVE_BZ2 + || (d->bz2 && size > 0) +#endif + )) { unsigned long long const oldSeek = Tell(); char ignore[1000]; @@ -1447,11 +1647,11 @@ unsigned long long FileFd::Size() size = Tell(); Seek(oldSeek); } -#if APT_USE_ZLIB +#ifdef HAVE_ZLIB // only check gzsize if we are actually a gzip file, just checking for // "gz" is not sufficient as uncompressed files could be opened with // gzopen in "direct" mode as well - else if (d->gz && !gzdirect(d->gz) && size > 0) + else if (d != NULL && d->gz && !gzdirect(d->gz) && size > 0) { off_t const oldPos = lseek(iFd,0,SEEK_CUR); /* unfortunately zlib.h doesn't provide a gzsize(), so we have to do @@ -1459,10 +1659,16 @@ unsigned long long FileFd::Size() * bits of the file */ // FIXME: Size for gz-files is limited by 32bit… no largefile support if (lseek(iFd, -4, SEEK_END) < 0) - return _error->Errno("lseek","Unable to seek to end of gzipped file"); + { + Flags |= Fail; + return _error->Errno("lseek","Unable to seek to end of gzipped file"); + } size = 0L; if (read(iFd, &size, 4) != 4) - return _error->Errno("read","Unable to read original size of gzipped file"); + { + Flags |= Fail; + return _error->Errno("read","Unable to read original size of gzipped file"); + } #ifdef WORDS_BIGENDIAN uint32_t tmp_size = size; @@ -1472,7 +1678,10 @@ unsigned long long FileFd::Size() #endif if (lseek(iFd, oldPos, SEEK_SET) < 0) - return _error->Errno("lseek","Unable to seek in gzipped file"); + { + Flags |= Fail; + return _error->Errno("lseek","Unable to seek in gzipped file"); + } return size; } @@ -1487,20 +1696,23 @@ unsigned long long FileFd::Size() time_t FileFd::ModificationTime() { struct stat Buf; - if (d->pipe == false && fstat(iFd,&Buf) != 0) + if ((d == NULL || d->pipe == false) && fstat(iFd,&Buf) != 0) { + Flags |= Fail; _error->Errno("fstat","Unable to determine the modification time of file %s", FileName.c_str()); return 0; } // for compressor pipes st_size is undefined and at 'best' zero - if (d->pipe == true || S_ISFIFO(Buf.st_mode)) + if ((d != NULL && d->pipe == true) || S_ISFIFO(Buf.st_mode)) { // we set it here, too, as we get the info here for free // in theory the Open-methods should take care of it already - d->pipe = true; + if (d != NULL) + d->pipe = true; if (stat(FileName.c_str(), &Buf) != 0) { + Flags |= Fail; _error->Errno("fstat","Unable to determine the modification time of file %s", FileName.c_str()); return 0; } @@ -1520,19 +1732,18 @@ bool FileFd::Close() bool Res = true; if ((Flags & AutoClose) == AutoClose) { -#if APT_USE_ZLIB - if (d != NULL && d->gz != NULL) { - int const e = gzclose(d->gz); - // gzdclose() on empty files always fails with "buffer error" here, ignore that - if (e != 0 && e != Z_BUF_ERROR) - Res &= _error->Errno("close",_("Problem closing the gzip file %s"), FileName.c_str()); - } else -#endif - if (iFd > 0 && close(iFd) != 0) - Res &= _error->Errno("close",_("Problem closing the file %s"), FileName.c_str()); + if ((Flags & Compressed) != Compressed && iFd > 0 && close(iFd) != 0) + Res &= _error->Errno("close",_("Problem closing the file %s"), FileName.c_str()); + + if (d != NULL) + { + Res &= d->CloseDown(FileName); + delete d; + d = NULL; + } } - if ((Flags & Replace) == Replace && iFd >= 0) { + if ((Flags & Replace) == Replace) { if (rename(TemporaryFileName.c_str(), FileName.c_str()) != 0) Res &= _error->Errno("rename",_("Problem renaming the file %s to %s"), TemporaryFileName.c_str(), FileName.c_str()); @@ -1547,14 +1758,8 @@ bool FileFd::Close() if (unlink(FileName.c_str()) != 0) Res &= _error->WarningE("unlnk",_("Problem unlinking the file %s"), FileName.c_str()); - if (d != NULL) - { - if (d->compressor_pid > 0) - ExecWait(d->compressor_pid, "FileFdCompressor", true); - delete d; - d = NULL; - } - + if (Res == false) + Flags |= Fail; return Res; } /*}}}*/ @@ -1565,7 +1770,10 @@ bool FileFd::Sync() { #ifdef _POSIX_SYNCHRONIZED_IO if (fsync(iFd) != 0) + { + Flags |= Fail; return _error->Errno("sync",_("Problem syncing the file")); + } #endif return true; } diff --git a/apt-pkg/contrib/fileutl.h b/apt-pkg/contrib/fileutl.h index a9cb45e9a..1e16540f7 100644 --- a/apt-pkg/contrib/fileutl.h +++ b/apt-pkg/contrib/fileutl.h @@ -78,6 +78,7 @@ class FileFd bool Read(void *To,unsigned long long Size,unsigned long long *Actual = 0); char* ReadLine(char *To, unsigned long long const Size); bool Write(const void *From,unsigned long long Size); + bool static Write(int Fd, const void *From, unsigned long long Size); bool Seek(unsigned long long To); bool Skip(unsigned long long To); bool Truncate(unsigned long long To); diff --git a/apt-pkg/contrib/mmap.cc b/apt-pkg/contrib/mmap.cc index 160718ea5..593bb063b 100644 --- a/apt-pkg/contrib/mmap.cc +++ b/apt-pkg/contrib/mmap.cc @@ -83,24 +83,25 @@ bool MMap::Map(FileFd &Fd) { if ((Flags & ReadOnly) != ReadOnly) return _error->Error("Compressed file %s can only be mapped readonly", Fd.Name().c_str()); - Base = new unsigned char[iSize]; + Base = malloc(iSize); + SyncToFd = new FileFd(); if (Fd.Seek(0L) == false || Fd.Read(Base, iSize) == false) return _error->Error("Compressed file %s can't be read into mmap", Fd.Name().c_str()); return true; } // Map it. - Base = mmap(0,iSize,Prot,Map,Fd.Fd(),0); + Base = (Flags & Fallback) ? MAP_FAILED : mmap(0,iSize,Prot,Map,Fd.Fd(),0); if (Base == (void *)-1) { - if (errno == ENODEV || errno == EINVAL) + if (errno == ENODEV || errno == EINVAL || (Flags & Fallback)) { // The filesystem doesn't support this particular kind of mmap. // So we allocate a buffer and read the whole file into it. if ((Flags & ReadOnly) == ReadOnly) { // for readonly, we don't need sync, so make it simple - Base = new unsigned char[iSize]; + Base = malloc(iSize); return Fd.Read(Base, iSize); } // FIXME: Writing to compressed fd's ? @@ -108,7 +109,7 @@ bool MMap::Map(FileFd &Fd) if (dupped_fd == -1) return _error->Errno("mmap", _("Couldn't duplicate file descriptor %i"), Fd.Fd()); - Base = new unsigned char[iSize]; + Base = calloc(iSize, 1); SyncToFd = new FileFd (dupped_fd); if (!SyncToFd->Seek(0L) || !SyncToFd->Read(Base, iSize)) return false; @@ -134,7 +135,7 @@ bool MMap::Close(bool DoSync) if (SyncToFd != NULL) { - delete[] (char *)Base; + free(Base); delete SyncToFd; SyncToFd = NULL; } @@ -282,8 +283,7 @@ DynamicMMap::DynamicMMap(unsigned long Flags,unsigned long const &WorkSpace, } #endif // fallback to a static allocated space - Base = new unsigned char[WorkSpace]; - memset(Base,0,WorkSpace); + Base = calloc(WorkSpace, 1); iSize = 0; } /*}}}*/ @@ -299,7 +299,7 @@ DynamicMMap::~DynamicMMap() #ifdef _POSIX_MAPPED_FILES munmap(Base, WorkSpace); #else - delete [] (unsigned char *)Base; + free(Base); #endif return; } @@ -463,6 +463,9 @@ bool DynamicMMap::Grow() { Base = realloc(Base, newSize); if (Base == NULL) return false; + else + /* Set new memory to 0 */ + memset((char*)Base + WorkSpace, 0, newSize - WorkSpace); } Pools =(Pool*) Base + poolOffset; diff --git a/apt-pkg/contrib/netrc.cc b/apt-pkg/contrib/netrc.cc index cb7d36088..56e59d84b 100644 --- a/apt-pkg/contrib/netrc.cc +++ b/apt-pkg/contrib/netrc.cc @@ -68,8 +68,7 @@ int parsenetrc (char *host, char *login, char *password, char *netrcfile = NULL) if (!home) return -1; - asprintf (&netrcfile, "%s%s%s", home, DIR_CHAR, NETRC); - if(!netrcfile) + if (asprintf (&netrcfile, "%s%s%s", home, DIR_CHAR, NETRC) == -1 || netrcfile == NULL) return -1; else netrc_alloc = true; diff --git a/apt-pkg/contrib/sha2_internal.cc b/apt-pkg/contrib/sha2_internal.cc index 6d27e8f2b..83b5a98d3 100644 --- a/apt-pkg/contrib/sha2_internal.cc +++ b/apt-pkg/contrib/sha2_internal.cc @@ -552,7 +552,9 @@ void SHA256_Update(SHA256_CTX* context, const sha2_byte *data, size_t len) { } while (len >= SHA256_BLOCK_LENGTH) { /* Process as many complete blocks as we can */ - SHA256_Transform(context, (sha2_word32*)data); + sha2_byte buffer[SHA256_BLOCK_LENGTH]; + MEMCPY_BCOPY(buffer, data, SHA256_BLOCK_LENGTH); + SHA256_Transform(context, (sha2_word32*)buffer); context->bitcount += SHA256_BLOCK_LENGTH << 3; len -= SHA256_BLOCK_LENGTH; data += SHA256_BLOCK_LENGTH; @@ -879,7 +881,9 @@ void SHA512_Update(SHA512_CTX* context, const sha2_byte *data, size_t len) { } while (len >= SHA512_BLOCK_LENGTH) { /* Process as many complete blocks as we can */ - SHA512_Transform(context, (sha2_word64*)data); + sha2_byte buffer[SHA512_BLOCK_LENGTH]; + MEMCPY_BCOPY(buffer, data, SHA512_BLOCK_LENGTH); + SHA512_Transform(context, (sha2_word64*)buffer); ADDINC128(context->bitcount, SHA512_BLOCK_LENGTH << 3); len -= SHA512_BLOCK_LENGTH; data += SHA512_BLOCK_LENGTH; diff --git a/apt-pkg/contrib/strutl.cc b/apt-pkg/contrib/strutl.cc index 99efa8d98..df11a80ad 100644 --- a/apt-pkg/contrib/strutl.cc +++ b/apt-pkg/contrib/strutl.cc @@ -23,6 +23,7 @@ #include #include +#include #include #include #include @@ -751,7 +752,8 @@ bool ReadMessages(int Fd, vector &List) // Look for the end of the message for (char *I = Buffer; I + 1 < End; I++) { - if (I[0] != '\n' || I[1] != '\n') + if (I[1] != '\n' || + (strncmp(I, "\n\n", 2) != 0 && strncmp(I, "\r\n\r\n", 4) != 0)) continue; // Pull the message out @@ -759,7 +761,7 @@ bool ReadMessages(int Fd, vector &List) PartialMessage += Message; // Fix up the buffer - for (; I < End && *I == '\n'; I++); + for (; I < End && (*I == '\r' || *I == '\n'); ++I); End -= I-Buffer; memmove(Buffer,I,End-Buffer); I = Buffer; @@ -1168,34 +1170,50 @@ unsigned long RegexChoice(RxChoiceList *Rxs,const char **ListBegin, return Hits; } /*}}}*/ -// ioprintf - C format string outputter to C++ iostreams /*{{{*/ +// {str,io}printf - C format string outputter to C++ strings/iostreams /*{{{*/ // --------------------------------------------------------------------- /* This is used to make the internationalization strings easier to translate and to allow reordering of parameters */ -void ioprintf(ostream &out,const char *format,...) +static bool iovprintf(ostream &out, const char *format, + va_list &args, ssize_t &size) { + char *S = (char*)malloc(size); + ssize_t const n = vsnprintf(S, size, format, args); + if (n > -1 && n < size) { + out << S; + free(S); + return true; + } else { + if (n > -1) + size = n + 1; + else + size *= 2; + } + free(S); + return false; +} +void ioprintf(ostream &out,const char *format,...) { va_list args; - va_start(args,format); - - // sprintf the description - char S[4096]; - vsnprintf(S,sizeof(S),format,args); - out << S; + ssize_t size = 400; + while (true) { + va_start(args,format); + if (iovprintf(out, format, args, size) == true) + return; + va_end(args); + } } - /*}}}*/ -// strprintf - C format string outputter to C++ strings /*{{{*/ -// --------------------------------------------------------------------- -/* This is used to make the internationalization strings easier to translate - and to allow reordering of parameters */ -void strprintf(string &out,const char *format,...) +void strprintf(string &out,const char *format,...) { va_list args; - va_start(args,format); - - // sprintf the description - char S[4096]; - vsnprintf(S,sizeof(S),format,args); - out = string(S); + ssize_t size = 400; + std::ostringstream outstr; + while (true) { + va_start(args,format); + if (iovprintf(outstr, format, args, size) == true) + break; + va_end(args); + } + out = outstr.str(); } /*}}}*/ // safe_snprintf - Safer snprintf /*{{{*/ diff --git a/apt-pkg/deb/debindexfile.cc b/apt-pkg/deb/debindexfile.cc index 5dc2a2ac2..de645bb6e 100644 --- a/apt-pkg/deb/debindexfile.cc +++ b/apt-pkg/deb/debindexfile.cc @@ -161,7 +161,7 @@ unsigned long debSourcesIndex::Size() const /* we need to ignore errors here; if the lists are absent, just return 0 */ _error->PushToStack(); - FileFd f = FileFd (IndexFile("Sources"), FileFd::ReadOnly, FileFd::Extension); + FileFd f(IndexFile("Sources"), FileFd::ReadOnly, FileFd::Extension); if (!f.Failed()) size = f.Size(); @@ -290,7 +290,7 @@ unsigned long debPackagesIndex::Size() const /* we need to ignore errors here; if the lists are absent, just return 0 */ _error->PushToStack(); - FileFd f = FileFd (IndexFile("Packages"), FileFd::ReadOnly, FileFd::Extension); + FileFd f(IndexFile("Packages"), FileFd::ReadOnly, FileFd::Extension); if (!f.Failed()) size = f.Size(); @@ -488,7 +488,7 @@ unsigned long debTranslationsIndex::Size() const /* we need to ignore errors here; if the lists are absent, just return 0 */ _error->PushToStack(); - FileFd f = FileFd (IndexFile(Language), FileFd::ReadOnly, FileFd::Extension); + FileFd f(IndexFile(Language), FileFd::ReadOnly, FileFd::Extension); if (!f.Failed()) size = f.Size(); @@ -602,7 +602,8 @@ bool debStatusIndex::Merge(pkgCacheGenerator &Gen,OpProgress *Prog) const pkgCache::PkgFileIterator CFile = Gen.GetCurFile(); CFile->Size = Pkg.FileSize(); CFile->mtime = Pkg.ModificationTime(); - CFile->Archive = Gen.WriteUniqString("now"); + map_ptrloc const storage = Gen.WriteUniqString("now"); + CFile->Archive = storage; if (Gen.MergeList(Parser) == false) return _error->Error("Problem with MergeList %s",File.c_str()); diff --git a/apt-pkg/deb/deblistparser.cc b/apt-pkg/deb/deblistparser.cc index 84e6c38c5..e93e51af3 100644 --- a/apt-pkg/deb/deblistparser.cc +++ b/apt-pkg/deb/deblistparser.cc @@ -15,6 +15,7 @@ #include #include #include +#include #include #include #include @@ -22,7 +23,6 @@ #include #include -#include #include /*}}}*/ @@ -215,15 +215,22 @@ string debListParser::DescriptionLanguage() */ MD5SumValue debListParser::Description_md5() { - string value = Section.FindS("Description-md5"); - - if (value.empty()) + string const value = Section.FindS("Description-md5"); + if (value.empty() == true) { MD5Summation md5; md5.Add((Description() + "\n").c_str()); return md5.Result(); - } else - return MD5SumValue(value); + } + else if (likely(value.size() == 32)) + { + if (likely(value.find_first_not_of("0123456789abcdefABCDEF") == string::npos)) + return MD5SumValue(value); + _error->Error("Malformed Description-md5 line; includes invalid character '%s'", value.c_str()); + return MD5SumValue(); + } + _error->Error("Malformed Description-md5 line; doesn't have the required length (32 != %d) '%s'", (int)value.size(), value.c_str()); + return MD5SumValue(); } /*}}}*/ // ListParser::UsePackage - Update a package structure /*{{{*/ @@ -236,21 +243,26 @@ bool debListParser::UsePackage(pkgCache::PkgIterator &Pkg, if (Pkg->Section == 0) Pkg->Section = UniqFindTagWrite("Section"); - // Packages which are not from the "native" arch doesn't get the essential flag - // in the default "native" mode - it is also possible to mark "all" or "none". - // The "installed" mode is handled by ParseStatus(), See #544481 and friends. string const static myArch = _config->Find("APT::Architecture"); - string const static essential = _config->Find("pkgCacheGen::Essential", "native"); - if ((essential == "native" && Pkg->Arch != 0 && myArch == Pkg.Arch()) || - essential == "all") + // Possible values are: "all", "native", "installed" and "none" + // The "installed" mode is handled by ParseStatus(), See #544481 and friends. + string const static essential = _config->Find("pkgCacheGen::Essential", "all"); + if (essential == "all" || + (essential == "native" && Pkg->Arch != 0 && myArch == Pkg.Arch())) if (Section.FindFlag("Essential",Pkg->Flags,pkgCache::Flag::Essential) == false) return false; if (Section.FindFlag("Important",Pkg->Flags,pkgCache::Flag::Important) == false) return false; if (strcmp(Pkg.Name(),"apt") == 0) - Pkg->Flags |= pkgCache::Flag::Essential | pkgCache::Flag::Important; - + { + if ((essential == "native" && Pkg->Arch != 0 && myArch == Pkg.Arch()) || + essential == "all") + Pkg->Flags |= pkgCache::Flag::Essential | pkgCache::Flag::Important; + else + Pkg->Flags |= pkgCache::Flag::Important; + } + if (ParseStatus(Pkg,Ver) == false) return false; return true; @@ -452,22 +464,6 @@ const char *debListParser::ConvertRelation(const char *I,unsigned int &Op) } return I; } - -/* - * CompleteArch: - * - * The complete architecture, consisting of -. - */ -static string CompleteArch(std::string const &arch) { - if (arch == "armel") return "linux-arm"; - if (arch == "armhf") return "linux-arm"; - if (arch == "lpia") return "linux-i386"; - if (arch == "powerpcspe") return "linux-powerpc"; - if (arch == "uclibc-linux-armel") return "linux-arm"; - if (arch == "uclinux-armel") return "uclinux-arm"; - - return (arch.find("-") != string::npos) ? arch : "linux-" + arch; -} /*}}}*/ // ListParser::ParseDepends - Parse a dependency element /*{{{*/ // --------------------------------------------------------------------- @@ -544,58 +540,59 @@ const char *debListParser::ParseDepends(const char *Start,const char *Stop, if (ParseArchFlags == true) { - string completeArch = CompleteArch(arch); + APT::CacheFilter::PackageArchitectureMatchesSpecification matchesArch(arch, false); // Parse an architecture if (I != Stop && *I == '[') { + ++I; // malformed - I++; - if (I == Stop) - return 0; - - const char *End = I; - bool Found = false; - bool NegArch = false; - while (I != Stop) + if (unlikely(I == Stop)) + return 0; + + const char *End = I; + bool Found = false; + bool NegArch = false; + while (I != Stop) { - // look for whitespace or ending ']' - while (End != Stop && !isspace(*End) && *End != ']') - End++; - - if (End == Stop) + // look for whitespace or ending ']' + for (;End != Stop && !isspace(*End) && *End != ']'; ++End); + + if (unlikely(End == Stop)) return 0; if (*I == '!') - { + { NegArch = true; - I++; - } + ++I; + } - if (stringcmp(arch,I,End) == 0) { + std::string arch(I, End); + if (arch.empty() == false && matchesArch(arch.c_str()) == true) + { Found = true; - } else { - std::string wildcard = SubstVar(string(I, End), "any", "*"); - if (fnmatch(wildcard.c_str(), completeArch.c_str(), 0) == 0) - Found = true; + if (I[-1] != '!') + NegArch = false; + // we found a match, so fast-forward to the end of the wildcards + for (; End != Stop && *End != ']'; ++End); } - + if (*End++ == ']') { I = End; break; } - + I = End; for (;I != Stop && isspace(*I) != 0; I++); - } + } - if (NegArch) + if (NegArch == true) Found = !Found; - - if (Found == false) + + if (Found == false) Package = ""; /* not for this arch */ } - + // Skip whitespace for (;I != Stop && isspace(*I) != 0; I++); } @@ -625,16 +622,18 @@ bool debListParser::ParseDepends(pkgCache::VerIterator &Ver, if (Section.Find(Tag,Start,Stop) == false) return true; - string Package; string const pkgArch = Ver.Arch(); - string Version; - unsigned int Op; while (1) { + string Package; + string Version; + unsigned int Op; + Start = ParseDepends(Start,Stop,Package,Version,Op,false,!MultiArchEnabled); if (Start == 0) return _error->Error("Problem parsing dependency %s",Tag); + size_t const found = Package.rfind(':'); if (MultiArchEnabled == true && (Type == pkgCache::Dep::Conflicts || @@ -646,6 +645,18 @@ bool debListParser::ParseDepends(pkgCache::VerIterator &Ver, if (NewDepends(Ver,Package,*a,Version,Op,Type) == false) return false; } + else if (MultiArchEnabled == true && found != string::npos && + strcmp(Package.c_str() + found, ":any") != 0) + { + string Arch = Package.substr(found+1, string::npos); + Package = Package.substr(0, found); + // Such dependencies are not supposed to be accepted … + // … but this is probably the best thing to do. + if (Arch == "native") + Arch = _config->Find("APT::Architecture"); + if (NewDepends(Ver,Package,Arch,Version,Op,Type) == false) + return false; + } else if (NewDepends(Ver,Package,pkgArch,Version,Op,Type) == false) return false; if (Start == Stop) @@ -771,7 +782,8 @@ bool debListParser::LoadReleaseInfo(pkgCache::PkgFileIterator &FileI, { // apt-secure does no longer download individual (per-section) Release // file. to provide Component pinning we use the section name now - FileI->Component = WriteUniqString(component); + map_ptrloc const storage = WriteUniqString(component); + FileI->Component = storage; // FIXME: Code depends on the fact that Release files aren't compressed FILE* release = fdopen(dup(File.Fd()), "r"); @@ -858,13 +870,14 @@ bool debListParser::LoadReleaseInfo(pkgCache::PkgFileIterator &FileI, break; *s = '\0'; } + map_ptrloc const storage = WriteUniqString(data); switch (writeTo) { - case Suite: FileI->Archive = WriteUniqString(data); break; - case Component: FileI->Component = WriteUniqString(data); break; - case Version: FileI->Version = WriteUniqString(data); break; - case Origin: FileI->Origin = WriteUniqString(data); break; - case Codename: FileI->Codename = WriteUniqString(data); break; - case Label: FileI->Label = WriteUniqString(data); break; + case Suite: FileI->Archive = storage; break; + case Component: FileI->Component = storage; break; + case Version: FileI->Version = storage; break; + case Origin: FileI->Origin = storage; break; + case Codename: FileI->Codename = storage; break; + case Label: FileI->Label = storage; break; case None: break; } } diff --git a/apt-pkg/deb/dpkgpm.cc b/apt-pkg/deb/dpkgpm.cc index be11870fd..296426c80 100644 --- a/apt-pkg/deb/dpkgpm.cc +++ b/apt-pkg/deb/dpkgpm.cc @@ -425,7 +425,7 @@ void pkgDPkgPM::DoStdin(int master) unsigned char input_buf[256] = {0,}; ssize_t len = read(0, input_buf, sizeof(input_buf)); if (len) - write(master, input_buf, len); + FileFd::Write(master, input_buf, len); else d->stdin_is_dev_null = true; } @@ -451,7 +451,7 @@ void pkgDPkgPM::DoTerminalPty(int master) } if(len <= 0) return; - write(1, term_buf, len); + FileFd::Write(1, term_buf, len); if(d->term_out) fwrite(term_buf, len, sizeof(char), d->term_out); } @@ -526,7 +526,7 @@ void pkgDPkgPM::ProcessDpkgStatusLine(int OutStatusFd, char *line) << ":" << s << endl; if(OutStatusFd > 0) - write(OutStatusFd, status.str().c_str(), status.str().size()); + FileFd::Write(OutStatusFd, status.str().c_str(), status.str().size()); if (Debug == true) std::clog << "send: '" << status.str() << "'" << endl; @@ -550,7 +550,7 @@ void pkgDPkgPM::ProcessDpkgStatusLine(int OutStatusFd, char *line) << ":" << list[3] << endl; if(OutStatusFd > 0) - write(OutStatusFd, status.str().c_str(), status.str().size()); + FileFd::Write(OutStatusFd, status.str().c_str(), status.str().size()); if (Debug == true) std::clog << "send: '" << status.str() << "'" << endl; pkgFailures++; @@ -564,7 +564,7 @@ void pkgDPkgPM::ProcessDpkgStatusLine(int OutStatusFd, char *line) << ":" << list[3] << endl; if(OutStatusFd > 0) - write(OutStatusFd, status.str().c_str(), status.str().size()); + FileFd::Write(OutStatusFd, status.str().c_str(), status.str().size()); if (Debug == true) std::clog << "send: '" << status.str() << "'" << endl; return; @@ -592,7 +592,7 @@ void pkgDPkgPM::ProcessDpkgStatusLine(int OutStatusFd, char *line) << ":" << s << endl; if(OutStatusFd > 0) - write(OutStatusFd, status.str().c_str(), status.str().size()); + FileFd::Write(OutStatusFd, status.str().c_str(), status.str().size()); if (Debug == true) std::clog << "send: '" << status.str() << "'" << endl; } @@ -738,6 +738,7 @@ bool pkgDPkgPM::OpenLog() d->history_out = fopen(history_name.c_str(),"a"); if (d->history_out == NULL) return _error->WarningE("OpenLog", _("Could not open file '%s'"), history_name.c_str()); + SetCloseExec(fileno(d->history_out), true); chmod(history_name.c_str(), 0644); fprintf(d->history_out, "\nStart-Date: %s\n", timestr); string remove, purge, install, reinstall, upgrade, downgrade; @@ -1055,7 +1056,8 @@ bool pkgDPkgPM::Go(int OutStatusFd) } int fd[2]; - pipe(fd); + if (pipe(fd) != 0) + return _error->Errno("pipe","Failed to create IPC pipe to dpkg"); #define ADDARG(X) Args.push_back(X); Size += strlen(X) #define ADDARGC(X) Args.push_back(X); Size += sizeof(X) - 1 @@ -1236,7 +1238,7 @@ bool pkgDPkgPM::Go(int OutStatusFd) << (PackagesDone/float(PackagesTotal)*100.0) << ":" << _("Running dpkg") << endl; - write(OutStatusFd, status.str().c_str(), status.str().size()); + FileFd::Write(OutStatusFd, status.str().c_str(), status.str().size()); } Child = ExecFork(); diff --git a/apt-pkg/depcache.cc b/apt-pkg/depcache.cc index 1eea55560..2656e9b42 100644 --- a/apt-pkg/depcache.cc +++ b/apt-pkg/depcache.cc @@ -1152,9 +1152,8 @@ bool pkgDepCache::MarkInstall(PkgIterator const &Pkg,bool AutoInst, } /* This bit is for processing the possibilty of an install/upgrade - fixing the problem */ - if (Start->Type != Dep::DpkgBreaks && - (DepState[Start->ID] & DepCVer) == DepCVer) + fixing the problem for "positive" dependencies */ + if (Start.IsNegative() == false && (DepState[Start->ID] & DepCVer) == DepCVer) { APT::VersionList verlist; pkgCache::VerIterator Cand = PkgState[Start.TargetPkg()->ID].CandidateVerIter(*this); @@ -1165,7 +1164,7 @@ bool pkgDepCache::MarkInstall(PkgIterator const &Pkg,bool AutoInst, pkgCache::VerIterator V = Prv.OwnerVer(); pkgCache::VerIterator Cand = PkgState[Prv.OwnerPkg()->ID].CandidateVerIter(*this); if (Cand.end() == true || V != Cand || - VS().CheckDep(Cand.VerStr(), Start->CompareOp, Start.TargetVer()) == false) + VS().CheckDep(Prv.ProvideVersion(), Start->CompareOp, Start.TargetVer()) == false) continue; verlist.insert(Cand); } @@ -1198,13 +1197,13 @@ bool pkgDepCache::MarkInstall(PkgIterator const &Pkg,bool AutoInst, } continue; } - - /* For conflicts we just de-install the package and mark as auto, - Conflicts may not have or groups. For dpkg's Breaks we try to - upgrade the package. */ - if (Start.IsNegative() == true) + /* Negative dependencies have no or-group + If the dependency isn't versioned, we try if an upgrade might solve the problem. + Otherwise we remove the offender if needed */ + else if (Start.IsNegative() == true && Start->Type != pkgCache::Dep::Obsoletes) { SPtrArray List = Start.AllTargets(); + pkgCache::PkgIterator TrgPkg = Start.TargetPkg(); for (Version **I = List; *I != 0; I++) { VerIterator Ver(*this,*I); @@ -1215,15 +1214,17 @@ bool pkgDepCache::MarkInstall(PkgIterator const &Pkg,bool AutoInst, if (PkgState[Pkg->ID].InstallVer == 0) continue; - if (PkgState[Pkg->ID].CandidateVer != *I && - Start->Type == Dep::DpkgBreaks && + if ((Start->Version != 0 || TrgPkg != Pkg) && + PkgState[Pkg->ID].CandidateVer != PkgState[Pkg->ID].InstallVer && + PkgState[Pkg->ID].CandidateVer != *I && MarkInstall(Pkg,true,Depth + 1, false, ForceImportantDeps) == true) continue; - else if (MarkDelete(Pkg,false,Depth + 1, false) == false) + else if ((Start->Type == pkgCache::Dep::Conflicts || Start->Type == pkgCache::Dep::DpkgBreaks) && + MarkDelete(Pkg,false,Depth + 1, false) == false) break; } continue; - } + } } return Dep.end() == true; diff --git a/apt-pkg/edsp.cc b/apt-pkg/edsp.cc index 791aac72f..adb8788b3 100644 --- a/apt-pkg/edsp.cc +++ b/apt-pkg/edsp.cc @@ -118,8 +118,7 @@ void EDSP::WriteScenarioDependency(pkgDepCache &Cache, FILE* output, pkgCache::P bool orGroup = false; for (pkgCache::DepIterator Dep = Ver.DependsList(); Dep.end() == false; ++Dep) { - // Ignore implicit dependencies for multiarch here - if (strcmp(Pkg.Arch(), Dep.TargetPkg().Arch()) != 0) + if (Dep.IsMultiArchImplicit() == true) continue; if (orGroup == false) dependencies[Dep->Type].append(", "); @@ -140,8 +139,7 @@ void EDSP::WriteScenarioDependency(pkgDepCache &Cache, FILE* output, pkgCache::P string provides; for (pkgCache::PrvIterator Prv = Ver.ProvidesList(); Prv.end() == false; ++Prv) { - // Ignore implicit provides for multiarch here - if (strcmp(Pkg.Arch(), Prv.ParentPkg().Arch()) != 0 || strcmp(Pkg.Name(),Prv.Name()) == 0) + if (Prv.IsMultiArchImplicit() == true) continue; provides.append(", ").append(Prv.Name()); } @@ -159,8 +157,7 @@ void EDSP::WriteScenarioLimitedDependency(pkgDepCache &Cache, FILE* output, bool orGroup = false; for (pkgCache::DepIterator Dep = Ver.DependsList(); Dep.end() == false; ++Dep) { - // Ignore implicit dependencies for multiarch here - if (strcmp(Pkg.Arch(), Dep.TargetPkg().Arch()) != 0) + if (Dep.IsMultiArchImplicit() == true) continue; if (orGroup == false) { @@ -193,8 +190,7 @@ void EDSP::WriteScenarioLimitedDependency(pkgDepCache &Cache, FILE* output, string provides; for (pkgCache::PrvIterator Prv = Ver.ProvidesList(); Prv.end() == false; ++Prv) { - // Ignore implicit provides for multiarch here - if (strcmp(Pkg.Arch(), Prv.ParentPkg().Arch()) != 0 || strcmp(Pkg.Name(),Prv.Name()) == 0) + if (Prv.IsMultiArchImplicit() == true) continue; if (pkgset.find(Prv.ParentPkg()) == pkgset.end()) continue; diff --git a/apt-pkg/edsp/edspindexfile.cc b/apt-pkg/edsp/edspindexfile.cc index 482581979..98ce4497a 100644 --- a/apt-pkg/edsp/edspindexfile.cc +++ b/apt-pkg/edsp/edspindexfile.cc @@ -51,7 +51,8 @@ bool edspIndex::Merge(pkgCacheGenerator &Gen,OpProgress *Prog) const pkgCache::PkgFileIterator CFile = Gen.GetCurFile(); CFile->Size = Pkg.FileSize(); CFile->mtime = Pkg.ModificationTime(); - CFile->Archive = Gen.WriteUniqString("edsp::scenario"); + map_ptrloc const storage = Gen.WriteUniqString("edsp::scenario"); + CFile->Archive = storage; if (Gen.MergeList(Parser) == false) return _error->Error("Problem with MergeList %s",File.c_str()); diff --git a/apt-pkg/edsp/edspsystem.cc b/apt-pkg/edsp/edspsystem.cc index 6b9207451..aae969d9d 100644 --- a/apt-pkg/edsp/edspsystem.cc +++ b/apt-pkg/edsp/edspsystem.cc @@ -91,7 +91,7 @@ signed edspSystem::Score(Configuration const &Cnf) { if (Cnf.Find("edsp::scenario", "") == "stdin") return 1000; - if (FileExists(Cnf.FindFile("edsp::scenario","")) == true) + if (RealFileExists(Cnf.FindFile("edsp::scenario","")) == true) return 1000; return -1000; } diff --git a/apt-pkg/init.cc b/apt-pkg/init.cc index a1c47c030..76278921f 100644 --- a/apt-pkg/init.cc +++ b/apt-pkg/init.cc @@ -24,7 +24,7 @@ #define Stringfy_(x) # x #define Stringfy(x) Stringfy_(x) -const char *pkgVersion = VERSION; +const char *pkgVersion = PACKAGE_VERSION; const char *pkgLibVersion = Stringfy(APT_PKG_MAJOR) "." Stringfy(APT_PKG_MINOR) "." Stringfy(APT_PKG_RELEASE); diff --git a/apt-pkg/makefile b/apt-pkg/makefile index e1f69dd65..27d7ead24 100644 --- a/apt-pkg/makefile +++ b/apt-pkg/makefile @@ -14,7 +14,13 @@ include ../buildlib/libversion.mak LIBRARY=apt-pkg MAJOR=$(LIBAPTPKG_MAJOR) MINOR=$(LIBAPTPKG_RELEASE) -SLIBS=$(PTHREADLIB) $(INTLLIBS) -lutil -ldl -lz +SLIBS=$(PTHREADLIB) $(INTLLIBS) -lutil -ldl +ifeq ($(HAVE_ZLIB),yes) +SLIBS+= -lz +endif +ifeq ($(HAVE_BZ2),yes) +SLIBS+= -lbz2 +endif APT_DOMAIN:=libapt-pkg$(LIBAPTPKG_MAJOR) # Source code for the contributed non-core things diff --git a/apt-pkg/packagemanager.cc b/apt-pkg/packagemanager.cc index dd8f306f2..46fc499c6 100644 --- a/apt-pkg/packagemanager.cc +++ b/apt-pkg/packagemanager.cc @@ -25,9 +25,10 @@ #include #include -#include #include #include + +#include /*}}}*/ using namespace std; @@ -337,7 +338,7 @@ bool pkgPackageManager::SmartConfigure(PkgIterator Pkg, int const Depth) however if there is a loop (A depends on B, B depends on A) this will not be the case, so check for dependencies before configuring. */ bool Bad = false, Changed = false; - const unsigned int max_loops = _config->FindI("APT::pkgPackageManager::MaxLoopCount", 100); + const unsigned int max_loops = _config->FindI("APT::pkgPackageManager::MaxLoopCount", 500); unsigned int i=0; do { @@ -601,8 +602,8 @@ bool pkgPackageManager::SmartUnPack(PkgIterator Pkg, bool const Immediate, int c This will be either dealt with if the package is configured as a dependency of Pkg (if and when Pkg is configured), or by the ConfigureAll call at the end of the for loop in OrderInstall. */ bool Changed = false; - const unsigned int max_loops = _config->FindI("APT::pkgPackageManager::MaxLoopCount", 100); - unsigned int i; + const unsigned int max_loops = _config->FindI("APT::pkgPackageManager::MaxLoopCount", 500); + unsigned int i = 0; do { Changed = false; @@ -621,7 +622,7 @@ bool pkgPackageManager::SmartUnPack(PkgIterator Pkg, bool const Immediate, int c // Look for easy targets: packages that are already okay for (DepIterator Cur = Start; Bad == true; ++Cur) { - SPtrArray VList = Start.AllTargets(); + SPtrArray VList = Cur.AllTargets(); for (Version **I = VList; *I != 0; ++I) { VerIterator Ver(Cache,*I); @@ -644,7 +645,7 @@ bool pkgPackageManager::SmartUnPack(PkgIterator Pkg, bool const Immediate, int c // Look for something that could be configured. for (DepIterator Cur = Start; Bad == true; ++Cur) { - SPtrArray VList = Start.AllTargets(); + SPtrArray VList = Cur.AllTargets(); for (Version **I = VList; *I != 0; ++I) { VerIterator Ver(Cache,*I); @@ -784,7 +785,7 @@ bool pkgPackageManager::SmartUnPack(PkgIterator Pkg, bool const Immediate, int c VerIterator V(Cache,*I); PkgIterator P = V.ParentPkg(); // we are checking for installation as an easy 'protection' against or-groups and (unchosen) providers - if (P->CurrentVer == 0 || P != Pkg || (P.CurrentVer() != V && Cache[P].InstallVer != V)) + if (P != Pkg || (P.CurrentVer() != V && Cache[P].InstallVer != V)) continue; circle = true; break; @@ -830,7 +831,7 @@ bool pkgPackageManager::SmartUnPack(PkgIterator Pkg, bool const Immediate, int c } } if (i++ > max_loops) - return _error->Error("Internal error: MaxLoopCount reached in SmartConfigure for %s, aborting", Pkg.FullName().c_str()); + return _error->Error("Internal error: APT::pkgPackageManager::MaxLoopCount reached in SmartConfigure for %s, aborting", Pkg.FullName().c_str()); } while (Changed == true); // Check for reverse conflicts. diff --git a/apt-pkg/pkgcache.cc b/apt-pkg/pkgcache.cc index 997c70768..9acb7da72 100644 --- a/apt-pkg/pkgcache.cc +++ b/apt-pkg/pkgcache.cc @@ -708,6 +708,21 @@ bool pkgCache::DepIterator::IsIgnorable(PrvIterator const &Prv) const return false; } /*}}}*/ +// DepIterator::IsMultiArchImplicit - added by the cache generation /*{{{*/ +// --------------------------------------------------------------------- +/* MultiArch can be translated to SingleArch for an resolver and we did so, + by adding dependencies to help the resolver understand the problem, but + sometimes it is needed to identify these to ignore them… */ +bool pkgCache::DepIterator::IsMultiArchImplicit() const +{ + if (ParentPkg()->Arch != TargetPkg()->Arch && + (S->Type == pkgCache::Dep::Replaces || + S->Type == pkgCache::Dep::DpkgBreaks || + S->Type == pkgCache::Dep::Conflicts)) + return true; + return false; +} + /*}}}*/ // ostream operator to handle string representation of a dependecy /*{{{*/ // --------------------------------------------------------------------- /* */ @@ -946,3 +961,17 @@ pkgCache::DescIterator pkgCache::VerIterator::TranslatedDescription() const }; /*}}}*/ +// PrvIterator::IsMultiArchImplicit - added by the cache generation /*{{{*/ +// --------------------------------------------------------------------- +/* MultiArch can be translated to SingleArch for an resolver and we did so, + by adding provides to help the resolver understand the problem, but + sometimes it is needed to identify these to ignore them… */ +bool pkgCache::PrvIterator::IsMultiArchImplicit() const +{ + pkgCache::PkgIterator const Owner = OwnerPkg(); + pkgCache::PkgIterator const Parent = ParentPkg(); + if (strcmp(Owner.Arch(), Parent.Arch()) != 0 || Owner->Name == Parent->Name) + return true; + return false; +} + /*}}}*/ diff --git a/apt-pkg/pkgcachegen.cc b/apt-pkg/pkgcachegen.cc index ec072fddd..f70cbd02a 100644 --- a/apt-pkg/pkgcachegen.cc +++ b/apt-pkg/pkgcachegen.cc @@ -38,7 +38,7 @@ typedef std::vector::iterator FileIterator; template std::vector pkgCacheGenerator::Dynamic::toReMap; -bool IsDuplicateDescription(pkgCache::DescIterator Desc, +static bool IsDuplicateDescription(pkgCache::DescIterator Desc, MD5SumValue const &CurMd5, std::string const &CurLang); using std::string; @@ -286,7 +286,7 @@ bool pkgCacheGenerator::MergeListPackage(ListParser &List, pkgCache::PkgIterator pkgCache::DescIterator Desc = Ver.DescriptionList(); // a version can only have one md5 describing it - if (MD5SumValue(Desc.md5()) != CurMd5) + if (Desc.end() == true || MD5SumValue(Desc.md5()) != CurMd5) continue; // don't add a new description if we have one for the given @@ -304,6 +304,9 @@ bool pkgCacheGenerator::MergeListPackage(ListParser &List, pkgCache::PkgIterator void const * const oldMap = Map.Data(); map_ptrloc const descindex = NewDescription(Desc, CurLang, CurMd5, *LastDesc); + if (unlikely(descindex == 0 && _error->PendingError())) + return _error->Error(_("Error occurred while processing %s (%s%d)"), + Pkg.Name(), "NewDescription", 1); if (oldMap != Map.Data()) LastDesc += (map_ptrloc*) Map.Data() - (map_ptrloc*) oldMap; *LastDesc = descindex; @@ -456,6 +459,9 @@ bool pkgCacheGenerator::MergeListVersion(ListParser &List, pkgCache::PkgIterator oldMap = Map.Data(); map_ptrloc const descindex = NewDescription(Desc, CurLang, CurMd5, *LastDesc); + if (unlikely(descindex == 0 && _error->PendingError())) + return _error->Error(_("Error occurred while processing %s (%s%d)"), + Pkg.Name(), "NewDescription", 2); if (oldMap != Map.Data()) LastDesc += (map_ptrloc*) Map.Data() - (map_ptrloc*) oldMap; *LastDesc = descindex; @@ -1310,10 +1316,11 @@ bool pkgCacheGenerator::MakeStatusCache(pkgSourceList &List,OpProgress *Progress } _error->RevertToStack(); } - else if (Debug == true) + else { _error->MergeWithStack(); - std::clog << "Open filebased MMap" << std::endl; + if (Debug == true) + std::clog << "Open filebased MMap" << std::endl; } } if (Writeable == false || CacheFile.empty() == true) @@ -1449,11 +1456,11 @@ bool pkgCacheGenerator::MakeOnlyStatusCache(OpProgress *Progress,DynamicMMap **O } /*}}}*/ // IsDuplicateDescription /*{{{*/ -bool IsDuplicateDescription(pkgCache::DescIterator Desc, +static bool IsDuplicateDescription(pkgCache::DescIterator Desc, MD5SumValue const &CurMd5, std::string const &CurLang) { // Descriptions in the same link-list have all the same md5 - if (MD5SumValue(Desc.md5()) != CurMd5) + if (Desc.end() == true || MD5SumValue(Desc.md5()) != CurMd5) return false; for (; Desc.end() == false; ++Desc) if (Desc.LanguageCode() == CurLang) diff --git a/apt-pkg/srcrecords.h b/apt-pkg/srcrecords.h index 06f0dce6c..ed69d0d72 100644 --- a/apt-pkg/srcrecords.h +++ b/apt-pkg/srcrecords.h @@ -71,6 +71,7 @@ class pkgSrcRecords virtual std::string Section() const = 0; virtual const char **Binaries() = 0; // Ownership does not transfer + //FIXME: Add a parameter to specify which architecture to use for [wildcard] matching virtual bool BuildDepends(std::vector &BuildDeps, bool const &ArchOnly, bool const &StripMultiArch = true) = 0; static const char *BuildDepType(unsigned char const &Type); diff --git a/buildlib/config.h.in b/buildlib/config.h.in index 4798fe3f5..85d3883fc 100644 --- a/buildlib/config.h.in +++ b/buildlib/config.h.in @@ -2,23 +2,15 @@ byte first (like Motorola and SPARC, unlike Intel and VAX). */ #undef WORDS_BIGENDIAN -/* The following 4 are only used by inttypes.h shim if the system lacks - inttypes.h */ -/* The number of bytes in a usigned char. */ -#undef SIZEOF_CHAR - -/* The number of bytes in a unsigned int. */ -#undef SIZEOF_INT - -/* The number of bytes in a unsigned long. */ -#undef SIZEOF_LONG - -/* The number of bytes in a unsigned short. */ -#undef SIZEOF_SHORT - /* Define if we have the timegm() function */ #undef HAVE_TIMEGM +/* Define if we have the zlib library for gzip */ +#undef HAVE_ZLIB + +/* Define if we have the bz2 library for bzip2 */ +#undef HAVE_BZ2 + /* These two are used by the statvfs shim for glibc2.0 and bsd */ /* Define if we have sys/vfs.h */ #undef HAVE_VFS_H @@ -39,10 +31,13 @@ /* Define the arch name string */ #undef COMMON_ARCH -/* The version number string */ -#undef VERSION - /* The package name string */ #undef PACKAGE +/* The version number string */ +#undef PACKAGE_VERSION + +/* The mail address to reach upstream */ +#undef PACKAGE_MAIL + #define APT_8_CLEANER_HEADERS diff --git a/buildlib/configure.mak b/buildlib/configure.mak index 310c2600c..68d0535b4 100644 --- a/buildlib/configure.mak +++ b/buildlib/configure.mak @@ -12,24 +12,50 @@ # It would be a fairly good idea to run this after a cvs checkout. BUILDDIR=build -.PHONY: startup +.PHONY: startup missing-config-files startup: configure $(BUILDDIR)/config.status $(addprefix $(BUILDDIR)/,$(CONVERTED)) # use the files provided from the system instead of carry around # and use (most of the time outdated) copycats +ifeq (file-okay,$(shell test -r buildlib/config.sub && echo 'file-okay')) +buildlib/config.sub: +else + ifeq (file-okay,$(shell test -r /usr/share/misc/config.sub && echo 'file-okay')) buildlib/config.sub: ln -sf /usr/share/misc/config.sub buildlib/config.sub + else +buildlib/config.sub: missing-config-files + endif +endif + +ifeq (file-okay,$(shell test -r buildlib/config.guess && echo 'file-okay')) buildlib/config.guess: - ln -sf /usr/share/misc/config.guess buildlib/config.guess +else + ifeq (file-okay,$(shell test -r /usr/share/misc/config.guess && echo 'file-okay')) +buildlib/config.guess: + ln -sf /usr/share/misc/config.guess buildlib/config.guess + else +buildlib/config.guess: missing-config-files + endif +endif + +missing-config-files: + @echo "APT needs 'config.guess' and 'config.sub' in buildlib/ for configuration." + @echo "On Debian systems these are available in the 'autotools-dev' package." + @echo + @echo "The latest versions can be acquired from the upstream git repository:" + @echo "http://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.guess;hb=HEAD" + @echo "http://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.sub;hb=HEAD" + exit 100 + configure: aclocal.m4 configure.in buildlib/config.guess buildlib/config.sub autoconf aclocal.m4: $(wildcard buildlib/*.m4) aclocal -I buildlib - + $(BUILDDIR)/config.status: configure - /usr/bin/test -e $(BUILDDIR) || mkdir $(BUILDDIR) + /usr/bin/test -e $(BUILDDIR) || mkdir $(BUILDDIR) (HERE=`pwd`; cd $(BUILDDIR) && $$HERE/configure) - -$(addprefix $(BUILDDIR)/,$(CONVERTED)): - (cd $(BUILDDIR) && ./config.status) + +$(addprefix $(BUILDDIR)/,$(CONVERTED)): $(BUILDDIR)/config.status diff --git a/buildlib/copy.mak b/buildlib/copy.mak index 3ae11a7eb..e8fe43deb 100644 --- a/buildlib/copy.mak +++ b/buildlib/copy.mak @@ -21,7 +21,7 @@ veryclean: veryclean/$(LOCAL) MKDIRS += $(dir $($(LOCAL)-LIST)) -$($(LOCAL)-LIST) : $(TO)/% : % +$($(LOCAL)-LIST) : $(TO)/% : % dirs echo Installing $< to $(@D) cp $< $(@D) diff --git a/buildlib/debiandoc.mak b/buildlib/debiandoc.mak index a97af0caf..7e22467cf 100644 --- a/buildlib/debiandoc.mak +++ b/buildlib/debiandoc.mak @@ -14,13 +14,15 @@ LOCAL := debiandoc-$(firstword $(SOURCE)) $(LOCAL)-HTML := $(addsuffix .html,$(addprefix $(DOC)/,$(basename $(SOURCE)))) $(LOCAL)-TEXT := $(addsuffix .text,$(addprefix $(DOC)/,$(basename $(SOURCE)))) +debiandoc: + #--------- # Rules to build HTML documentations ifdef DEBIANDOC_HTML # Install generation hooks -doc: $($(LOCAL)-HTML) +debiandoc: $($(LOCAL)-HTML) veryclean: veryclean/html/$(LOCAL) vpath %.sgml $(SUBDIRS) @@ -42,7 +44,7 @@ endif ifdef DEBIANDOC_TEXT # Install generation hooks -doc: $($(LOCAL)-TEXT) +debiandoc: $($(LOCAL)-TEXT) veryclean: veryclean/text/$(LOCAL) vpath %.sgml $(SUBDIRS) diff --git a/buildlib/defaults.mak b/buildlib/defaults.mak index edb089160..5b970876a 100644 --- a/buildlib/defaults.mak +++ b/buildlib/defaults.mak @@ -121,7 +121,7 @@ MKDIRS := $(BIN) all: dirs binary doc binary: library program maintainer-clean dist-clean distclean pristine sanity: veryclean -headers library clean veryclean program test: +startup headers library clean veryclean program test update-po manpages debiandoc: veryclean: echo Very Clean done for $(SUBDIR) diff --git a/buildlib/environment.mak.in b/buildlib/environment.mak.in index fdac3e6c3..fc859951e 100644 --- a/buildlib/environment.mak.in +++ b/buildlib/environment.mak.in @@ -2,6 +2,8 @@ # if you want you can edit it, just don't re-run configure. PACKAGE = @PACKAGE@ +PACKAGE_VERSION = @PACKAGE_VERSION@ +PACKAGE_MAIL = @PACKAGE_MAIL@ # C++ compiler options CC = @CC@ @@ -52,9 +54,9 @@ BDBLIB = @BDBLIB@ INTLLIBS = @INTLLIBS@ # Shim Headerfile control -HAVE_C9X = @HAVE_C9X@ HAVE_STATVFS = @HAVE_STATVFS@ -HAVE_TIMEGM = @HAVE_TIMEGM@ +HAVE_ZLIB = @HAVE_ZLIB@ +HAVE_BZ2 = @HAVE_BZ2@ NEED_SOCKLEN_T_DEFINE = @NEED_SOCKLEN_T_DEFINE@ # Shared library things diff --git a/buildlib/inttypes.h.in b/buildlib/inttypes.h.in deleted file mode 100644 index 3b43b7672..000000000 --- a/buildlib/inttypes.h.in +++ /dev/null @@ -1,50 +0,0 @@ -/* This is an ISO C 9X header file. We omit this copy to the include - directory if the local platform does not have inttypes.h, it contains - [u]int[8,16,32]_t fixed width types */ - -#include - -#undef int32_t -#undef uint32_t -#undef int16_t -#undef uint16_t -#undef int8_t -#undef uint8_t - -/* Generate the fixed bit size types */ -#if SIZEOF_INT == 4 - typedef int int32_t; - typedef unsigned int uint32_t; -#else -# if SIZEOF_LONG == 4 - typedef long int32_t; - typedef unsigned long uint32_t; -# else -# if SIZEOF_SHORT == 4 - typedef short int32_t; - typedef unsigned short uint32_t; -# else -# error Must have a form of 32-bit integer -# endif -# endif -#endif - -#if SIZEOF_INT == 2 - typedef int int16_t; - typedef unsigned int uint16_t; -#else -# if SIZEOF_LONG == 2 - typedef long int16_t; - typedef unsigned long uint16_t; -# else -# if SIZEOF_SHORT == 2 - typedef short int16_t; - typedef unsigned short uint16_t; -# else -# error Must have a form of 16-bit integer -# endif -# endif -#endif - -typedef signed char int8_t; -typedef unsigned char uint8_t; diff --git a/buildlib/makefile.in b/buildlib/makefile.in index 756565f40..66144cfc6 100644 --- a/buildlib/makefile.in +++ b/buildlib/makefile.in @@ -31,11 +31,6 @@ maintainer-clean dist-clean pristine sanity distclean: .PHONY: dirs dirs: $(MAKE) -C $(SRCDIR) -f Makefile $@ -ifeq ($(HAVE_C9X),yes) - @rm -f include/inttypes.h > /dev/null 2>&1 -else - @cp -p $(SRCDIR)/buildlib/inttypes.h.in include/inttypes.h -endif ifeq ($(HAVE_STATVFS),yes) @rm -f include/statvfs.h > /dev/null 2>&1 else diff --git a/buildlib/manpage.mak b/buildlib/manpage.mak index 6cdf73ccf..063841d86 100644 --- a/buildlib/manpage.mak +++ b/buildlib/manpage.mak @@ -14,7 +14,8 @@ LOCAL := manpage-$(firstword $(SOURCE)) $(LOCAL)-LIST := $(addprefix $(DOC)/,$(SOURCE)) # Install generation hooks -doc: $($(LOCAL)-LIST) +doc: manpages +manpages: $($(LOCAL)-LIST) veryclean: veryclean/$(LOCAL) MKDIRS += $(DOC) diff --git a/buildlib/mkChangeLog b/buildlib/mkChangeLog deleted file mode 100755 index 4164ec9d5..000000000 --- a/buildlib/mkChangeLog +++ /dev/null @@ -1,14 +0,0 @@ -#!/bin/sh - -NAMES="`sed -ne 's/'\''/'\''\\\\'\'''\''/g; - s/^.*CVS:\([^ ]\+\) \([^<]\+\) <\([^>]*\)>/\ - -u '\''\1:\2:\3'\''/gp' AUTHORS`" -OPTIONS="-l 78" - -# Generate the standard ChangeLog -echo CVSIGNORE=po rcs2log $OPTIONS $NAMES -eval CVSIGNORE=po rcs2log $OPTIONS $NAMES >> ChangeLog - -# Generate the po ChangeLog -#echo rcs2log $OPTIONS $NAMES po -#eval rcs2log $OPTIONS $NAMES po >> po/ChangeLog diff --git a/buildlib/po4a_manpage.mak b/buildlib/po4a_manpage.mak index 404bb57a5..09eca0ec2 100644 --- a/buildlib/po4a_manpage.mak +++ b/buildlib/po4a_manpage.mak @@ -13,22 +13,28 @@ SOURCE = $(patsubst %.xml,%,$(wildcard *.$(LC).?.xml)) INCLUDES = apt.ent apt-verbatim.ent +manpages: + # Do not use XMLTO, build the manpages directly with XSLTPROC ifdef XSLTPROC -STYLESHEET=../manpage-style.xsl +STYLESHEET=manpage-style.xsl LOCAL := po4a-manpage-$(firstword $(SOURCE)) $(LOCAL)-LIST := $(SOURCE) # Install generation hooks -doc: $($(LOCAL)-LIST) +manpages: $($(LOCAL)-LIST) veryclean: veryclean/$(LOCAL) apt-verbatim.ent: ../apt-verbatim.ent cp ../apt-verbatim.ent . -$($(LOCAL)-LIST) :: % : %.xml $(INCLUDES) +manpage-style.xsl: ../manpage-style.xsl + sed "// i\ +" ../manpage-style.xsl > manpage-style.xsl + +$($(LOCAL)-LIST) :: % : %.xml $(STYLESHEET) $(INCLUDES) echo Creating man page $@ $(XSLTPROC) -o $@ $(STYLESHEET) $< || exit 200 # why xsltproc doesn't respect the -o flag here??? test -f $(subst .$(LC),,$@) || echo FIXME: xsltproc respect the -o flag now, workaround can be removed diff --git a/buildlib/podomain.mak b/buildlib/podomain.mak index 511a5cae2..cca7d55be 100644 --- a/buildlib/podomain.mak +++ b/buildlib/podomain.mak @@ -12,9 +12,9 @@ endif MKDIRS += $(PO_DOMAINS)/$(MY_DOMAIN) $(PO_DOMAINS)/$(MY_DOMAIN)/$(LOCAL).$(TYPE)list: SRC := $(addprefix $(SUBDIR)/,$(SOURCE)) -$(PO_DOMAINS)/$(MY_DOMAIN)/$(LOCAL).$(TYPE)list: makefile +$(PO_DOMAINS)/$(MY_DOMAIN)/$(LOCAL).$(TYPE)list: makefile dirs (echo $(SRC) | xargs -n1 echo) > $@ -binary program clean: $(PO_DOMAINS)/$(MY_DOMAIN)/$(LOCAL).$(TYPE)list +startup binary program clean update-po: $(PO_DOMAINS)/$(MY_DOMAIN)/$(LOCAL).$(TYPE)list veryclean: veryclean/$(LOCAL) veryclean/po/$(LOCAL): LIST := $(PO_DOMAINS)/$(MY_DOMAIN)/$(LOCAL).$(TYPE)list diff --git a/buildlib/sizetable b/buildlib/sizetable deleted file mode 100644 index 372ddd091..000000000 --- a/buildlib/sizetable +++ /dev/null @@ -1,25 +0,0 @@ -# -# This file lists common architectures for cross-compilation (CPUs, not -# OSs), and the endian-ness and relative type sizes. It is not needed for -# native compilation. -# -# If you wish to cross-compile APT, and your architecture is not listed -# here, you should add it, and submit it by email to the APT team at -# . -# -# This is used primarily for the MD5 algorithm. -# The format is:- -# CPU endian sizeof: char, int, short, long -i386 little 1 4 2 4 -amd64 little 1 4 2 8 -armeb big 1 4 2 4 -arm little 1 4 2 4 -alpha little 1 4 2 8 -mipsel little 1 4 2 4 -sparc big 1 4 2 4 -sparc64 big 1 4 2 8 -m68k big 1 4 2 4 -powerpc big 1 4 2 4 -mips big 1 4 2 4 -hppa big 1 4 2 4 -m32r big 1 4 2 4 diff --git a/cmdline/apt-cache.cc b/cmdline/apt-cache.cc index 94654ffd4..ce869581b 100644 --- a/cmdline/apt-cache.cc +++ b/cmdline/apt-cache.cc @@ -1676,7 +1676,7 @@ bool GenCaches(CommandLine &Cmd) /* */ bool ShowHelp(CommandLine &Cmd) { - ioprintf(cout,_("%s %s for %s compiled on %s %s\n"),PACKAGE,VERSION, + ioprintf(cout,_("%s %s for %s compiled on %s %s\n"),PACKAGE,PACKAGE_VERSION, COMMON_ARCH,__DATE__,__TIME__); if (_config->FindB("version") == true) diff --git a/cmdline/apt-cdrom.cc b/cmdline/apt-cdrom.cc index 0017d954e..2551f4916 100644 --- a/cmdline/apt-cdrom.cc +++ b/cmdline/apt-cdrom.cc @@ -195,7 +195,7 @@ bool DoIdent(CommandLine &) /* */ int ShowHelp() { - ioprintf(cout,_("%s %s for %s compiled on %s %s\n"),PACKAGE,VERSION, + ioprintf(cout,_("%s %s for %s compiled on %s %s\n"),PACKAGE,PACKAGE_VERSION, COMMON_ARCH,__DATE__,__TIME__); if (_config->FindB("version") == true) return 0; diff --git a/cmdline/apt-config.cc b/cmdline/apt-config.cc index 47bedfe3f..397ce32df 100644 --- a/cmdline/apt-config.cc +++ b/cmdline/apt-config.cc @@ -63,7 +63,13 @@ bool DoShell(CommandLine &CmdL) /* */ bool DoDump(CommandLine &CmdL) { - _config->Dump(cout); + bool const empty = _config->FindB("APT::Config::Dump::EmptyValue", true); + std::string const format = _config->Find("APT::Config::Dump::Format", "%f \"%v\";\n"); + if (CmdL.FileSize() == 1) + _config->Dump(cout, NULL, format.c_str(), empty); + else + for (const char **I = CmdL.FileList + 1; *I != 0; ++I) + _config->Dump(cout, *I, format.c_str(), empty); return true; } /*}}}*/ @@ -72,7 +78,7 @@ bool DoDump(CommandLine &CmdL) /* */ int ShowHelp() { - ioprintf(cout,_("%s %s for %s compiled on %s %s\n"),PACKAGE,VERSION, + ioprintf(cout,_("%s %s for %s compiled on %s %s\n"),PACKAGE,PACKAGE_VERSION, COMMON_ARCH,__DATE__,__TIME__); if (_config->FindB("version") == true) return 0; @@ -100,6 +106,8 @@ int main(int argc,const char *argv[]) /*{{{*/ {'v',"version","version",0}, {'c',"config-file",0,CommandLine::ConfigFile}, {'o',"option",0,CommandLine::ArbItem}, + {0,"empty","APT::Config::Dump::EmptyValue",CommandLine::Boolean}, + {0,"format","APT::Config::Dump::Format",CommandLine::HasArg}, {0,0,0,0}}; CommandLine::Dispatch Cmds[] = {{"shell",&DoShell}, {"dump",&DoDump}, diff --git a/cmdline/apt-dump-solver.cc b/cmdline/apt-dump-solver.cc index e82e15c6e..aa16b1271 100644 --- a/cmdline/apt-dump-solver.cc +++ b/cmdline/apt-dump-solver.cc @@ -21,7 +21,7 @@ bool ShowHelp() { std::cout << - PACKAGE " " VERSION " for " COMMON_ARCH " compiled on " __DATE__ " " __TIME__ << std::endl << + PACKAGE " " PACKAGE_VERSION " for " COMMON_ARCH " compiled on " __DATE__ " " __TIME__ << std::endl << "Usage: apt-dump-resolver\n" "\n" "apt-dump-resolver is a dummy solver who just dumps its input to the\n" diff --git a/cmdline/apt-extracttemplates.cc b/cmdline/apt-extracttemplates.cc index dc4c110a1..8fe15fdf9 100644 --- a/cmdline/apt-extracttemplates.cc +++ b/cmdline/apt-extracttemplates.cc @@ -36,11 +36,13 @@ #include #include #include +#include + #include -#include -#include #include "apt-extracttemplates.h" + +#include /*}}}*/ using namespace std; @@ -224,7 +226,7 @@ bool DebFile::ParseInfo() /* */ int ShowHelp(void) { - ioprintf(cout,_("%s %s for %s compiled on %s %s\n"),PACKAGE,VERSION, + ioprintf(cout,_("%s %s for %s compiled on %s %s\n"),PACKAGE,PACKAGE_VERSION, COMMON_ARCH,__DATE__,__TIME__); if (_config->FindB("version") == true) diff --git a/cmdline/apt-get.cc b/cmdline/apt-get.cc index f4ad75d1c..d4c7f4200 100644 --- a/cmdline/apt-get.cc +++ b/cmdline/apt-get.cc @@ -254,6 +254,9 @@ bool ShowList(ostream &out,string Title,string List,string VersionsList) */ void ShowBroken(ostream &out,CacheFile &Cache,bool Now) { + if (Cache->BrokenCount() == 0) + return; + out << _("The following packages have unmet dependencies:") << endl; for (unsigned J = 0; J < Cache->Head().PackageCount; J++) { @@ -1415,7 +1418,7 @@ bool InstallPackages(CacheFile &Cache,bool ShwKept,bool Ask = true, "all files have been overwritten by other packages:", "The following packages disappeared from your system as\n" "all files have been overwritten by other packages:", disappearedPkgs.size()), disappear, ""); - c0out << _("Note: This is done automatic and on purpose by dpkg.") << std::endl; + c0out << _("Note: This is done automatically and on purpose by dpkg.") << std::endl; return true; } @@ -1679,10 +1682,13 @@ bool DoUpdate(CommandLine &CmdL) ListUpdate(Stat, *List); // Rebuild the cache. - pkgCacheFile::RemoveCaches(); - if (Cache.BuildCaches() == false) - return false; - + if (_config->FindB("pkgCacheFile::Generate", true) == true) + { + pkgCacheFile::RemoveCaches(); + if (Cache.BuildCaches() == false) + return false; + } + return true; } /*}}}*/ @@ -1711,12 +1717,13 @@ bool DoAutomaticRemove(CacheFile &Cache) bool smallList = (hideAutoRemove == false && strcasecmp(_config->Find("APT::Get::HideAutoRemove","").c_str(),"small") == 0); - string autoremovelist, autoremoveversions; unsigned long autoRemoveCount = 0; APT::PackageSet tooMuch; + APT::PackageList autoRemoveList; // look over the cache to see what can be removed - for (pkgCache::PkgIterator Pkg = Cache->PkgBegin(); ! Pkg.end(); ++Pkg) + for (unsigned J = 0; J < Cache->Head().PackageCount; ++J) { + pkgCache::PkgIterator Pkg(Cache,Cache.List[J]); if (Cache[Pkg].Garbage) { if(Pkg.CurrentVer() != 0 || Cache[Pkg].Install()) @@ -1733,6 +1740,8 @@ bool DoAutomaticRemove(CacheFile &Cache) } else { + if (hideAutoRemove == false && Cache[Pkg].Delete() == false) + autoRemoveList.insert(Pkg); // if the package is a new install and already garbage we don't need to // install it in the first place, so nuke it instead of show it if (Cache[Pkg].Install() == true && Pkg.CurrentVer() == 0) @@ -1742,16 +1751,8 @@ bool DoAutomaticRemove(CacheFile &Cache) Cache->MarkDelete(Pkg, false); } // only show stuff in the list that is not yet marked for removal - else if(hideAutoRemove == false && Cache[Pkg].Delete() == false) - { + else if(hideAutoRemove == false && Cache[Pkg].Delete() == false) ++autoRemoveCount; - // we don't need to fill the strings if we don't need them - if (smallList == false) - { - autoremovelist += Pkg.FullName(true) + " "; - autoremoveversions += string(Cache[Pkg].CandVersion) + "\n"; - } - } } } } @@ -1786,14 +1787,7 @@ bool DoAutomaticRemove(CacheFile &Cache) std::clog << "Save " << Pkg << " as another installed garbage package depends on it" << std::endl; Cache->MarkInstall(Pkg, false); if (hideAutoRemove == false) - { ++autoRemoveCount; - if (smallList == false) - { - autoremovelist += Pkg.FullName(true) + " "; - autoremoveversions += string(Cache[Pkg].CandVersion) + "\n"; - } - } tooMuch.erase(Pkg); Changed = true; break; @@ -1803,6 +1797,18 @@ bool DoAutomaticRemove(CacheFile &Cache) } while (Changed == true); } + std::string autoremovelist, autoremoveversions; + if (smallList == false && autoRemoveCount != 0) + { + for (APT::PackageList::const_iterator Pkg = autoRemoveList.begin(); Pkg != autoRemoveList.end(); ++Pkg) + { + if (Cache[Pkg].Garbage == false) + continue; + autoremovelist += Pkg.FullName(true) + " "; + autoremoveversions += string(Cache[Pkg].CandVersion) + "\n"; + } + } + // Now see if we had destroyed anything (if we had done anything) if (Cache->BrokenCount() != 0) { @@ -1826,7 +1832,7 @@ bool DoAutomaticRemove(CacheFile &Cache) else ioprintf(c1out, P_("%lu package was automatically installed and is no longer required.\n", "%lu packages were automatically installed and are no longer required.\n", autoRemoveCount), autoRemoveCount); - c1out << _("Use 'apt-get autoremove' to remove them.") << std::endl; + c1out << P_("Use 'apt-get autoremove' to remove it.", "Use 'apt-get autoremove' to remove them.", autoRemoveCount) << std::endl; } return true; } @@ -2356,6 +2362,8 @@ bool DoDownload(CommandLine &CmdL) pkgRecords Recs(Cache); pkgSourceList *SrcList = Cache.GetSourceList(); + bool gotAll = true; + for (APT::VersionList::const_iterator Ver = verset.begin(); Ver != verset.end(); ++Ver) @@ -2366,11 +2374,19 @@ bool DoDownload(CommandLine &CmdL) pkgRecords::Parser &rec=Recs.Lookup(Ver.FileList()); pkgCache::VerFileIterator Vf = Ver.FileList(); if (Vf.end() == true) - return _error->Error("Can not find VerFile"); + { + _error->Error("Can not find VerFile for %s in version %s", Pkg.FullName().c_str(), Ver.VerStr()); + gotAll = false; + continue; + } pkgCache::PkgFileIterator F = Vf.File(); pkgIndexFile *index; if(SrcList->FindIndex(F, index) == false) - return _error->Error("FindIndex failed"); + { + _error->Error(_("Can't find a source to download version '%s' of '%s'"), Ver.VerStr(), Pkg.FullName().c_str()); + gotAll = false; + continue; + } string uri = index->ArchiveURI(rec.FileName()); strprintf(descr, _("Downloading %s %s"), Pkg.Name(), Ver.VerStr()); // get the most appropriate hash @@ -2386,6 +2402,8 @@ bool DoDownload(CommandLine &CmdL) // get the file new pkgAcqFile(&Fetcher, uri, hash.toStr(), (*Ver)->Size, descr, Pkg.Name(), "."); } + if (gotAll == false) + return false; // Just print out the uris and exit if the --print-uris flag was used if (_config->FindB("APT::Get::Print-URIs") == true) @@ -2493,7 +2511,7 @@ bool DoSource(CommandLine &CmdL) Src.c_str(), vcs.c_str(), uri.c_str()); if(vcs == "Bzr") ioprintf(c1out,_("Please use:\n" - "bzr get %s\n" + "bzr branch %s\n" "to retrieve the latest (possibly unreleased) " "updates to the package.\n"), uri.c_str()); @@ -2786,8 +2804,18 @@ bool DoBuildDep(CommandLine &CmdL) // Process the build-dependencies vector BuildDeps; - if (Last->BuildDepends(BuildDeps, _config->FindB("APT::Get::Arch-Only", false), StripMultiArch) == false) - return _error->Error(_("Unable to get build-dependency information for %s"),Src.c_str()); + // FIXME: Can't specify architecture to use for [wildcard] matching, so switch default arch temporary + if (hostArch.empty() == false) + { + std::string nativeArch = _config->Find("APT::Architecture"); + _config->Set("APT::Architecture", hostArch); + bool Success = Last->BuildDepends(BuildDeps, _config->FindB("APT::Get::Arch-Only", false), StripMultiArch); + _config->Set("APT::Architecture", nativeArch); + if (Success == false) + return _error->Error(_("Unable to get build-dependency information for %s"),Src.c_str()); + } + else if (Last->BuildDepends(BuildDeps, _config->FindB("APT::Get::Arch-Only", false), StripMultiArch) == false) + return _error->Error(_("Unable to get build-dependency information for %s"),Src.c_str()); // Also ensure that build-essential packages are present Configuration::Item const *Opts = _config->Tree("APT::Build-Essential"); @@ -2864,7 +2892,7 @@ bool DoBuildDep(CommandLine &CmdL) pkgCache::PkgIterator Pkg; // Cross-Building? - if (StripMultiArch == false) + if (StripMultiArch == false && D->Type != pkgSrcRecords::Parser::BuildDependIndep) { size_t const colon = D->Package.find(":"); if (colon != string::npos && @@ -2873,39 +2901,48 @@ bool DoBuildDep(CommandLine &CmdL) else Pkg = Cache->FindPkg(D->Package); - // We need to decide if host or build arch, so find a version we can look at - pkgCache::VerIterator Ver; - // a bad version either is invalid or doesn't satify dependency - #define BADVER(Ver) Ver.end() == true || \ - (Ver.end() == false && D->Version.empty() == false && \ - Cache->VS().CheckDep(Ver.VerStr(),D->Op,D->Version.c_str()) == false) + #define BADVER(Ver) (Ver.end() == true || \ + (D->Version.empty() == false && \ + Cache->VS().CheckDep(Ver.VerStr(),D->Op,D->Version.c_str()) == false)) + APT::VersionList verlist; if (Pkg.end() == false) { - Ver = (*Cache)[Pkg].InstVerIter(*Cache); - if (BADVER(Ver)) - Ver = (*Cache)[Pkg].CandidateVerIter(*Cache); + pkgCache::VerIterator Ver = (*Cache)[Pkg].InstVerIter(*Cache); + if (BADVER(Ver) == false) + verlist.insert(Ver); + Ver = (*Cache)[Pkg].CandidateVerIter(*Cache); + if (BADVER(Ver) == false) + verlist.insert(Ver); } - if (BADVER(Ver)) + if (verlist.empty() == true) { pkgCache::PkgIterator HostPkg = Cache->FindPkg(D->Package, hostArch); if (HostPkg.end() == false) { - Ver = (*Cache)[HostPkg].InstVerIter(*Cache); - if (BADVER(Ver)) - Ver = (*Cache)[HostPkg].CandidateVerIter(*Cache); + pkgCache::VerIterator Ver = (*Cache)[HostPkg].InstVerIter(*Cache); + if (BADVER(Ver) == false) + verlist.insert(Ver); + Ver = (*Cache)[HostPkg].CandidateVerIter(*Cache); + if (BADVER(Ver) == false) + verlist.insert(Ver); } } - if ((BADVER(Ver)) == false) + #undef BADVER + + string forbidden; + // We need to decide if host or build arch, so find a version we can look at + APT::VersionList::const_iterator Ver = verlist.begin(); + for (; Ver != verlist.end(); ++Ver) { - string forbidden; + forbidden.clear(); if (Ver->MultiArch == pkgCache::Version::None || Ver->MultiArch == pkgCache::Version::All) { if (colon == string::npos) - { Pkg = Ver.ParentPkg().Group().FindPkg(hostArch); - } + else if (strcmp(D->Package.c_str() + colon, ":any") == 0) + forbidden = "Multi-Arch: none"; } else if (Ver->MultiArch == pkgCache::Version::Same) { @@ -2937,21 +2974,32 @@ bool DoBuildDep(CommandLine &CmdL) } // native gets buildArch } + if (forbidden.empty() == false) { if (_config->FindB("Debug::BuildDeps",false) == true) - cout << " :any is not allowed from M-A: same package " << (*D).Package << endl; + cout << D->Package.substr(colon, string::npos) << " is not allowed from " << forbidden << " package " << (*D).Package << " (" << Ver.VerStr() << ")" << endl; + continue; + } + + //we found a good version + break; + } + if (Ver == verlist.end()) + { + if (_config->FindB("Debug::BuildDeps",false) == true) + cout << " No multiarch info as we have no satisfying installed nor candidate for " << D->Package << " on build or host arch" << endl; + + if (forbidden.empty() == false) + { if (hasAlternatives) continue; return _error->Error(_("%s dependency for %s can't be satisfied " "because %s is not allowed on '%s' packages"), Last->BuildDepType(D->Type), Src.c_str(), - D->Package.c_str(), "Multi-Arch: same"); + D->Package.c_str(), forbidden.c_str()); } } - else if (_config->FindB("Debug::BuildDeps",false) == true) - cout << " No multiarch info as we have no satisfying installed nor candidate for " << D->Package << " on build or host arch" << endl; - #undef BADVER } else Pkg = Cache->FindPkg(D->Package); @@ -3228,9 +3276,13 @@ bool DoChangelog(CommandLine &CmdL) pkgAcquire Fetcher; if (_config->FindB("APT::Get::Print-URIs", false) == true) + { + bool Success = true; for (APT::VersionList::const_iterator Ver = verset.begin(); Ver != verset.end(); ++Ver) - return DownloadChangelog(Cache, Fetcher, Ver, ""); + Success &= DownloadChangelog(Cache, Fetcher, Ver, ""); + return Success; + } AcqTextStatus Stat(ScreenWidth, _config->FindI("quiet",0)); Fetcher.Setup(&Stat); @@ -3295,7 +3347,7 @@ bool DoMoo(CommandLine &CmdL) /* */ bool ShowHelp(CommandLine &CmdL) { - ioprintf(cout,_("%s %s for %s compiled on %s %s\n"),PACKAGE,VERSION, + ioprintf(cout,_("%s %s for %s compiled on %s %s\n"),PACKAGE,PACKAGE_VERSION, COMMON_ARCH,__DATE__,__TIME__); if (_config->FindB("version") == true) diff --git a/cmdline/apt-internal-solver.cc b/cmdline/apt-internal-solver.cc index 1b636e4d5..aef7636e9 100644 --- a/cmdline/apt-internal-solver.cc +++ b/cmdline/apt-internal-solver.cc @@ -7,6 +7,8 @@ ##################################################################### */ /*}}}*/ // Include Files /*{{{*/ +#include + #include #include #include @@ -18,33 +20,30 @@ #include #include -#include -#include - #include #include + +#include /*}}}*/ // ShowHelp - Show a help screen /*{{{*/ // --------------------------------------------------------------------- /* */ bool ShowHelp(CommandLine &CmdL) { - ioprintf(std::cout,_("%s %s for %s compiled on %s %s\n"),PACKAGE,VERSION, + ioprintf(std::cout,_("%s %s for %s compiled on %s %s\n"),PACKAGE,PACKAGE_VERSION, COMMON_ARCH,__DATE__,__TIME__); std::cout << - _("Usage: apt-internal-resolver\n" + _("Usage: apt-internal-solver\n" "\n" - "apt-internal-resolver is an interface to use the current internal\n" + "apt-internal-solver is an interface to use the current internal\n" "like an external resolver for the APT family for debugging or alike\n" "\n" "Options:\n" " -h This help text.\n" " -q Loggable output - no progress indicator\n" " -c=? Read this configuration file\n" - " -o=? Set an arbitrary configuration option, eg -o dir::cache=/tmp\n" - "apt.conf(5) manual pages for more information and options.\n" - " This APT has Super Cow Powers.\n"); + " -o=? Set an arbitrary configuration option, eg -o dir::cache=/tmp\n"); return true; } /*}}}*/ diff --git a/cmdline/apt-key b/cmdline/apt-key index 97d6e0323..c184e3e75 100755 --- a/cmdline/apt-key +++ b/cmdline/apt-key @@ -11,7 +11,8 @@ GPG_CMD="gpg --ignore-time-conflict --no-options --no-default-keyring --secret-k if [ "$(id -u)" -eq 0 ]; then # we could use a tmpfile here too, but creation of this tends to be time-consuming - GPG_CMD="$GPG_CMD --trustdb-name /etc/apt/trustdb.gpg" + eval $(apt-config shell TRUSTDBDIR Dir::Etc/d) + GPG_CMD="$GPG_CMD --trustdb-name ${TRUSTDBDIR}/trustdb.gpg" fi GPG="$GPG_CMD" diff --git a/cmdline/apt-mark.cc b/cmdline/apt-mark.cc index ef4331714..2a093c55a 100644 --- a/cmdline/apt-mark.cc +++ b/cmdline/apt-mark.cc @@ -360,14 +360,14 @@ bool ShowHold(CommandLine &CmdL) /* */ bool ShowHelp(CommandLine &CmdL) { - ioprintf(cout,_("%s %s for %s compiled on %s %s\n"),PACKAGE,VERSION, + ioprintf(cout,_("%s %s for %s compiled on %s %s\n"),PACKAGE,PACKAGE_VERSION, COMMON_ARCH,__DATE__,__TIME__); cout << _("Usage: apt-mark [options] {auto|manual} pkg1 [pkg2 ...]\n" "\n" "apt-mark is a simple command line interface for marking packages\n" - "as manual or automatical installed. It can also list marks.\n" + "as manually or automatically installed. It can also list marks.\n" "\n" "Commands:\n" " auto - Mark the given packages as automatically installed\n" diff --git a/cmdline/apt-sortpkgs.cc b/cmdline/apt-sortpkgs.cc index 20ae14f2a..46989044e 100644 --- a/cmdline/apt-sortpkgs.cc +++ b/cmdline/apt-sortpkgs.cc @@ -144,7 +144,7 @@ bool DoIt(string InFile) /* */ int ShowHelp() { - ioprintf(cout,_("%s %s for %s compiled on %s %s\n"),PACKAGE,VERSION, + ioprintf(cout,_("%s %s for %s compiled on %s %s\n"),PACKAGE,PACKAGE_VERSION, COMMON_ARCH,__DATE__,__TIME__); if (_config->FindB("version") == true) return 0; diff --git a/cmdline/makefile b/cmdline/makefile index 07e9eb8ca..f3712232a 100644 --- a/cmdline/makefile +++ b/cmdline/makefile @@ -33,20 +33,6 @@ LIB_MAKES = apt-pkg/makefile SOURCE = apt-cdrom.cc include $(PROGRAM_H) -# The apt-sortpkgs program -PROGRAM=apt-sortpkgs -SLIBS = -lapt-pkg $(INTLLIBS) -LIB_MAKES = apt-pkg/makefile -SOURCE = apt-sortpkgs.cc -include $(PROGRAM_H) - -# The apt-extracttemplates program -PROGRAM=apt-extracttemplates -SLIBS = -lapt-pkg -lapt-inst $(INTLLIBS) -LIB_MAKES = apt-pkg/makefile -SOURCE = apt-extracttemplates.cc -include $(PROGRAM_H) - # The apt-key program SOURCE=apt-key TO=$(BIN) @@ -66,6 +52,25 @@ include $(PROGRAM_H) #TARGET=program #include $(COPY_H) +# +# the following programs are shipped in apt-utils +# +APT_DOMAIN:=apt-utils + +# The apt-sortpkgs program +PROGRAM=apt-sortpkgs +SLIBS = -lapt-pkg $(INTLLIBS) +LIB_MAKES = apt-pkg/makefile +SOURCE = apt-sortpkgs.cc +include $(PROGRAM_H) + +# The apt-extracttemplates program +PROGRAM=apt-extracttemplates +SLIBS = -lapt-pkg -lapt-inst $(INTLLIBS) +LIB_MAKES = apt-pkg/makefile +SOURCE = apt-extracttemplates.cc +include $(PROGRAM_H) + # The internal solver acting as an external PROGRAM=apt-internal-solver SLIBS = -lapt-pkg $(INTLLIBS) diff --git a/configure.in b/configure.in index 065afccf1..56eeaccbe 100644 --- a/configure.in +++ b/configure.in @@ -17,11 +17,15 @@ AC_INIT(configure.in) AC_CONFIG_AUX_DIR(buildlib) AC_CONFIG_HEADER(include/config.h:buildlib/config.h.in include/apti18n.h:buildlib/apti18n.h.in) -dnl -- SET THIS TO THE RELEASE VERSION -- -AC_DEFINE_UNQUOTED(VERSION,"0.8.16~exp9") PACKAGE="apt" +PACKAGE_VERSION="0.9.7.1" +PACKAGE_MAIL="APT Development Team " AC_DEFINE_UNQUOTED(PACKAGE,"$PACKAGE") +AC_DEFINE_UNQUOTED(PACKAGE_VERSION,"$PACKAGE_VERSION") +AC_DEFINE_UNQUOTED(PACKAGE_MAIL,"$PACKAGE_MAIL") AC_SUBST(PACKAGE) +AC_SUBST(PACKAGE_VERSION) +AC_SUBST(PACKAGE_MAIL) dnl Check the archs, we want the target type. AC_CANONICAL_SYSTEM @@ -87,9 +91,21 @@ AC_CHECK_LIB(curl, curl_easy_init, AC_SUBST(BDBLIB) +HAVE_ZLIB=no AC_CHECK_LIB(z, gzopen, - [AC_CHECK_HEADER(zlib.h, [], AC_MSG_ERROR([failed: zlib.h not found]))], + [AC_CHECK_HEADER(zlib.h, [HAVE_ZLIB=yes], AC_MSG_ERROR([failed: zlib.h not found]))], AC_MSG_ERROR([failed: Need libz])) +AC_SUBST(HAVE_ZLIB) +if test "x$HAVE_ZLIB" = "xyes"; then + AC_DEFINE(HAVE_ZLIB) +fi + +HAVE_BZ2=no +AC_CHECK_LIB(bz2, BZ2_bzopen,[AC_CHECK_HEADER(bzlib.h, [HAVE_BZ2=yes], [])], []) +AC_SUBST(HAVE_BZ2) +if test "x$HAVE_BZ2" = "xyes"; then + AC_DEFINE(HAVE_BZ2) +fi dnl Converts the ARCH to be something singular for this general CPU family dnl This is often the dpkg architecture string. @@ -103,12 +119,6 @@ fi AC_MSG_RESULT($archset) AC_DEFINE_UNQUOTED(COMMON_ARCH,"$archset") -dnl We use C99 types if at all possible -AC_CACHE_CHECK([for C99 integer types],apt_cv_c9x_ints,[ - AC_TRY_COMPILE([#include ], - [uint8_t Foo1;uint16_t Foo2;uint32_t Foo3;], - apt_cv_c9x_ints=yes,apt_cv_c9x_ints=no)]) - dnl Single Unix Spec statvfs AC_CHECK_FUNC(statvfs,[HAVE_STATVFS=yes]) AC_SUBST(HAVE_STATVFS) @@ -128,46 +138,9 @@ dnl We should use the real timegm function if we have it. AC_CHECK_FUNC(timegm,AC_DEFINE(HAVE_TIMEGM)) AC_SUBST(HAVE_TIMEGM) -dnl Check the sizes etc. of the architecture -dnl This is stupid, it should just use the AC macros like it does below -dnl Cross compilers can either get a real C library or preload the cache -dnl with their size values. -changequote(,) -archline="`awk \" ! /^#|^\\\$/ {if (match(\\\"$archset\\\",\\\$1)) {print; exit}}\" $srcdir/buildlib/sizetable | cut -f 2- -d ' '`" -if test "x$archline" != "x"; then - changequote([,]) - set $archline - if test "$1" = "little"; then - ac_cv_c_bigendian=no - else - ac_cv_c_bigendian=yes - fi - size_char=$2 - size_int=$3 - size_short=$4 - size_long=$5 -fi - -dnl I wonder what AC_C_BIGENDIAN does if you cross compile... -dnl This is probably bogus, as above we only care if we have to build our own -dnl C9x types. -if test "$cross_compiling" = "yes" -a "x$archline" = "x"; then - AC_MSG_ERROR(When cross compiling, architecture must be present in sizetable) -fi +dnl Check the architecture AC_C_BIGENDIAN -dnl We do not need this if we have inttypes! -HAVE_C9X=yes -if test x"$apt_cv_c9x_ints" = x"no"; then - AC_CHECK_SIZEOF(char,$size_char) - AC_CHECK_SIZEOF(int,$size_int) - AC_CHECK_SIZEOF(short,$size_short) - AC_CHECK_SIZEOF(long,$size_long) - - HAVE_C9X= - AC_SUBST(HAVE_C9X) -fi - dnl HP-UX sux.. AC_MSG_CHECKING(for missing socklen_t) AC_EGREP_HEADER(socklen_t, sys/socket.h,[AC_MSG_RESULT(no)],[ @@ -218,4 +191,4 @@ fi AC_SUBST(USE_NLS) AC_PATH_PROG(BASH, bash) -AC_OUTPUT(environment.mak:buildlib/environment.mak.in makefile:buildlib/makefile.in doc/Doxyfile,make -s dirs) +AC_OUTPUT(environment.mak:buildlib/environment.mak.in makefile:buildlib/makefile.in doc/Doxyfile:doc/Doxyfile.in,make -s dirs) diff --git a/debian/apt-utils.install b/debian/apt-utils.install index 8b1378917..cdbcb2f37 100644 --- a/debian/apt-utils.install +++ b/debian/apt-utils.install @@ -1 +1 @@ - +usr/share/locale/*/*/apt-utils.mo diff --git a/debian/apt-utils.links b/debian/apt-utils.links deleted file mode 100644 index 5bf138c4a..000000000 --- a/debian/apt-utils.links +++ /dev/null @@ -1 +0,0 @@ -usr/bin/apt-internal-solver usr/lib/apt/solvers/apt diff --git a/debian/changelog b/debian/changelog index 50d6ea3ce..7a8c63d04 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,13 +1,386 @@ -apt (0.8.16~exp15) unstable; urgency=low +apt (0.9.8~exp1) UNRELEASED; urgency=low + [ David Kalnischkies ] + * apt-pkg/contrib/strutl.cc: + - support \n and \r\n line endings in ReadMessages + + -- David Kalnischkies Mon, 09 Jul 2012 17:36:40 +0200 + +apt (0.9.7.1) unstable; urgency=low + + [ Program translation updates ] + * Bulgarian (Damyan Ivanov) (Closes: #678983) + * Hungarian (Gabor Kelemen) + * Italian (Milo Casagrande) + * Slovenian (Andrej Znidarsic) + * German (Holger Wansing) (Closes: #679314) + * Slovak (Ivan Masár) (Closes: #679448) + + [ David Kalnischkies ] + * cmdline/apt-internal-solver.cc, cmdline/apt-mark.cc: + - typo fixes and unfuzzy translations + * debian/control: + - libapt-{pkg,inst} packages should be in section 'libs' instead + of 'admin' as by ftp-master override request in #677596 + - demote debiandoc-sgml to Build-Depends-Indep + * doc/makefile: + - separate translation building of debiandoc from manpages + so that we don't need to build debiandoc for binary packages + + -- Michael Vogt Fri, 29 Jun 2012 14:26:32 +0200 + +apt (0.9.7) unstable; urgency=low + + [ Julian Andres Klode ] + * apt-pkg/contrib/mmap.cc: + - Fix the Fallback option to work correctly, by not calling + realloc() on a map mapped by mmap(), and by using malloc + and friends instead of new[]. + - Zero out the new memory allocated with realloc(). + + [ Daniel Hartwig ] + * apt-pkg/pkgcachegen.cc: + - always reset _error->StackCount in MakeStatusCache (Closes: #677175) + + [ David Kalnischkies ] + * apt-pkg/deb/deblistparser.cc: + - ensure that mixed positive/negative architecture wildcards + are handled in the same way as dpkg handles them + - use PackageArchitectureMatchesSpecification filter + * apt-pkg/cachefilter.cc: + - add PackageArchitectureMatchesSpecification (Closes: #672603) + * apt-pkg/cacheset.cc: + - add PackageContainerInterface::FromGroup to support + architecture specifications with wildcards on the commandline + * apt-pkg/pkgcache.cc: + - do a string comparision for architecture checking in IsMultiArchImplicit + as 'unique' strings in the pkgcache aren't unique (Closes: #677454) + * buildlib/configure.mak: + - print a message detailing how to get config.guess and config.sub + in case they are not in /usr/share/misc (Closes: #677312) + * cmdline/apt-get.cc: + - print a friendly message in 'download' if a package can't be + downloaded (Closes: #677887) + + -- Michael Vogt Tue, 19 Jun 2012 16:42:43 +0200 + +apt (0.9.6) unstable; urgency=low + + [ David Kalnischkies ] + * apt-pkg/cdrom.cc: + - fix regression from 0.9.3 which dumped the main configuration + _config instead of the cdrom settings (Cnf) as identified and + tested by Milan Kupcevic, thanks! (Closes: #674100) + * cmdline/apt-get.cc: + - do not show 'list of broken packages' header if no package + is broken as it happens e.g. for external resolver errors + - print URIs for all changelogs in case of --print-uris, + thanks to Daniel Hartwig for the patch! (Closes: #674897) + - show 'bzr branch' as 'bzr get' is deprecated (LP: #1011032) + - check build-dep candidate if install is forbidden + * debian/apt-utils.links: + - the internal resolver 'apt' is now directly installed in + /usr/lib/apt/solvers, so don't instruct dh to create a broken link + * doc/apt-verbatim.ent: + - APT doesn't belong to the product 'Linux', so use 'APT' instead + as after all APT is a big suite of applications + * doc/examples/sources.list: + - use the codename instead of 'stable' in the examples sources.list + as we do in the manpage and as the debian-installer does + * doc/apt-get.8.xml: + - use apt-utils as package example instead of libc6 + * apt-pkg/contrib/cmdline.cc: + - apply patch from Daniel Hartwig to fix a segfault in case + the LongOpt is empty (Closes: #676331) + - fix segfault with empty LongOpt in --no-* branch + * ftparchive/apt-ftparchive.cc: + - default to putting the Contents-* files below $(SECTION) as apt-file + expects them there - thanks Martin-Éric Racine! (Closes: #675827) + * apt-pkg/deb/deblistparser.cc: + - set pkgCacheGen::Essential to "all" again (Closes: #675449) + * apt-pkg/algorithms.cc: + - force install only for one essential package out of a group + * apt-pkg/aptconfiguration.cc: + - if APT::Languages=none save "none" in allCodes so that the detected + configuration is cached as intended (Closes: #674690, LP: #1004947) + * apt-pkg/cacheiterators.h: + - add an IsMultiArchImplicit() method for Dep- and PrvIterator + + [ Justin B Rye ] + * doc/apt-cdrom.8.xml: + - replace CDROM with the proper CD-ROM in text + - correct disc vs. disk issues + * doc/apt-extracttemplates.1.xml: + - debconf is not DebConf + * doc/apt-get.8.xml: + - move dselect-upgrade below dist-upgrade + - review and fix spelling issues + * doc/apt-ftparchive.8.xml, doc/apt-config.8.xml, + doc/apt-key.8.xml, doc/apt-mark.8.xml, + doc/apt_preferences.5.xml, doc/apt-secure.8.xml, + doc/apt-sortpkgs.1.xml, sources.list.5.xml: + - review and fix typo, grammar and style issues + * doc/apt.conf.5.xml: + - review and fix typo, grammar and style issues + - rephrase APT::Immediate-Configuration and many others + + [ Sebastian Heinlein ] + * cmdline/apt-key: + - do not hardcode /etc but use Dir::Etc instead + + [ Robert Luberda ] + * Polish manpage translation update (Closes: #675603) + * doc/apt-mark.8.xml: + - in hold, the option name is --file not --filename + + [ Christian Perrier ] + * French program and manpage translation update + * Danish program translation by Joe Hansen. Closes: #675605 + + [ Thibaut Girka ] + * cmdline/apt-get.cc: + - complain correctly about :any build-dep on M-A:none packages + * apt-pkg/deb/deblistparser.cc: + - add support for arch-specific qualifiers in dependencies + + -- Michael Vogt Mon, 11 Jun 2012 16:21:53 +0200 + +apt (0.9.5.1) unstable; urgency=low + + [ David Kalnischkies ] + * apt-pkg/contrib/fileutl.cc: + - dup() given compressed fd in OpenDescriptor if AutoClose + is disabled as otherwise gzclose() and co will close it + * doc/*.xml: + - mark even more stuff as untranslateable and improve the + markup here and there (no real text change) + - use docbook DTD 4.5 instead of 4.2 to have valid docs + + [ Justin B Rye ] + * doc/*.xml: + - remove 'GNU/Linux' from 'Debian systems' strings as Debian + has more systems than just GNU/Linux nowadays + * doc/apt-cache.8.xml: + - fix a typo as well as adding missing literal markup + - three small rewordings for better english sentences + + -- Michael Vogt Thu, 24 May 2012 17:16:34 +0200 + +apt (0.9.5) unstable; urgency=low + + [ Chris Leick ] + * proofreading of the manpage pot + * German manpage translation update (Closes: #673294) + + [ David Kalnischkies ] + * buildlib/podomain.mak: + - ensure that all sources end up in the srclist so that we don't + forget to extract half of the translation strings + * buildlib/inttypes.h.in: + - remove inttypes.h compatibility as providing such a c99 types + compatibility conflicts with the usage of c99 type long long + * apt-pkg/contrib/mmap.cc: + - have a dummy SyncToFd around in case of ReadOnly access to a + compressed file as we otherwise on Close() do not delete[] the + char buffer but munmap() it… (Closes: #673815) + * debian/control: + - moving debiandoc-sgml to Build-Depends-Indep was one step too much + for the buildds as we still build two sgml files in arch:any + * debian/rules: + - move internal-solver as 'apt' to his friend dump-solver in + /usr/lib/apt/solvers to avoid writing a manpage for it + + -- Michael Vogt Tue, 22 May 2012 16:14:22 +0200 + +apt (0.9.4) unstable; urgency=low + + [ David Kalnischkies ] + * methods/http.cc: + - after many years of pointless discussions disable http/1.1 pipelining + by default as many webservers and proxies seem to be unable to conform + to specification must's (rfc2616 section 8.1.2.2) (LP: #996151) + - add spaces around PACKAGE_VERSION to fix FTBFS with -std=c++11 + * apt-pkg/pkgcachegen.cc: + - make IsDuplicatedDescription static so that it is really private + as we don't need a symbol for it as it is not in a header + * Makefile, buildlib/*.mak: + - reshuffle dependencies so that parallel building seems to work + - separate manpages from the rest of the doc building + * prepare-release: + - apt-inst version isn't apt versions, so don't override variable + * debian/rules: + - apt-utils packages manpages, so it should depend on build-doc + - make apt and apt-utils packages depend on manpages instead of full doc + * debian/control: + - move doxygen and debiandoc-sgml to Build-Depends-Indep as docs + are no longer build in the same target as the manpages + * apt-pkg/acquire-methods.cc: + - factor out into private Dequeue() to fix access to deleted pointer + * apt-pkg/contrib/fileutl.cc: + - ensure that we close compressed fds, wait for forks and such even if + the FileFd itself is set to not autoclose the given Fd + * cmdline/apt-get.cc: + - use the host architecture, not the build architecture for matching + of [architecture restrictions] in Build-Depends (Closes: #672927) + * doc/makefile: + - build manpages with the correct l10n.gentext.default.language setting + to get the correct section titles provided by docbook + * doc/po/de.po: + - updated german manpage translation by Chris Leick, thanks! + * apt-pkg/packagemanager.cc: + - do not run into loop on new-pre-depends-breaks (Closes: #673536) + * doc/*.xml: + - add a few translator notes and reword some paragraphs to ensure that + translators and users alike can better understand them (Closes: #669409) + - in mark all options with