From: Michael Vogt Date: Tue, 22 May 2012 14:07:49 +0000 (+0200) Subject: merged (and resolved a bunch of conflicts) of the debian-sid branch X-Git-Tag: 0.9.13.exp1ubuntu1~73 X-Git-Url: https://git.saurik.com/apt.git/commitdiff_plain/800694cbf33e206935dc9e6e8fa6e9a036b8efad?hp=-c merged (and resolved a bunch of conflicts) of the debian-sid branch --- 800694cbf33e206935dc9e6e8fa6e9a036b8efad diff --combined apt-pkg/deb/dpkgpm.cc index 653e14190,296426c80..7e937a85b --- a/apt-pkg/deb/dpkgpm.cc +++ b/apt-pkg/deb/dpkgpm.cc @@@ -29,6 -29,7 +29,6 @@@ #include #include #include -#include #include #include #include @@@ -424,7 -425,7 +424,7 @@@ void pkgDPkgPM::DoStdin(int master unsigned char input_buf[256] = {0,}; ssize_t len = read(0, input_buf, sizeof(input_buf)); if (len) - write(master, input_buf, len); + FileFd::Write(master, input_buf, len); else d->stdin_is_dev_null = true; } @@@ -450,7 -451,7 +450,7 @@@ void pkgDPkgPM::DoTerminalPty(int maste } if(len <= 0) return; - write(1, term_buf, len); + FileFd::Write(1, term_buf, len); if(d->term_out) fwrite(term_buf, len, sizeof(char), d->term_out); } @@@ -525,7 -526,7 +525,7 @@@ void pkgDPkgPM::ProcessDpkgStatusLine(i << ":" << s << endl; if(OutStatusFd > 0) - write(OutStatusFd, status.str().c_str(), status.str().size()); + FileFd::Write(OutStatusFd, status.str().c_str(), status.str().size()); if (Debug == true) std::clog << "send: '" << status.str() << "'" << endl; @@@ -549,7 -550,7 +549,7 @@@ << ":" << list[3] << endl; if(OutStatusFd > 0) - write(OutStatusFd, status.str().c_str(), status.str().size()); + FileFd::Write(OutStatusFd, status.str().c_str(), status.str().size()); if (Debug == true) std::clog << "send: '" << status.str() << "'" << endl; pkgFailures++; @@@ -563,7 -564,7 +563,7 @@@ << ":" << list[3] << endl; if(OutStatusFd > 0) - write(OutStatusFd, status.str().c_str(), status.str().size()); + FileFd::Write(OutStatusFd, status.str().c_str(), status.str().size()); if (Debug == true) std::clog << "send: '" << status.str() << "'" << endl; return; @@@ -591,7 -592,7 +591,7 @@@ << ":" << s << endl; if(OutStatusFd > 0) - write(OutStatusFd, status.str().c_str(), status.str().size()); + FileFd::Write(OutStatusFd, status.str().c_str(), status.str().size()); if (Debug == true) std::clog << "send: '" << status.str() << "'" << endl; } @@@ -862,6 -863,8 +862,8 @@@ static int racy_pselect(int nfds, fd_se */ bool pkgDPkgPM::Go(int OutStatusFd) { + pkgPackageManager::SigINTStop = false; + // Generate the base argument list for dpkg std::vector Args; unsigned long StartSize = 0; @@@ -907,7 -910,7 +909,7 @@@ dup2(nullfd, STDIN_FILENO); dup2(nullfd, STDOUT_FILENO); dup2(nullfd, STDERR_FILENO); - execv(Args[0], (char**) &Args[0]); + execvp(Args[0], (char**) &Args[0]); _error->WarningE("dpkgGo", "Can't detect if dpkg supports multi-arch!"); _exit(2); } @@@ -1053,7 -1056,8 +1055,8 @@@ } int fd[2]; - pipe(fd); + if (pipe(fd) != 0) + return _error->Errno("pipe","Failed to create IPC pipe to dpkg"); #define ADDARG(X) Args.push_back(X); Size += strlen(X) #define ADDARGC(X) Args.push_back(X); Size += sizeof(X) - 1 @@@ -1234,7 -1238,7 +1237,7 @@@ << (PackagesDone/float(PackagesTotal)*100.0) << ":" << _("Running dpkg") << endl; - write(OutStatusFd, status.str().c_str(), status.str().size()); + FileFd::Write(OutStatusFd, status.str().c_str(), status.str().size()); } Child = ExecFork(); @@@ -1431,9 -1435,8 +1434,8 @@@ } void SigINT(int sig) { - if (_config->FindB("APT::Immediate-Configure-All",false)) - pkgPackageManager::SigINTStop = true; - } + pkgPackageManager::SigINTStop = true; + } /*}}}*/ // pkgDpkgPM::Reset - Dump the contents of the command list /*{{{*/ // --------------------------------------------------------------------- @@@ -1448,11 -1451,17 +1450,17 @@@ void pkgDPkgPM::Reset( /* */ void pkgDPkgPM::WriteApportReport(const char *pkgpath, const char *errormsg) { + // If apport doesn't exist or isn't installed do nothing + // This e.g. prevents messages in 'universes' without apport + pkgCache::PkgIterator apportPkg = Cache.FindPkg("apport"); + if (apportPkg.end() == true || apportPkg->CurrentVer == 0) + return; + string pkgname, reportfile, srcpkgname, pkgver, arch; string::size_type pos; FILE *report; - if (_config->FindB("Dpkg::ApportFailureReport", false) == false) + if (_config->FindB("Dpkg::ApportFailureReport", true) == false) { std::clog << "configured to not write apport reports" << std::endl; return; @@@ -1479,49 -1488,18 +1487,49 @@@ } // do not report out-of-memory failures - if(strstr(errormsg, strerror(ENOMEM)) != NULL) { + if(strstr(errormsg, strerror(ENOMEM)) != NULL || + strstr(errormsg, "failed to allocate memory") != NULL) { std::clog << _("No apport report written because the error message indicates a out of memory error") << std::endl; return; } - // do not report dpkg I/O errors - // XXX - this message is localized, but this only matches the English version. This is better than nothing. - if(strstr(errormsg, "short read in buffer_copy (")) { - std::clog << _("No apport report written because the error message indicates a dpkg I/O error") << std::endl; + // do not report bugs regarding inaccessible local files + if(strstr(errormsg, strerror(ENOENT)) != NULL || + strstr(errormsg, "cannot access archive") != NULL) { + std::clog << _("No apport report written because the error message indicates an issue on the local system") << std::endl; + return; + } + + // do not report errors encountered when decompressing packages + if(strstr(errormsg, "--fsys-tarfile returned error exit status 2") != NULL) { + std::clog << _("No apport report written because the error message indicates an issue on the local system") << std::endl; return; } + // do not report dpkg I/O errors, this is a format string, so we compare + // the prefix and the suffix of the error with the dpkg error message + vector io_errors; + io_errors.push_back(string("failed to read on buffer copy for %s")); + io_errors.push_back(string("failed in write on buffer copy for %s")); + io_errors.push_back(string("short read on buffer copy for %s")); + + for (vector::iterator I = io_errors.begin(); I != io_errors.end(); I++) + { + vector list = VectorizeString(dgettext("dpkg", (*I).c_str()), '%'); + if (list.size() > 1) { + // we need to split %s, VectorizeString only allows char so we need + // to kill the "s" manually + if (list[1].size() > 1) { + list[1].erase(0, 1); + if(strstr(errormsg, list[0].c_str()) && + strstr(errormsg, list[1].c_str())) { + std::clog << _("No apport report written because the error message indicates a dpkg I/O error") << std::endl; + return; + } + } + } + } + // get the pkgname and reportfile pkgname = flNotDir(pkgpath); pos = pkgname.find('_'); @@@ -1566,7 -1544,7 +1574,7 @@@ if(strstr(strbuf,"Package:") == strbuf) { char pkgname[255], version[255]; - if(sscanf(strbuf, "Package: %s %s", pkgname, version) == 2) + if(sscanf(strbuf, "Package: %254s %254s", pkgname, version) == 2) if(strcmp(pkgver.c_str(), version) == 0) { fclose(report); @@@ -1608,24 -1586,6 +1616,24 @@@ fprintf(report, "DpkgTerminalLog:\n"); log = fopen(logfile_name.c_str(),"r"); if(log != NULL) + { + while( fgets(buf, sizeof(buf), log) != NULL) + fprintf(report, " %s", buf); + fprintf(report, " \n"); + fclose(log); + } + } + + // attach history log it if we have it + string histfile_name = _config->FindFile("Dir::Log::History"); + if (!histfile_name.empty()) + { + FILE *log = NULL; + char buf[1024]; + + fprintf(report, "DpkgHistoryLog:\n"); + log = fopen(histfile_name.c_str(),"r"); + if(log != NULL) { while( fgets(buf, sizeof(buf), log) != NULL) fprintf(report, " %s", buf); @@@ -1637,7 -1597,10 +1645,10 @@@ const char *ops_str[] = {"Install", "Configure","Remove","Purge"}; fprintf(report, "AptOrdering:\n"); for (vector::iterator I = List.begin(); I != List.end(); ++I) - fprintf(report, " %s: %s\n", (*I).Pkg.Name(), ops_str[(*I).Op]); + if ((*I).Pkg != NULL) + fprintf(report, " %s: %s\n", (*I).Pkg.Name(), ops_str[(*I).Op]); + else + fprintf(report, " %s: %s\n", "NULL", ops_str[(*I).Op]); // attach dmesg log (to learn about segfaults) if (FileExists("/bin/dmesg")) diff --combined apt-pkg/depcache.cc index fdfd1c713,2656e9b42..56194e244 --- a/apt-pkg/depcache.cc +++ b/apt-pkg/depcache.cc @@@ -871,7 -871,6 +871,7 @@@ bool pkgDepCache::IsDeleteOk(PkgIterato if (FromUser == false && Pkg->CurrentVer == 0) { StateCache &P = PkgState[Pkg->ID]; + // Status == 2 means this applies for new installs only if (P.InstallVer != 0 && P.Status == 2 && (P.Flags & Flag::Auto) != Flag::Auto) { if (DebugMarker == true) @@@ -964,6 -963,13 +964,13 @@@ struct CompareProviders else if ((B->Flags & pkgCache::Flag::Essential) == pkgCache::Flag::Essential) return true; } + if ((A->Flags & pkgCache::Flag::Important) != (B->Flags & pkgCache::Flag::Important)) + { + if ((A->Flags & pkgCache::Flag::Important) == pkgCache::Flag::Important) + return false; + else if ((B->Flags & pkgCache::Flag::Important) == pkgCache::Flag::Important) + return true; + } // higher priority seems like a good idea if (AV->Priority != BV->Priority) return AV->Priority < BV->Priority; @@@ -1146,9 -1152,8 +1153,8 @@@ bool pkgDepCache::MarkInstall(PkgIterat } /* This bit is for processing the possibilty of an install/upgrade - fixing the problem */ - if (Start->Type != Dep::DpkgBreaks && - (DepState[Start->ID] & DepCVer) == DepCVer) + fixing the problem for "positive" dependencies */ + if (Start.IsNegative() == false && (DepState[Start->ID] & DepCVer) == DepCVer) { APT::VersionList verlist; pkgCache::VerIterator Cand = PkgState[Start.TargetPkg()->ID].CandidateVerIter(*this); @@@ -1159,7 -1164,7 +1165,7 @@@ pkgCache::VerIterator V = Prv.OwnerVer(); pkgCache::VerIterator Cand = PkgState[Prv.OwnerPkg()->ID].CandidateVerIter(*this); if (Cand.end() == true || V != Cand || - VS().CheckDep(Cand.VerStr(), Start->CompareOp, Start.TargetVer()) == false) + VS().CheckDep(Prv.ProvideVersion(), Start->CompareOp, Start.TargetVer()) == false) continue; verlist.insert(Cand); } @@@ -1192,13 -1197,13 +1198,13 @@@ } continue; } - - /* For conflicts we just de-install the package and mark as auto, - Conflicts may not have or groups. For dpkg's Breaks we try to - upgrade the package. */ - if (Start.IsNegative() == true) + /* Negative dependencies have no or-group + If the dependency isn't versioned, we try if an upgrade might solve the problem. + Otherwise we remove the offender if needed */ + else if (Start.IsNegative() == true && Start->Type != pkgCache::Dep::Obsoletes) { SPtrArray List = Start.AllTargets(); + pkgCache::PkgIterator TrgPkg = Start.TargetPkg(); for (Version **I = List; *I != 0; I++) { VerIterator Ver(*this,*I); @@@ -1209,15 -1214,17 +1215,17 @@@ if (PkgState[Pkg->ID].InstallVer == 0) continue; - if (PkgState[Pkg->ID].CandidateVer != *I && - Start->Type == Dep::DpkgBreaks && + if ((Start->Version != 0 || TrgPkg != Pkg) && + PkgState[Pkg->ID].CandidateVer != PkgState[Pkg->ID].InstallVer && + PkgState[Pkg->ID].CandidateVer != *I && MarkInstall(Pkg,true,Depth + 1, false, ForceImportantDeps) == true) continue; - else if (MarkDelete(Pkg,false,Depth + 1, false) == false) + else if ((Start->Type == pkgCache::Dep::Conflicts || Start->Type == pkgCache::Dep::DpkgBreaks) && + MarkDelete(Pkg,false,Depth + 1, false) == false) break; } continue; - } + } } return Dep.end() == true; @@@ -1642,6 -1649,7 +1650,7 @@@ bool pkgDepCache::MarkRequired(InRootSe { if(!(PkgState[p->ID].Flags & Flag::Auto) || (p->Flags & Flag::Essential) || + (p->Flags & Flag::Important) || userFunc.InRootSet(p) || // be nice even then a required package violates the policy (#583517) // and do the full mark process also for required packages diff --combined apt-pkg/init.cc index 4c15858a1,76278921f..4818174ac --- a/apt-pkg/init.cc +++ b/apt-pkg/init.cc @@@ -24,7 -24,7 +24,7 @@@ #define Stringfy_(x) # x #define Stringfy(x) Stringfy_(x) - const char *pkgVersion = VERSION; + const char *pkgVersion = PACKAGE_VERSION; const char *pkgLibVersion = Stringfy(APT_PKG_MAJOR) "." Stringfy(APT_PKG_MINOR) "." Stringfy(APT_PKG_RELEASE); @@@ -92,7 -92,6 +92,7 @@@ bool pkgInitConfig(Configuration &Cnf Cnf.Set("Dir::Ignore-Files-Silently::", "\\.dpkg-[a-z]+$"); Cnf.Set("Dir::Ignore-Files-Silently::", "\\.save$"); Cnf.Set("Dir::Ignore-Files-Silently::", "\\.orig$"); + Cnf.Set("Dir::Ignore-Files-Silently::", "\\.distUpgrade$"); // Default cdrom mount point Cnf.CndSet("Acquire::cdrom::mount", "/media/cdrom/"); diff --combined apt-pkg/packagemanager.cc index cefb760af,46fc499c6..80369dd1a --- a/apt-pkg/packagemanager.cc +++ b/apt-pkg/packagemanager.cc @@@ -25,9 -25,10 +25,10 @@@ #include #include - #include #include #include + + #include /*}}}*/ using namespace std; @@@ -183,8 -184,7 +184,7 @@@ bool pkgPackageManager::CreateOrderList continue; // Mark the package and its dependends for immediate configuration - if ((((I->Flags & pkgCache::Flag::Essential) == pkgCache::Flag::Essential || - (I->Flags & pkgCache::Flag::Important) == pkgCache::Flag::Important) && + if ((((I->Flags & pkgCache::Flag::Essential) == pkgCache::Flag::Essential) && NoImmConfigure == false) || ImmConfigureAll) { if(Debug && !ImmConfigureAll) @@@ -338,7 -338,7 +338,7 @@@ bool pkgPackageManager::SmartConfigure( however if there is a loop (A depends on B, B depends on A) this will not be the case, so check for dependencies before configuring. */ bool Bad = false, Changed = false; - const unsigned int max_loops = _config->FindI("APT::pkgPackageManager::MaxLoopCount", 500); + const unsigned int max_loops = _config->FindI("APT::pkgPackageManager::MaxLoopCount", 5000); unsigned int i=0; do { @@@ -523,7 -523,8 +523,8 @@@ bool pkgPackageManager::EarlyRemove(Pkg // Essential packages get special treatment bool IsEssential = false; - if ((Pkg->Flags & pkgCache::Flag::Essential) != 0) + if ((Pkg->Flags & pkgCache::Flag::Essential) != 0 || + (Pkg->Flags & pkgCache::Flag::Important) != 0) IsEssential = true; /* Check for packages that are the dependents of essential packages and @@@ -533,7 -534,8 +534,8 @@@ for (DepIterator D = Pkg.RevDependsList(); D.end() == false && IsEssential == false; ++D) if (D->Type == pkgCache::Dep::Depends || D->Type == pkgCache::Dep::PreDepends) - if ((D.ParentPkg()->Flags & pkgCache::Flag::Essential) != 0) + if ((D.ParentPkg()->Flags & pkgCache::Flag::Essential) != 0 || + (D.ParentPkg()->Flags & pkgCache::Flag::Important) != 0) IsEssential = true; } @@@ -600,8 -602,8 +602,8 @@@ bool pkgPackageManager::SmartUnPack(Pkg This will be either dealt with if the package is configured as a dependency of Pkg (if and when Pkg is configured), or by the ConfigureAll call at the end of the for loop in OrderInstall. */ bool Changed = false; - const unsigned int max_loops = _config->FindI("APT::pkgPackageManager::MaxLoopCount", 500); + const unsigned int max_loops = _config->FindI("APT::pkgPackageManager::MaxLoopCount", 5000); - unsigned int i=0; + unsigned int i = 0; do { Changed = false; @@@ -783,7 -785,7 +785,7 @@@ VerIterator V(Cache,*I); PkgIterator P = V.ParentPkg(); // we are checking for installation as an easy 'protection' against or-groups and (unchosen) providers - if (P->CurrentVer == 0 || P != Pkg || (P.CurrentVer() != V && Cache[P].InstallVer != V)) + if (P != Pkg || (P.CurrentVer() != V && Cache[P].InstallVer != V)) continue; circle = true; break; diff --combined cmdline/apt-get.cc index 7c7e94eba,c996017b8..1c6d39824 --- a/cmdline/apt-get.cc +++ b/cmdline/apt-get.cc @@@ -713,11 -713,32 +713,32 @@@ public } virtual pkgCache::VerIterator canNotFindNewestVer(pkgCacheFile &Cache, pkgCache::PkgIterator const &Pkg) { - APT::VersionSet const verset = tryVirtualPackage(Cache, Pkg, APT::VersionSet::NEWEST); - if (verset.empty() == false) - return *(verset.begin()); - if (ShowError == true) - ioprintf(out, _("Virtual packages like '%s' can't be removed\n"), Pkg.FullName(true).c_str()); + if (Pkg->ProvidesList != 0) + { + APT::VersionSet const verset = tryVirtualPackage(Cache, Pkg, APT::VersionSet::NEWEST); + if (verset.empty() == false) + return *(verset.begin()); + if (ShowError == true) + ioprintf(out, _("Virtual packages like '%s' can't be removed\n"), Pkg.FullName(true).c_str()); + } + else + { + pkgCache::GrpIterator Grp = Pkg.Group(); + pkgCache::PkgIterator P = Grp.PackageList(); + for (; P.end() != true; P = Grp.NextPkg(P)) + { + if (P == Pkg) + continue; + if (P->CurrentVer != 0) { + // TRANSLATORS: Note, this is not an interactive question + ioprintf(c1out,_("Package '%s' is not installed, so not removed. Did you mean '%s'?\n"), + Pkg.FullName(true).c_str(), P.FullName(true).c_str()); + break; + } + } + if (P.end() == true) + ioprintf(c1out,_("Package '%s' is not installed, so not removed\n"),Pkg.FullName(true).c_str()); + } return pkgCache::VerIterator(Cache, 0); } @@@ -904,7 -925,23 +925,23 @@@ struct TryToRemove if ((Pkg->CurrentVer == 0 && PurgePkgs == false) || (PurgePkgs == true && Pkg->CurrentState == pkgCache::State::NotInstalled)) { - ioprintf(c1out,_("Package %s is not installed, so not removed\n"),Pkg.FullName(true).c_str()); + pkgCache::GrpIterator Grp = Pkg.Group(); + pkgCache::PkgIterator P = Grp.PackageList(); + for (; P.end() != true; P = Grp.NextPkg(P)) + { + if (P == Pkg) + continue; + if (P->CurrentVer != 0 || (PurgePkgs == true && P->CurrentState != pkgCache::State::NotInstalled)) + { + // TRANSLATORS: Note, this is not an interactive question + ioprintf(c1out,_("Package '%s' is not installed, so not removed. Did you mean '%s'?\n"), + Pkg.FullName(true).c_str(), P.FullName(true).c_str()); + break; + } + } + if (P.end() == true) + ioprintf(c1out,_("Package '%s' is not installed, so not removed\n"),Pkg.FullName(true).c_str()); + // MarkInstall refuses to install packages on hold Pkg->SelectedState = pkgCache::State::Hold; } @@@ -1378,7 -1415,7 +1415,7 @@@ bool InstallPackages(CacheFile &Cache,b "all files have been overwritten by other packages:", "The following packages disappeared from your system as\n" "all files have been overwritten by other packages:", disappearedPkgs.size()), disappear, ""); - c0out << _("Note: This is done automatic and on purpose by dpkg.") << std::endl; + c0out << _("Note: This is done automatically and on purpose by dpkg.") << std::endl; return true; } @@@ -1642,10 -1679,13 +1679,13 @@@ bool DoUpdate(CommandLine &CmdL ListUpdate(Stat, *List); // Rebuild the cache. - pkgCacheFile::RemoveCaches(); - if (Cache.BuildCaches() == false) - return false; - + if (_config->FindB("pkgCacheFile::Generate", true) == true) + { + pkgCacheFile::RemoveCaches(); + if (Cache.BuildCaches() == false) + return false; + } + return true; } /*}}}*/ @@@ -1674,12 -1714,13 +1714,13 @@@ bool DoAutomaticRemove(CacheFile &Cache bool smallList = (hideAutoRemove == false && strcasecmp(_config->Find("APT::Get::HideAutoRemove","").c_str(),"small") == 0); - string autoremovelist, autoremoveversions; unsigned long autoRemoveCount = 0; APT::PackageSet tooMuch; + APT::PackageList autoRemoveList; // look over the cache to see what can be removed - for (pkgCache::PkgIterator Pkg = Cache->PkgBegin(); ! Pkg.end(); ++Pkg) + for (unsigned J = 0; J < Cache->Head().PackageCount; ++J) { + pkgCache::PkgIterator Pkg(Cache,Cache.List[J]); if (Cache[Pkg].Garbage) { if(Pkg.CurrentVer() != 0 || Cache[Pkg].Install()) @@@ -1696,6 -1737,8 +1737,8 @@@ } else { + if (hideAutoRemove == false && Cache[Pkg].Delete() == false) + autoRemoveList.insert(Pkg); // if the package is a new install and already garbage we don't need to // install it in the first place, so nuke it instead of show it if (Cache[Pkg].Install() == true && Pkg.CurrentVer() == 0) @@@ -1705,16 -1748,8 +1748,8 @@@ Cache->MarkDelete(Pkg, false); } // only show stuff in the list that is not yet marked for removal - else if(hideAutoRemove == false && Cache[Pkg].Delete() == false) - { + else if(hideAutoRemove == false && Cache[Pkg].Delete() == false) ++autoRemoveCount; - // we don't need to fill the strings if we don't need them - if (smallList == false) - { - autoremovelist += Pkg.FullName(true) + " "; - autoremoveversions += string(Cache[Pkg].CandVersion) + "\n"; - } - } } } } @@@ -1749,14 -1784,7 +1784,7 @@@ std::clog << "Save " << Pkg << " as another installed garbage package depends on it" << std::endl; Cache->MarkInstall(Pkg, false); if (hideAutoRemove == false) - { ++autoRemoveCount; - if (smallList == false) - { - autoremovelist += Pkg.FullName(true) + " "; - autoremoveversions += string(Cache[Pkg].CandVersion) + "\n"; - } - } tooMuch.erase(Pkg); Changed = true; break; @@@ -1766,6 -1794,18 +1794,18 @@@ } while (Changed == true); } + std::string autoremovelist, autoremoveversions; + if (smallList == false && autoRemoveCount != 0) + { + for (APT::PackageList::const_iterator Pkg = autoRemoveList.begin(); Pkg != autoRemoveList.end(); ++Pkg) + { + if (Cache[Pkg].Garbage == false) + continue; + autoremovelist += Pkg.FullName(true) + " "; + autoremoveversions += string(Cache[Pkg].CandVersion) + "\n"; + } + } + // Now see if we had destroyed anything (if we had done anything) if (Cache->BrokenCount() != 0) { @@@ -1789,7 -1829,7 +1829,7 @@@ else ioprintf(c1out, P_("%lu package was automatically installed and is no longer required.\n", "%lu packages were automatically installed and are no longer required.\n", autoRemoveCount), autoRemoveCount); - c1out << _("Use 'apt-get autoremove' to remove them.") << std::endl; + c1out << P_("Use 'apt-get autoremove' to remove it.", "Use 'apt-get autoremove' to remove them.", autoRemoveCount) << std::endl; } return true; } @@@ -2456,7 -2496,7 +2496,7 @@@ bool DoSource(CommandLine &CmdL Src.c_str(), vcs.c_str(), uri.c_str()); if(vcs == "Bzr") ioprintf(c1out,_("Please use:\n" - "bzr get %s\n" + "bzr branch %s\n" "to retrieve the latest (possibly unreleased) " "updates to the package.\n"), uri.c_str()); @@@ -2749,8 -2789,18 +2789,18 @@@ bool DoBuildDep(CommandLine &CmdL // Process the build-dependencies vector BuildDeps; - if (Last->BuildDepends(BuildDeps, _config->FindB("APT::Get::Arch-Only", false), StripMultiArch) == false) - return _error->Error(_("Unable to get build-dependency information for %s"),Src.c_str()); + // FIXME: Can't specify architecture to use for [wildcard] matching, so switch default arch temporary + if (hostArch.empty() == false) + { + std::string nativeArch = _config->Find("APT::Architecture"); + _config->Set("APT::Architecture", hostArch); + bool Success = Last->BuildDepends(BuildDeps, _config->FindB("APT::Get::Arch-Only", false), StripMultiArch); + _config->Set("APT::Architecture", nativeArch); + if (Success == false) + return _error->Error(_("Unable to get build-dependency information for %s"),Src.c_str()); + } + else if (Last->BuildDepends(BuildDeps, _config->FindB("APT::Get::Arch-Only", false), StripMultiArch) == false) + return _error->Error(_("Unable to get build-dependency information for %s"),Src.c_str()); // Also ensure that build-essential packages are present Configuration::Item const *Opts = _config->Tree("APT::Build-Essential"); @@@ -2863,7 -2913,7 +2913,7 @@@ if ((BADVER(Ver)) == false) { string forbidden; - if (Ver->MultiArch == pkgCache::Version::None || Ver->MultiArch == pkgCache::Version::All) + if (Ver->MultiArch == pkgCache::Version::None) { if (colon == string::npos) { @@@ -2878,7 -2928,7 +2928,7 @@@ forbidden = "Multi-Arch: same"; // :native gets the buildArch } - else if ((Ver->MultiArch & pkgCache::Version::Foreign) == pkgCache::Version::Foreign) + else if ((Ver->MultiArch & pkgCache::Version::Foreign) == pkgCache::Version::Foreign || Ver->MultiArch == pkgCache::Version::All) { if (colon != string::npos) forbidden = "Multi-Arch: foreign"; @@@ -3154,10 -3204,7 +3204,10 @@@ bool DownloadChangelog(CacheFile &Cache return true; // error - return _error->Error("changelog download failed"); + pkgRecords Recs(CacheFile); + pkgRecords::Parser &rec=Recs.Lookup(Ver.FileList()); + string srcpkg = rec.SourcePkg().empty() ? Pkg.Name() : rec.SourcePkg(); + return _error->Error("changelog for this version is not (yet) available; try https://launchpad.net/ubuntu/+source/%s/+changelog", srcpkg.c_str()); } /*}}}*/ // DisplayFileInPager - Display File with pager /*{{{*/ @@@ -3261,7 -3308,7 +3311,7 @@@ bool DoMoo(CommandLine &CmdL /* */ bool ShowHelp(CommandLine &CmdL) { - ioprintf(cout,_("%s %s for %s compiled on %s %s\n"),PACKAGE,VERSION, + ioprintf(cout,_("%s %s for %s compiled on %s %s\n"),PACKAGE,PACKAGE_VERSION, COMMON_ARCH,__DATE__,__TIME__); if (_config->FindB("version") == true) diff --combined configure.in index 3e9692a53,d574c18e5..e27825dc8 --- a/configure.in +++ b/configure.in @@@ -17,11 -17,15 +17,15 @@@ AC_INIT(configure.in AC_CONFIG_AUX_DIR(buildlib) AC_CONFIG_HEADER(include/config.h:buildlib/config.h.in include/apti18n.h:buildlib/apti18n.h.in) - dnl -- SET THIS TO THE RELEASE VERSION -- - AC_DEFINE_UNQUOTED(VERSION,"0.8.16~exp12ubuntu7") PACKAGE="apt" -PACKAGE_VERSION="0.9.4" ++PACKAGE_VERSION="0.9.5ubuntu1~20120522" + PACKAGE_MAIL="APT Development Team " AC_DEFINE_UNQUOTED(PACKAGE,"$PACKAGE") + AC_DEFINE_UNQUOTED(PACKAGE_VERSION,"$PACKAGE_VERSION") + AC_DEFINE_UNQUOTED(PACKAGE_MAIL,"$PACKAGE_MAIL") AC_SUBST(PACKAGE) + AC_SUBST(PACKAGE_VERSION) + AC_SUBST(PACKAGE_MAIL) dnl Check the archs, we want the target type. AC_CANONICAL_SYSTEM @@@ -87,9 -91,21 +91,21 @@@ AC_CHECK_LIB(curl, curl_easy_init AC_SUBST(BDBLIB) + HAVE_ZLIB=no AC_CHECK_LIB(z, gzopen, - [AC_CHECK_HEADER(zlib.h, [], AC_MSG_ERROR([failed: zlib.h not found]))], + [AC_CHECK_HEADER(zlib.h, [HAVE_ZLIB=yes], AC_MSG_ERROR([failed: zlib.h not found]))], AC_MSG_ERROR([failed: Need libz])) + AC_SUBST(HAVE_ZLIB) + if test "x$HAVE_ZLIB" = "xyes"; then + AC_DEFINE(HAVE_ZLIB) + fi + + HAVE_BZ2=no + AC_CHECK_LIB(bz2, BZ2_bzopen,[AC_CHECK_HEADER(bzlib.h, [HAVE_BZ2=yes], [])], []) + AC_SUBST(HAVE_BZ2) + if test "x$HAVE_BZ2" = "xyes"; then + AC_DEFINE(HAVE_BZ2) + fi dnl Converts the ARCH to be something singular for this general CPU family dnl This is often the dpkg architecture string. @@@ -103,12 -119,6 +119,6 @@@ f AC_MSG_RESULT($archset) AC_DEFINE_UNQUOTED(COMMON_ARCH,"$archset") - dnl We use C99 types if at all possible - AC_CACHE_CHECK([for C99 integer types],apt_cv_c9x_ints,[ - AC_TRY_COMPILE([#include ], - [uint8_t Foo1;uint16_t Foo2;uint32_t Foo3;], - apt_cv_c9x_ints=yes,apt_cv_c9x_ints=no)]) - dnl Single Unix Spec statvfs AC_CHECK_FUNC(statvfs,[HAVE_STATVFS=yes]) AC_SUBST(HAVE_STATVFS) @@@ -128,46 -138,9 +138,9 @@@ dnl We should use the real timegm funct AC_CHECK_FUNC(timegm,AC_DEFINE(HAVE_TIMEGM)) AC_SUBST(HAVE_TIMEGM) - dnl Check the sizes etc. of the architecture - dnl This is stupid, it should just use the AC macros like it does below - dnl Cross compilers can either get a real C library or preload the cache - dnl with their size values. - changequote(,) - archline="`awk \" ! /^#|^\\\$/ {if (match(\\\"$archset\\\",\\\$1)) {print; exit}}\" $srcdir/buildlib/sizetable | cut -f 2- -d ' '`" - if test "x$archline" != "x"; then - changequote([,]) - set $archline - if test "$1" = "little"; then - ac_cv_c_bigendian=no - else - ac_cv_c_bigendian=yes - fi - size_char=$2 - size_int=$3 - size_short=$4 - size_long=$5 - fi - - dnl I wonder what AC_C_BIGENDIAN does if you cross compile... - dnl This is probably bogus, as above we only care if we have to build our own - dnl C9x types. - if test "$cross_compiling" = "yes" -a "x$archline" = "x"; then - AC_MSG_ERROR(When cross compiling, architecture must be present in sizetable) - fi + dnl Check the architecture AC_C_BIGENDIAN - dnl We do not need this if we have inttypes! - HAVE_C9X=yes - if test x"$apt_cv_c9x_ints" = x"no"; then - AC_CHECK_SIZEOF(char,$size_char) - AC_CHECK_SIZEOF(int,$size_int) - AC_CHECK_SIZEOF(short,$size_short) - AC_CHECK_SIZEOF(long,$size_long) - - HAVE_C9X= - AC_SUBST(HAVE_C9X) - fi - dnl HP-UX sux.. AC_MSG_CHECKING(for missing socklen_t) AC_EGREP_HEADER(socklen_t, sys/socket.h,[AC_MSG_RESULT(no)],[ @@@ -218,4 -191,4 +191,4 @@@ f AC_SUBST(USE_NLS) AC_PATH_PROG(BASH, bash) - AC_OUTPUT(environment.mak:buildlib/environment.mak.in makefile:buildlib/makefile.in doc/Doxyfile,make -s dirs) + AC_OUTPUT(environment.mak:buildlib/environment.mak.in makefile:buildlib/makefile.in doc/Doxyfile:doc/Doxyfile.in,make -s dirs) diff --combined debian/changelog index c82c73d0e,568590edb..5a21fb8b4 --- a/debian/changelog +++ b/debian/changelog @@@ -1,149 -1,411 +1,570 @@@ ++apt (0.9.5ubuntu1) UNRELEASED; urgency=low ++ ++ * merged from debian, remaining changes: ++ - use ubuntu keyring and ubuntu archive keyring in apt-key ++ - run update-apt-xapian-index in apt.cron ++ - support apt-key net-update and verify keys against master-keyring ++ - run apt-key net-update in cron.daily ++ - different example sources.list ++ - APT::pkgPackageManager::MaxLoopCount set to 5000 ++ - apport pkgfailure handling ++ ++ -- Michael Vogt Tue, 22 May 2012 15:57:26 +0200 ++ + apt (0.9.5) UNRELEASED; urgency=low + + [ Chris Leick ] + * proofreading of the manpage pot + * German manpage translation update (Closes: #673294) + + [ David Kalnischkies ] + * buildlib/podomain.mak: + - ensure that all sources end up in the srclist so that we don't + forget to extract half of the translation strings + * buildlib/inttypes.h.in: + - remove inttypes.h compatibility as providing such a c99 types + compatibility conflicts with the usage of c99 type long long + * apt-pkg/contrib/mmap.cc: + - have a dummy SyncToFd around in case of ReadOnly access to a + compressed file as we otherwise on Close() do not delete[] the + char buffer but munmap() it… (Closes: #673815) + * debian/control: + - moving debiandoc-sgml to Build-Depends-Indep was one step too much + for the buildds as we still build two sgml files in arch:any + * debian/rules: + - move internal-solver as 'apt' to his friend dump-solver in + /usr/lib/apt/solvers to avoid writing a manpage for it + + -- David Kalnischkies Mon, 21 May 2012 15:10:49 +0200 + + apt (0.9.4) unstable; urgency=low + + [ David Kalnischkies ] + * methods/http.cc: + - after many years of pointless discussions disable http/1.1 pipelining + by default as many webservers and proxies seem to be unable to conform + to specification must's (rfc2616 section 8.1.2.2) (LP: #996151) + - add spaces around PACKAGE_VERSION to fix FTBFS with -std=c++11 + * apt-pkg/pkgcachegen.cc: + - make IsDuplicatedDescription static so that it is really private + as we don't need a symbol for it as it is not in a header + * Makefile, buildlib/*.mak: + - reshuffle dependencies so that parallel building seems to work + - separate manpages from the rest of the doc building + * prepare-release: + - apt-inst version isn't apt versions, so don't override variable + * debian/rules: + - apt-utils packages manpages, so it should depend on build-doc + - make apt and apt-utils packages depend on manpages instead of full doc + * debian/control: + - move doxygen and debiandoc-sgml to Build-Depends-Indep as docs + are no longer build in the same target as the manpages + * apt-pkg/acquire-methods.cc: + - factor out into private Dequeue() to fix access to deleted pointer + * apt-pkg/contrib/fileutl.cc: + - ensure that we close compressed fds, wait for forks and such even if + the FileFd itself is set to not autoclose the given Fd + * cmdline/apt-get.cc: + - use the host architecture, not the build architecture for matching + of [architecture restrictions] in Build-Depends (Closes: #672927) + * doc/makefile: + - build manpages with the correct l10n.gentext.default.language setting + to get the correct section titles provided by docbook + * doc/po/de.po: + - updated german manpage translation by Chris Leick, thanks! + * apt-pkg/packagemanager.cc: + - do not run into loop on new-pre-depends-breaks (Closes: #673536) + * doc/*.xml: + - add a few translator notes and reword some paragraphs to ensure that + translators and users alike can better understand them (Closes: #669409) + - in mark all options with