builtin.push_back(c);
}
}
+ closedir(D);
// get the environment language codes: LC_MESSAGES (and later LANGUAGE)
// we extract both, a long and a short code and then we will
environment.push_back(envLong);
environment.push_back(envShort);
// take care of LANGUAGE
- string envLang = Locale == 0 ? getenv("LANGUAGE") : *(Locale+1);
+ const char *language_env = getenv("LANGUAGE") == 0 ? "" : getenv("LANGUAGE");
+ string envLang = Locale == 0 ? language_env : *(Locale+1);
if (envLang.empty() == false) {
std::vector<string> env = ExplodeString(envLang,':');
short addedLangs = 0; // add a maximum of 3 fallbacks from the environment
return codes;
}
/*}}}*/
+ // getArchitectures - Return Vector of prefered Architectures /*{{{*/
+ std::vector<std::string> const Configuration::getArchitectures(bool const &Cached) {
+ using std::string;
+
+ std::vector<string> static archs;
+ if (likely(Cached == true) && archs.empty() == false)
+ return archs;
+
+ string const arch = _config->Find("APT::Architecture");
+ archs = _config->FindVector("APT::Architectures");
+ if (archs.empty() == true ||
+ std::find(archs.begin(), archs.end(), arch) == archs.end())
+ archs.push_back(arch);
+ return archs;
+ }
+ /*}}}*/
+ // checkArchitecture - are we interested in the given Architecture? /*{{{*/
+ bool const Configuration::checkArchitecture(std::string const &Arch) {
+ if (Arch == "all")
+ return true;
+ std::vector<std::string> const archs = getArchitectures(true);
+ return (std::find(archs.begin(), archs.end(), Arch) != archs.end());
+ }
+ /*}}}*/
}
#include<apt-pkg/cdromutl.h>
#include<apt-pkg/strutl.h>
#include<apt-pkg/cdrom.h>
+ #include<apt-pkg/aptconfiguration.h>
+
#include<sstream>
#include<fstream>
#include<config.h>
/* Here we drop everything that is not this machines arch */
bool pkgCdrom::DropBinaryArch(vector<string> &List)
{
- char S[300];
- snprintf(S,sizeof(S),"/binary-%s/",
- _config->Find("Apt::Architecture").c_str());
-
+
for (unsigned int I = 0; I < List.size(); I++)
{
const char *Str = List[I].c_str();
-
- const char *Res;
- if ((Res = strstr(Str,"/binary-")) == 0)
+ const char *Start, *End;
+ if ((Start = strstr(Str,"/binary-")) == 0)
continue;
- // Weird, remove it.
- if (strlen(Res) < strlen(S))
- {
- List.erase(List.begin() + I);
- I--;
- continue;
- }
-
- // See if it is our arch
- if (stringcmp(Res,Res + strlen(S),S) == 0)
- continue;
-
- // Erase it
+ // Between Start and End is the architecture
+ Start += 8;
+ if ((End = strstr(Start,"/")) != 0 && Start != End &&
+ APT::Configuration::checkArchitecture(string(Start, --End)) == true)
+ continue; // okay, architecture is accepted
+
+ // not accepted -> Erase it
List.erase(List.begin() + I);
- I--;
+ --I; // the next entry is at the same index after the erase
}
return true;
List[J] = string();
}
}
-
+ delete[] Inodes;
+
// Wipe erased entries
for (unsigned int I = 0; I < List.size();)
{
// SourcesIndex::SourceInfo - Short 1 liner describing a source /*{{{*/
// ---------------------------------------------------------------------
/* The result looks like:
- http://foo/ stable/main src 1.1.1 (dsc) */
+ http://foo/debian/ stable/main src 1.1.1 (dsc) */
string debSourcesIndex::SourceInfo(pkgSrcRecords::Parser const &Record,
pkgSrcRecords::File const &File) const
{
string Res;
- Res = ::URI::SiteOnly(URI) + ' ';
+ Res = ::URI::NoUserPassword(URI) + ' ';
if (Dist[Dist.size() - 1] == '/')
{
if (Dist != "/")
/* */
string debSourcesIndex::Info(const char *Type) const
{
- string Info = ::URI::SiteOnly(URI) + ' ';
+ string Info = ::URI::NoUserPassword(URI) + ' ';
if (Dist[Dist.size() - 1] == '/')
{
if (Dist != "/")
// PackagesIndex::debPackagesIndex - Contructor /*{{{*/
// ---------------------------------------------------------------------
/* */
- debPackagesIndex::debPackagesIndex(string URI,string Dist,string Section,bool Trusted) :
- pkgIndexFile(Trusted), URI(URI), Dist(Dist), Section(Section)
+ debPackagesIndex::debPackagesIndex(string const &URI, string const &Dist, string const &Section,
+ bool const &Trusted, string const &Arch) :
+ pkgIndexFile(Trusted), URI(URI), Dist(Dist), Section(Section), Architecture(Arch)
{
+ if (Architecture == "native")
+ Architecture = _config->Find("APT::Architecture");
}
/*}}}*/
// PackagesIndex::ArchiveInfo - Short version of the archive url /*{{{*/
/* This is a shorter version that is designed to be < 60 chars or so */
string debPackagesIndex::ArchiveInfo(pkgCache::VerIterator Ver) const
{
- string Res = ::URI::SiteOnly(URI) + ' ';
+ string Res = ::URI::NoUserPassword(URI) + ' ';
if (Dist[Dist.size() - 1] == '/')
{
if (Dist != "/")
Res += " ";
Res += Ver.ParentPkg().Name();
Res += " ";
+ Res += Ver.Arch();
+ Res += " ";
Res += Ver.VerStr();
return Res;
}
/* */
string debPackagesIndex::Info(const char *Type) const
{
- string Info = ::URI::SiteOnly(URI) + ' ';
+ string Info = ::URI::NoUserPassword(URI) + ' ';
if (Dist[Dist.size() - 1] == '/')
{
if (Dist != "/")
else
Info += Dist + '/' + Section;
Info += " ";
+ Info += Architecture;
+ Info += " ";
Info += Type;
return Info;
}
}
else
Res = URI + "dists/" + Dist + '/' + Section +
- "/binary-" + _config->Find("APT::Architecture") + '/';
+ "/binary-" + Architecture + '/';
Res += Type;
return Res;
{
string PackageFile = IndexFile("Packages");
FileFd Pkg(PackageFile,FileFd::ReadOnly);
- debListParser Parser(&Pkg);
+ debListParser Parser(&Pkg, Architecture);
if (_error->PendingError() == true)
return _error->Error("Problem opening %s",PackageFile.c_str());
/* */
string debTranslationsIndex::Info(const char *Type) const
{
- string Info = ::URI::SiteOnly(URI) + ' ';
+ string Info = ::URI::NoUserPassword(URI) + ' ';
if (Dist[Dist.size() - 1] == '/')
{
if (Dist != "/")
#include <apt-pkg/fileutl.h>
#include <apt-pkg/strutl.h>
#include <apt-pkg/configuration.h>
+ #include <apt-pkg/aptconfiguration.h>
#include <apt-pkg/pkgsystem.h>
#include <apt-pkg/tagfile.h>
bool pkgDepCache::readStateFile(OpProgress *Prog) /*{{{*/
{
FileFd state_file;
- string state = _config->FindDir("Dir::State") + "extended_states";
+ string const state = _config->FindDir("Dir::State") + "extended_states";
if(FileExists(state)) {
state_file.Open(state, FileFd::ReadOnly);
- int file_size = state_file.Size();
+ int const file_size = state_file.Size();
if(Prog != NULL)
Prog->OverallProgress(0, file_size, 1,
_("Reading state information"));
pkgTagFile tagfile(&state_file);
pkgTagSection section;
- int amt=0;
- bool debug_autoremove=_config->FindB("Debug::pkgAutoRemove",false);
+ int amt = 0;
+ bool const debug_autoremove = _config->FindB("Debug::pkgAutoRemove",false);
while(tagfile.Step(section)) {
- string pkgname = section.FindS("Package");
- pkgCache::PkgIterator pkg=Cache->FindPkg(pkgname);
- // Silently ignore unknown packages and packages with no actual
- // version.
- if(!pkg.end() && !pkg.VersionList().end()) {
- short reason = section.FindI("Auto-Installed", 0);
- if(reason > 0)
- PkgState[pkg->ID].Flags |= Flag::Auto;
- if(debug_autoremove)
- std::cout << "Auto-Installed : " << pkgname << std::endl;
- amt+=section.size();
- if(Prog != NULL)
- Prog->OverallProgress(amt, file_size, 1,
- _("Reading state information"));
+ string const pkgname = section.FindS("Package");
+ string pkgarch = section.FindS("Architecture");
+ if (pkgarch.empty() == true)
+ pkgarch = "any";
+ pkgCache::PkgIterator pkg = Cache->FindPkg(pkgname, pkgarch);
+ // Silently ignore unknown packages and packages with no actual version.
+ if(pkg.end() == true || pkg->VersionList == 0)
+ continue;
+
+ short const reason = section.FindI("Auto-Installed", 0);
+ if(reason > 0)
+ {
+ PkgState[pkg->ID].Flags |= Flag::Auto;
+ if (unlikely(debug_autoremove))
+ std::cout << "Auto-Installed : " << pkg.FullName() << std::endl;
+ if (pkgarch == "any")
+ {
+ pkgCache::GrpIterator G = pkg.Group();
+ for (pkg = G.NextPkg(pkg); pkg.end() != true; pkg = G.NextPkg(pkg))
+ if (pkg->VersionList != 0)
+ PkgState[pkg->ID].Flags |= Flag::Auto;
+ }
}
+ amt += section.size();
+ if(Prog != NULL)
+ Prog->OverallProgress(amt, file_size, 1,
+ _("Reading state information"));
}
if(Prog != NULL)
Prog->OverallProgress(file_size, file_size, 1,
/*}}}*/
bool pkgDepCache::writeStateFile(OpProgress *prog, bool InstalledOnly) /*{{{*/
{
- bool debug_autoremove = _config->FindB("Debug::pkgAutoRemove",false);
+ bool const debug_autoremove = _config->FindB("Debug::pkgAutoRemove",false);
if(debug_autoremove)
std::clog << "pkgDepCache::writeStateFile()" << std::endl;
FileFd StateFile;
- string state = _config->FindDir("Dir::State") + "extended_states";
+ string const state = _config->FindDir("Dir::State") + "extended_states";
// if it does not exist, create a empty one
if(!FileExists(state))
state.c_str());
FILE *OutFile;
- string outfile = state + ".tmp";
+ string const outfile = state + ".tmp";
if((OutFile = fopen(outfile.c_str(),"w")) == NULL)
return _error->Error(_("Failed to write temporary StateFile %s"),
outfile.c_str());
const char *nullreorderlist[] = {0};
while(tagfile.Step(section)) {
string const pkgname = section.FindS("Package");
+ string pkgarch = section.FindS("Architecture");
+ if (pkgarch.empty() == true)
+ pkgarch = "native";
// Silently ignore unknown packages and packages with no actual
// version.
- pkgCache::PkgIterator pkg=Cache->FindPkg(pkgname);
+ pkgCache::PkgIterator pkg = Cache->FindPkg(pkgname, pkgarch);
if(pkg.end() || pkg.VersionList().end())
continue;
- bool const newAuto = (PkgState[pkg->ID].Flags & Flag::Auto);
+ StateCache const &P = PkgState[pkg->ID];
+ bool newAuto = (P.Flags & Flag::Auto);
+ // skip not installed or now-removed ones if requested
+ if (InstalledOnly && (
+ (pkg->CurrentVer == 0 && P.Mode != ModeInstall) ||
+ (pkg->CurrentVer != 0 && P.Mode == ModeDelete)))
+ {
+ // The section is obsolete if it contains no other tag
+ unsigned int const count = section.Count();
+ if (count < 2 ||
- (count == 2 && section.Exists("Auto-Installed")))
++ (count == 2 && section.Exists("Auto-Installed")) ||
++ (count == 3 && section.Exists("Auto-Installed") && section.Exists("Architecture")))
+ continue;
+ else
+ newAuto = false;
+ }
if(_config->FindB("Debug::pkgAutoRemove",false))
std::clog << "Update existing AutoInstall info: "
- << pkgname << std::endl;
- TFRewriteData rewrite[2];
- rewrite[0].Tag = "Auto-Installed";
- rewrite[0].Rewrite = newAuto ? "1" : "0";
+ << pkg.FullName() << std::endl;
+ TFRewriteData rewrite[3];
+ rewrite[0].Tag = "Architecture";
+ rewrite[0].Rewrite = pkg.Arch();
rewrite[0].NewTag = 0;
- rewrite[1].Tag = 0;
+ rewrite[1].Tag = "Auto-Installed";
+ rewrite[1].Rewrite = newAuto ? "1" : "0";
+ rewrite[1].NewTag = 0;
+ rewrite[2].Tag = 0;
TFRewrite(OutFile, section, nullreorderlist, rewrite);
fprintf(OutFile,"\n");
- pkgs_seen.insert(pkgname);
+ pkgs_seen.insert(pkg.FullName());
}
// then write the ones we have not seen yet
std::ostringstream ostr;
for(pkgCache::PkgIterator pkg=Cache->PkgBegin(); !pkg.end(); pkg++) {
- if(PkgState[pkg->ID].Flags & Flag::Auto) {
+ StateCache const &P = PkgState[pkg->ID];
+ if(P.Flags & Flag::Auto) {
- if (pkgs_seen.find(pkg.Name()) != pkgs_seen.end()) {
+ if (pkgs_seen.find(pkg.FullName()) != pkgs_seen.end()) {
if(debug_autoremove)
- std::clog << "Skipping already written " << pkg.Name() << std::endl;
+ std::clog << "Skipping already written " << pkg.FullName() << std::endl;
continue;
}
// skip not installed ones if requested
- if(InstalledOnly && pkg->CurrentVer == 0)
+ if (InstalledOnly && (
+ (pkg->CurrentVer == 0 && P.Mode != ModeInstall) ||
+ (pkg->CurrentVer != 0 && P.Mode == ModeDelete)))
continue;
+ const char* const pkgarch = pkg.Arch();
+ if (strcmp(pkgarch, "all") == 0)
+ continue;
if(debug_autoremove)
- std::clog << "Writing new AutoInstall: "
- << pkg.Name() << std::endl;
+ std::clog << "Writing new AutoInstall: " << pkg.FullName() << std::endl;
ostr.str(string(""));
- ostr << "Package: " << pkg.Name()
+ ostr << "Package: " << pkg.Name()
+ << "\nArchitecture: " << pkgarch
<< "\nAuto-Installed: 1\n\n";
fprintf(OutFile,"%s",ostr.str().c_str());
- fprintf(OutFile,"\n");
}
}
fclose(OutFile);
}
}
/*}}}*/
+ // DepCache::RemovePseudoInstalledPkg - MultiArch helper for Update() /*{{{*/
+ // ---------------------------------------------------------------------
+ /* We "install" arch all packages for all archs if it is installed. Many
+ of these will be broken. This method will look at these broken Pkg and
+ "remove" it. */
+ bool pkgDepCache::RemovePseudoInstalledPkg(PkgIterator &Pkg, std::set<unsigned long> &recheck) {
+ if (unlikely(Pkg->CurrentVer == 0))
+ return false;
+
+ VerIterator V = Pkg.CurrentVer();
+ if (V->MultiArch != Version::All)
+ return false;
+
+ // Never ever kill an "all" package - they have no dependency so they can't be broken
+ if (strcmp(Pkg.Arch(),"all") == 0)
+ return false;
+
+ unsigned char const CurDepState = VersionState(V.DependsList(),DepInstall,DepInstMin,DepInstPolicy);
+ if ((CurDepState & DepInstMin) == DepInstMin) {
+ // okay, the package isn't broken, but is the package also required?
+ // If it has no real dependencies, no installed rdepends and doesn't
+ // provide something of value, we will kill it as not required.
+ // These pseudopackages have otherwise interesting effects if they get
+ // a new dependency in a newer version…
+ for (pkgCache::DepIterator D = V.DependsList();
+ D.end() != true; ++D)
+ if (D.IsCritical() == true && D.ParentPkg()->Group != Pkg->Group)
+ return false;
+ for (DepIterator D = Pkg.RevDependsList(); D.end() != true; ++D)
+ {
+ if (D.IsCritical() == false)
+ continue;
+ PkgIterator const P = D.ParentPkg();
+ if (P->Group == Pkg->Group)
+ continue;
+ if (P->CurrentVer != 0)
+ return false;
+ }
+ for (PrvIterator Prv = V.ProvidesList(); Prv.end() != true; Prv++)
+ for (DepIterator d = Prv.ParentPkg().RevDependsList();
+ d.end() != true; ++d)
+ {
+ PkgIterator const P = d.ParentPkg();
+ if (P->CurrentVer != 0 &&
+ P->Group != Pkg->Group)
+ return false;
+ }
+ }
+
+ // Dependencies for this arch all package are not statisfied
+ // so we installed it only for our convenience: get right of it now.
+ RemoveSizes(Pkg);
+ RemoveStates(Pkg);
+
+ Pkg->CurrentVer = 0;
+ PkgState[Pkg->ID].InstallVer = 0;
+
+ AddStates(Pkg);
+ Update(Pkg);
+ AddSizes(Pkg);
+
+ // After the remove previously satisfied pseudo pkg could be now
+ // no longer satisfied, so we need to recheck the reverse dependencies
+ for (DepIterator d = Pkg.RevDependsList(); d.end() != true; ++d)
+ {
+ PkgIterator const P = d.ParentPkg();
+ if (P->CurrentVer != 0)
+ recheck.insert(P.Index());
+ }
+
+ for (DepIterator d = V.DependsList(); d.end() != true; ++d)
+ {
+ PkgIterator const P = d.TargetPkg();
+ for (PrvIterator Prv = P.ProvidesList(); Prv.end() != true; ++Prv)
+ {
+ PkgIterator const O = Prv.OwnerPkg();
+ if (O->CurrentVer != 0)
+ recheck.insert(O.Index());
+ }
+
+ if (P->CurrentVer != 0)
+ recheck.insert(P.Index());
+ }
+
+ for (PrvIterator Prv = V.ProvidesList(); Prv.end() != true; Prv++)
+ {
+ for (DepIterator d = Prv.ParentPkg().RevDependsList();
+ d.end() != true; ++d)
+ {
+ PkgIterator const P = d.ParentPkg();
+ if (P->CurrentVer == 0)
+ continue;
+
+ recheck.insert(P.Index());
+ }
+ }
+
+
+ return true;
+ }
+ /*}}}*/
// DepCache::Update - Figure out all the state information /*{{{*/
// ---------------------------------------------------------------------
/* This will figure out the state of all the packages and all the
iKeepCount = 0;
iBrokenCount = 0;
iBadCount = 0;
-
+
+ std::set<unsigned long> recheck;
+
// Perform the depends pass
int Done = 0;
+ bool const checkMultiArch = APT::Configuration::getArchitectures().size() > 1;
+ unsigned long killed = 0;
for (PkgIterator I = PkgBegin(); I.end() != true; I++,Done++)
{
if (Prog != 0 && Done%20 == 0)
for (VerIterator V = I.VersionList(); V.end() != true; V++)
{
unsigned char Group = 0;
-
+
for (DepIterator D = V.DependsList(); D.end() != true; D++)
{
// Build the dependency state.
D->Type == Dep::DpkgBreaks ||
D->Type == Dep::Obsoletes)
State = ~State;
- }
+ }
}
- // Compute the pacakge dependency state and size additions
+ // Compute the package dependency state and size additions
AddSizes(I);
UpdateVerState(I);
AddStates(I);
+
+ if (checkMultiArch != true || I->CurrentVer == 0)
+ continue;
+
+ VerIterator const V = I.CurrentVer();
+ if (V->MultiArch != Version::All)
+ continue;
+
+ recheck.insert(I.Index());
+ --Done; // no progress if we need to recheck the package
}
- if (Prog != 0)
+ if (checkMultiArch == true) {
+ /* FIXME: recheck breaks proper progress reporting as we don't know
+ how many packages we need to recheck. To lower the effect
+ a bit we increase with a kill, but we should do something more clever… */
+ for(std::set<unsigned long>::const_iterator p = recheck.begin();
+ p != recheck.end(); ++p) {
+ if (Prog != 0 && Done%20 == 0)
+ Prog->Progress(Done);
+ PkgIterator P = PkgIterator(*Cache, Cache->PkgP + *p);
+ if (RemovePseudoInstalledPkg(P, recheck) == true) {
+ ++killed;
+ ++Done;
+ }
+ recheck.erase(p);
+ }
+
+ /* Okay, we have killed a great amount of pseudopackages -
+ we have killed so many that we have now arch "all" packages
+ without an installed pseudo package, but we NEED an installed
+ pseudo package, so we will search now for a pseudo package
+ we can install without breaking everything. */
+ for (GrpIterator G = Cache->GrpBegin(); G.end() != true; ++G)
+ {
+ PkgIterator P = G.FindPkg("all");
+ if (P.end() == true)
+ continue;
+ if (P->CurrentVer == 0)
+ continue;
+ bool installed = false;
+ for (P = G.FindPkg("any"); P.end() != true; P = G.NextPkg(P))
+ {
+ if (strcmp(P.Arch(), "all") == 0)
+ continue;
+ if (P->CurrentVer == 0)
+ continue;
+ installed = true;
+ break;
+ }
+ if (installed == false)
+ recheck.insert(G.Index());
+ }
+ std::vector<std::string> Archs = APT::Configuration::getArchitectures();
+ bool checkChanged = false;
+ do {
+ for(std::set<unsigned long>::const_iterator g = recheck.begin();
+ g != recheck.end(); ++g) {
+ GrpIterator G = GrpIterator(*Cache, Cache->GrpP + *g);
+ VerIterator allV = G.FindPkg("all").CurrentVer();
+ for (std::vector<std::string>::const_iterator a = Archs.begin();
+ a != Archs.end(); ++a)
+ {
+ PkgIterator P = G.FindPkg(*a);
+ if (P.end() == true) continue;
+ for (VerIterator V = P.VersionList(); V.end() != true; ++V)
+ {
+ // FIXME: String comparison isn't a save indicator!
+ if (strcmp(allV.VerStr(),V.VerStr()) != 0)
+ continue;
+ unsigned char const CurDepState = VersionState(V.DependsList(),DepInstall,DepInstMin,DepInstPolicy);
+ if ((CurDepState & DepInstMin) != DepInstMin)
+ break; // we found the correct version, but it is broken. Better try another arch or later again
+ P->CurrentVer = V.Index();
+ AddStates(P);
+ Update(P);
+ AddSizes(P);
+ checkChanged = true;
+ break;
+ }
+ }
+ recheck.erase(g);
+ }
+ } while (checkChanged == true && recheck.empty() == false);
+
+ if (_config->FindB("Debug::MultiArchKiller", false) == true)
+ for(std::set<unsigned long>::const_iterator g = recheck.begin();
+ g != recheck.end(); ++g)
+ std::cout << "No pseudo package for »" << GrpIterator(*Cache, Cache->GrpP + *g).Name() << "« installed" << std::endl;
+ }
+
+ if (Prog != 0)
Prog->Progress(Done);
readStateFile(Prog);
AddStates(Pkg);
Update(Pkg);
AddSizes(Pkg);
+
+ // if we remove the pseudo package, we also need to remove the "real"
+ if (Pkg->CurrentVer != 0 && Pkg.CurrentVer().Pseudo() == true)
+ MarkDelete(Pkg.Group().FindPkg("all"), rPurge, Depth+1, FromUser);
}
/*}}}*/
// DepCache::IsDeleteOk - check if it is ok to remove this package /*{{{*/
// debug output
if(debug_autoremove && PkgState[p->ID].Flags & Flag::Auto)
- std::clog << "AutoDep: " << p.Name() << std::endl;
+ std::clog << "AutoDep: " << p.FullName() << std::endl;
}
// init vars
// MarkPackage - mark a single package in Mark-and-Sweep /*{{{*/
void pkgDepCache::MarkPackage(const pkgCache::PkgIterator &pkg,
const pkgCache::VerIterator &ver,
- bool follow_recommends,
- bool follow_suggests)
+ bool const &follow_recommends,
+ bool const &follow_suggests)
{
pkgDepCache::StateCache &state = PkgState[pkg->ID];
- VerIterator currver = pkg.CurrentVer();
- VerIterator candver = state.CandidateVerIter(*this);
- VerIterator instver = state.InstVerIter(*this);
+
+ // if we are marked already we are done
+ if(state.Marked)
+ return;
+
+ VerIterator const currver = pkg.CurrentVer();
+ VerIterator const candver = state.CandidateVerIter(*this);
+ VerIterator const instver = state.InstVerIter(*this);
#if 0
// If a package was garbage-collected but is now being marked, we
!(ver == currver && instver.end() && !ver.end()))
return;
- // if we are marked already we are done
- if(state.Marked)
- return;
+ bool const debug_autoremove = _config->FindB("Debug::pkgAutoRemove", false);
- bool debug_autoremove = _config->FindB("Debug::pkgAutoRemove", false);
-
if(debug_autoremove)
{
- std::clog << "Marking: " << pkg.Name();
+ std::clog << "Marking: " << pkg.FullName();
if(!ver.end())
std::clog << " " << ver.VerStr();
if(!currver.end())
state.Marked=true;
- if(!ver.end())
+ if(ver.end() == true)
+ return;
+
+ // If the version belongs to a Multi-Arch all package
+ // we will mark all others in this Group with this version also
+ // Beware: We compare versions here the lazy way: string comparision
+ // this is bad if multiple repositories provide different versions
+ // of the package with an identical version number - but even in this
+ // case the dependencies are likely the same.
+ if (ver->MultiArch == pkgCache::Version::All &&
+ strcmp(ver.Arch(true), "all") == 0)
{
+ GrpIterator G = pkg.Group();
+ const char* const VerStr = ver.VerStr();
+ for (PkgIterator P = G.FindPkg("any");
+ P.end() != true; P = G.NextPkg(P))
+ {
+ for (VerIterator V = P.VersionList();
+ V.end() != true; ++V)
+ {
+ if (strcmp(VerStr, V.VerStr()) != 0)
+ continue;
+ MarkPackage(P, V, follow_recommends, follow_suggests);
+ break;
+ }
+ }
+ }
+
for(DepIterator d = ver.DependsList(); !d.end(); ++d)
{
if(d->Type == Dep::Depends ||
{
if(debug_autoremove)
{
- std::clog << "Following dep: " << d.ParentPkg().Name()
+ std::clog << "Following dep: " << d.ParentPkg().FullName()
<< " " << d.ParentVer().VerStr() << " "
- << d.DepType() << " "
- << d.TargetPkg().Name();
+ << d.DepType() << " " << d.TargetPkg().FullName();
if((d->CompareOp & ~pkgCache::Dep::Or) != pkgCache::Dep::NoOp)
{
std::clog << " (" << d.CompType() << " "
}
std::clog << std::endl;
}
- MarkPackage(V.ParentPkg(), V,
+ MarkPackage(V.ParentPkg(), V,
follow_recommends, follow_suggests);
}
}
{
if(debug_autoremove)
{
- std::clog << "Following dep: " << d.ParentPkg().Name()
- << " " << d.ParentVer().VerStr() << " "
- << d.DepType() << " "
- << d.TargetPkg().Name();
+ std::clog << "Following dep: " << d.ParentPkg().FullName() << " "
+ << d.ParentVer().VerStr() << " "
+ << d.DepType() << " " << d.TargetPkg().FullName() << " ";
if((d->CompareOp & ~pkgCache::Dep::Or) != pkgCache::Dep::NoOp)
{
std::clog << " (" << d.CompType() << " "
<< d.TargetVer() << ")";
}
std::clog << ", provided by "
- << prv.OwnerPkg().Name() << " "
+ << prv.OwnerPkg().FullName() << " "
<< prv.OwnerVer().VerStr()
<< std::endl;
}
}
}
}
- }
}
/*}}}*/
bool pkgDepCache::Sweep() /*{{{*/
{
state.Garbage=true;
if(debug_autoremove)
- std::cout << "Garbage: " << p.Name() << std::endl;
+ std::cout << "Garbage: " << p.FullName() << std::endl;
}
}
#include <vector>
#include <memory>
+ #include <set>
class pkgDepCache : protected pkgCache::Namespace
{
*/
void MarkPackage(const pkgCache::PkgIterator &pkg,
const pkgCache::VerIterator &ver,
- bool follow_recommends,
- bool follow_suggests);
+ bool const &follow_recommends,
+ bool const &follow_suggests);
/** \brief Update the Marked field of all packages.
*
inline operator pkgCache &() {return *Cache;};
inline Header &Head() {return *Cache->HeaderP;};
inline PkgIterator PkgBegin() {return Cache->PkgBegin();};
+ inline GrpIterator FindGrp(string const &Name) {return Cache->FindGrp(Name);};
inline PkgIterator FindPkg(string const &Name) {return Cache->FindPkg(Name);};
+ inline PkgIterator FindPkg(string const &Name, string const &Arch) {return Cache->FindPkg(Name, Arch);};
inline pkgCache &GetCache() {return *Cache;};
inline pkgVersioningSystem &VS() {return *Cache->VS;};
virtual bool IsDeleteOk(const PkgIterator &Pkg,bool Purge = false,
unsigned long Depth = 0, bool FromUser = true);
- // This is for debuging
- void Update(OpProgress *Prog = 0);
-
// read persistent states
bool readStateFile(OpProgress *prog);
- bool writeStateFile(OpProgress *prog, bool InstalledOnly=false);
+ bool writeStateFile(OpProgress *prog, bool InstalledOnly=true);
// Size queries
inline double UsrSize() {return iUsrSize;};
inline unsigned long BadCount() {return iBadCount;};
bool Init(OpProgress *Prog);
-
+ // Generate all state information
+ void Update(OpProgress *Prog = 0);
+
pkgDepCache(pkgCache *Cache,Policy *Plcy = 0);
virtual ~pkgDepCache();
+
+ private:
+ // Helper for Update(OpProgress) to remove pseudoinstalled arch all packages
+ bool RemovePseudoInstalledPkg(PkgIterator &Pkg, std::set<unsigned long> &recheck);
};
#endif
#include <apt-pkg/progress.h>
#include <apt-pkg/sourcelist.h>
#include <apt-pkg/configuration.h>
+ #include <apt-pkg/aptconfiguration.h>
#include <apt-pkg/strutl.h>
#include <apt-pkg/sptr.h>
#include <apt-pkg/pkgsystem.h>
// CacheGenerator::pkgCacheGenerator - Constructor /*{{{*/
// ---------------------------------------------------------------------
- /* We set the diry flag and make sure that is written to the disk */
+ /* We set the dirty flag and make sure that is written to the disk */
pkgCacheGenerator::pkgCacheGenerator(DynamicMMap *pMap,OpProgress *Prog) :
Map(*pMap), Cache(pMap,false), Progress(Prog),
FoundFileDeps(0)
unsigned int Counter = 0;
while (List.Step() == true)
{
- // Get a pointer to the package structure
- string PackageName = List.Package();
+ string const PackageName = List.Package();
if (PackageName.empty() == true)
return false;
-
+
+ /* As we handle Arch all packages as architecture bounded
+ we add all information to every (simulated) arch package */
+ std::vector<string> genArch;
+ if (List.ArchitectureAll() == true) {
+ genArch = APT::Configuration::getArchitectures();
+ if (genArch.size() != 1)
+ genArch.push_back("all");
+ } else
+ genArch.push_back(List.Architecture());
+
+ for (std::vector<string>::const_iterator arch = genArch.begin();
+ arch != genArch.end(); ++arch)
+ {
+ // Get a pointer to the package structure
pkgCache::PkgIterator Pkg;
- if (NewPackage(Pkg,PackageName) == false)
+ if (NewPackage(Pkg, PackageName, *arch) == false)
return _error->Error(_("Error occurred while processing %s (NewPackage)"),PackageName.c_str());
Counter++;
if (Counter % 100 == 0 && Progress != 0)
pkgCache::VerIterator Ver = Pkg.VersionList();
map_ptrloc *LastVer = &Pkg->VersionList;
int Res = 1;
+ unsigned long const Hash = List.VersionHash();
for (; Ver.end() == false; LastVer = &Ver->NextVer, Ver++)
{
Res = Cache.VS->CmpVersion(Version,Ver.VerStr());
- if (Res >= 0)
+ // Version is higher as current version - insert here
+ if (Res > 0)
break;
+ // Versionstrings are equal - is hash also equal?
+ if (Res == 0 && Ver->Hash == Hash)
+ break;
+ // proceed with the next till we have either the right
+ // or we found another version (which will be lower)
}
-
- /* We already have a version for this item, record that we
- saw it */
- unsigned long Hash = List.VersionHash();
- if (Res == 0 && Ver->Hash == Hash)
+
+ /* We already have a version for this item, record that we saw it */
+ if (Res == 0 && Ver.end() == false && Ver->Hash == Hash)
{
if (List.UsePackage(Pkg,Ver) == false)
return _error->Error(_("Error occurred while processing %s (UsePackage2)"),
}
continue;
- }
-
- // Skip to the end of the same version set.
- if (Res == 0)
- {
- for (; Ver.end() == false; LastVer = &Ver->NextVer, Ver++)
- {
- Res = Cache.VS->CmpVersion(Version,Ver.VerStr());
- if (Res != 0)
- break;
- }
}
// Add a new version
if ((*LastDesc == 0 && _error->PendingError()) || NewFileDesc(Desc,List) == false)
return _error->Error(_("Error occurred while processing %s (NewFileDesc2)"),PackageName.c_str());
+ }
}
FoundFileDeps |= List.HasFileDeps();
return true;
}
/*}}}*/
+ // CacheGenerator::NewGroup - Add a new group /*{{{*/
+ // ---------------------------------------------------------------------
+ /* This creates a new group structure and adds it to the hash table */
+ bool pkgCacheGenerator::NewGroup(pkgCache::GrpIterator &Grp, const string &Name) {
+ Grp = Cache.FindGrp(Name);
+ if (Grp.end() == false)
+ return true;
+
+ // Get a structure
+ unsigned long const Group = Map.Allocate(sizeof(pkgCache::Group));
+ if (unlikely(Group == 0))
+ return false;
+
+ Grp = pkgCache::GrpIterator(Cache, Cache.GrpP + Group);
+ Grp->Name = Map.WriteString(Name);
+ if (unlikely(Grp->Name == 0))
+ return false;
+
+ // Insert it into the hash table
+ unsigned long const Hash = Cache.Hash(Name);
+ Grp->Next = Cache.HeaderP->GrpHashTable[Hash];
+ Cache.HeaderP->GrpHashTable[Hash] = Group;
+
+ Cache.HeaderP->GroupCount++;
+
+ return true;
+ }
+ /*}}}*/
// CacheGenerator::NewPackage - Add a new package /*{{{*/
// ---------------------------------------------------------------------
/* This creates a new package structure and adds it to the hash table */
- bool pkgCacheGenerator::NewPackage(pkgCache::PkgIterator &Pkg,const string &Name)
- {
- Pkg = Cache.FindPkg(Name);
- if (Pkg.end() == false)
- return true;
+ bool pkgCacheGenerator::NewPackage(pkgCache::PkgIterator &Pkg,const string &Name,
+ const string &Arch) {
+ pkgCache::GrpIterator Grp;
+ if (unlikely(NewGroup(Grp, Name) == false))
+ return false;
+
+ Pkg = Grp.FindPkg(Arch);
+ if (Pkg.end() == false)
+ return true;
// Get a structure
- unsigned long Package = Map.Allocate(sizeof(pkgCache::Package));
- if (Package == 0)
+ unsigned long const Package = Map.Allocate(sizeof(pkgCache::Package));
+ if (unlikely(Package == 0))
return false;
-
Pkg = pkgCache::PkgIterator(Cache,Cache.PkgP + Package);
-
+
// Insert it into the hash table
- unsigned long Hash = Cache.Hash(Name);
- Pkg->NextPackage = Cache.HeaderP->HashTable[Hash];
- Cache.HeaderP->HashTable[Hash] = Package;
-
- // Set the name and the ID
- Pkg->Name = Map.WriteString(Name);
- if (Pkg->Name == 0)
+ unsigned long const Hash = Cache.Hash(Name);
+ Pkg->NextPackage = Cache.HeaderP->PkgHashTable[Hash];
+ Cache.HeaderP->PkgHashTable[Hash] = Package;
+
+ // remember the packages in the group
+ Grp->FirstPackage = Package;
+ if (Grp->LastPackage == 0)
+ Grp->LastPackage = Package;
+
+ // Set the name, arch and the ID
+ Pkg->Name = Grp->Name;
+ Pkg->Group = Grp.Index();
+ Pkg->Arch = WriteUniqString(Arch.c_str());
+ if (unlikely(Pkg->Arch == 0))
return false;
Pkg->ID = Cache.HeaderP->PackageCount++;
-
+
return true;
}
/*}}}*/
return Description;
}
/*}}}*/
- // ListParser::NewDepends - Create a dependency element /*{{{*/
+ // CacheGenerator::FinishCache - do various finish operations /*{{{*/
+ // ---------------------------------------------------------------------
+ /* This prepares the Cache for delivery */
+ bool pkgCacheGenerator::FinishCache(OpProgress &Progress) {
+ // FIXME: add progress reporting for this operation
+ // Do we have different architectures in your groups ?
+ vector<string> archs = APT::Configuration::getArchitectures();
+ if (archs.size() > 1) {
+ // Create Conflicts in between the group
+ for (pkgCache::GrpIterator G = GetCache().GrpBegin(); G.end() != true; G++) {
+ string const PkgName = G.Name();
+ for (pkgCache::PkgIterator P = G.PackageList(); P.end() != true; P = G.NextPkg(P)) {
+ if (strcmp(P.Arch(),"all") == 0)
+ continue;
+ pkgCache::PkgIterator allPkg;
+ for (pkgCache::VerIterator V = P.VersionList(); V.end() != true; V++) {
+ string const Arch = V.Arch(true);
+ map_ptrloc *OldDepLast = NULL;
+ /* MultiArch handling introduces a lot of implicit Dependencies:
+ - MultiArch: same → Co-Installable if they have the same version
+ - Architecture: all → Need to be Co-Installable for internal reasons
+ - All others conflict with all other group members */
+ bool const coInstall = (V->MultiArch == pkgCache::Version::All ||
+ V->MultiArch == pkgCache::Version::Same);
+ if (V->MultiArch == pkgCache::Version::All && allPkg.end() == true)
+ allPkg = G.FindPkg("all");
+ for (vector<string>::const_iterator A = archs.begin(); A != archs.end(); ++A) {
+ if (*A == Arch)
+ continue;
+ /* We allow only one installed arch at the time
+ per group, therefore each group member conflicts
+ with all other group members */
+ pkgCache::PkgIterator D = G.FindPkg(*A);
+ if (D.end() == true)
+ continue;
+ if (coInstall == true) {
+ // Replaces: ${self}:other ( << ${binary:Version})
+ NewDepends(D, V, V.VerStr(),
+ pkgCache::Dep::Less, pkgCache::Dep::Replaces,
+ OldDepLast);
+ // Breaks: ${self}:other (!= ${binary:Version})
+ NewDepends(D, V, V.VerStr(),
+ pkgCache::Dep::Less, pkgCache::Dep::DpkgBreaks,
+ OldDepLast);
+ NewDepends(D, V, V.VerStr(),
+ pkgCache::Dep::Greater, pkgCache::Dep::DpkgBreaks,
+ OldDepLast);
+ if (V->MultiArch == pkgCache::Version::All) {
+ // Depend on ${self}:all which does depend on nothing
+ NewDepends(allPkg, V, V.VerStr(),
+ pkgCache::Dep::Equals, pkgCache::Dep::Depends,
+ OldDepLast);
+ }
+ } else {
+ // Conflicts: ${self}:other
+ NewDepends(D, V, "",
+ pkgCache::Dep::NoOp, pkgCache::Dep::Conflicts,
+ OldDepLast);
+ }
+ }
+ }
+ }
+ }
+ }
+ return true;
+ }
+ /*}}}*/
+ // CacheGenerator::NewDepends - Create a dependency element /*{{{*/
// ---------------------------------------------------------------------
/* This creates a dependency element in the tree. It is linked to the
version and to the package that it is pointing to. */
- bool pkgCacheGenerator::ListParser::NewDepends(pkgCache::VerIterator Ver,
- const string &PackageName,
- const string &Version,
- unsigned int Op,
- unsigned int Type)
+ bool pkgCacheGenerator::NewDepends(pkgCache::PkgIterator &Pkg,
+ pkgCache::VerIterator &Ver,
+ string const &Version,
+ unsigned int const &Op,
+ unsigned int const &Type,
+ map_ptrloc *OldDepLast)
{
- pkgCache &Cache = Owner->Cache;
-
// Get a structure
- unsigned long Dependency = Owner->Map.Allocate(sizeof(pkgCache::Dependency));
- if (Dependency == 0)
+ unsigned long const Dependency = Map.Allocate(sizeof(pkgCache::Dependency));
+ if (unlikely(Dependency == 0))
return false;
// Fill it in
Dep->Type = Type;
Dep->CompareOp = Op;
Dep->ID = Cache.HeaderP->DependsCount++;
-
- // Locate the target package
- pkgCache::PkgIterator Pkg;
- if (Owner->NewPackage(Pkg,PackageName) == false)
- return false;
-
+
// Probe the reverse dependency list for a version string that matches
if (Version.empty() == false)
{
if (I->Version != 0 && I.TargetVer() == Version)
Dep->Version = I->Version;*/
if (Dep->Version == 0)
- if ((Dep->Version = WriteString(Version)) == 0)
+ if (unlikely((Dep->Version = Map.WriteString(Version)) == 0))
return false;
}
-
+
// Link it to the package
Dep->Package = Pkg.Index();
Dep->NextRevDepends = Pkg->RevDepends;
Pkg->RevDepends = Dep.Index();
-
- /* Link it to the version (at the end of the list)
- Caching the old end point speeds up generation substantially */
- if (OldDepVer != Ver)
+
+ // Do we know where to link the Dependency to?
+ if (OldDepLast == NULL)
{
OldDepLast = &Ver->DependsList;
for (pkgCache::DepIterator D = Ver.DependsList(); D.end() == false; D++)
OldDepLast = &D->NextDepends;
- OldDepVer = Ver;
}
- // Is it a file dependency?
- if (PackageName[0] == '/')
- FoundFileDeps = true;
-
Dep->NextDepends = *OldDepLast;
*OldDepLast = Dep.Index();
OldDepLast = &Dep->NextDepends;
return true;
}
/*}}}*/
+ // ListParser::NewDepends - Create the environment for a new dependency /*{{{*/
+ // ---------------------------------------------------------------------
+ /* This creates a Group and the Package to link this dependency to if
+ needed and handles also the caching of the old endpoint */
+ bool pkgCacheGenerator::ListParser::NewDepends(pkgCache::VerIterator Ver,
+ const string &PackageName,
+ const string &Arch,
+ const string &Version,
+ unsigned int Op,
+ unsigned int Type)
+ {
+ pkgCache::GrpIterator Grp;
+ if (unlikely(Owner->NewGroup(Grp, PackageName) == false))
+ return false;
+
+ // Locate the target package
+ pkgCache::PkgIterator Pkg = Grp.FindPkg(Arch);
+ if (Pkg.end() == true) {
+ if (unlikely(Owner->NewPackage(Pkg, PackageName, Arch) == false))
+ return false;
+ }
+
+ // Is it a file dependency?
+ if (unlikely(PackageName[0] == '/'))
+ FoundFileDeps = true;
+
+ /* Caching the old end point speeds up generation substantially */
+ if (OldDepVer != Ver) {
+ OldDepLast = NULL;
+ OldDepVer = Ver;
+ }
+
+ return Owner->NewDepends(Pkg, Ver, Version, Op, Type, OldDepLast);
+ }
+ /*}}}*/
// ListParser::NewProvides - Create a Provides element /*{{{*/
// ---------------------------------------------------------------------
/* */
bool pkgCacheGenerator::ListParser::NewProvides(pkgCache::VerIterator Ver,
- const string &PackageName,
+ const string &PkgName,
+ const string &PkgArch,
const string &Version)
{
pkgCache &Cache = Owner->Cache;
// We do not add self referencing provides
- if (Ver.ParentPkg().Name() == PackageName)
+ if (Ver.ParentPkg().Name() == PkgName && PkgArch == Ver.Arch(true))
return true;
// Get a structure
- unsigned long Provides = Owner->Map.Allocate(sizeof(pkgCache::Provides));
- if (Provides == 0)
+ unsigned long const Provides = Owner->Map.Allocate(sizeof(pkgCache::Provides));
+ if (unlikely(Provides == 0))
return false;
Cache.HeaderP->ProvidesCount++;
Prv->Version = Ver.Index();
Prv->NextPkgProv = Ver->ProvidesList;
Ver->ProvidesList = Prv.Index();
- if (Version.empty() == false && (Prv->ProvideVersion = WriteString(Version)) == 0)
+ if (Version.empty() == false && unlikely((Prv->ProvideVersion = WriteString(Version)) == 0))
return false;
// Locate the target package
pkgCache::PkgIterator Pkg;
- if (Owner->NewPackage(Pkg,PackageName) == false)
+ if (unlikely(Owner->NewPackage(Pkg,PkgName, PkgArch) == false))
return false;
// Link it to the package
if (BuildCache(Gen,Progress,CurrentSize,TotalSize,
Files.begin()+EndOfSource,Files.end()) == false)
return false;
+
+ // FIXME: move me to a better place
+ Gen.FinishCache(Progress);
}
else
{
if (BuildCache(Gen,Progress,CurrentSize,TotalSize,
Files.begin()+EndOfSource,Files.end()) == false)
return false;
+
+ // FIXME: move me to a better place
+ Gen.FinishCache(Progress);
}
if (Debug == true)
std::clog << "Caches are ready for shipping" << std::endl;
if (BuildCache(Gen,Progress,CurrentSize,TotalSize,
Files.begin()+EndOfSource,Files.end()) == false)
return false;
-
+
+ // FIXME: move me to a better place
+ Gen.FinishCache(Progress);
+
if (_error->PendingError() == true)
return false;
*OutMap = Map.UnGuard();
}
// Print out each package and the failed dependencies
- out <<" " << I.Name() << ":";
- unsigned Indent = strlen(I.Name()) + 3;
+ out << " " << I.FullName(true) << " :";
+ unsigned const Indent = I.FullName(true).size() + 3;
bool First = true;
pkgCache::VerIterator Ver;
out << ' ' << End.DepType() << ": ";
FirstOr = false;
- out << Start.TargetPkg().Name();
+ out << Start.TargetPkg().FullName(true);
// Show a quick summary of the version requirements
if (Start.TargetVer() != 0)
{
pkgCache::PkgIterator I(Cache,Cache.List[J]);
if (Cache[I].NewInstall() == true) {
- List += string(I.Name()) + " ";
+ if (Cache[I].CandidateVerIter(Cache).Pseudo() == true)
+ continue;
+ List += I.FullName(true) + " ";
VersionsList += string(Cache[I].CandVersion) + "\n";
}
}
pkgCache::PkgIterator I(Cache,Cache.List[J]);
if (Cache[I].Delete() == true)
{
+ if (Cache[I].CandidateVerIter(Cache).Pseudo() == true)
+ continue;
if ((Cache[I].iFlags & pkgDepCache::Purge) == pkgDepCache::Purge)
- List += string(I.Name()) + "* ";
+ List += I.FullName(true) + "* ";
else
- List += string(I.Name()) + " ";
+ List += I.FullName(true) + " ";
VersionsList += string(Cache[I].CandVersion)+ "\n";
}
I->CurrentVer == 0 || Cache[I].Delete() == true)
continue;
- List += string(I.Name()) + " ";
+ List += I.FullName(true) + " ";
VersionsList += string(Cache[I].CurVersion) + " => " + Cache[I].CandVersion + "\n";
}
ShowList(out,_("The following packages have been kept back:"),List,VersionsList);
// Not interesting
if (Cache[I].Upgrade() == false || Cache[I].NewInstall() == true)
continue;
-
- List += string(I.Name()) + " ";
+ if (Cache[I].CandidateVerIter(Cache).Pseudo() == true)
+ continue;
+
+ List += I.FullName(true) + " ";
VersionsList += string(Cache[I].CurVersion) + " => " + Cache[I].CandVersion + "\n";
}
ShowList(out,_("The following packages will be upgraded:"),List,VersionsList);
// Not interesting
if (Cache[I].Downgrade() == false || Cache[I].NewInstall() == true)
continue;
-
- List += string(I.Name()) + " ";
+ if (Cache[I].CandidateVerIter(Cache).Pseudo() == true)
+ continue;
+
+ List += I.FullName(true) + " ";
VersionsList += string(Cache[I].CurVersion) + " => " + Cache[I].CandVersion + "\n";
}
return ShowList(out,_("The following packages will be DOWNGRADED:"),List,VersionsList);
pkgCache::PkgIterator I(Cache,Cache.List[J]);
if (Cache[I].InstallVer != (pkgCache::Version *)I.CurrentVer() &&
I->SelectedState == pkgCache::State::Hold) {
- List += string(I.Name()) + " ";
+ List += I.FullName(true) + " ";
VersionsList += string(Cache[I].CurVersion) + " => " + Cache[I].CandVersion + "\n";
}
}
if (Added[I->ID] == false)
{
Added[I->ID] = true;
- List += string(I.Name()) + " ";
+ List += I.FullName(true) + " ";
//VersionsList += string(Cache[I].CurVersion) + "\n"; ???
}
}
Added[P->ID] = true;
char S[300];
- snprintf(S,sizeof(S),_("%s (due to %s) "),P.Name(),I.Name());
+ snprintf(S,sizeof(S),_("%s (due to %s) "),P.FullName(true).c_str(),I.FullName(true).c_str());
List += S;
//VersionsList += "\n"; ???
}
unsigned long ReInstall = 0;
for (pkgCache::PkgIterator I = Dep.PkgBegin(); I.end() == false; I++)
{
+ if (pkgCache::VerIterator(Dep, Dep[I].CandidateVer).Pseudo() == true)
+ continue;
+
if (Dep[I].NewInstall() == true)
Install++;
else
if (found_one == true)
{
ioprintf(c1out,_("Note, selecting %s instead of %s\n"),
- Prov.Name(),Pkg.Name());
+ Prov.FullName(true).c_str(),Pkg.FullName(true).c_str());
Pkg = Prov;
}
}
{
if (AllowFail == true)
ioprintf(c1out,_("Skipping %s, it is already installed and upgrade is not set.\n"),
- Pkg.Name());
+ Pkg.FullName(true).c_str());
return true;
}
-
+
+ // Ignore request for install if package would be new
+ if (_config->FindB("APT::Get::Only-Upgrade", false) == true &&
+ Pkg->CurrentVer == 0)
+ {
+ if (AllowFail == true)
+ ioprintf(c1out,_("Skipping %s, it is not installed and only upgrades are requested.\n"),
+ Pkg.Name());
+ return true;
+ }
+
// Check if there is something at all to install
pkgDepCache::StateCache &State = Cache[Pkg];
if (Remove == true && Pkg->CurrentVer == 0)
if (AllowFail == false)
return false;
- ioprintf(c1out,_("Package %s is not installed, so not removed\n"),Pkg.Name());
+ ioprintf(c1out,_("Package %s is not installed, so not removed\n"),Pkg.FullName(true).c_str());
return true;
}
if (Pkg->ProvidesList != 0)
{
ioprintf(c1out,_("Package %s is a virtual package provided by:\n"),
- Pkg.Name());
+ Pkg.FullName(true).c_str());
pkgCache::PrvIterator I = Pkg.ProvidesList();
for (; I.end() == false; I++)
if (Cache[Pkg].CandidateVerIter(Cache) == I.OwnerVer())
{
if (Cache[Pkg].Install() == true && Cache[Pkg].NewInstall() == false)
- c1out << " " << Pkg.Name() << " " << I.OwnerVer().VerStr() <<
+ c1out << " " << Pkg.FullName(true) << " " << I.OwnerVer().VerStr() <<
_(" [Installed]") << endl;
else
- c1out << " " << Pkg.Name() << " " << I.OwnerVer().VerStr() << endl;
+ c1out << " " << Pkg.FullName(true) << " " << I.OwnerVer().VerStr() << endl;
}
}
c1out << _("You should explicitly select one to install.") << endl;
ioprintf(c1out,
_("Package %s is not available, but is referred to by another package.\n"
"This may mean that the package is missing, has been obsoleted, or\n"
- "is only available from another source\n"),Pkg.Name());
+ "is only available from another source\n"),Pkg.FullName(true).c_str());
string List;
string VersionsList;
if (Seen[Dep.ParentPkg()->ID] == true)
continue;
Seen[Dep.ParentPkg()->ID] = true;
- List += string(Dep.ParentPkg().Name()) + " ";
+ List += Dep.ParentPkg().FullName(true) + " ";
//VersionsList += string(Dep.ParentPkg().CurVersion) + "\n"; ???
}
ShowList(c1out,_("However the following packages replace it:"),List,VersionsList);
}
- _error->Error(_("Package %s has no installation candidate"),Pkg.Name());
+ _error->Error(_("Package %s has no installation candidate"),Pkg.FullName(true).c_str());
return false;
}
{
if (Pkg->CurrentVer == 0 || Pkg.CurrentVer().Downloadable() == false)
ioprintf(c1out,_("Reinstallation of %s is not possible, it cannot be downloaded.\n"),
- Pkg.Name());
+ Pkg.FullName(true).c_str());
else
Cache.SetReInstall(Pkg,true);
}
{
if (AllowFail == true)
ioprintf(c1out,_("%s is already the newest version.\n"),
- Pkg.Name());
+ Pkg.FullName(true).c_str());
}
}
else
{
if (IsRel == true)
return _error->Error(_("Release '%s' for '%s' was not found"),
- VerTag,Pkg.Name());
+ VerTag,Pkg.FullName(true).c_str());
return _error->Error(_("Version '%s' for '%s' was not found"),
- VerTag,Pkg.Name());
+ VerTag,Pkg.FullName(true).c_str());
}
if (strcmp(VerTag,Ver.VerStr()) != 0)
{
ioprintf(c1out,_("Selected version %s (%s) for %s\n"),
- Ver.VerStr(),Ver.RelStr().c_str(),Pkg.Name());
+ Ver.VerStr(),Ver.RelStr().c_str(),Pkg.FullName(true).c_str());
}
Cache.SetCandidateVersion(Ver);
{
if(Pkg.CurrentVer() != 0 || Cache[Pkg].Install())
if(Debug)
- std::cout << "We could delete %s" << Pkg.Name() << std::endl;
+ std::cout << "We could delete %s" << Pkg.FullName(true).c_str() << std::endl;
if (doAutoRemove)
{
// we don't need to fill the strings if we don't need them
if (smallList == false)
{
- autoremovelist += string(Pkg.Name()) + " ";
+ autoremovelist += Pkg.FullName(true) + " ";
autoremoveversions += string(Cache[Pkg].CandVersion) + "\n";
}
}
Cache[Pkg].Install() == false &&
(Cache[Pkg].Flags & pkgCache::Flag::Auto) &&
_config->FindB("APT::Get::ReInstall",false) == false &&
+ _config->FindB("APT::Get::Only-Upgrade",false) == false &&
_config->FindB("APT::Get::Download-Only",false) == false)
{
ioprintf(c1out,_("%s set to manually installed.\n"),
- Pkg.Name());
+ Pkg.FullName(true).c_str());
Cache->MarkAuto(Pkg,false);
AutoMarkChanged++;
}
break;
if (*J == 0) {
- List += string(I.Name()) + " ";
+ List += I.FullName(true) + " ";
VersionsList += string(Cache[I].CandVersion) + "\n";
}
}
for(;;)
{
/* Skip if package is installed already, or is about to be */
- string target = string(Start.TargetPkg().Name()) + " ";
+ string target = Start.TargetPkg().FullName(true) + " ";
if ((*Start.TargetPkg()).SelectedState == pkgCache::State::Install
|| Cache[Start.TargetPkg()].Install())
{
for (unsigned I = 0; I != J; I++)
ioprintf(cout,_("Fetch source %s\n"),Dsc[I].Package.c_str());
+ delete[] Dsc;
return true;
}
for (; I != Fetcher.UriEnd(); I++)
cout << '\'' << I->URI << "' " << flNotDir(I->Owner->DestFile) << ' ' <<
I->Owner->FileSize << ' ' << I->Owner->HashSum() << endl;
+ delete[] Dsc;
return true;
}
if (_config->FindB("APT::Get::Download-only",false) == true)
{
c1out << _("Download complete and in download only mode") << endl;
+ delete[] Dsc;
return true;
}
_exit(0);
}
-
+ delete[] Dsc;
+
// Wait for the subprocess
int Status = 0;
while (waitpid(Process,&Status,0) != Process)
// Process the build-dependencies
vector<pkgSrcRecords::Parser::BuildDepRec> BuildDeps;
- if (Last->BuildDepends(BuildDeps, _config->FindB("APT::Get::Arch-Only",false)) == false)
+ if (Last->BuildDepends(BuildDeps, _config->FindB("APT::Get::Arch-Only",true)) == false)
return _error->Error(_("Unable to get build-dependency information for %s"),Src.c_str());
// Also ensure that build-essential packages are present
for (; Prv.end() != true; Prv++)
{
if (_config->FindB("Debug::BuildDeps",false) == true)
- cout << " Checking provider " << Prv.OwnerPkg().Name() << endl;
+ cout << " Checking provider " << Prv.OwnerPkg().FullName() << endl;
if ((*Cache)[Prv.OwnerPkg()].InstVerIter(*Cache).end() == false)
break;
if (Prv.end() == false)
{
if (_config->FindB("Debug::BuildDeps",false) == true)
- cout << " Is provided by installed package " << Prv.OwnerPkg().Name() << endl;
+ cout << " Is provided by installed package " << Prv.OwnerPkg().FullName() << endl;
skipAlternatives = hasAlternatives;
continue;
}
return _error->Error(_("Failed to satisfy %s dependency for %s: Installed package %s is too new"),
Last->BuildDepType((*D).Type),
Src.c_str(),
- Pkg.Name());
+ Pkg.FullName(true).c_str());
}
}
return true;
}
/*}}}*/
-
// DoMoo - Never Ask, Never Tell /*{{{*/
// ---------------------------------------------------------------------
/* */
{0,"fix-missing","APT::Get::Fix-Missing",0},
{0,"ignore-hold","APT::Ignore-Hold",0},
{0,"upgrade","APT::Get::upgrade",0},
+ {0,"only-upgrade","APT::Get::Only-Upgrade",0},
{0,"force-yes","APT::Get::force-yes",0},
{0,"print-uris","APT::Get::Print-URIs",0},
{0,"diff-only","APT::Get::Diff-Only",0},
apt (0.7.26) UNRELEASED; urgency=low
+ [ Christian Perrier ]
+ * German translation update. Closes: #571037
+ * Spanish manpages translation update. Closes: #573293
+
[ David Kalnischkies ]
- * [BREAK] add possibility to download and use multiply
- Translation files, configurable with Acquire::Translation
- (Closes: #444222, #448216, #550564)
+ * [BREAK] merge MultiArch-ABI. We don't support MultiArch,
+ but we support the usage of the new ABI so libapt users
+ can start to prepare for MultiArch (Closes: #536029)
+ * Ignore :qualifiers after package name in build dependencies
+ in the library by default, but try to honour them in apt-get
+ as we have some sort of MultiArch support ready (Closes: #558103)
+ * Switch to dpkg-source 3.0 (native) format
+ * apt-pkg/depcache.cc:
+ - remove Auto-Installed information from extended_states
+ together with the package itself (Closes: #572364)
+ * cmdline/apt-mark:
+ - don't crash if no arguments are given (Closes: #570962)
+ * debian/control:
+ - remove some years old and obsolete Replaces
+ - add automake/conf build-depends/conflicts as recommend by
+ the autotools-dev README (Closes: #572615)
+ * apt-pkg/contrib/mmap.{h,cc}:
+ - add char[] fallback for filesystems without shared writable
+ mmap() like JFFS2. Thanks to Marius Vollmer for writing
+ and to Loïc Minier for pointing to the patch! (Closes: #314334)
+ * doc/apt_preferences.5.xml:
+ - fix two typos and be more verbose in the novice warning.
+ Thanks to Osamu Aoki for pointing it out! (Closes: #567669)
+ * apt-pkg/deb/dpkgpm.cc:
+ - fix error message construction in OpenLog()
+ - if available store the Commandline in the history
+ * cmdline/apt-get.cc:
+ - add a --only-upgrade flag to install command (Closes: #572259)
+ - fix memory leaks in error conditions in DoSource()
+ * apt-pkg/contrib/cmndline.cc:
+ - save Commandline in Commandline::AsString for logging
+ * apt-pkg/deb/debversion.cc:
+ - consider absent of debian revision equivalent to 0 (Closes: #573592)
+ * doc/makefile, doc/*:
+ - generate subdirectories for building the manpages in on the fly
+ depending on the po files we have.
+ * apt-pkg/pkgcachegen.cc:
+ - merge versions correctly even if multiple different versions
+ with the same version number are available.
+ Thanks to Magnus Holmgren for the patch! (Closes: #351056)
+
+ [ Julian Andres Klode ]
+ * cmdline/apt-mark:
+ - Use the new python-apt API (and conflict with python-apt << 0.7.93.2).
+ * apt-inst/contrib/arfile.h:
+ - Add public ARArchive::Members() which returns the list of members.
+ * debian/rules:
+ - Fix the libraries name to be e.g. libapt-pkg4.9 instead of
+ libapt-pkg-4.9.
+
+ -- David Kalnischkies <kalnischkies@gmail.com> Sun, 14 Mar 2010 16:47:07 +0100
+
+apt (0.7.26~exp2) experimental; urgency=low
+
+ * fix crash when LANGUAGE is not set
+
+ -- Michael Vogt <mvo@debian.org> Thu, 18 Feb 2010 22:07:23 +0100
+
+apt (0.7.26~exp1) experimental; urgency=low
+
+ [ David Kalnischkies ]
+ * [BREAK] add possibility to download and use multiply
+ Translation files, configurable with Acquire::Translation
+ (Closes: #444222, #448216, #550564)
+ * Ignore :qualifiers after package name in build dependencies
+ for now as long we don't understand them (Closes: #558103)
* apt-pkg/contrib/mmap.{cc,h}:
- - extend it to have a growable flag - unused now but maybe…
+ - extend it to have a growable flag - unused now but maybe...
* apt-pkg/pkgcache.h:
- use long instead of short for {Ver,Desc}File size,
patch from Víctor Manuel Jáquez Leal, thanks! (Closes: #538917)
- update with no changes to debian policy 3.8.4
* doc/apt_preferences.5.xml:
- explicitly warn against careless use (Closes: #567669)
+ * debian/rules:
+ - remove creation of empty dir /usr/share/apt
+ * doc/apt-cdrom.8.xml:
+ - fix typo spotted by lintian: proc(c)eed
[ Ivan Masár ]
* Slovak translation update. Closes: #568294
-
- -- David Kalnischkies <kalnischkies@gmail.com> Sat, 13 Feb 2010 01:42:50 +0100
+
+ [ Michael Vogt ]
+ * [BREAK] merged lp:~mvo/apt/history
+ - this writes a /var/log/apt/history tagfile that contains details
+ from the transaction (complements term.log)
+ * methods/http.cc:
+ - add cache-control headers even if no cache is given to allow
+ adding options for intercepting proxies
+ - add Acquire::http::ProxyAutoDetect configuration that
+ can be used to call a external helper to figure out the
+ proxy configuration and return it to apt via stdout
+ (this is a step towards WPAD and zeroconf/avahi support)
+ * abicheck/
+ - add new abitest tester using the ABI Compliance Checker from
+ http://ispras.linuxfoundation.org/index.php/ABI_compliance_checker
+
+ [ Robert Collins ]
+ * Change the package index Info methods to allow apt-cache policy to be
+ useful when using several different archives on the same host.
+ (Closes: #329814, LP: #22354)
+
+ -- Michael Vogt <mvo@debian.org> Thu, 18 Feb 2010 16:11:39 +0100
apt (0.7.25.3) unstable; urgency=low
[ David Kalnischkies ]
* apt-pkg/contrib/macros.h:
- - install the header system.h with a new name to be able to use
+ - install the header system.h with a new name to be able to use
it in other headers (Closes: #567662)
* cmdline/acqprogress.cc:
- Set Mode to Medium so that the correct prefix is used.
- generate sha1 and sha256 checksums for dsc (Closes: #567343)
* cmdline/apt-get.cc:
- don't mark as manually if in download only (Closes: #468180)
-
+
-- Michael Vogt <mvo@debian.org> Mon, 01 Feb 2010 18:41:15 +0100
apt (0.7.25.2) unstable; urgency=low
packages that are not in the extended_states file yet
(closes: #534920)
* ftparchive/writer.{cc,h}:
- - merge crash fix for apt-ftparchive on hurd, thanks to
+ - merge crash fix for apt-ftparchive on hurd, thanks to
Samuel Thibault for the patch (closes: #566664)
[ David Kalnischkies ]
* French manpage translation update
* Russian translation update by Yuri Kozlov
Closes: #564171
-
+
[Chris Leick]
* spot & fix various typos in all manpages
* German manpage translation update
-- Otavio Salvador <otavio@debian.org> Wed, 01 Aug 2007 19:49:51 -0300
--apt (0.7.6) unstable; urgency=low
--
-- * Applied patch from Aurelien Jarno <aurel32@debian.org> to fix wrong
-- directory downloading on non-linux architectures (closes: #435597)
--
-- -- Otavio Salvador <otavio@debian.org> Wed, 01 Aug 2007 19:49:51 -0300
--
apt (0.7.5) unstable; urgency=low
[ Otavio Salvador ]