return true;
}
/*}}}*/
+ // Base256ToNum - Convert a fixed length binary to a number /*{{{*/
+ // ---------------------------------------------------------------------
+ /* This is used in decoding the 256bit encoded fixed length fields in
+ tar files */
+ bool Base256ToNum(const char *Str,unsigned long &Res,unsigned int Len)
+ {
+ if ((Str[0] & 0x80) == 0)
+ return false;
+ else
+ {
+ Res = Str[0] & 0x7F;
+ for(unsigned int i = 1; i < Len; ++i)
+ Res = (Res<<8) + Str[i];
+ return true;
+ }
+ }
+ /*}}}*/
// HexDigit - Convert a hex character into an integer /*{{{*/
// ---------------------------------------------------------------------
/* Helper for Hex2Num */
return Buffer + Did;
}
/*}}}*/
+// StripEpoch - Remove the version "epoch" from a version string /*{{{*/
+// ---------------------------------------------------------------------
+string StripEpoch(const string &VerStr)
+{
+ size_t i = VerStr.find(":");
+ if (i == string::npos)
+ return VerStr;
+ return VerStr.substr(i+1);
+}
// tolower_ascii - tolower() function that ignores the locale /*{{{*/
// ---------------------------------------------------------------------
int StringToBool(const string &Text,int Default = -1);
bool ReadMessages(int Fd, vector<string> &List);
bool StrToNum(const char *Str,unsigned long &Res,unsigned Len,unsigned Base = 0);
+ bool Base256ToNum(const char *Str,unsigned long &Res,unsigned int Len);
bool Hex2Num(const string &Str,unsigned char *Num,unsigned int Length);
bool TokSplitString(char Tok,char *Input,char **List,
unsigned long ListMax);
char *safe_snprintf(char *Buffer,char *End,const char *Format,...) __like_printf(3);
bool CheckDomainList(const string &Host, const string &List);
int tolower_ascii(int const c) __attrib_const __hot;
+string StripEpoch(const string &VerStr);
#define APT_MKSTRCMP(name,func) \
inline int name(const char *A,const char *B) {return func(A,A+strlen(A),B,B+strlen(B));}; \
return _error->Errno("fdopen","Faild to open new FD");
// Feed it the filenames.
- bool Die = false;
if (Version <= 1)
{
for (vector<Item>::iterator I = List.begin(); I != List.end(); I++)
into the pipe. */
fprintf(F,"%s\n",I->File.c_str());
if (ferror(F) != 0)
- {
- Die = true;
break;
- }
}
}
else
- Die = !SendV2Pkgs(F);
+ SendV2Pkgs(F);
fclose(F);
strprintf(dpkg_error, "Sub-process %s exited unexpectedly",Args[0]);
if(dpkg_error.size() > 0)
- _error->Error(dpkg_error.c_str());
+ _error->Error("%s", dpkg_error.c_str());
if(stopOnError)
{
{
while( fgets(buf, sizeof(buf), log) != NULL)
fprintf(report, " %s", buf);
- fclose(log);
+ pclose(log);
}
}
{
while( fgets(buf, sizeof(buf), log) != NULL)
fprintf(report, " %s", buf);
- fclose(log);
+ pclose(log);
}
}
Cnf.Set("Dir::State::lists","lists/");
Cnf.Set("Dir::State::cdroms","cdroms.list");
Cnf.Set("Dir::State::mirrors","mirrors/");
-
+
// Cache
Cnf.Set("Dir::Cache","var/cache/apt/");
Cnf.Set("Dir::Cache::archives","archives/");
// Translation
Cnf.Set("APT::Acquire::Translation", "environment");
+ // Default cdrom mount point
+ Cnf.Set("Acquire::cdrom::mount", "/media/cdrom/");
+
bool Res = true;
// Read an alternate config file
const char *Cfg = getenv("APT_CONFIG");
if (Cfg != 0)
{
- if (FileExists(Cfg) == true)
+ if (RealFileExists(Cfg) == true)
Res &= ReadConfigFile(Cnf,Cfg);
else
- _error->WarningE("FileExists",_("Unable to read %s"),Cfg);
+ _error->WarningE("RealFileExists",_("Unable to read %s"),Cfg);
}
// Read the configuration parts dir
// Read the main config file
string FName = Cnf.FindFile("Dir::Etc::main");
- if (FileExists(FName) == true)
+ if (RealFileExists(FName) == true)
Res &= ReadConfigFile(Cnf,FName);
if (Res == false)
//VersionsList += string(Cache[I].CurVersion) + "\n"; ???
}
}
-
+ else
+ continue;
+
if (I->CurrentVer == 0)
continue;
APT::PackageSet virtualPkgs;
public:
+ std::list<std::pair<pkgCache::VerIterator, std::string> > selectedByRelease;
+
CacheSetHelperAPTGet(std::ostream &out) : APT::CacheSetHelper(true), out(out) {
explicitlyNamed = true;
}
}
virtual void showSelectedVersion(pkgCache::PkgIterator const &Pkg, pkgCache::VerIterator const Ver,
string const &ver, bool const &verIsRel) {
- if (ver != Ver.VerStr())
- ioprintf(out, _("Selected version '%s' (%s) for '%s'\n"),
- Ver.VerStr(), Ver.RelStr().c_str(), Pkg.FullName(true).c_str());
+ if (ver == Ver.VerStr())
+ return;
+ selectedByRelease.push_back(make_pair(Ver, ver));
}
bool showVirtualPackageErrors(pkgCacheFile &Cache) {
}
}
+ bool propergateReleaseCandiateSwitching(std::list<std::pair<pkgCache::VerIterator, std::string> > start, std::ostream &out)
+ {
+ for (std::list<std::pair<pkgCache::VerIterator, std::string> >::const_iterator s = start.begin();
+ s != start.end(); ++s)
+ Cache->GetDepCache()->SetCandidateVersion(s->first);
+
+ bool Success = true;
+ std::list<std::pair<pkgCache::VerIterator, pkgCache::VerIterator> > Changed;
+ for (std::list<std::pair<pkgCache::VerIterator, std::string> >::const_iterator s = start.begin();
+ s != start.end(); ++s)
+ {
+ Changed.push_back(std::make_pair(s->first, pkgCache::VerIterator(*Cache)));
+ // We continue here even if it failed to enhance the ShowBroken output
+ Success &= Cache->GetDepCache()->SetCandidateRelease(s->first, s->second, Changed);
+ }
+ for (std::list<std::pair<pkgCache::VerIterator, pkgCache::VerIterator> >::const_iterator c = Changed.begin();
+ c != Changed.end(); ++c)
+ {
+ if (c->second.end() == true)
+ ioprintf(out, _("Selected version '%s' (%s) for '%s'\n"),
+ c->first.VerStr(), c->first.RelStr().c_str(), c->first.ParentPkg().FullName(true).c_str());
+ else if (c->first.ParentPkg()->Group != c->second.ParentPkg()->Group)
+ {
+ pkgCache::VerIterator V = (*Cache)[c->first.ParentPkg()].CandidateVerIter(*Cache);
+ ioprintf(out, _("Selected version '%s' (%s) for '%s' because of '%s'\n"), V.VerStr(),
+ V.RelStr().c_str(), V.ParentPkg().FullName(true).c_str(), c->second.ParentPkg().FullName(true).c_str());
+ }
+ }
+ return Success;
+ }
+
void doAutoInstall() {
for (APT::PackageSet::const_iterator P = doAutoInstallLater.begin();
P != doAutoInstallLater.end(); ++P) {
{
// force a hashsum for compatibility reasons
_config->CndSet("Acquire::ForceHash", "md5sum");
- if (Fetcher.Setup(&Stat, "") == false)
- return false;
}
else if (Fetcher.Setup(&Stat, _config->FindDir("Dir::Cache::Archives")) == false)
return false;
if(Debug)
std::cout << "DoAutomaticRemove()" << std::endl;
- // we don't want to autoremove and we don't want to see it, so why calculating?
- if (doAutoRemove == false && hideAutoRemove == true)
- return true;
-
if (doAutoRemove == true &&
_config->FindB("APT::Get::Remove",true) == false)
{
bool purgePkgs = _config->FindB("APT::Get::Purge", false);
bool smallList = (hideAutoRemove == false &&
- strcasecmp(_config->Find("APT::Get::HideAutoRemove","").c_str(),"small") == 0);
+ strcasecmp(_config->Find("APT::Get::HideAutoRemove","").c_str(),"small") == 0);
string autoremovelist, autoremoveversions;
unsigned long autoRemoveCount = 0;
}
else
{
+ // if the package is a new install and already garbage we don't need to
+ // install it in the first place, so nuke it instead of show it
+ if (Cache[Pkg].Install() == true && Pkg.CurrentVer() == 0)
+ Cache->MarkDelete(Pkg, false);
// only show stuff in the list that is not yet marked for removal
- if(Cache[Pkg].Delete() == false)
+ else if(hideAutoRemove == false && Cache[Pkg].Delete() == false)
{
++autoRemoveCount;
// we don't need to fill the strings if we don't need them
}
}
}
+
+ // Now see if we had destroyed anything (if we had done anything)
+ if (Cache->BrokenCount() != 0)
+ {
+ c1out << _("Hmm, seems like the AutoRemover destroyed something which really\n"
+ "shouldn't happen. Please file a bug report against apt.") << endl;
+ c1out << endl;
+ c1out << _("The following information may help to resolve the situation:") << endl;
+ c1out << endl;
+ ShowBroken(c1out,Cache,false);
+
+ return _error->Error(_("Internal Error, AutoRemover broke stuff"));
+ }
+
// if we don't remove them, we should show them!
if (doAutoRemove == false && (autoremovelist.empty() == false || autoRemoveCount != 0))
{
"%lu packages were automatically installed and are no longer required.\n", autoRemoveCount), autoRemoveCount);
c1out << _("Use 'apt-get autoremove' to remove them.") << std::endl;
}
- // Now see if we had destroyed anything (if we had done anything)
- else if (Cache->BrokenCount() != 0)
- {
- c1out << _("Hmm, seems like the AutoRemover destroyed something which really\n"
- "shouldn't happen. Please file a bug report against apt.") << endl;
- c1out << endl;
- c1out << _("The following information may help to resolve the situation:") << endl;
- c1out << endl;
- ShowBroken(c1out,Cache,false);
-
- return _error->Error(_("Internal Error, AutoRemover broke stuff"));
- }
return true;
}
/*}}}*/
{
if (order[i] == MOD_INSTALL) {
InstallAction = std::for_each(verset[MOD_INSTALL].begin(), verset[MOD_INSTALL].end(), InstallAction);
+ InstallAction.propergateReleaseCandiateSwitching(helper.selectedByRelease, c0out);
InstallAction.doAutoInstall();
}
else if (order[i] == MOD_REMOVE)
pkgCache::PkgIterator I(Cache,Cache.List[J]);
if ((*Cache)[I].Install() == false)
continue;
+ pkgCache::VerIterator Cand = Cache[I].CandidateVerIter(Cache);
+ if (Cand.Pseudo() == true)
+ continue;
- const char **J;
- for (J = CmdL.FileList + 1; *J != 0; J++)
- if (strcmp(*J,I.Name()) == 0)
- break;
-
- if (*J == 0) {
- List += I.FullName(true) + " ";
- VersionsList += string(Cache[I].CandVersion) + "\n";
- }
+ if (verset[MOD_INSTALL].find(Cand) != verset[MOD_INSTALL].end())
+ continue;
+
+ List += I.FullName(true) + " ";
+ VersionsList += string(Cache[I].CandVersion) + "\n";
}
ShowList(c1out,_("The following extra packages will be installed:"),List,VersionsList);
Cleaner.Go(_config->FindDir("Dir::Cache::archives") + "partial/",*Cache);
}
/*}}}*/
+// DoDownload - download a binary /*{{{*/
+// ---------------------------------------------------------------------
+bool DoDownload(CommandLine &CmdL)
+{
+ CacheFile Cache;
+ if (Cache.ReadOnlyOpen() == false)
+ return false;
+
+ APT::CacheSetHelper helper(c0out);
+ APT::VersionSet verset = APT::VersionSet::FromCommandLine(Cache,
+ CmdL.FileList + 1, APT::VersionSet::CANDIDATE, helper);
+ pkgAcquire Fetcher;
+ AcqTextStatus Stat(ScreenWidth, _config->FindI("quiet",0));
+ Fetcher.Setup(&Stat);
+
+ if (verset.empty() == true)
+ return false;
+
+ pkgRecords Recs(Cache);
+ pkgSourceList *SrcList = Cache.GetSourceList();
+ for (APT::VersionSet::const_iterator Ver = verset.begin();
+ Ver != verset.end();
+ ++Ver)
+ {
+ string descr;
+ // get the right version
+ pkgCache::PkgIterator Pkg = Ver.ParentPkg();
+ pkgRecords::Parser &rec=Recs.Lookup(Ver.FileList());
+ pkgCache::VerFileIterator Vf = Ver.FileList();
+ if (Vf.end() == true)
+ return _error->Error("Can not find VerFile");
+ pkgCache::PkgFileIterator F = Vf.File();
+ pkgIndexFile *index;
+ if(SrcList->FindIndex(F, index) == false)
+ return _error->Error("FindIndex failed");
+ string uri = index->ArchiveURI(rec.FileName());
+ strprintf(descr, _("Downloading %s %s"), Pkg.Name(), Ver.VerStr());
+ // get the most appropriate hash
+ HashString hash;
+ if (rec.SHA256Hash() != "")
+ hash = HashString("sha256", rec.SHA256Hash());
+ else if (rec.SHA1Hash() != "")
+ hash = HashString("sha1", rec.SHA1Hash());
+ else if (rec.MD5Hash() != "")
+ hash = HashString("md5", rec.MD5Hash());
+ // get the file
+ new pkgAcqFile(&Fetcher, uri, hash.toStr(), (*Ver)->Size, descr, Pkg.Name(), ".");
+ }
+ bool result = (Fetcher.Run() == pkgAcquire::Continue);
+
+ return result;
+}
+ /*}}}*/
// DoCheck - Perform the check operation /*{{{*/
// ---------------------------------------------------------------------
/* Opening automatically checks the system, this command is mostly used
return true;
}
/*}}}*/
+// GetChangelogPath - return a path pointing to a changelog file or dir /*{{{*/
+// ---------------------------------------------------------------------
+/* This returns a "path" string for the changelog url construction.
+ * Please note that its not complete, it either needs a "/changelog"
+ * appended (for the packages.debian.org/changelogs site) or a
+ * ".changelog" (for third party sites that store the changelog in the
+ * pool/ next to the deb itself)
+ * Example return: "pool/main/a/apt/apt_0.8.8ubuntu3"
+ */
+string GetChangelogPath(CacheFile &Cache,
+ pkgCache::PkgIterator Pkg,
+ pkgCache::VerIterator Ver)
+{
+ string path;
+
+ pkgRecords Recs(Cache);
+ pkgRecords::Parser &rec=Recs.Lookup(Ver.FileList());
+ string srcpkg = rec.SourcePkg().empty() ? Pkg.Name() : rec.SourcePkg();
+ string ver = Ver.VerStr();
+ // if there is a source version it always wins
+ if (rec.SourceVer() != "")
+ ver = rec.SourceVer();
+ path = flNotFile(rec.FileName());
+ path += srcpkg + "_" + StripEpoch(ver);
+ return path;
+}
+ /*}}}*/
+// GuessThirdPartyChangelogUri - return url /*{{{*/
+// ---------------------------------------------------------------------
+/* Contruct a changelog file path for third party sites that do not use
+ * packages.debian.org/changelogs
+ * This simply uses the ArchiveURI() of the source pkg and looks for
+ * a .changelog file there, Example for "mediabuntu":
+ * apt-get changelog mplayer-doc:
+ * http://packages.medibuntu.org/pool/non-free/m/mplayer/mplayer_1.0~rc4~try1.dsfg1-1ubuntu1+medibuntu1.changelog
+ */
+bool GuessThirdPartyChangelogUri(CacheFile &Cache,
+ pkgCache::PkgIterator Pkg,
+ pkgCache::VerIterator Ver,
+ string &out_uri)
+{
+ // get the binary deb server path
+ pkgCache::VerFileIterator Vf = Ver.FileList();
+ if (Vf.end() == true)
+ return false;
+ pkgCache::PkgFileIterator F = Vf.File();
+ pkgIndexFile *index;
+ pkgSourceList *SrcList = Cache.GetSourceList();
+ if(SrcList->FindIndex(F, index) == false)
+ return false;
+
+ // get archive uri for the binary deb
+ string path_without_dot_changelog = GetChangelogPath(Cache, Pkg, Ver);
+ out_uri = index->ArchiveURI(path_without_dot_changelog + ".changelog");
+
+ // now strip away the filename and add srcpkg_srcver.changelog
+ return true;
+}
+// DownloadChangelog - Download the changelog /*{{{*/
+// ---------------------------------------------------------------------
+bool DownloadChangelog(CacheFile &CacheFile, pkgAcquire &Fetcher,
+ pkgCache::VerIterator Ver, string targetfile)
+/* Download a changelog file for the given package version to
+ * targetfile. This will first try the server from Apt::Changelogs::Server
+ * (http://packages.debian.org/changelogs by default) and if that gives
+ * a 404 tries to get it from the archive directly (see
+ * GuessThirdPartyChangelogUri for details how)
+ */
+{
+ string path;
+ string descr;
+ string server;
+ string changelog_uri;
+
+ // data structures we need
+ pkgCache::PkgIterator Pkg = Ver.ParentPkg();
+
+ // make the server root configurable
+ server = _config->Find("Apt::Changelogs::Server",
+ "http://packages.debian.org/changelogs");
+ path = GetChangelogPath(CacheFile, Pkg, Ver);
+ strprintf(changelog_uri, "%s/%s/changelog", server.c_str(), path.c_str());
+ strprintf(descr, _("Changelog for %s (%s)"), Pkg.Name(), changelog_uri.c_str());
+ // queue it
+ new pkgAcqFile(&Fetcher, changelog_uri, "", 0, descr, Pkg.Name(), "ignored", targetfile);
+
+ // try downloading it, if that fails, they third-party-changelogs location
+ // FIXME: res is "Continue" even if I get a 404?!?
+ int res = Fetcher.Run();
+ if (!FileExists(targetfile))
+ {
+ string third_party_uri;
+ if (GuessThirdPartyChangelogUri(CacheFile, Pkg, Ver, third_party_uri))
+ {
+ strprintf(descr, _("Changelog for %s (%s)"), Pkg.Name(), third_party_uri.c_str());
+ new pkgAcqFile(&Fetcher, third_party_uri, "", 0, descr, Pkg.Name(), "ignored", targetfile);
+ res = Fetcher.Run();
+ }
+ }
+
+ if (FileExists(targetfile))
+ return true;
+
+ // error
+ return _error->Error("changelog download failed");
+}
+ /*}}}*/
+// DisplayFileInPager - Display File with pager /*{{{*/
+void DisplayFileInPager(string filename)
+{
+ pid_t Process = ExecFork();
+ if (Process == 0)
+ {
+ const char *Args[3];
+ Args[0] = "/usr/bin/sensible-pager";
+ Args[1] = filename.c_str();
+ Args[2] = 0;
+ execvp(Args[0],(char **)Args);
+ exit(100);
+ }
+
+ // Wait for the subprocess
+ ExecWait(Process, "sensible-pager", false);
+}
+ /*}}}*/
+// DoChangelog - Get changelog from the command line /*{{{*/
+// ---------------------------------------------------------------------
+bool DoChangelog(CommandLine &CmdL)
+{
+ CacheFile Cache;
+ if (Cache.ReadOnlyOpen() == false)
+ return false;
+
+ APT::CacheSetHelper helper(c0out);
+ APT::VersionSet verset = APT::VersionSet::FromCommandLine(Cache,
+ CmdL.FileList + 1, APT::VersionSet::CANDIDATE, helper);
+ pkgAcquire Fetcher;
+ AcqTextStatus Stat(ScreenWidth, _config->FindI("quiet",0));
+ Fetcher.Setup(&Stat);
+
+ if (verset.empty() == true)
+ return false;
+ char *tmpdir = mkdtemp(strdup("/tmp/apt-changelog-XXXXXX"));
+ if (tmpdir == NULL) {
+ return _error->Errno("mkdtemp", "mkdtemp failed");
+ }
+
+ for (APT::VersionSet::const_iterator Ver = verset.begin();
+ Ver != verset.end();
+ ++Ver)
+ {
+ string changelogfile = string(tmpdir) + "changelog";
+ if (DownloadChangelog(Cache, Fetcher, Ver, changelogfile))
+ DisplayFileInPager(changelogfile);
+ // cleanup temp file
+ unlink(changelogfile.c_str());
+ }
+ // clenaup tmp dir
+ rmdir(tmpdir);
+ free(tmpdir);
+ return true;
+}
+ /*}}}*/
// DoMoo - Never Ask, Never Tell /*{{{*/
// ---------------------------------------------------------------------
/* */
" check - Verify that there are no broken dependencies\n"
" markauto - Mark the given packages as automatically installed\n"
" unmarkauto - Mark the given packages as manually installed\n"
+ " changelog - Download and display the changelog for the given package\n"
+ " download - Download the binary package into the current directory\n"
"\n"
"Options:\n"
" -h This help text.\n"
{"autoclean",&DoAutoClean},
{"check",&DoCheck},
{"source",&DoSource},
+ {"download",&DoDownload},
+ {"changelog",&DoChangelog},
{"moo",&DoMoo},
{"help",&ShowHelp},
{0,0}};
-apt (0.8.11+wheezy) UNRELEASED; urgency=low
++apt (0.8.11) UNRELEASED; urgency=low
+
+ * apt-get install pkg/experimental will now not only switch the
+ candidate of package pkg to the version from the release experimental
+ but also of all dependencies of pkg if the current candidate can't
+ satisfy a versioned dependency.
+
+ -- David Kalnischkies <kalnischkies@gmail.com> Fri, 03 Dec 2010 14:09:12 +0100
+
apt (0.7.26~exp3) experimental; urgency=low
* apt-ftparchive now reads the standard configuration files in
- apt (0.8.11) unstable; urgency=low
-apt (0.8.11+wheezy) unstable; urgency=low
++apt (0.8.11) UNRELEASED; urgency=low
+
++ [ David Kalnischkies ]
+ * apt-pkg/depcache.cc:
+ - add SetCandidateRelease() to set a candidate version and
+ the candidates of dependencies if needed to a specified
+ release (Closes: #572709)
+ * cmdline/apt-get.cc:
+ - if --print-uris is used don't setup downloader as we don't need
+ progress, lock nor the directories it would create otherwise
+ - show dependencies of essential packages which are going to remove
+ only if they cause the remove of this essential (Closes: #601961)
+ - keep not installed garbage packages uninstalled instead of showing
+ in the autoremove section and installing those (Closes: #604222)
+ - change pkg/release behavior to use the new SetCandidateRelease
+ so installing packages from experimental or backports is easier
+ - really do not show packages in the extra section if they were
+ requested on the commandline, e.g. with a modifier (Closes: #184730)
+ * debian/control:
+ - add Vcs-Browser now that loggerhead works again (Closes: #511168)
+ - depend on debhelper 7 to raise compat level
+ - depend on dpkg-dev (>= 1.15.8) to have c++ symbol mangling
+ * apt-pkg/contrib/fileutl.cc:
+ - add a RealFileExists method and check that your configuration files
+ are real files to avoid endless loops if not (Closes: #604401)
+ - ignore non-regular files in GetListOfFilesInDir (Closes: #594694)
+ * apt-pkg/contrib/weakptr.h:
+ - include stddefs.h to fix compile error (undefined NULL) with gcc-4.6
+ * methods/https.cc:
+ - fix CURLOPT_SSL_VERIFYHOST by really passing 2 to it if enabled
+ * deb/dpkgpm.cc:
+ - fix popen/fclose mismatch reported by cppcheck. Thanks to Petter
+ Reinholdtsen for report and patch! (Closes: #607803)
+ * doc/apt.conf.5.xml:
+ - fix multipl{y,e} spelling error reported by Jakub Wilk (Closes: #607636)
+ * apt-inst/contrib/extracttar.cc:
+ - let apt-utils work with encoded tar headers if uid/gid are large.
+ Thanks to Nobuhiro Hayashi for the patch! (Closes: #330162)
+ * apt-pkg/cacheiterator.h:
+ - do not segfault if cache is not build (Closes: #254770)
+ * doc/apt-get.8.xml:
+ - remove duplicated mentioning of --install-recommends
+ * doc/sources.list.5.xml:
+ - remove obsolete references to non-us (Closes: #594495)
+ * debian/rules:
+ - use -- instead of deprecated -u for dh_gencontrol
+ - remove shlibs.local creation and usage
+ - show differences in the symbol files, but never fail
+ * pre-build.sh:
+ - remove as it is not needed for a working 'bzr bd'
+ * debian/{apt,apt-utils}.symbols:
+ - ship experimental unmangled c++ symbol files
+ * methods/rred.cc:
+ - operate optional on gzip compressed pdiffs
+ * apt-pkg/acquire-item.cc:
+ - don't uncompress downloaded pdiff files before feeding it to rred
+ - try downloading clearsigned InRelease before trying Release.gpg
+ - change the internal handling of Extensions in pkgAcqIndex
+ - add a special uncompressed compression type to prefer those files
+ - download and use i18n/Index to choose which Translations to download
+ * cmdline/apt-key:
+ - don't set trustdb-name as non-root so 'list' and 'finger'
+ can be used without being root (Closes: #393005, #592107)
+ * apt-pkg/deb/deblistparser.cc:
+ - rewrite LoadReleaseInfo to cope with clearsigned Releasefiles
+ * ftparchive/writer.cc:
+ - add config option to search for more patterns in release command
+ - include Index files by default in the Release file
+ * methods/{gzip,bzip}.cc:
+ - print a good error message if FileSize() is zero
+ * apt-pkg/aptconfiguration.cc:
+ - remove the inbuilt Translation files whitelist
- -- Michael Vogt <mvo@debian.org> Tue, 01 Feb 2011 09:38:48 +0100
+ [ Michael Vogt ]
+ * methods/http.cc:
+ - do not hang if Acquire::http::ProxyAutoDetect can not be
+ executed or returns no data (LP: #654393)
+ * debian/apt.conf.autoremove:
+ - never autoremove the GNU/Hurd kernel (closes: #588423), thanks
+ to Guillem Jover
+ * apt-pkg/cdrom.cc, apt-pkg/init.cc, methods/cdrom.cc:
+ - use /media/cdrom as default mountoint (closes: #611569)
+
+ [ Martin Pitt ]
+ * test/integration/test-compressed-indexes, test/test-indexes.sh:
+ - Explicitly disable compressed indexes at the start. This ensures that we
+ will actually test uncompressed indexes regardless of the internal
+ default value of Acquire::GzipIndexes.
+
+ -- David Kalnischkies <kalnischkies@gmail.com> Fri, 28 Jan 2011 12:22:25 +0100
apt (0.8.10.3) unstable; urgency=low
for broken dependencies.</para></listitem>
</varlistentry>
+ <varlistentry><term>download</term>
+ <listitem><para><literal>download</literal> will download the given
+ binary package into the current directoy.
+ </para></listitem>
+ </varlistentry>
+
<varlistentry><term>clean</term>
<listitem><para><literal>clean</literal> clears out the local repository of retrieved package
files. It removes everything but the lock file from
<listitem><para><literal>autoremove</literal> is used to remove packages that were automatically
installed to satisfy dependencies for some package and that are no more needed.</para></listitem>
</varlistentry>
+
+ <varlistentry><term>changelog</term>
+ <listitem><para><literal>changelog</literal> downloads a package changelog and displays
+ it through <command>sensible-pager</command>. The server name and base
+ directory is defined in the <literal>APT::Changelogs::Server</literal>
+ variable (e. g. <ulink>http://packages.debian.org/changelogs</ulink> for
+ Debian or <ulink>http://changelogs.ubuntu.com/changelogs</ulink> for
+ Ubuntu).
+ By default it displays the changelog for the version that is
+ installed. However, you can specify the same options as for
+ the <option>install</option> command.
+ </para>
+ </listitem>
+ </varlistentry>
+
+
+
</variablelist>
</refsect1>
<listitem><para>Compile source packages after downloading them.
Configuration Item: <literal>APT::Get::Compile</literal>.</para></listitem>
</varlistentry>
-
- <varlistentry><term><option>--install-recommends</option></term>
- <listitem><para>Also install recommended packages.</para></listitem>
- </varlistentry>
-
- <varlistentry><term><option>--no-install-recommends</option></term>
- <listitem><para>Do not install recommended packages.</para></listitem>
- </varlistentry>
<varlistentry><term><option>--ignore-hold</option></term>
<listitem><para>Ignore package Holds; This causes <command>apt-get</command> to ignore a hold
*/
quiet "0";
+ quiet::NoUpdate "true"; // never update progress information - included in -q=1
// Options for APT in general
APT
// does a ExecFork)
Keep-Fds {};
+ Changelogs
+ {
+ // server the provides the changelogs, the code will assume
+ // the changlogs are in the pool/ under a srcpkg_ver directory
+ // with the name "changelog"
+ Server "http://packages.debian.org/changelogs";
+ }:
+
// control parameters for cron jobs by /etc/cron.daily/apt
Periodic
{
- BackupArchiveInterval "0";
- // - Backup after n-days if archive contents changed.(0=disable)
-
- BackupLevel "3";
- // - Backup level.(0=disable), 1 is invalid.
-
- // APT::Archives::MaxAge "0"; (old, deprecated)
- MaxAge "0"; // (new)
- // - Set maximum allowed age of a cache package file. If a cache
- // package file is older it is deleted (0=disable)
-
- // APT::Archives::MinAge "2"; (old, deprecated)
- MinAge "2"; // (new)
- // - Set minimum age of a package file. If a file is younger it
- // will not be deleted (0=disable). Usefull to prevent races
- // and to keep backups of the packages for emergency.
-
- // APT::Archives::MaxSize "0"; (old, deprecated)
- MaxSize "0"; // (new)
- // - Set maximum size of the cache in MB (0=disable). If the cache
- // is bigger, cached package files are deleted until the size
- // requirement is met (the biggest packages will be deleted
- // first).
-
- Update-Package-Lists "0";
- // - Do "apt-get update" automatically every n-days (0=disable)
- //
- Download-Upgradeable-Packages "0";
- // - Do "apt-get upgrade --download-only" every n-days (0=disable)
- //
- Unattended-Upgrade "0";
- // - Run the "unattended-upgrade" security upgrade script
- // every n-days (0=disabled)
- // Requires the package "unattended-upgrades" and will write
- // a log in /var/log/unattended-upgrades
- //
- AutocleanInterval "0";
- // - Do "apt-get autoclean" every n-days (0=disable)
-
- Verbose "0";
- // - Send report mail to root
- // 0: no report (or null string)
- // 1: progress report (actually any string)
- // 2: + command outputs (remove -qq, remove 2>/dev/null, add -d)
- // 3: + trace on
+ BackupArchiveInterval "0";
+ // - Backup after n-days if archive contents changed.(0=disable)
+
+ BackupLevel "3";
+ // - Backup level.(0=disable), 1 is invalid.
+
+ // APT::Archives::MaxAge "0"; (old, deprecated)
+ MaxAge "0"; // (new)
+ // - Set maximum allowed age of a cache package file. If a cache
+ // package file is older it is deleted (0=disable)
+
+ // APT::Archives::MinAge "2"; (old, deprecated)
+ MinAge "2"; // (new)
+ // - Set minimum age of a package file. If a file is younger it
+ // will not be deleted (0=disable). Usefull to prevent races
+ // and to keep backups of the packages for emergency.
+
+ // APT::Archives::MaxSize "0"; (old, deprecated)
+ MaxSize "0"; // (new)
+ // - Set maximum size of the cache in MB (0=disable). If the cache
+ // is bigger, cached package files are deleted until the size
+ // requirement is met (the biggest packages will be deleted
+ // first).
+
+ Update-Package-Lists "0";
+ // - Do "apt-get update" automatically every n-days (0=disable)
+ //
+ Download-Upgradeable-Packages "0";
+ // - Do "apt-get upgrade --download-only" every n-days (0=disable)
+ //
+ Unattended-Upgrade "0";
+ // - Run the "unattended-upgrade" security upgrade script
+ // every n-days (0=disabled)
+ // Requires the package "unattended-upgrades" and will write
+ // a log in /var/log/unattended-upgrades
+ //
+ AutocleanInterval "0";
+ // - Do "apt-get autoclean" every n-days (0=disable)
+
+ Verbose "0";
+ // - Send report mail to root
+ // 0: no report (or null string)
+ // 1: progress report (actually any string)
+ // 2: + command outputs (remove -qq, remove 2>/dev/null, add -d)
+ // 3: + trace on
};
};
lzma "lzma";
gz "gzip";
- Order { "gz"; "lzma"; "bz2"; };
+ Order { "uncompressed"; "gz"; "lzma"; "bz2"; };
};
Languages
if [ $MSGLEVEL -le 2 ]; then
msgmsg() { true; }
msgnmsg() { true; }
+ msgtest() { true; }
+ msgpass() { echo -n " ${CPASS}P${CNORMAL}" >&2; }
+ msgskip() { echo -n " ${CWARNING}S${CNORMAL}" >&2; }
+ msgfail() { echo -n " ${CFAIL}FAIL${CNORMAL}" >&2; }
fi
if [ $MSGLEVEL -le 3 ]; then
msginfo() { true; }
dpkg() {
$(which dpkg) --root=${TMPWORKINGDIRECTORY}/rootdir --force-not-root --force-bad-path --log=${TMPWORKINGDIRECTORY}/rootdir/var/log/dpkg.log $*
}
+ aptitude() {
+ if [ -f ./aptconfig.conf ]; then
+ APT_CONFIG=aptconfig.conf LD_LIBRARY_PATH=${BUILDDIRECTORY} $(which aptitude) $*
+ elif [ -f ../aptconfig.conf ]; then
+ APT_CONFIG=../aptconfig.conf LD_LIBRARY_PATH=${BUILDDIRECTORY} $(which aptitude) $*
+ else
+ LD_LIBRARY_PATH=${BUILDDIRECTORY} $(which aptitude) $*
+ fi
+ }
+
+ addtrap() {
+ CURRENTTRAP="$CURRENTTRAP $1"
+ trap "$CURRENTTRAP" 0 HUP INT QUIT ILL ABRT FPE SEGV PIPE TERM
+ }
setupenvironment() {
TMPWORKINGDIRECTORY=$(mktemp -d)
BUILDDIRECTORY="${TESTDIR}/../../build/bin"
test -x "${BUILDDIRECTORY}/apt-get" || msgdie "You need to build tree first"
local OLDWORKINGDIRECTORY=$(pwd)
- CURRENTTRAP="cd /; rm -rf $TMPWORKINGDIRECTORY; cd $OLDWORKINGDIRECTORY"
- trap "$CURRENTTRAP" 0 HUP INT QUIT ILL ABRT FPE SEGV PIPE TERM
+ addtrap "cd /; rm -rf $TMPWORKINGDIRECTORY; cd $OLDWORKINGDIRECTORY;"
cd $TMPWORKINGDIRECTORY
mkdir rootdir aptarchive keys
cd rootdir
local PACKAGESFILE=$(echo "$(basename $0)" | sed -e 's/^test-/Packages-/' -e 's/^skip-/Packages-/')
if [ -f "${TESTDIR}/${PACKAGESFILE}" ]; then
cp "${TESTDIR}/${PACKAGESFILE}" aptarchive/Packages
- else
- touch aptarchive/Packages
+ fi
+ local SOURCESSFILE=$(echo "$(basename $0)" | sed -e 's/^test-/Sources-/' -e 's/^skip-/Sources-/')
+ if [ -f "${TESTDIR}/${SOURCESSFILE}" ]; then
+ cp "${TESTDIR}/${SOURCESSFILE}" aptarchive/Sources
fi
cp $(find $TESTDIR -name '*.pub' -o -name '*.sec') keys/
ln -s ${TMPWORKINGDIRECTORY}/keys/joesixpack.pub rootdir/etc/apt/trusted.gpg.d/joesixpack.gpg
echo "Dir::state::status \"${TMPWORKINGDIRECTORY}/rootdir/var/lib/dpkg/status\";" >> aptconfig.conf
echo "Debug::NoLocking \"true\";" >> aptconfig.conf
echo "APT::Get::Show-User-Simulation-Note \"false\";" >> aptconfig.conf
+ echo "Dir::Bin::Methods \"${BUILDDIRECTORY}/methods\";" >> aptconfig.conf
echo "Dir::Bin::dpkg \"fakeroot\";" >> aptconfig.conf
+ echo "Dir::Bin::methods \"${BUILDDIRECTORY}/methods\";" >> aptconfig.conf
echo "DPKG::options:: \"dpkg\";" >> aptconfig.conf
echo "DPKG::options:: \"--root=${TMPWORKINGDIRECTORY}/rootdir\";" >> aptconfig.conf
echo "DPKG::options:: \"--force-not-root\";" >> aptconfig.conf
echo "DPKG::options:: \"--force-bad-path\";" >> aptconfig.conf
echo "DPKG::options:: \"--log=${TMPWORKINGDIRECTORY}/rootdir/var/log/dpkg.log\";" >> aptconfig.conf
+ echo 'quiet::NoUpdate "true";' >> aptconfig.conf
export LC_ALL=C
msgdone "info"
}
Build-Depends: debhelper (>= 7)
Standards-Version: 3.9.1
- Package: $NAME
- Architecture: $ARCH" > debian/control
+ Package: $NAME" > debian/control
+ if [ "$ARCH" = 'all' ]; then
+ echo "Architecture: all" >> debian/control
+ else
+ echo "Architecture: any" >> debian/control
+ fi
test -z "$DEPENDENCIES" || echo "$DEPENDENCIES" >> debian/control
if [ -z "$DESCRIPTION" ]; then
echo "Description: an autogenerated dummy ${NAME}=${VERSION}/${RELEASE}
else
DISTSECTION="$(echo "$SECTION" | cut -d'/' -f 1)"
fi
- setupsimplenativepackage "$NAME" "$ARCH" "$VERSION" "$RELEASE" "$DEPENDENCIES" "$DESCRIPTION" "$SECTION"
- buildpackage "incoming/${NAME}-${VERSION}" "$RELEASE" "$DISTSECTION"
- rm -rf "incoming/${NAME}-${VERSION}"
+ local BUILDDIR=${TMPWORKINGDIRECTORY}/incoming/${NAME}-${VERSION}
+
+ msgninfo "Build package ${NAME} in ${VERSION} for ${RELEASE} in ${DISTSECTION}… "
+ mkdir -p $BUILDDIR/debian/source
+ echo "* most suckless software product ever" > ${BUILDDIR}/FEATURES
+ echo "#!/bin/sh
+ echo '$NAME says \"Hello!\"'" > ${BUILDDIR}/${NAME}
+
+ echo "Copyleft by Joe Sixpack $(date +%Y)" > ${BUILDDIR}/debian/copyright
+ echo "$NAME ($VERSION) $RELEASE; urgency=low
+
+ * Initial release
+
+ -- Joe Sixpack <joe@example.org> $(date -R)" > ${BUILDDIR}/debian/changelog
+ echo "Source: $NAME
+ Section: $SECTION
+ Priority: optional
+ Maintainer: Joe Sixpack <joe@example.org>
+ Standards-Version: 3.9.1
+
+ Package: $NAME" > ${BUILDDIR}/debian/control
+ if [ "$ARCH" = 'all' ]; then
+ echo "Architecture: all" >> ${BUILDDIR}/debian/control
+ else
+ echo "Architecture: any" >> ${BUILDDIR}/debian/control
+ fi
+ test -z "$DEPENDENCIES" || echo "$DEPENDENCIES" >> ${BUILDDIR}/debian/control
+ if [ -z "$DESCRIPTION" ]; then
+ echo "Description: an autogenerated dummy ${NAME}=${VERSION}/${RELEASE}
+ If you find such a package installed on your system,
+ YOU did something horribly wrong! They are autogenerated
+ und used only by testcases for APT and surf no other propose…" >> ${BUILDDIR}/debian/control
+ else
+ echo "Description: $DESCRIPTION" >> ${BUILDIR}/debian/control
+ fi
+ echo '3.0 (native)' > ${BUILDDIR}/debian/source/format
+ local SRCS="$( (cd ${BUILDDIR}/..; dpkg-source -b ${NAME}-${VERSION} 2>&1) | grep '^dpkg-source: info: building' | grep -o '[a-z0-9._+~-]*$')"
+
+ mkdir -p ${BUILDDIR}/debian/tmp/DEBIAN ${BUILDDIR}/debian/tmp/usr/share/doc/${NAME} ${BUILDDIR}/debian/tmp/usr/bin
+ cp ${BUILDDIR}/debian/copyright ${BUILDDIR}/debian/changelog ${BUILDDIR}/FEATURES ${BUILDDIR}/debian/tmp/usr/share/doc/${NAME}
+ cp ${BUILDDIR}/${NAME} ${BUILDDIR}/debian/tmp/usr/bin
+ (cd ${BUILDDIR}; dpkg-gencontrol -DArchitecture=$ARCH)
+ (cd ${BUILDDIR}/debian/tmp; md5sum $(find usr/ -type f) > DEBIAN/md5sums)
+
+ dpkg-deb --build ${BUILDDIR}/debian/tmp ${BUILDDIR}/.. > /dev/null
+ echo "pool/${NAME}_${VERSION}_${ARCH}.deb" >> ${BUILDDIR}/../${RELEASE}.${DISTSECTION}.pkglist
+ for SRC in $SRCS; do
+ echo "pool/${SRC}" >> ${BUILDDIR}/../${RELEASE}.${DISTSECTION}.srclist
+ done
+ rm -rf "${BUILDDIR}"
+ msgdone "info"
}
buildpackage() {
Packages::Compress ". gzip bzip2 lzma";
Sources::Compress ". gzip bzip2 lzma";
Contents::Compress ". gzip bzip2 lzma";
+ Translation::Compress ". gzip bzip2 lzma";
+ LongDescription "false";
};
TreeDefault {
Directory "pool/";
done
}
+ insertpackage() {
+ local RELEASE="$1"
+ local NAME="$2"
+ local ARCH="$3"
+ local VERSION="$4"
+ local DEPENDENCIES="$5"
+ local ARCHS="$ARCH"
+ if [ "$ARCHS" = "all" ]; then
+ ARCHS="$(aptconfig dump | grep APT::Architecture | cut -d'"' -f 2 | sed '/^$/ d' | sort | uniq | tr '\n' ' ')"
+ fi
+ for BUILDARCH in $ARCHS; do
+ local PPATH="aptarchive/dists/${RELEASE}/main/binary-${BUILDARCH}"
+ mkdir -p $PPATH aptarchive/dists/${RELEASE}/main/source
+ touch aptarchive/dists/${RELEASE}/main/source/Sources
+ local FILE="${PPATH}/Packages"
+ echo "Package: $NAME
+ Priority: optional
+ Section: other
+ Installed-Size: 42
+ Maintainer: Joe Sixpack <joe@example.org>
+ Architecture: $ARCH
+ Version: $VERSION
+ Filename: pool/main/${NAME}/${NAME}_${VERSION}_${ARCH}.deb" >> $FILE
+ test -z "$DEPENDENCIES" || echo "$DEPENDENCIES" >> $FILE
+ echo "Description: an autogenerated dummy ${NAME}=${VERSION}/${RELEASE}
+ If you find such a package installed on your system,
+ YOU did something horribly wrong! They are autogenerated
+ und used only by testcases for APT and surf no other propose…
+ " >> $FILE
+ done
+ }
+
buildaptarchivefromincoming() {
msginfo "Build APT archive for ${CCMD}$(basename $0)${CINFO} based on incoming packages…"
cd aptarchive
[ -e dists ] || buildaptftparchivedirectorystructure
msgninfo "\tGenerate Packages, Sources and Contents files… "
aptftparchive -qq generate ftparchive.conf
- msgdone "info"
- msgninfo "\tGenerate Release files… "
- for dir in $(find ./dists -mindepth 1 -maxdepth 1 -type d); do
- aptftparchive -qq release $dir -o APT::FTPArchive::Release::Codename="$(echo "$dir" | cut -d'/' -f 3)" | sed -e '/0 Release$/ d' > $dir/Release # remove the self reference
- done
cd - > /dev/null
msgdone "info"
+ generatereleasefiles
}
buildaptarchivefromfiles() {
msginfo "Build APT archive for ${CCMD}$(basename $0)${CINFO} based on prebuild files…"
- cd aptarchive
- if [ -f Packages ]; then
- msgninfo "\tPackages file… "
- cat Packages | gzip > Packages.gz
- cat Packages | bzip2 > Packages.bz2
- cat Packages | lzma > Packages.lzma
+ find aptarchive -name 'Packages' -o -name 'Sources' | while read line; do
+ msgninfo "\t${line} file… "
+ cat ${line} | gzip > ${line}.gz
+ cat ${line} | bzip2 > ${line}.bz2
+ cat ${line} | lzma > ${line}.lzma
msgdone "info"
+ done
+ generatereleasefiles
+ }
+
+ generatereleasefiles() {
+ msgninfo "\tGenerate Release files… "
+ local DATE="${1:-now}"
+ if [ -e aptarchive/dists ]; then
+ for dir in $(find ./aptarchive/dists -mindepth 3 -maxdepth 3 -type d -name 'i18n'); do
+ aptftparchive -qq release $dir -o APT::FTPArchive::Release::Patterns::='Translation-*' > $dir/Index
+ done
+ for dir in $(find ./aptarchive/dists -mindepth 1 -maxdepth 1 -type d); do
+ local CODENAME="$(echo "$dir" | cut -d'/' -f 4)"
+ aptftparchive -qq release $dir -o APT::FTPArchive::Release::Suite="${CODENAME}" -o APT::FTPArchive::Release::Codename="${CODENAME}" | sed -e '/0 Release$/ d' > $dir/Release # remove the self reference
+ if [ "$CODENAME" = "experimental" -o "$CODENAME" = "experimental2" ]; then
+ sed -i '/^Date: / a\
+ NotAutomatic: yes' $dir/Release
+ fi
+ done
+ else
+ aptftparchive -qq release ./aptarchive | sed -e '/0 Release$/ d' > aptarchive/Release # remove the self reference
fi
- if [ -f Sources ]; then
- msgninfo "\tSources file… "
- cat Sources | gzip > Sources.gz
- cat Sources | bzip2 > Sources.bz2
- cat Sources | lzma > Sources.lzma
- msgdone "info"
+ if [ "$DATE" != "now" ]; then
+ for release in $(find ./aptarchive -name 'Release'); do
+ touch -d "$1" $release
+ done
fi
- msgninfo "\tRelease file… "
- aptftparchive -qq release . | sed -e '/0 Release$/ d' > Release # remove the self reference
msgdone "info"
- cd ..
}
setupdistsaptarchive() {
done
for RELEASE in $(find aptarchive/ -name Release); do
gpg --yes --no-default-keyring $SECKEYS $PUBKEYS --default-key "$SIGNER" -abs -o ${RELEASE}.gpg ${RELEASE}
+ gpg --yes --no-default-keyring $SECKEYS $PUBKEYS --default-key "$SIGNER" --clearsign -o "$(echo "${RELEASE}" | sed 's#/Release$#/InRelease#')" $RELEASE
done
msgdone "info"
}
changetowebserver() {
if which weborf > /dev/null; then
weborf -xb aptarchive/ 2>&1 > /dev/null &
- CURRENTTRAP="kill $(ps | grep weborf | sed -e 's#^[ ]*##' | cut -d' ' -f 1); $CURRENTTRAP"
- trap "$CURRENTTRAP" 0 HUP INT QUIT ILL ABRT FPE SEGV PIPE TERM
+ addtrap "kill $!;"
local APTARCHIVE="file://$(readlink -f ./aptarchive)"
for LIST in $(find rootdir/etc/apt/sources.list.d/ -name 'apt-test-*.list'); do
sed -i $LIST -e "s#$APTARCHIVE#http://localhost:8080/#"
testequal() {
local COMPAREFILE=$(mktemp)
+ addtrap "rm $COMPAREFILE;"
echo "$1" > $COMPAREFILE
shift
msgtest "Test for equality of" "$*"
$* 2>&1 | checkdiff $COMPAREFILE - && msgpass || msgfail
- rm $COMPAREFILE
}
testequalor2() {
local COMPAREFILE1=$(mktemp)
local COMPAREFILE2=$(mktemp)
local COMPAREAGAINST=$(mktemp)
+ addtrap "rm $COMPAREFILE1 $COMPAREFILE2 $COMPAREAGAINST;"
echo "$1" > $COMPAREFILE1
echo "$2" > $COMPAREFILE2
shift 2
( echo "\n${CINFO}Diff against OR 1${CNORMAL}" "$(checkdiff $COMPAREFILE1 $COMPAREAGAINST)" \
"\n${CINFO}Diff against OR 2${CNORMAL}" "$(checkdiff $COMPAREFILE2 $COMPAREAGAINST)" &&
msgfail )
- rm $COMPAREFILE1 $COMPAREFILE2 $COMPAREAGAINST
}
testshowvirtual() {
VIRTUAL="${VIRTUAL}
N: No packages found"
local COMPAREFILE=$(mktemp)
+ addtrap "rm $COMPAREFILE;"
local ARCH=$(dpkg-architecture -qDEB_HOST_ARCH_CPU)
eval `apt-config shell ARCH APT::Architecture`
echo "$VIRTUAL" | sed -e "s/:$ARCH//" -e 's/:all//' > $COMPAREFILE
aptcache show -q=0 $PACKAGE 2>&1 | checkdiff $COMPAREFILE - && msgpass || msgfail
- rm $COMPAREFILE
}
testnopackage() {