// ---------------------------------------------------------------------
/* Stash status and the file size. Note that setting Complete means
sub-phases of the acquire process such as decompresion are operating */
-void pkgAcquire::Item::Start(string /*Message*/,unsigned long Size)
+void pkgAcquire::Item::Start(string /*Message*/,unsigned long long Size)
{
Status = StatFetching;
if (FileSize == 0 && Complete == false)
// Acquire::Item::Done - Item downloaded OK /*{{{*/
// ---------------------------------------------------------------------
/* */
-void pkgAcquire::Item::Done(string Message,unsigned long Size,string Hash,
+void pkgAcquire::Item::Done(string Message,unsigned long long Size,string Hash,
pkgAcquire::MethodConfig *Cnf)
{
// We just downloaded something..
}
}
/*}}}*/
-void pkgAcqSubIndex::Done(string Message,unsigned long Size,string Md5Hash, /*{{{*/
+void pkgAcqSubIndex::Done(string Message,unsigned long long Size,string Md5Hash, /*{{{*/
pkgAcquire::MethodConfig *Cnf)
{
if(Debug)
Dequeue();
}
/*}}}*/
-void pkgAcqDiffIndex::Done(string Message,unsigned long Size,string Md5Hash, /*{{{*/
+void pkgAcqDiffIndex::Done(string Message,unsigned long long Size,string Md5Hash, /*{{{*/
pkgAcquire::MethodConfig *Cnf)
{
if(Debug)
// remove all patches until the next matching patch is found
// this requires the Index file to be ordered
for(vector<DiffInfo>::iterator I=available_patches.begin();
- available_patches.size() > 0 &&
+ available_patches.empty() == false &&
I != available_patches.end() &&
- (*I).sha1 != local_sha1;
- I++)
+ I->sha1 != local_sha1;
+ ++I)
{
available_patches.erase(I);
}
// error checking and falling back if no patch was found
- if(available_patches.size() == 0)
- {
+ if(available_patches.empty() == true)
+ {
Failed("", NULL);
return false;
}
return true;
}
/*}}}*/
-void pkgAcqIndexDiffs::Done(string Message,unsigned long Size,string Md5Hash, /*{{{*/
+void pkgAcqIndexDiffs::Done(string Message,unsigned long long Size,string Md5Hash, /*{{{*/
pkgAcquire::MethodConfig *Cnf)
{
if(Debug)
chmod(FinalFile.c_str(),0644);
// see if there is more to download
- if(available_patches.size() > 0) {
+ if(available_patches.empty() == false) {
new pkgAcqIndexDiffs(Owner, RealURI, Description, Desc.ShortDesc,
ExpectedHash, ServerSha1, available_patches);
return Finish();
if (CompressionExtension.empty() == false)
CompressionExtension.erase(CompressionExtension.end()-1);
+ // only verify non-optional targets, see acquire-item.h for a FIXME
+ // to make this more flexible
+ if (Target->IsOptional())
+ Verify = false;
+ else
+ Verify = true;
+
Init(Target->URI, Target->Description, Target->ShortDesc);
}
/*}}}*/
to the uncompressed version of the file. If this is so the file
is copied into the partial directory. In all other cases the file
is decompressed with a gzip uri. */
-void pkgAcqIndex::Done(string Message,unsigned long Size,string Hash,
+void pkgAcqIndex::Done(string Message,unsigned long long Size,string Hash,
pkgAcquire::MethodConfig *Cfg)
{
Item::Done(Message,Size,Hash,Cfg);
/* Verify the index file for correctness (all indexes must
* have a Package field) (LP: #346386) (Closes: #627642) */
+ if (Verify == true)
{
FileFd fd(DestFile, FileFd::ReadOnly);
pkgTagSection sec;
return "\nIndex-File: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime);
}
-void pkgAcqMetaSig::Done(string Message,unsigned long Size,string MD5,
+void pkgAcqMetaSig::Done(string Message,unsigned long long Size,string MD5,
pkgAcquire::MethodConfig *Cfg)
{
Item::Done(Message,Size,MD5,Cfg);
return "\nIndex-File: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime);
}
/*}}}*/
-void pkgAcqMetaIndex::Done(string Message,unsigned long Size,string Hash, /*{{{*/
+void pkgAcqMetaIndex::Done(string Message,unsigned long long Size,string Hash, /*{{{*/
pkgAcquire::MethodConfig *Cfg)
{
Item::Done(Message,Size,Hash,Cfg);
if (SigFile == "")
{
// There was no signature file, so we are finished. Download
- // the indexes without verification.
- QueueIndexes(false);
+ // the indexes and do only hashsum verification
+ MetaIndexParser->Load(DestFile);
+ QueueIndexes(true);
}
else
{
#endif
for (vector <struct IndexTarget*>::const_iterator Target = IndexTargets->begin();
Target != IndexTargets->end();
- Target++)
+ ++Target)
{
HashString ExpectedIndexHash;
if (verify)
{
std::cerr << "Queueing: " << (*Target)->URI << std::endl;
std::cerr << "Expected Hash: " << ExpectedIndexHash.toStr() << std::endl;
+ std::cerr << "For: " << Record->MetaKeyFilename << std::endl;
}
if (ExpectedIndexHash.empty() == true && (*Target)->IsOptional() == false)
{
// check if we have one trusted source for the package. if so, switch
// to "TrustedOnly" mode
- for (pkgCache::VerFileIterator i = Version.FileList(); i.end() == false; i++)
+ for (pkgCache::VerFileIterator i = Version.FileList(); i.end() == false; ++i)
{
pkgIndexFile *Index;
if (Sources->FindIndex(i.File(),Index) == false)
bool pkgAcqArchive::QueueNext()
{
string const ForceHash = _config->Find("Acquire::ForceHash");
- for (; Vf.end() == false; Vf++)
+ for (; Vf.end() == false; ++Vf)
{
// Ignore not source sources
if ((Vf.File()->Flags & pkgCache::Flag::NotSource) != 0)
string PkgFile = Parse.FileName();
if (ForceHash.empty() == false)
{
+ if(stringcasecmp(ForceHash, "sha512") == 0)
+ ExpectedHash = HashString("SHA512", Parse.SHA512Hash());
if(stringcasecmp(ForceHash, "sha256") == 0)
ExpectedHash = HashString("SHA256", Parse.SHA256Hash());
else if (stringcasecmp(ForceHash, "sha1") == 0)
else
{
string Hash;
- if ((Hash = Parse.SHA256Hash()).empty() == false)
+ if ((Hash = Parse.SHA512Hash()).empty() == false)
+ ExpectedHash = HashString("SHA512", Hash);
+ else if ((Hash = Parse.SHA256Hash()).empty() == false)
ExpectedHash = HashString("SHA256", Hash);
else if ((Hash = Parse.SHA1Hash()).empty() == false)
ExpectedHash = HashString("SHA1", Hash);
if (stat(FinalFile.c_str(),&Buf) == 0)
{
// Make sure the size matches
- if ((unsigned)Buf.st_size == Version->Size)
+ if ((unsigned long long)Buf.st_size == Version->Size)
{
Complete = true;
Local = true;
if (stat(FinalFile.c_str(),&Buf) == 0)
{
// Make sure the size matches
- if ((unsigned)Buf.st_size == Version->Size)
+ if ((unsigned long long)Buf.st_size == Version->Size)
{
Complete = true;
Local = true;
if (stat(DestFile.c_str(),&Buf) == 0)
{
// Hmm, the partial file is too big, erase it
- if ((unsigned)Buf.st_size > Version->Size)
+ if ((unsigned long long)Buf.st_size > Version->Size)
unlink(DestFile.c_str());
else
PartialSize = Buf.st_size;
Desc.ShortDesc = Version.ParentPkg().Name();
QueueURI(Desc);
- Vf++;
+ ++Vf;
return true;
}
return false;
// AcqArchive::Done - Finished fetching /*{{{*/
// ---------------------------------------------------------------------
/* */
-void pkgAcqArchive::Done(string Message,unsigned long Size,string CalcHash,
+void pkgAcqArchive::Done(string Message,unsigned long long Size,string CalcHash,
pkgAcquire::MethodConfig *Cfg)
{
Item::Done(Message,Size,CalcHash,Cfg);
StringToBool(LookupTag(Message,"Transient-Failure"),false) == true)
{
// Vf = Version.FileList();
- while (Vf.end() == false) Vf++;
+ while (Vf.end() == false) ++Vf;
StoreFilename = string();
Item::Failed(Message,Cnf);
return;
// ---------------------------------------------------------------------
/* The file is added to the queue */
pkgAcqFile::pkgAcqFile(pkgAcquire *Owner,string URI,string Hash,
- unsigned long Size,string Dsc,string ShortDesc,
+ unsigned long long Size,string Dsc,string ShortDesc,
const string &DestDir, const string &DestFilename,
bool IsIndexFile) :
Item(Owner), ExpectedHash(Hash), IsIndexFile(IsIndexFile)
if (stat(DestFile.c_str(),&Buf) == 0)
{
// Hmm, the partial file is too big, erase it
- if ((unsigned)Buf.st_size > Size)
+ if ((unsigned long long)Buf.st_size > Size)
unlink(DestFile.c_str());
else
PartialSize = Buf.st_size;
// AcqFile::Done - Item downloaded OK /*{{{*/
// ---------------------------------------------------------------------
/* */
-void pkgAcqFile::Done(string Message,unsigned long Size,string CalcHash,
+void pkgAcqFile::Done(string Message,unsigned long long Size,string CalcHash,
pkgAcquire::MethodConfig *Cnf)
{
Item::Done(Message,Size,CalcHash,Cnf);
return "";
}
/*}}}*/
-bool IndexTarget::IsOptional() const {
- if (strncmp(ShortDesc.c_str(), "Translation", 11) != 0)
- return false;
- return true;
-}
-bool IndexTarget::IsSubIndex() const {
- if (ShortDesc != "TranslationIndex")
- return false;
- return true;
-}
void pkgAcqMethod::Fail(string Err,bool Transient)
{
// Strip out junk from the error messages
- for (string::iterator I = Err.begin(); I != Err.end(); I++)
+ for (string::iterator I = Err.begin(); I != Err.end(); ++I)
{
if (*I == '\r')
*I = ' ';
std::cout << "SHA1-Hash: " << Res.SHA1Sum << "\n";
if (Res.SHA256Sum.empty() == false)
std::cout << "SHA256-Hash: " << Res.SHA256Sum << "\n";
+ if (Res.SHA512Sum.empty() == false)
+ std::cout << "SHA512-Hash: " << Res.SHA512Sum << "\n";
if (UsedMirror.empty() == false)
std::cout << "UsedMirror: " << UsedMirror << "\n";
if (Res.GPGVOutput.empty() == false)
std::cout << "Alt-SHA1-Hash: " << Alt->SHA1Sum << "\n";
if (Alt->SHA256Sum.empty() == false)
std::cout << "Alt-SHA256-Hash: " << Alt->SHA256Sum << "\n";
-
+ if (Alt->SHA512Sum.empty() == false)
+ std::cout << "Alt-SHA512-Hash: " << Alt->SHA512Sum << "\n";
+
if (Alt->IMSHit == true)
std::cout << "Alt-IMS-Hit: true\n";
}
to keep the pipeline synchronized. */
void pkgAcqMethod::Redirect(const string &NewURI)
{
- std::cout << "103 Redirect\nURI: ";
- if (Queue != 0)
- std::cout << Queue->Uri << "\n";
- else
- std::cout << "<UNKNOWN>\n";
- std::cout << "New-URI: " << NewURI << "\n"
+ std::cout << "103 Redirect\nURI: " << Queue->Uri << "\n"
+ << "New-URI: " << NewURI << "\n"
<< "\n" << std::flush;
// Change the URI for the request.
MD5Sum = Hash.MD5.Result();
SHA1Sum = Hash.SHA1.Result();
SHA256Sum = Hash.SHA256.Result();
+ SHA512Sum = Hash.SHA512.Result();
}
/*}}}*/
// Acquire::pkgAcquire - Constructor /*{{{*/
// ---------------------------------------------------------------------
/* We grab some runtime state from the configuration space */
-pkgAcquire::pkgAcquire() : Queues(0), Workers(0), Configs(0), Log(NULL), ToFetch(0),
+pkgAcquire::pkgAcquire() : LockFD(-1), Queues(0), Workers(0), Configs(0), Log(NULL), ToFetch(0),
Debug(_config->FindB("Debug::pkgAcquire",false)),
- Running(false), LockFD(-1)
+ Running(false)
{
string const Mode = _config->Find("Acquire::Queue-Mode","host");
if (strcasecmp(Mode.c_str(),"host") == 0)
if (strcasecmp(Mode.c_str(),"access") == 0)
QueueMode = QueueAccess;
}
-pkgAcquire::pkgAcquire(pkgAcquireStatus *Progress) : Queues(0), Workers(0),
+pkgAcquire::pkgAcquire(pkgAcquireStatus *Progress) : LockFD(-1), Queues(0), Workers(0),
Configs(0), Log(Progress), ToFetch(0),
Debug(_config->FindB("Debug::pkgAcquire",false)),
- Running(false), LockFD(-1)
+ Running(false)
{
string const Mode = _config->Find("Acquire::Queue-Mode","host");
if (strcasecmp(Mode.c_str(),"host") == 0)
/* */
void pkgAcquire::Shutdown()
{
- while (Items.size() != 0)
+ while (Items.empty() == false)
{
if (Items[0]->Status == Item::StatFetching)
Items[0]->Status = Item::StatError;
I = Items.begin();
}
else
- I++;
+ ++I;
}
}
/*}}}*/
I->Shutdown(false);
// Shut down the items
- for (ItemIterator I = Items.begin(); I != Items.end(); I++)
+ for (ItemIterator I = Items.begin(); I != Items.end(); ++I)
(*I)->Finished();
if (_error->PendingError())
// Look in the get list
ItemCIterator I = Items.begin();
- for (; I != Items.end(); I++)
+ for (; I != Items.end(); ++I)
if (flNotDir((*I)->DestFile) == Dir->d_name)
break;
unsigned long long pkgAcquire::TotalNeeded()
{
unsigned long long Total = 0;
- for (ItemCIterator I = ItemsBegin(); I != ItemsEnd(); I++)
+ for (ItemCIterator I = ItemsBegin(); I != ItemsEnd(); ++I)
Total += (*I)->FileSize;
return Total;
}
unsigned long long pkgAcquire::FetchNeeded()
{
unsigned long long Total = 0;
- for (ItemCIterator I = ItemsBegin(); I != ItemsEnd(); I++)
+ for (ItemCIterator I = ItemsBegin(); I != ItemsEnd(); ++I)
if ((*I)->Local == false)
Total += (*I)->FileSize;
return Total;
unsigned long long pkgAcquire::PartialPresent()
{
unsigned long long Total = 0;
- for (ItemCIterator I = ItemsBegin(); I != ItemsEnd(); I++)
+ for (ItemCIterator I = ItemsBegin(); I != ItemsEnd(); ++I)
if ((*I)->Local == false)
Total += (*I)->PartialSize;
return Total;
unsigned int Unknown = 0;
unsigned int Count = 0;
for (pkgAcquire::ItemCIterator I = Owner->ItemsBegin(); I != Owner->ItemsEnd();
- I++, Count++)
+ ++I, ++Count)
{
TotalItems++;
if ((*I)->Status == pkgAcquire::Item::StatDone)
- CurrentItems++;
+ ++CurrentItems;
// Totally ignore local items
if ((*I)->Local == true)
if ((*I)->Complete == true)
CurrentBytes += (*I)->FileSize;
if ((*I)->FileSize == 0 && (*I)->Complete == false)
- Unknown++;
+ ++Unknown;
}
// Compute the current completion
- unsigned long ResumeSize = 0;
+ unsigned long long ResumeSize = 0;
for (pkgAcquire::Worker *I = Owner->WorkersBegin(); I != 0;
I = Owner->WorkerStep(I))
if (I->CurrentItem != 0 && I->CurrentItem->Owner->Complete == false)
else
CurrentCPS = ((CurrentBytes - ResumeSize) - LastBytes)/Delta;
LastBytes = CurrentBytes - ResumeSize;
- ElapsedTime = (unsigned long)Delta;
+ ElapsedTime = (unsigned long long)Delta;
Time = NewTime;
}
char msg[200];
long i = CurrentItems < TotalItems ? CurrentItems + 1 : CurrentItems;
- unsigned long ETA =
- (unsigned long)((TotalBytes - CurrentBytes) / CurrentCPS);
+ unsigned long long const ETA = (TotalBytes - CurrentBytes) / CurrentCPS;
// only show the ETA if it makes sense
if (ETA > 0 && ETA < 172800 /* two days */ )
else
CurrentCPS = FetchedBytes/Delta;
LastBytes = CurrentBytes;
- ElapsedTime = (unsigned int)Delta;
+ ElapsedTime = (unsigned long long)Delta;
}
/*}}}*/
// AcquireStatus::Fetched - Called when a byte set has been fetched /*{{{*/
// ---------------------------------------------------------------------
/* This is used to get accurate final transfer rate reporting. */
-void pkgAcquireStatus::Fetched(unsigned long Size,unsigned long Resume)
+void pkgAcquireStatus::Fetched(unsigned long long Size,unsigned long long Resume)
{
FetchedBytes += Size - Resume;
}
#include <apt-pkg/version.h>
#include <apt-pkg/sptr.h>
#include <apt-pkg/acquire-item.h>
-
+#include <apt-pkg/edsp.h>
+
#include <apti18n.h>
#include <sys/types.h>
#include <cstdlib>
#include <algorithm>
#include <iostream>
+
+#include <stdio.h>
/*}}}*/
using namespace std;
Sim.MarkInstall(Pkg,false);
// Look for broken conflicts+predepends.
- for (PkgIterator I = Sim.PkgBegin(); I.end() == false; I++)
+ for (PkgIterator I = Sim.PkgBegin(); I.end() == false; ++I)
{
if (Sim[I].InstallVer == 0)
continue;
Sim.Update();
// Print out each package and the failed dependencies
- for (pkgCache::DepIterator D = Sim[Pkg].InstVerIter(Sim).DependsList(); D.end() == false; D++)
+ for (pkgCache::DepIterator D = Sim[Pkg].InstVerIter(Sim).DependsList(); D.end() == false; ++D)
{
if (Sim.IsImportantDep(D) == false ||
(Sim[D] & pkgDepCache::DepInstall) != 0)
void pkgSimulate::ShortBreaks()
{
cout << " [";
- for (PkgIterator I = Sim.PkgBegin(); I.end() == false; I++)
+ for (PkgIterator I = Sim.PkgBegin(); I.end() == false; ++I)
{
if (Sim[I].InstBroken() == true)
{
{
pkgDepCache::ActionGroup group(Cache);
- for (pkgCache::PkgIterator I = Cache.PkgBegin(); I.end() == false; I++)
+ for (pkgCache::PkgIterator I = Cache.PkgBegin(); I.end() == false; ++I)
{
if (I->VersionList == 0)
continue;
pkgDepCache::ActionGroup group(Cache);
// Auto upgrade all broken packages
- for (pkgCache::PkgIterator I = Cache.PkgBegin(); I.end() == false; I++)
+ for (pkgCache::PkgIterator I = Cache.PkgBegin(); I.end() == false; ++I)
if (Cache[I].NowBroken() == true)
Cache.MarkInstall(I, true, 0, false);
/* Fix packages that are in a NeedArchive state but don't have a
downloadable install version */
- for (pkgCache::PkgIterator I = Cache.PkgBegin(); I.end() == false; I++)
+ for (pkgCache::PkgIterator I = Cache.PkgBegin(); I.end() == false; ++I)
{
if (I.State() != pkgCache::PkgIterator::NeedsUnpack ||
Cache[I].Delete() == true)
*/
bool pkgDistUpgrade(pkgDepCache &Cache)
{
+ std::string const solver = _config->Find("APT::Solver", "internal");
+ if (solver != "internal") {
+ OpTextProgress Prog(*_config);
+ return EDSP::ResolveExternal(solver.c_str(), Cache, false, true, false, &Prog);
+ }
+
pkgDepCache::ActionGroup group(Cache);
/* Upgrade all installed packages first without autoinst to help the resolver
/* Auto upgrade all installed packages, this provides the basis
for the installation */
- for (pkgCache::PkgIterator I = Cache.PkgBegin(); I.end() == false; I++)
+ for (pkgCache::PkgIterator I = Cache.PkgBegin(); I.end() == false; ++I)
if (I->CurrentVer != 0)
Cache.MarkInstall(I, true, 0, false);
/* Now, auto upgrade all essential packages - this ensures that
the essential packages are present and working */
- for (pkgCache::PkgIterator I = Cache.PkgBegin(); I.end() == false; I++)
+ for (pkgCache::PkgIterator I = Cache.PkgBegin(); I.end() == false; ++I)
if ((I->Flags & pkgCache::Flag::Essential) == pkgCache::Flag::Essential)
Cache.MarkInstall(I, true, 0, false);
/* We do it again over all previously installed packages to force
conflict resolution on them all. */
- for (pkgCache::PkgIterator I = Cache.PkgBegin(); I.end() == false; I++)
+ for (pkgCache::PkgIterator I = Cache.PkgBegin(); I.end() == false; ++I)
if (I->CurrentVer != 0)
Cache.MarkInstall(I, false, 0, false);
// Hold back held packages.
if (_config->FindB("APT::Ignore-Hold",false) == false)
{
- for (pkgCache::PkgIterator I = Cache.PkgBegin(); I.end() == false; I++)
+ for (pkgCache::PkgIterator I = Cache.PkgBegin(); I.end() == false; ++I)
{
if (I->SelectedState == pkgCache::State::Hold)
{
to install packages not marked for install */
bool pkgAllUpgrade(pkgDepCache &Cache)
{
+ std::string const solver = _config->Find("APT::Solver", "internal");
+ if (solver != "internal") {
+ OpTextProgress Prog(*_config);
+ return EDSP::ResolveExternal(solver.c_str(), Cache, true, false, false, &Prog);
+ }
+
pkgDepCache::ActionGroup group(Cache);
pkgProblemResolver Fix(&Cache);
return false;
// Upgrade all installed packages
- for (pkgCache::PkgIterator I = Cache.PkgBegin(); I.end() == false; I++)
+ for (pkgCache::PkgIterator I = Cache.PkgBegin(); I.end() == false; ++I)
{
if (Cache[I].Install() == true)
Fix.Protect(I);
do
{
Change = false;
- for (pkgCache::PkgIterator I = Cache.PkgBegin(); I.end() == false; I++)
+ for (pkgCache::PkgIterator I = Cache.PkgBegin(); I.end() == false; ++I)
{
// Not interesting
if (Cache[I].Upgrade() == false || Cache[I].NewInstall() == true)
Change = true;
}
}
- Count++;
+ ++Count;
}
while (Change == true && Count < 10);
<< " AddEssential => " << AddEssential << endl;
// Generate the base scores for a package based on its properties
- for (pkgCache::PkgIterator I = Cache.PkgBegin(); I.end() == false; I++)
+ for (pkgCache::PkgIterator I = Cache.PkgBegin(); I.end() == false; ++I)
{
if (Cache[I].InstallVer == 0)
continue;
}
// Now that we have the base scores we go and propogate dependencies
- for (pkgCache::PkgIterator I = Cache.PkgBegin(); I.end() == false; I++)
+ for (pkgCache::PkgIterator I = Cache.PkgBegin(); I.end() == false; ++I)
{
if (Cache[I].InstallVer == 0)
continue;
- for (pkgCache::DepIterator D = Cache[I].InstVerIter(Cache).DependsList(); D.end() == false; D++)
+ for (pkgCache::DepIterator D = Cache[I].InstVerIter(Cache).DependsList(); D.end() == false; ++D)
{
if (D->Type == pkgCache::Dep::Depends ||
D->Type == pkgCache::Dep::PreDepends)
/* Now we cause 1 level of dependency inheritance, that is we add the
score of the packages that depend on the target Package. This
fortifies high scoring packages */
- for (pkgCache::PkgIterator I = Cache.PkgBegin(); I.end() == false; I++)
+ for (pkgCache::PkgIterator I = Cache.PkgBegin(); I.end() == false; ++I)
{
if (Cache[I].InstallVer == 0)
continue;
- for (pkgCache::DepIterator D = I.RevDependsList(); D.end() == false; D++)
+ for (pkgCache::DepIterator D = I.RevDependsList(); D.end() == false; ++D)
{
// Only do it for the install version
if ((pkgCache::Version *)D.ParentVer() != Cache[D.ParentPkg()].InstallVer ||
/* Now we propogate along provides. This makes the packages that
provide important packages extremely important */
- for (pkgCache::PkgIterator I = Cache.PkgBegin(); I.end() == false; I++)
+ for (pkgCache::PkgIterator I = Cache.PkgBegin(); I.end() == false; ++I)
{
- for (pkgCache::PrvIterator P = I.ProvidesList(); P.end() == false; P++)
+ for (pkgCache::PrvIterator P = I.ProvidesList(); P.end() == false; ++P)
{
// Only do it once per package
if ((pkgCache::Version *)P.OwnerVer() != Cache[P.OwnerPkg()].InstallVer)
/* Protected things are pushed really high up. This number should put them
ahead of everything */
- for (pkgCache::PkgIterator I = Cache.PkgBegin(); I.end() == false; I++)
+ for (pkgCache::PkgIterator I = Cache.PkgBegin(); I.end() == false; ++I)
{
if ((Flags[I->ID] & Protected) != 0)
Scores[I->ID] += AddProtected;
if (Start == End)
break;
- Start++;
+ ++Start;
}
if (Fail == true)
break;
return true;
}
/*}}}*/
-// ProblemResolver::Resolve - Run the resolution pass /*{{{*/
+// ProblemResolver::Resolve - calls a resolver to fix the situation /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool pkgProblemResolver::Resolve(bool BrokenFix)
+{
+ std::string const solver = _config->Find("APT::Solver", "internal");
+ if (solver != "internal") {
+ OpTextProgress Prog(*_config);
+ return EDSP::ResolveExternal(solver.c_str(), Cache, false, false, false, &Prog);
+ }
+ return ResolveInternal(BrokenFix);
+}
+ /*}}}*/
+// ProblemResolver::ResolveInternal - Run the resolution pass /*{{{*/
// ---------------------------------------------------------------------
/* This routines works by calculating a score for each package. The score
is derived by considering the package's priority and all reverse
The BrokenFix flag enables a mode where the algorithm tries to
upgrade packages to advoid problems. */
-bool pkgProblemResolver::Resolve(bool BrokenFix)
+bool pkgProblemResolver::ResolveInternal(bool const BrokenFix)
{
pkgDepCache::ActionGroup group(Cache);
- unsigned long Size = Cache.Head().PackageCount;
-
// Record which packages are marked for install
bool Again = false;
do
{
Again = false;
- for (pkgCache::PkgIterator I = Cache.PkgBegin(); I.end() == false; I++)
+ for (pkgCache::PkgIterator I = Cache.PkgBegin(); I.end() == false; ++I)
{
if (Cache[I].Install() == true)
Flags[I->ID] |= PreInstalled;
clog << "Starting" << endl;
MakeScores();
-
+
+ unsigned long const Size = Cache.Head().PackageCount;
+
/* We have to order the packages so that the broken fixing pass
operates from highest score to lowest. This prevents problems when
high score packages cause the removal of lower score packages that
would cause the removal of even lower score packages. */
SPtrArray<pkgCache::Package *> PList = new pkgCache::Package *[Size];
pkgCache::Package **PEnd = PList;
- for (pkgCache::PkgIterator I = Cache.PkgBegin(); I.end() == false; I++)
+ for (pkgCache::PkgIterator I = Cache.PkgBegin(); I.end() == false; ++I)
*PEnd++ = I;
This = this;
qsort(PList,PEnd - PList,sizeof(*PList),&ScoreSort);
}
else
{
- Start++;
+ ++Start;
// We only worry about critical deps.
if (Start.IsCritical() != true)
continue;
{
// See if this is the result of a hold
pkgCache::PkgIterator I = Cache.PkgBegin();
- for (;I.end() != true; I++)
+ for (;I.end() != true; ++I)
{
if (Cache[I].InstBroken() == false)
continue;
// set the auto-flags (mvo: I'm not sure if we _really_ need this)
pkgCache::PkgIterator I = Cache.PkgBegin();
- for (;I.end() != true; I++) {
+ for (;I.end() != true; ++I) {
if (Cache[I].NewInstall() && !(Flags[I->ID] & PreInstalled)) {
if(_config->FindI("Debug::pkgAutoRemove",false)) {
std::clog << "Resolve installed new pkg: " << I.FullName(false)
in that it does not install or remove any packages. It is assumed that the
system was non-broken previously. */
bool pkgProblemResolver::ResolveByKeep()
+{
+ std::string const solver = _config->Find("APT::Solver", "internal");
+ if (solver != "internal") {
+ OpTextProgress Prog(*_config);
+ return EDSP::ResolveExternal(solver.c_str(), Cache, true, false, false, &Prog);
+ }
+ return ResolveByKeepInternal();
+}
+ /*}}}*/
+// ProblemResolver::ResolveByKeepInternal - Resolve problems using keep /*{{{*/
+// ---------------------------------------------------------------------
+/* This is the work horse of the soft upgrade routine. It is very gental
+ in that it does not install or remove any packages. It is assumed that the
+ system was non-broken previously. */
+bool pkgProblemResolver::ResolveByKeepInternal()
{
pkgDepCache::ActionGroup group(Cache);
would cause the removal of even lower score packages. */
pkgCache::Package **PList = new pkgCache::Package *[Size];
pkgCache::Package **PEnd = PList;
- for (pkgCache::PkgIterator I = Cache.PkgBegin(); I.end() == false; I++)
+ for (pkgCache::PkgIterator I = Cache.PkgBegin(); I.end() == false; ++I)
*PEnd++ = I;
This = this;
qsort(PList,PEnd - PList,sizeof(*PList),&ScoreSort);
if (Start == End)
break;
- Start++;
+ ++Start;
}
if (InstOrNewPolicyBroken(I) == false)
{
pkgDepCache::ActionGroup group(Cache);
- for (pkgCache::PkgIterator I = Cache.PkgBegin(); I.end() == false; I++)
+ for (pkgCache::PkgIterator I = Cache.PkgBegin(); I.end() == false; ++I)
{
if ((Flags[I->ID] & Protected) == Protected)
{
bool Failed = false;
bool TransientNetworkFailure = false;
for (pkgAcquire::ItemIterator I = Fetcher.ItemsBegin();
- I != Fetcher.ItemsEnd(); I++)
+ I != Fetcher.ItemsEnd(); ++I)
{
if ((*I)->Status == pkgAcquire::Item::StatDone)
continue;
sort(List.begin(),List.end());
// Collect similar entries
- for (vector<string>::iterator I = List.begin(); I != List.end(); I++)
+ for (vector<string>::iterator I = List.begin(); I != List.end(); ++I)
{
// Find a space..
string::size_type Space = (*I).find(' ');
string Word1 = string(*I,Space,SSpace-Space);
string Prefix = string(*I,0,Space);
- for (vector<string>::iterator J = List.begin(); J != I; J++)
+ for (vector<string>::iterator J = List.begin(); J != I; ++J)
{
// Find a space..
string::size_type Space2 = (*J).find(' ');
that were the same. */
bool pkgCdrom::WriteSourceList(string Name,vector<string> &List,bool Source)
{
- if (List.size() == 0)
+ if (List.empty() == true)
return true;
string File = _config->FindFile("Dir::Etc::sourcelist");
if (First == true)
{
- for (vector<string>::iterator I = List.begin(); I != List.end(); I++)
+ for (vector<string>::iterator I = List.begin(); I != List.end(); ++I)
{
string::size_type Space = (*I).find(' ');
if (Space == string::npos)
// Just in case the file was empty
if (First == true)
{
- for (vector<string>::iterator I = List.begin(); I != List.end(); I++)
+ for (vector<string>::iterator I = List.begin(); I != List.end(); ++I)
{
string::size_type Space = (*I).find(' ');
if (Space == string::npos)
if (_config->FindB("Debug::aptcdrom",false) == true)
{
cout << "I found (binary):" << endl;
- for (vector<string>::iterator I = List.begin(); I != List.end(); I++)
+ for (vector<string>::iterator I = List.begin(); I != List.end(); ++I)
cout << *I << endl;
cout << "I found (source):" << endl;
- for (vector<string>::iterator I = SourceList.begin(); I != SourceList.end(); I++)
+ for (vector<string>::iterator I = SourceList.begin(); I != SourceList.end(); ++I)
cout << *I << endl;
cout << "I found (Signatures):" << endl;
- for (vector<string>::iterator I = SigList.begin(); I != SigList.end(); I++)
+ for (vector<string>::iterator I = SigList.begin(); I != SigList.end(); ++I)
cout << *I << endl;
}
log->Update(msg.str(), STEP_SCAN);
}
- if (List.size() == 0 && SourceList.size() == 0)
+ if (List.empty() == true && SourceList.empty() == true)
{
if (_config->FindB("APT::CDROM::NoMount",false) == false)
UnmountCdrom(CDROM);
{
// Escape special characters
string::iterator J = Name.begin();
- for (; J != Name.end(); J++)
+ for (; J != Name.end(); ++J)
if (*J == '"' || *J == ']' || *J == '[')
*J = '_';
// Escape special characters
string::iterator J = Name.begin();
- for (; J != Name.end(); J++)
+ for (; J != Name.end(); ++J)
if (*J == '"' || *J == ']' || *J == '[')
*J = '_';
if(log != NULL)
log->Update(_("Source list entries for this disc are:\n"));
- for (vector<string>::iterator I = List.begin(); I != List.end(); I++)
+ for (vector<string>::iterator I = List.begin(); I != List.end(); ++I)
{
string::size_type Space = (*I).find(' ');
if (Space == string::npos)
}
}
- for (vector<string>::iterator I = SourceList.begin(); I != SourceList.end(); I++)
+ for (vector<string>::iterator I = SourceList.begin(); I != SourceList.end(); ++I)
{
string::size_type Space = (*I).find(' ');
if (Space == string::npos)
libudev_handle = h;
udev_new = (udev* (*)(void)) dlsym(h, "udev_new");
udev_enumerate_add_match_property = (int (*)(udev_enumerate*, const char*, const char*))dlsym(h, "udev_enumerate_add_match_property");
-#if 0 // FIXME: uncomment on next ABI break
udev_enumerate_add_match_sysattr = (int (*)(udev_enumerate*, const char*, const char*))dlsym(h, "udev_enumerate_add_match_sysattr");
-#endif
udev_enumerate_scan_devices = (int (*)(udev_enumerate*))dlsym(h, "udev_enumerate_scan_devices");
udev_enumerate_get_list_entry = (udev_list_entry* (*)(udev_enumerate*))dlsym(h, "udev_enumerate_get_list_entry");
udev_device_new_from_syspath = (udev_device* (*)(udev*, const char*))dlsym(h, "udev_device_new_from_syspath");
return true;
}
/*}}}*/
-
/*{{{*/
-// compatiblity only with the old API/ABI, can be removed on the next
-// ABI break
+// convenience interface, this will just call ScanForRemovable
vector<CdromDevice>
pkgUdevCdromDevices::Scan()
{
if (CdromOnly)
udev_enumerate_add_match_property(enumerate, "ID_CDROM", "1");
else {
-#if 1 // FIXME: remove the next two lines on the next ABI break
- int (*udev_enumerate_add_match_sysattr)(struct udev_enumerate *udev_enumerate, const char *property, const char *value);
- udev_enumerate_add_match_sysattr = (int (*)(udev_enumerate*, const char*, const char*))dlsym(libudev_handle, "udev_enumerate_add_match_sysattr");
-#endif
udev_enumerate_add_match_sysattr(enumerate, "removable", "1");
}
}
if (ArchitectureAll() == true)
- switch (Ver->MultiArch)
- {
- case pkgCache::Version::Foreign: Ver->MultiArch = pkgCache::Version::AllForeign; break;
- case pkgCache::Version::Allowed: Ver->MultiArch = pkgCache::Version::AllAllowed; break;
- default: Ver->MultiArch = pkgCache::Version::All;
- }
+ Ver->MultiArch |= pkgCache::Version::All;
// Archive Size
Ver->Size = Section.FindULL("Size");
std::vector<string> const lang = APT::Configuration::getLanguages(true);
for (std::vector<string>::const_iterator l = lang.begin();
- l != lang.end(); l++)
+ l != lang.end(); ++l)
if (Section.FindS(string("Description-").append(*l).c_str()).empty() == false)
return *l;
if (MultiArchEnabled == false)
return true;
- else if (Ver->MultiArch == pkgCache::Version::Allowed || Ver->MultiArch == pkgCache::Version::AllAllowed)
+ else if ((Ver->MultiArch & pkgCache::Version::Allowed) == pkgCache::Version::Allowed)
{
string const Package = string(Ver.ParentPkg().Name()).append(":").append("any");
return NewProvidesAllArch(Ver, Package, Ver.VerStr());
}
- else if (Ver->MultiArch == pkgCache::Version::Foreign || Ver->MultiArch == pkgCache::Version::AllForeign)
+ else if ((Ver->MultiArch & pkgCache::Version::Foreign) == pkgCache::Version::Foreign)
return NewProvidesAllArch(Ver, Ver.ParentPkg().Name(), Ver.VerStr());
return true;
return URI + "dists/" + Dist + "/" + TranslationIndexURISuffix(Type, Section);
}
-debReleaseIndex::debReleaseIndex(string const &URI, string const &Dist) {
- this->URI = URI;
- this->Dist = Dist;
- this->Indexes = NULL;
- this->Type = "deb";
+debReleaseIndex::debReleaseIndex(string const &URI, string const &Dist) :
+ metaIndex(URI, Dist, "deb"), Trusted(CHECK_TRUST)
+{}
+
+debReleaseIndex::debReleaseIndex(string const &URI, string const &Dist, bool const Trusted) :
+ metaIndex(URI, Dist, "deb") {
+ SetTrusted(Trusted);
}
debReleaseIndex::~debReleaseIndex() {
for (std::set<std::string>::const_iterator s = sections.begin();
s != sections.end(); ++s) {
for (std::vector<std::string>::const_iterator l = lang.begin();
- l != lang.end(); l++) {
+ l != lang.end(); ++l) {
if (*l == "none") continue;
IndexTarget * Target = new OptionalIndexTarget();
Target->ShortDesc = "Translation-" + *l;
// special case for --print-uris
if (GetAll) {
vector <struct IndexTarget *> *targets = ComputeIndexTargets();
- for (vector <struct IndexTarget*>::const_iterator Target = targets->begin(); Target != targets->end(); Target++) {
+ for (vector <struct IndexTarget*>::const_iterator Target = targets->begin(); Target != targets->end(); ++Target) {
new pkgAcqIndex(Owner, (*Target)->URI, (*Target)->Description,
(*Target)->ShortDesc, HashString());
}
return true;
}
+void debReleaseIndex::SetTrusted(bool const Trusted)
+{
+ if (Trusted == true)
+ this->Trusted = ALWAYS_TRUSTED;
+ else
+ this->Trusted = NEVER_TRUSTED;
+}
+
bool debReleaseIndex::IsTrusted() const
{
+ if (Trusted == ALWAYS_TRUSTED)
+ return true;
+ else if (Trusted == NEVER_TRUSTED)
+ return false;
+
+
if(_config->FindB("APT::Authentication::TrustCDROM", false))
if(URI.substr(0,strlen("cdrom:")) == "cdrom:")
return true;
if (src != ArchEntries.end()) {
vector<debSectionEntry const*> const SectionEntries = src->second;
for (vector<debSectionEntry const*>::const_iterator I = SectionEntries.begin();
- I != SectionEntries.end(); I++)
+ I != SectionEntries.end(); ++I)
Indexes->push_back(new debSourcesIndex (URI, Dist, (*I)->Section, IsTrusted()));
}
if (a->first == "source")
continue;
for (vector<debSectionEntry const*>::const_iterator I = a->second.begin();
- I != a->second.end(); I++) {
+ I != a->second.end(); ++I) {
Indexes->push_back(new debPackagesIndex (URI, Dist, (*I)->Section, IsTrusted(), a->first));
sections[(*I)->Section].insert(lang.begin(), lang.end());
}
for (map<string, set<string> >::const_iterator s = sections.begin();
s != sections.end(); ++s)
for (set<string>::const_iterator l = s->second.begin();
- l != s->second.end(); l++) {
+ l != s->second.end(); ++l) {
if (*l == "none") continue;
Indexes->push_back(new debTranslationsIndex(URI,Dist,s->first,(*l).c_str()));
}
vector<string> const Archs =
(arch != Options.end()) ? VectorizeString(arch->second, ',') :
APT::Configuration::getArchitectures();
+ map<string, string>::const_iterator const trusted = Options.find("trusted");
for (vector<metaIndex *>::const_iterator I = List.begin();
- I != List.end(); I++)
+ I != List.end(); ++I)
{
// We only worry about debian entries here
if (strcmp((*I)->GetType(), "deb") != 0)
continue;
debReleaseIndex *Deb = (debReleaseIndex *) (*I);
+ if (trusted != Options.end())
+ Deb->SetTrusted(StringToBool(trusted->second, false));
+
/* This check insures that there will be only one Release file
queued for all the Packages files and Sources files it
corresponds to. */
return true;
}
}
+
// No currently created Release file indexes this entry, so we create a new one.
- // XXX determine whether this release is trusted or not
- debReleaseIndex *Deb = new debReleaseIndex(URI, Dist);
+ debReleaseIndex *Deb;
+ if (trusted != Options.end())
+ Deb = new debReleaseIndex(URI, Dist, StringToBool(trusted->second, false));
+ else
+ Deb = new debReleaseIndex(URI, Dist);
+
if (IsSrc == true)
Deb->PushSectionEntry ("source", new debReleaseIndex::debSectionEntry(Section, IsSrc));
else
return Section.FindS("SHA1");
}
/*}}}*/
-// RecordParser::SHA1Hash - Return the archive hash /*{{{*/
+// RecordParser::SHA256Hash - Return the archive hash /*{{{*/
// ---------------------------------------------------------------------
/* */
string debRecordParser::SHA256Hash()
return Section.FindS("SHA256");
}
/*}}}*/
+// RecordParser::SHA512Hash - Return the archive hash /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+string debRecordParser::SHA512Hash()
+{
+ return Section.FindS("SHA512");
+}
+ /*}}}*/
// RecordParser::Maintainer - Return the maintainer email /*{{{*/
// ---------------------------------------------------------------------
/* */
return Section.FindS("Maintainer");
}
/*}}}*/
+// RecordParser::RecordField - Return the value of an arbitrary field /*{{*/
+// ---------------------------------------------------------------------
+/* */
+string debRecordParser::RecordField(const char *fieldName)
+{
+ return Section.FindS(fieldName);
+}
+
+ /*}}}*/
// RecordParser::ShortDesc - Return a 1 line description /*{{{*/
// ---------------------------------------------------------------------
/* */
{
vector<string> const lang = APT::Configuration::getLanguages();
for (vector<string>::const_iterator l = lang.begin();
- orig.empty() && l != lang.end(); l++)
+ orig.empty() && l != lang.end(); ++l)
orig = Section.FindS(string("Description-").append(*l).c_str());
}
using namespace std;
+class pkgDPkgPMPrivate
+{
+public:
+ pkgDPkgPMPrivate() : dpkgbuf_pos(0), term_out(NULL), history_out(NULL)
+ {
+ }
+ bool stdin_is_dev_null;
+ // the buffer we use for the dpkg status-fd reading
+ char dpkgbuf[1024];
+ int dpkgbuf_pos;
+ FILE *term_out;
+ FILE *history_out;
+ string dpkg_error;
+};
+
namespace
{
// Maps the dpkg "processing" info to human readable names. Entry 0
// ---------------------------------------------------------------------
/* */
pkgDPkgPM::pkgDPkgPM(pkgDepCache *Cache)
- : pkgPackageManager(Cache), dpkgbuf_pos(0),
- term_out(NULL), history_out(NULL), PackagesDone(0), PackagesTotal(0)
+ : pkgPackageManager(Cache), PackagesDone(0), PackagesTotal(0)
{
+ d = new pkgDPkgPMPrivate();
}
/*}}}*/
// DPkgPM::pkgDPkgPM - Destructor /*{{{*/
/* */
pkgDPkgPM::~pkgDPkgPM()
{
+ delete d;
}
/*}}}*/
// DPkgPM::Install - Install a package /*{{{*/
fprintf(F,"\n");
// Write out the package actions in order.
- for (vector<Item>::iterator I = List.begin(); I != List.end(); I++)
+ for (vector<Item>::iterator I = List.begin(); I != List.end(); ++I)
{
if(I->Pkg.end() == true)
continue;
// Feed it the filenames.
if (Version <= 1)
{
- for (vector<Item>::iterator I = List.begin(); I != List.end(); I++)
+ for (vector<Item>::iterator I = List.begin(); I != List.end(); ++I)
{
// Only deal with packages to be installed from .deb
if (I->Op != Item::Install)
if (len)
write(master, input_buf, len);
else
- stdin_is_dev_null = true;
+ d->stdin_is_dev_null = true;
}
/*}}}*/
// DPkgPM::DoTerminalPty - Read the terminal pty and write log /*{{{*/
if(len <= 0)
return;
write(1, term_buf, len);
- if(term_out)
- fwrite(term_buf, len, sizeof(char), term_out);
+ if(d->term_out)
+ fwrite(term_buf, len, sizeof(char), d->term_out);
}
/*}}}*/
// DPkgPM::ProcessDpkgStatusBuf /*{{{*/
char *p, *q;
int len;
- len=read(statusfd, &dpkgbuf[dpkgbuf_pos], sizeof(dpkgbuf)-dpkgbuf_pos);
- dpkgbuf_pos += len;
+ len=read(statusfd, &d->dpkgbuf[d->dpkgbuf_pos], sizeof(d->dpkgbuf)-d->dpkgbuf_pos);
+ d->dpkgbuf_pos += len;
if(len <= 0)
return;
// process line by line if we have a buffer
- p = q = dpkgbuf;
- while((q=(char*)memchr(p, '\n', dpkgbuf+dpkgbuf_pos-p)) != NULL)
+ p = q = d->dpkgbuf;
+ while((q=(char*)memchr(p, '\n', d->dpkgbuf+d->dpkgbuf_pos-p)) != NULL)
{
*q = 0;
ProcessDpkgStatusLine(OutStatusFd, p);
}
// now move the unprocessed bits (after the final \n that is now a 0x0)
- // to the start and update dpkgbuf_pos
- p = (char*)memrchr(dpkgbuf, 0, dpkgbuf_pos);
+ // to the start and update d->dpkgbuf_pos
+ p = (char*)memrchr(d->dpkgbuf, 0, d->dpkgbuf_pos);
if(p == NULL)
return;
p++;
// move the unprocessed tail to the start and update pos
- memmove(dpkgbuf, p, p-dpkgbuf);
- dpkgbuf_pos = dpkgbuf+dpkgbuf_pos-p;
+ memmove(d->dpkgbuf, p, p-d->dpkgbuf);
+ d->dpkgbuf_pos = d->dpkgbuf+d->dpkgbuf_pos-p;
}
/*}}}*/
// DPkgPM::WriteHistoryTag /*{{{*/
// poor mans rstrip(", ")
if (value[length-2] == ',' && value[length-1] == ' ')
value.erase(length - 2, 2);
- fprintf(history_out, "%s: %s\n", tag.c_str(), value.c_str());
+ fprintf(d->history_out, "%s: %s\n", tag.c_str(), value.c_str());
} /*}}}*/
// DPkgPM::OpenLog /*{{{*/
bool pkgDPkgPM::OpenLog()
_config->Find("Dir::Log::Terminal"));
if (!logfile_name.empty())
{
- term_out = fopen(logfile_name.c_str(),"a");
- if (term_out == NULL)
+ d->term_out = fopen(logfile_name.c_str(),"a");
+ if (d->term_out == NULL)
return _error->WarningE("OpenLog", _("Could not open file '%s'"), logfile_name.c_str());
- setvbuf(term_out, NULL, _IONBF, 0);
- SetCloseExec(fileno(term_out), true);
+ setvbuf(d->term_out, NULL, _IONBF, 0);
+ SetCloseExec(fileno(d->term_out), true);
struct passwd *pw;
struct group *gr;
pw = getpwnam("root");
if (pw != NULL && gr != NULL)
chown(logfile_name.c_str(), pw->pw_uid, gr->gr_gid);
chmod(logfile_name.c_str(), 0644);
- fprintf(term_out, "\nLog started: %s\n", timestr);
+ fprintf(d->term_out, "\nLog started: %s\n", timestr);
}
// write your history
_config->Find("Dir::Log::History"));
if (!history_name.empty())
{
- history_out = fopen(history_name.c_str(),"a");
- if (history_out == NULL)
+ d->history_out = fopen(history_name.c_str(),"a");
+ if (d->history_out == NULL)
return _error->WarningE("OpenLog", _("Could not open file '%s'"), history_name.c_str());
chmod(history_name.c_str(), 0644);
- fprintf(history_out, "\nStart-Date: %s\n", timestr);
+ fprintf(d->history_out, "\nStart-Date: %s\n", timestr);
string remove, purge, install, reinstall, upgrade, downgrade;
- for (pkgCache::PkgIterator I = Cache.PkgBegin(); I.end() == false; I++)
+ for (pkgCache::PkgIterator I = Cache.PkgBegin(); I.end() == false; ++I)
{
enum { CANDIDATE, CANDIDATE_AUTO, CURRENT_CANDIDATE, CURRENT } infostring;
string *line = NULL;
WriteHistoryTag("Downgrade",downgrade);
WriteHistoryTag("Remove",remove);
WriteHistoryTag("Purge",purge);
- fflush(history_out);
+ fflush(d->history_out);
}
return true;
struct tm *tmp = localtime(&t);
strftime(timestr, sizeof(timestr), "%F %T", tmp);
- if(term_out)
+ if(d->term_out)
{
- fprintf(term_out, "Log ended: ");
- fprintf(term_out, "%s", timestr);
- fprintf(term_out, "\n");
- fclose(term_out);
+ fprintf(d->term_out, "Log ended: ");
+ fprintf(d->term_out, "%s", timestr);
+ fprintf(d->term_out, "\n");
+ fclose(d->term_out);
}
- term_out = NULL;
+ d->term_out = NULL;
- if(history_out)
+ if(d->history_out)
{
if (disappearedPkgs.empty() == false)
{
}
WriteHistoryTag("Disappeared", disappear);
}
- if (dpkg_error.empty() == false)
- fprintf(history_out, "Error: %s\n", dpkg_error.c_str());
- fprintf(history_out, "End-Date: %s\n", timestr);
- fclose(history_out);
+ if (d->dpkg_error.empty() == false)
+ fprintf(d->history_out, "Error: %s\n", d->dpkg_error.c_str());
+ fprintf(d->history_out, "End-Date: %s\n", timestr);
+ fclose(d->history_out);
}
- history_out = NULL;
+ d->history_out = NULL;
return true;
}
// that will be [installed|configured|removed|purged] and add
// them to the PackageOps map (the dpkg states it goes through)
// and the PackageOpsTranslations (human readable strings)
- for (vector<Item>::const_iterator I = List.begin(); I != List.end();I++)
+ for (vector<Item>::const_iterator I = List.begin(); I != List.end(); ++I)
{
if((*I).Pkg.end() == true)
continue;
string const name = (*I).Pkg.Name();
PackageOpsDone[name] = 0;
- for(int i=0; (DpkgStatesOpMap[(*I).Op][i]).state != NULL; i++)
+ for(int i=0; (DpkgStatesOpMap[(*I).Op][i]).state != NULL; ++i)
{
PackageOps[name].push_back(DpkgStatesOpMap[(*I).Op][i]);
PackagesTotal++;
}
}
- stdin_is_dev_null = false;
+ d->stdin_is_dev_null = false;
// create log
OpenLog();
// Do all actions with the same Op in one run
vector<Item>::const_iterator J = I;
if (TriggersPending == true)
- for (; J != List.end(); J++)
+ for (; J != List.end(); ++J)
{
if (J->Op == I->Op)
continue;
break;
}
else
- for (; J != List.end() && J->Op == I->Op; J++)
+ for (; J != List.end() && J->Op == I->Op; ++J)
/* nothing */;
// Generate the argument list
// Write in the file or package names
if (I->Op == Item::Install)
{
- for (;I != J && Size < MaxArgBytes; I++)
+ for (;I != J && Size < MaxArgBytes; ++I)
{
if (I->File[0] != '/')
return _error->Error("Internal Error, Pathname to install is not absolute '%s'",I->File.c_str());
{
string const nativeArch = _config->Find("APT::Architecture");
unsigned long const oldSize = I->Op == Item::Configure ? Size : 0;
- for (;I != J && Size < MaxArgBytes; I++)
+ for (;I != J && Size < MaxArgBytes; ++I)
{
if((*I).Pkg.end() == true)
continue;
const char *s = _("Can not write log, openpty() "
"failed (/dev/pts not mounted?)\n");
fprintf(stderr, "%s",s);
- if(term_out)
- fprintf(term_out, "%s",s);
+ if(d->term_out)
+ fprintf(d->term_out, "%s",s);
master = slave = -1;
} else {
struct termios rtt;
// wait for input or output here
FD_ZERO(&rfds);
- if (master >= 0 && !stdin_is_dev_null)
+ if (master >= 0 && !d->stdin_is_dev_null)
FD_SET(0, &rfds);
FD_SET(_dpkgin, &rfds);
if(master >= 0)
RunScripts("DPkg::Post-Invoke");
if (WIFSIGNALED(Status) != 0 && WTERMSIG(Status) == SIGSEGV)
- strprintf(dpkg_error, "Sub-process %s received a segmentation fault.",Args[0]);
+ strprintf(d->dpkg_error, "Sub-process %s received a segmentation fault.",Args[0]);
else if (WIFEXITED(Status) != 0)
- strprintf(dpkg_error, "Sub-process %s returned an error code (%u)",Args[0],WEXITSTATUS(Status));
+ strprintf(d->dpkg_error, "Sub-process %s returned an error code (%u)",Args[0],WEXITSTATUS(Status));
else
- strprintf(dpkg_error, "Sub-process %s exited unexpectedly",Args[0]);
+ strprintf(d->dpkg_error, "Sub-process %s exited unexpectedly",Args[0]);
- if(dpkg_error.size() > 0)
- _error->Error("%s", dpkg_error.c_str());
+ if(d->dpkg_error.size() > 0)
+ _error->Error("%s", d->dpkg_error.c_str());
if(stopOnError)
{
fprintf(report, "ErrorMessage:\n %s\n", errormsg);
// ensure that the log is flushed
- if(term_out)
- fflush(term_out);
+ if(d->term_out)
+ fflush(d->term_out);
// attach terminal log it if we have it
string logfile_name = _config->FindFile("Dir::Log::Terminal");
// log the ordering
const char *ops_str[] = {"Install", "Configure","Remove","Purge"};
fprintf(report, "AptOrdering:\n");
- for (vector<Item>::iterator I = List.begin(); I != List.end(); I++)
+ for (vector<Item>::iterator I = List.begin(); I != List.end(); ++I)
fprintf(report, " %s: %s\n", (*I).Pkg.Name(), ops_str[(*I).Op]);
// attach dmesg log (to learn about segfaults)
/* Set the current state of everything. In this state all of the
packages are kept exactly as is. See AllUpgrade */
int Done = 0;
- for (PkgIterator I = PkgBegin(); I.end() != true; I++,Done++)
+ for (PkgIterator I = PkgBegin(); I.end() != true; ++I, ++Done)
{
if (Prog != 0 && Done%20 == 0)
Prog->Progress(Done);
// then write the ones we have not seen yet
std::ostringstream ostr;
- for(pkgCache::PkgIterator pkg=Cache->PkgBegin(); !pkg.end(); pkg++) {
+ for(pkgCache::PkgIterator pkg=Cache->PkgBegin(); !pkg.end(); ++pkg) {
StateCache const &P = PkgState[pkg->ID];
if(P.Flags & Flag::Auto) {
if (pkgs_seen.find(pkg.FullName()) != pkgs_seen.end()) {
// Check the providing packages
PrvIterator P = Dep.TargetPkg().ProvidesList();
PkgIterator Pkg = Dep.ParentPkg();
- for (; P.end() != true; P++)
+ for (; P.end() != true; ++P)
{
/* Provides may never be applied against the same package (or group)
if it is a conflicts. See the comment above. */
/*}}}*/
// DepCache::AddSizes - Add the packages sizes to the counters /*{{{*/
// ---------------------------------------------------------------------
-/* Call with Mult = -1 to preform the inverse opration
- The Mult increases the complexity of the calulations here and is unused -
- or do we really have a usecase for removing the size of a package two
- times? So let us replace it with a simple bool and be done with it… */
-__deprecated void pkgDepCache::AddSizes(const PkgIterator &Pkg,signed long Mult)
-{
- StateCache &P = PkgState[Pkg->ID];
-
- if (Pkg->VersionList == 0)
- return;
-
- if (Pkg.State() == pkgCache::PkgIterator::NeedsConfigure &&
- P.Keep() == true)
- return;
-
- // Compute the size data
- if (P.NewInstall() == true)
- {
- iUsrSize += (signed long long)(Mult*P.InstVerIter(*this)->InstalledSize);
- iDownloadSize += (signed long long)(Mult*P.InstVerIter(*this)->Size);
- return;
- }
-
- // Upgrading
- if (Pkg->CurrentVer != 0 &&
- (P.InstallVer != (Version *)Pkg.CurrentVer() ||
- (P.iFlags & ReInstall) == ReInstall) && P.InstallVer != 0)
- {
- iUsrSize += (signed long long)(Mult*((signed long long)P.InstVerIter(*this)->InstalledSize -
- (signed long long)Pkg.CurrentVer()->InstalledSize));
- iDownloadSize += (signed long long)(Mult*P.InstVerIter(*this)->Size);
- return;
- }
-
- // Reinstall
- if (Pkg.State() == pkgCache::PkgIterator::NeedsUnpack &&
- P.Delete() == false)
- {
- iDownloadSize += (signed long long)(Mult*P.InstVerIter(*this)->Size);
- return;
- }
-
- // Removing
- if (Pkg->CurrentVer != 0 && P.InstallVer == 0)
- {
- iUsrSize -= (signed long long)(Mult*Pkg.CurrentVer()->InstalledSize);
- return;
- }
-}
- /*}}}*/
-// DepCache::AddSizes - Add the packages sizes to the counters /*{{{*/
-// ---------------------------------------------------------------------
/* Call with Inverse = true to preform the inverse opration */
-void pkgDepCache::AddSizes(const PkgIterator &Pkg, bool const &Inverse)
+void pkgDepCache::AddSizes(const PkgIterator &Pkg, bool const Inverse)
{
StateCache &P = PkgState[Pkg->ID];
calld Remove/Add itself. Remember, dependencies can be circular so
while processing a dep for Pkg it is possible that Add/Remove
will be called on Pkg */
-void pkgDepCache::AddStates(const PkgIterator &Pkg,int Add)
+void pkgDepCache::AddStates(const PkgIterator &Pkg, bool const Invert)
{
+ signed char const Add = (Invert == false) ? 1 : -1;
StateCache &State = PkgState[Pkg->ID];
// The Package is broken (either minimal dep or policy dep)
{
unsigned char Group = 0;
- for (DepIterator D = V.DependsList(); D.end() != true; D++)
+ for (DepIterator D = V.DependsList(); D.end() != true; ++D)
{
// Build the dependency state.
unsigned char &State = DepState[D->ID];
// Compute a single dependency element (glob or)
DepIterator Start = D;
unsigned char State = 0;
- for (bool LastOR = true; D.end() == false && LastOR == true; D++)
+ for (bool LastOR = true; D.end() == false && LastOR == true; ++D)
{
State |= DepState[D->ID];
LastOR = (D->CompareOp & Dep::Or) == Dep::Or;
// Perform the depends pass
int Done = 0;
- for (PkgIterator I = PkgBegin(); I.end() != true; I++,Done++)
+ for (PkgIterator I = PkgBegin(); I.end() != true; ++I, ++Done)
{
if (Prog != 0 && Done%20 == 0)
Prog->Progress(Done);
- for (VerIterator V = I.VersionList(); V.end() != true; V++)
+ for (VerIterator V = I.VersionList(); V.end() != true; ++V)
{
unsigned char Group = 0;
- for (DepIterator D = V.DependsList(); D.end() != true; D++)
+ for (DepIterator D = V.DependsList(); D.end() != true; ++D)
{
// Build the dependency state.
unsigned char &State = DepState[D->ID];
void pkgDepCache::Update(DepIterator D)
{
// Update the reverse deps
- for (;D.end() != true; D++)
+ for (;D.end() != true; ++D)
{
unsigned char &State = DepState[D->ID];
State = DependencyState(D);
// Update the provides map for the current ver
if (Pkg->CurrentVer != 0)
for (PrvIterator P = Pkg.CurrentVer().ProvidesList();
- P.end() != true; P++)
+ P.end() != true; ++P)
Update(P.ParentPkg().RevDependsList());
// Update the provides map for the candidate ver
if (PkgState[Pkg->ID].CandidateVer != 0)
for (PrvIterator P = PkgState[Pkg->ID].CandidateVerIter(*this).ProvidesList();
- P.end() != true; P++)
+ P.end() != true; ++P)
Update(P.ParentPkg().RevDependsList());
}
/*}}}*/
// DepCache::MarkKeep - Put the package in the keep state /*{{{*/
// ---------------------------------------------------------------------
/* */
-void pkgDepCache::MarkKeep(PkgIterator const &Pkg, bool Soft, bool FromUser,
+bool pkgDepCache::MarkKeep(PkgIterator const &Pkg, bool Soft, bool FromUser,
unsigned long Depth)
{
if (IsModeChangeOk(ModeKeep, Pkg, Depth, FromUser) == false)
- return;
+ return false;
/* Reject an attempt to keep a non-source broken installed package, those
must be upgraded */
if (Pkg.State() == PkgIterator::NeedsUnpack &&
Pkg.CurrentVer().Downloadable() == false)
- return;
+ return false;
/* We changed the soft state all the time so the UI is a bit nicer
to use */
// Check that it is not already kept
if (P.Mode == ModeKeep)
- return;
+ return true;
if (Soft == true)
P.iFlags |= AutoKept;
P.InstallVer = Pkg.CurrentVer();
AddStates(Pkg);
-
Update(Pkg);
-
AddSizes(Pkg);
+
+ return true;
}
/*}}}*/
// DepCache::MarkDelete - Put the package in the delete state /*{{{*/
// ---------------------------------------------------------------------
/* */
-void pkgDepCache::MarkDelete(PkgIterator const &Pkg, bool rPurge,
+bool pkgDepCache::MarkDelete(PkgIterator const &Pkg, bool rPurge,
unsigned long Depth, bool FromUser)
{
if (IsModeChangeOk(ModeDelete, Pkg, Depth, FromUser) == false)
- return;
+ return false;
StateCache &P = PkgState[Pkg->ID];
// Check that it is not already marked for delete
if ((P.Mode == ModeDelete || P.InstallVer == 0) &&
(Pkg.Purge() == true || rPurge == false))
- return;
+ return true;
// check if we are allowed to remove the package
if (IsDeleteOk(Pkg,rPurge,Depth,FromUser) == false)
- return;
+ return false;
P.iFlags &= ~(AutoKept | Purge);
if (rPurge == true)
Update(Pkg);
AddSizes(Pkg);
+ return true;
}
/*}}}*/
// DepCache::IsDeleteOk - check if it is ok to remove this package /*{{{*/
// DepCache::MarkInstall - Put the package in the install state /*{{{*/
// ---------------------------------------------------------------------
/* */
-void pkgDepCache::MarkInstall(PkgIterator const &Pkg,bool AutoInst,
+bool pkgDepCache::MarkInstall(PkgIterator const &Pkg,bool AutoInst,
unsigned long Depth, bool FromUser,
bool ForceImportantDeps)
{
if (IsModeChangeOk(ModeInstall, Pkg, Depth, FromUser) == false)
- return;
+ return false;
StateCache &P = PkgState[Pkg->ID];
// See if there is even any possible instalation candidate
if (P.CandidateVer == 0)
- return;
+ return false;
/* Check that it is not already marked for install and that it can be
installed */
P.CandidateVer == (Version *)Pkg.CurrentVer()))
{
if (P.CandidateVer == (Version *)Pkg.CurrentVer() && P.InstallVer == 0)
- MarkKeep(Pkg, false, FromUser, Depth+1);
- return;
+ return MarkKeep(Pkg, false, FromUser, Depth+1);
+ return true;
}
// check if we are allowed to install the package
if (IsInstallOk(Pkg,AutoInst,Depth,FromUser) == false)
- return;
+ return false;
ActionGroup group(*this);
P.iFlags &= ~AutoKept;
Update(Pkg);
AddSizes(Pkg);
- if (AutoInst == false)
- return;
+ if (AutoInst == false || _config->Find("APT::Solver", "internal") != "internal")
+ return true;
if (DebugMarker == true)
std::clog << OutputInDepth(Depth) << "MarkInstall " << Pkg << " FU=" << FromUser << std::endl;
DepIterator Start = Dep;
bool Result = true;
unsigned Ors = 0;
- for (bool LastOR = true; Dep.end() == false && LastOR == true; Dep++,Ors++)
+ for (bool LastOR = true; Dep.end() == false && LastOR == true; ++Dep, ++Ors)
{
LastOR = (Dep->CompareOp & Dep::Or) == Dep::Or;
continue;
// if the dependency was critical, we can't install it, so remove it again
MarkDelete(Pkg,false,Depth + 1, false);
- return;
+ return false;
}
/* Check if any ImportantDep() (but not Critical) were added
continue;
if (PkgState[Pkg->ID].CandidateVer != *I &&
- Start->Type == Dep::DpkgBreaks)
- MarkInstall(Pkg,true,Depth + 1, false, ForceImportantDeps);
- else
- MarkDelete(Pkg,false,Depth + 1, false);
+ Start->Type == Dep::DpkgBreaks &&
+ MarkInstall(Pkg,true,Depth + 1, false, ForceImportantDeps) == true)
+ continue;
+ else if (MarkDelete(Pkg,false,Depth + 1, false) == false)
+ break;
}
continue;
}
}
+
+ return Dep.end() == true;
}
/*}}}*/
// DepCache::IsInstallOk - check if it is ok to install this package /*{{{*/
// DepCache::SetCandidateVersion - Change the candidate version /*{{{*/
// ---------------------------------------------------------------------
/* */
-void pkgDepCache::SetCandidateVersion(VerIterator TargetVer, bool const &Pseudo)
+void pkgDepCache::SetCandidateVersion(VerIterator TargetVer)
{
pkgCache::PkgIterator Pkg = TargetVer.ParentPkg();
StateCache &P = PkgState[Pkg->ID];
unless they are already installed */
VerIterator Last(*(pkgCache *)this,0);
- for (VerIterator I = Pkg.VersionList(); I.end() == false; I++)
+ for (VerIterator I = Pkg.VersionList(); I.end() == false; ++I)
{
if (Pkg.CurrentVer() == I)
return I;
- for (VerFileIterator J = I.FileList(); J.end() == false; J++)
+ for (VerFileIterator J = I.FileList(); J.end() == false; ++J)
{
if ((J.File()->Flags & Flag::NotSource) != 0)
continue;
return true;
else if(Dep->Type == pkgCache::Dep::Recommends)
{
- if ( _config->FindB("APT::Install-Recommends", false))
+ if (InstallRecommends)
return true;
// we suport a special mode to only install-recommends for certain
// sections
return true;
}
else if(Dep->Type == pkgCache::Dep::Suggests)
- return _config->FindB("APT::Install-Suggests", false);
+ return InstallSuggests;
return false;
}
/*}}}*/
+// Policy::GetPriority - Get the priority of the package pin /*{{{*/
+signed short pkgDepCache::Policy::GetPriority(pkgCache::PkgIterator const &Pkg)
+{ return 0; };
+signed short pkgDepCache::Policy::GetPriority(pkgCache::PkgFileIterator const &File)
+{ return 0; };
+ /*}}}*/
pkgDepCache::InRootSetFunc *pkgDepCache::GetRootSetFunc() /*{{{*/
{
DefaultRootSetFunc *f = new DefaultRootSetFunc;
// pkgDepCache::MarkRequired - the main mark algorithm /*{{{*/
bool pkgDepCache::MarkRequired(InRootSetFunc &userFunc)
{
+ if (_config->Find("APT::Solver", "internal") != "internal")
+ return true;
+
bool follow_recommends;
bool follow_suggests;
bool debug_autoremove = _config->FindB("Debug::pkgAutoRemove",false);
inline bool Delete() const {return Mode == ModeDelete;};
inline bool Purge() const {return Delete() == true && (iFlags & pkgDepCache::Purge) == pkgDepCache::Purge; };
inline bool Keep() const {return Mode == ModeKeep;};
+ inline bool Protect() const {return (iFlags & Protected) == Protected;};
inline bool Upgrade() const {return Status > 0 && Mode == ModeInstall;};
inline bool Upgradable() const {return Status >= 1;};
inline bool Downgrade() const {return Status < 0 && Mode == ModeInstall;};
class Policy
{
public:
-
+ Policy() {
+ InstallRecommends = _config->FindB("APT::Install-Recommends", false);
+ InstallSuggests = _config->FindB("APT::Install-Suggests", false);
+ }
+
virtual VerIterator GetCandidateVer(PkgIterator const &Pkg);
virtual bool IsImportantDep(DepIterator const &Dep);
-
+ virtual signed short GetPriority(PkgIterator const &Pkg);
+ virtual signed short GetPriority(PkgFileIterator const &File);
+
virtual ~Policy() {};
+
+ private:
+ bool InstallRecommends;
+ bool InstallSuggests;
};
private:
void Update(PkgIterator const &P);
// Count manipulators
- void AddSizes(const PkgIterator &Pkg, bool const &Invert = false);
+ void AddSizes(const PkgIterator &Pkg, bool const Invert = false);
inline void RemoveSizes(const PkgIterator &Pkg) {AddSizes(Pkg, true);};
- void AddSizes(const PkgIterator &Pkg,signed long Mult) __deprecated;
- void AddStates(const PkgIterator &Pkg,int Add = 1);
- inline void RemoveStates(const PkgIterator &Pkg) {AddStates(Pkg,-1);};
+ void AddStates(const PkgIterator &Pkg, bool const Invert = false);
+ inline void RemoveStates(const PkgIterator &Pkg) {AddStates(Pkg,true);};
public:
/** \name State Manipulators
*/
// @{
- void MarkKeep(PkgIterator const &Pkg, bool Soft = false,
+ bool MarkKeep(PkgIterator const &Pkg, bool Soft = false,
bool FromUser = true, unsigned long Depth = 0);
- bool MarkDelete(PkgIterator const &Pkg, bool Purge = false,
- void MarkDelete(PkgIterator const &Pkg, bool MarkPurge = false,
++ bool MarkDelete(PkgIterator const &Pkg, bool MarkPurge = false,
unsigned long Depth = 0, bool FromUser = true);
- void MarkInstall(PkgIterator const &Pkg,bool AutoInst = true,
+ bool MarkInstall(PkgIterator const &Pkg,bool AutoInst = true,
unsigned long Depth = 0, bool FromUser = true,
bool ForceImportantDeps = false);
void MarkProtected(PkgIterator const &Pkg) { PkgState[Pkg->ID].iFlags |= Protected; };
void SetReInstall(PkgIterator const &Pkg,bool To);
- // FIXME: Remove the unused boolean parameter on abi break
- void SetCandidateVersion(VerIterator TargetVer, bool const &Pseudo = true);
+ void SetCandidateVersion(VerIterator TargetVer);
bool SetCandidateRelease(pkgCache::VerIterator TargetVer,
std::string const &TargetRel);
/** Set the candidate version for dependencies too if needed.
* \param Depth recursive deep of this Marker call
* \param FromUser was the remove requested by the user?
*/
- virtual bool IsDeleteOk(const PkgIterator &Pkg,bool Purge = false,
+ virtual bool IsDeleteOk(const PkgIterator &Pkg,bool MarkPurge = false,
unsigned long Depth = 0, bool FromUser = true);
// read persistent states
virtual ~pkgDepCache();
private:
- // Helper for Update(OpProgress) to remove pseudoinstalled arch all packages
- // FIXME: they are private so shouldn't affect abi, but just in case…
- __deprecated bool RemovePseudoInstalledPkg(PkgIterator &Pkg, std::set<unsigned long> &recheck) { return true; };
- __deprecated bool ReInstallPseudoForGroup(unsigned long const &Grp, std::set<unsigned long> &recheck) { return true; };
- __deprecated bool ReInstallPseudoForGroup(pkgCache::PkgIterator const &P, std::set<unsigned long> &recheck) { return true; };
-
-
bool IsModeChangeOk(ModeList const mode, PkgIterator const &Pkg,
unsigned long const Depth, bool const FromUser);
};
if (ordering == false)
return _error->Error("Internal ordering error");
- for (pkgOrderList::iterator I = List->begin(); I != List->end(); I++)
+ for (pkgOrderList::iterator I = List->begin(); I != List->end(); ++I)
{
PkgIterator Pkg(Cache,*I);
FileNames[Pkg->ID] = string();
List->SetFileList(FileNames);
bool Bad = false;
- for (PkgIterator I = Cache.PkgBegin(); I.end() == false; I++)
+ for (PkgIterator I = Cache.PkgBegin(); I.end() == false; ++I)
{
if (List->IsMissing(I) == false)
continue;
D = I.CurrentVer().DependsList();
}
- for ( /* nothing */ ; D.end() == false; D++)
+ for ( /* nothing */ ; D.end() == false; ++D)
if (D->Type == pkgCache::Dep::Depends || D->Type == pkgCache::Dep::PreDepends)
{
if(!List->IsFlag(D.TargetPkg(), pkgOrderList::Immediate))
static bool const NoImmConfigure = !_config->FindB("APT::Immediate-Configure",true);
// Generate the list of affected packages and sort it
- for (PkgIterator I = Cache.PkgBegin(); I.end() == false; I++)
+ for (PkgIterator I = Cache.PkgBegin(); I.end() == false; ++I)
{
// Ignore no-version packages
if (I->VersionList == 0)
bool pkgPackageManager::CheckRConflicts(PkgIterator Pkg,DepIterator D,
const char *Ver)
{
- for (;D.end() == false; D++)
+ for (;D.end() == false; ++D)
{
if (D->Type != pkgCache::Dep::Conflicts &&
D->Type != pkgCache::Dep::Obsoletes)
pkgOrderList OList(&Cache);
// Populate the order list
- for (pkgOrderList::iterator I = List->begin(); I != List->end(); I++)
+ for (pkgOrderList::iterator I = List->begin(); I != List->end(); ++I)
if (List->IsFlag(pkgCache::PkgIterator(Cache,*I),
pkgOrderList::UnPacked) == true)
OList.push_back(*I);
bool const ConfigurePkgs = (conf == "all");
// Perform the configuring
- for (pkgOrderList::iterator I = OList.begin(); I != OList.end(); I++)
+ for (pkgOrderList::iterator I = OList.begin(); I != OList.end(); ++I)
{
PkgIterator Pkg(Cache,*I);
return false;
// Perform the configuring
- for (pkgOrderList::iterator I = OList.begin(); I != OList.end(); I++)
+ for (pkgOrderList::iterator I = OList.begin(); I != OList.end(); ++I)
{
PkgIterator Pkg(Cache,*I);
List->Flag(Pkg,pkgOrderList::Configured,pkgOrderList::States);
}
- if (Cache[Pkg].InstVerIter(Cache)->MultiArch == pkgCache::Version::Same)
+ if ((Cache[Pkg].InstVerIter(Cache)->MultiArch & pkgCache::Version::Same) == pkgCache::Version::Same)
for (PkgIterator P = Pkg.Group().PackageList();
P.end() == false; P = Pkg.Group().NextPkg(P))
{
{
if (D->Type != pkgCache::Dep::Depends && D->Type != pkgCache::Dep::PreDepends)
{
- D++;
+ ++D;
continue;
}
// Grok or groups
Bad = true;
- for (bool LastOR = true; D.end() == false && LastOR == true; D++)
+ for (bool LastOR = true; D.end() == false && LastOR == true; ++D)
{
LastOR = (D->CompareOp & pkgCache::Dep::Or) == pkgCache::Dep::Or;
continue;
SPtrArray<Version *> VList = D.AllTargets();
- for (Version **I = VList; *I != 0 && Bad == true; I++)
+ for (Version **I = VList; *I != 0 && Bad == true; ++I)
{
VerIterator Ver(Cache,*I);
PkgIterator Pkg = Ver.ParentPkg();
if (Pkg->CurrentVer != 0)
{
for (DepIterator D = Pkg.RevDependsList(); D.end() == false &&
- IsEssential == false; D++)
+ IsEssential == false; ++D)
if (D->Type == pkgCache::Dep::Depends || D->Type == pkgCache::Dep::PreDepends)
if ((D.ParentPkg()->Flags & pkgCache::Flag::Essential) != 0)
IsEssential = true;
return _error->Error("Couldn't configure pre-depend %s for %s, "
"probably a dependency cycle.",
End.TargetPkg().Name(),Pkg.Name());
- Start++;
+ ++Start;
}
else
break;
return false;
for (PrvIterator P = instVer.ProvidesList();
- P.end() == false; P++)
+ P.end() == false; ++P)
CheckRConflicts(Pkg,P.ParentPkg().RevDependsList(),P.ProvideVersion());
List->Flag(Pkg,pkgOrderList::UnPacked,pkgOrderList::States);
- if (instVer->MultiArch == pkgCache::Version::Same)
+ if ((instVer->MultiArch & pkgCache::Version::Same) == pkgCache::Version::Same)
for (PkgIterator P = Pkg.Group().PackageList();
P.end() == false; P = Pkg.Group().NextPkg(P))
{
clog << "Done ordering" << endl;
bool DoneSomething = false;
- for (pkgOrderList::iterator I = List->begin(); I != List->end(); I++)
+ for (pkgOrderList::iterator I = List->begin(); I != List->end(); ++I)
{
PkgIterator Pkg(Cache,*I);
return Failed;
// Sanity check
- for (pkgOrderList::iterator I = List->begin(); I != List->end(); I++)
+ for (pkgOrderList::iterator I = List->begin(); I != List->end(); ++I)
{
if (List->IsFlag(*I,pkgOrderList::Configured) == false)
{
memset(PkgHashTable,0,sizeof(PkgHashTable));
memset(GrpHashTable,0,sizeof(GrpHashTable));
memset(Pools,0,sizeof(Pools));
+
+ CacheFileSize = 0;
}
/*}}}*/
// Cache::Header::CheckSizes - Check if the two headers have same *sz /*{{{*/
HeaderP->CheckSizes(DefHeader) == false)
return _error->Error(_("The package cache file is an incompatible version"));
+ if (Map.Size() < HeaderP->CacheFileSize)
+ return _error->Error(_("The package cache file is corrupted, it is too small"));
+
// Locate our VS..
if (HeaderP->VerSysName == 0 ||
(VS = pkgVersioningSystem::GetVS(StrP + HeaderP->VerSysName)) == 0)
unsigned long pkgCache::sHash(const string &Str) const
{
unsigned long Hash = 0;
- for (string::const_iterator I = Str.begin(); I != Str.end(); I++)
+ for (string::const_iterator I = Str.begin(); I != Str.end(); ++I)
Hash = 5*Hash + tolower_ascii(*I);
return Hash % _count(HeaderP->PkgHashTable);
}
unsigned long pkgCache::sHash(const char *Str) const
{
unsigned long Hash = 0;
- for (const char *I = Str; *I != 0; I++)
+ for (const char *I = Str; *I != 0; ++I)
Hash = 5*Hash + tolower_ascii(*I);
return Hash % _count(HeaderP->PkgHashTable);
}
virtual package libc-dev which is provided by libc5-dev and libc6-dev
we must ignore libc5-dev when considering the provides list. */
PrvIterator PStart = Result.ProvidesList();
- for (; PStart.end() != true && PStart.OwnerPkg() == ParentPkg(); PStart++);
+ for (; PStart.end() != true && PStart.OwnerPkg() == ParentPkg(); ++PStart);
// Nothing but indirect self provides
if (PStart.end() == true)
// Check for single packages in the provides list
PrvIterator P = PStart;
- for (; P.end() != true; P++)
+ for (; P.end() != true; ++P)
{
// Skip over self provides
if (P.OwnerPkg() == ParentPkg())
PkgIterator DPkg = TargetPkg();
// Walk along the actual package providing versions
- for (VerIterator I = DPkg.VersionList(); I.end() == false; I++)
+ for (VerIterator I = DPkg.VersionList(); I.end() == false; ++I)
{
if (Owner->VS->CheckDep(I.VerStr(),S->CompareOp,TargetVer()) == false)
continue;
}
// Follow all provides
- for (PrvIterator I = DPkg.ProvidesList(); I.end() == false; I++)
+ for (PrvIterator I = DPkg.ProvidesList(); I.end() == false; ++I)
{
if (Owner->VS->CheckDep(I.ProvideVersion(),S->CompareOp,TargetVer()) == false)
continue;
/* Start at A and look for B. If B is found then A > B otherwise
B was before A so A < B */
VerIterator I = *this;
- for (;I.end() == false; I++)
+ for (;I.end() == false; ++I)
if (I == B)
return 1;
return -1;
bool pkgCache::VerIterator::Downloadable() const
{
VerFileIterator Files = FileList();
- for (; Files.end() == false; Files++)
+ for (; Files.end() == false; ++Files)
if ((Files.File()->Flags & pkgCache::Flag::NotSource) != pkgCache::Flag::NotSource)
return true;
return false;
bool pkgCache::VerIterator::Automatic() const
{
VerFileIterator Files = FileList();
- for (; Files.end() == false; Files++)
+ for (; Files.end() == false; ++Files)
// Do not check ButAutomaticUpgrades here as it is kind of automatic…
if ((Files.File()->Flags & pkgCache::Flag::NotAutomatic) != pkgCache::Flag::NotAutomatic)
return true;
return false;
}
/*}}}*/
-// VerIterator::Pseudo - deprecated no-op method /*{{{*/
-bool pkgCache::VerIterator::Pseudo() const { return false; }
- /*}}}*/
// VerIterator::NewestFile - Return the newest file version relation /*{{{*/
// ---------------------------------------------------------------------
/* This looks at the version numbers associated with all of the sources
{
VerFileIterator Files = FileList();
VerFileIterator Highest = Files;
- for (; Files.end() == false; Files++)
+ for (; Files.end() == false; ++Files)
{
if (Owner->VS->CmpReleaseVer(Files.File().Version(),Highest.File().Version()) > 0)
Highest = Files;
{
bool First = true;
string Res;
- for (pkgCache::VerFileIterator I = this->FileList(); I.end() == false; I++)
+ for (pkgCache::VerFileIterator I = this->FileList(); I.end() == false; ++I)
{
// Do not print 'not source' entries'
pkgCache::PkgFileIterator File = I.File();
// See if we have already printed this out..
bool Seen = false;
- for (pkgCache::VerFileIterator J = this->FileList(); I != J; J++)
+ for (pkgCache::VerFileIterator J = this->FileList(); I != J; ++J)
{
pkgCache::PkgFileIterator File2 = J.File();
if (File2->Label == 0 || File->Label == 0)
{
std::vector<string> const lang = APT::Configuration::getLanguages();
for (std::vector<string>::const_iterator l = lang.begin();
- l != lang.end(); l++)
+ l != lang.end(); ++l)
{
pkgCache::DescIterator Desc = DescriptionList();
for (; Desc.end() == false; ++Desc)
return;
Cache.HeaderP->Dirty = false;
+ Cache.HeaderP->CacheFileSize = Map.Size();
Map.Sync(0,sizeof(pkgCache::Header));
}
/*}}}*/
// don't add a new description if we have one for the given
// md5 && language
- for ( ; Desc.end() == false; Desc++)
+ for ( ; Desc.end() == false; ++Desc)
if (MD5SumValue(Desc.md5()) == CurMd5 &&
Desc.LanguageCode() == List.DescriptionLanguage())
duplicate=true;
for (Desc = Ver.DescriptionList();
Desc.end() == false;
- LastDesc = &Desc->NextDesc, Desc++)
+ LastDesc = &Desc->NextDesc, ++Desc)
{
if (MD5SumValue(Desc.md5()) == CurMd5)
{
unsigned long Hash = List.VersionHash();
pkgCache::VerIterator Ver = Pkg.VersionList();
Dynamic<pkgCache::VerIterator> DynVer(Ver);
- for (; Ver.end() == false; Ver++)
+ for (; Ver.end() == false; ++Ver)
{
if (Ver->Hash == Hash && Version.c_str() == Ver.VerStr())
{
// Link it to the end of the list
map_ptrloc *Last = &Ver->FileList;
- for (pkgCache::VerFileIterator V = Ver.FileList(); V.end() == false; V++)
+ for (pkgCache::VerFileIterator V = Ver.FileList(); V.end() == false; ++V)
Last = &V->NextFile;
VF->NextFile = *Last;
*Last = VF.Index();
// Link it to the end of the list
map_ptrloc *Last = &Desc->FileList;
- for (pkgCache::DescFileIterator D = Desc.FileList(); D.end() == false; D++)
+ for (pkgCache::DescFileIterator D = Desc.FileList(); D.end() == false; ++D)
Last = &D->NextFile;
DF->NextFile = *Last;
// Create Conflicts in between the group
pkgCache::GrpIterator G = GetCache().GrpBegin();
Dynamic<pkgCache::GrpIterator> DynG(G);
- for (; G.end() != true; G++)
+ for (; G.end() != true; ++G)
{
string const PkgName = G.Name();
pkgCache::PkgIterator P = G.PackageList();
Dynamic<pkgCache::PkgIterator> DynallPkg(allPkg);
pkgCache::VerIterator V = P.VersionList();
Dynamic<pkgCache::VerIterator> DynV(V);
- for (; V.end() != true; V++)
+ for (; V.end() != true; ++V)
{
// copy P.Arch() into a string here as a cache remap
// in NewDepends() later may alter the pointer location
if (OldDepLast == NULL)
{
OldDepLast = &Ver->DependsList;
- for (pkgCache::DepIterator D = Ver.DependsList(); D.end() == false; D++)
+ for (pkgCache::DepIterator D = Ver.DependsList(); D.end() == false; ++D)
OldDepLast = &D->NextDepends;
} else if (oldMap != Map.Data())
OldDepLast += (map_ptrloc*) Map.Data() - (map_ptrloc*) oldMap;
return false;
}
- if (List.GetLastModifiedTime() < GetModificationTime(CacheFile))
+ if (List.GetLastModifiedTime() > GetModificationTime(CacheFile))
{
if (Debug == true)
std::clog << "sources.list is newer than the cache" << std::endl;
verify the IMS data and check that it is on the disk too.. */
SPtrArray<bool> Visited = new bool[Cache.HeaderP->PackageFileCount];
memset(Visited,0,sizeof(*Visited)*Cache.HeaderP->PackageFileCount);
- for (; Start != End; Start++)
+ for (; Start != End; ++Start)
{
if (Debug == true)
std::clog << "Checking PkgFile " << (*Start)->Describe() << ": ";
static unsigned long ComputeSize(FileIterator Start,FileIterator End)
{
unsigned long TotalSize = 0;
- for (; Start != End; Start++)
+ for (; Start != End; ++Start)
{
if ((*Start)->HasPackages() == false)
continue;
FileIterator Start, FileIterator End)
{
FileIterator I;
- for (I = Start; I != End; I++)
+ for (I = Start; I != End; ++I)
{
if ((*I)->HasPackages() == false)
continue;
Progress->Done();
TotalSize = ComputeSize(Start, End);
CurrentSize = 0;
- for (I = Start; I != End; I++)
+ for (I = Start; I != End; ++I)
{
unsigned long Size = (*I)->Size();
if (Progress != NULL)
vector<pkgIndexFile *> Files;
for (vector<metaIndex *>::const_iterator i = List.begin();
i != List.end();
- i++)
+ ++i)
{
vector <pkgIndexFile *> *Indexes = (*i)->GetIndexFiles();
for (vector<pkgIndexFile *>::const_iterator j = Indexes->begin();
j != Indexes->end();
- j++)
+ ++j)
Files.push_back (*j);
}
bool pkgPolicy::InitDefaults()
{
// Initialize the priorities based on the status of the package file
- for (pkgCache::PkgFileIterator I = Cache->FileBegin(); I != Cache->FileEnd(); I++)
+ for (pkgCache::PkgFileIterator I = Cache->FileBegin(); I != Cache->FileEnd(); ++I)
{
PFPriority[I->ID] = 500;
if ((I->Flags & pkgCache::Flag::NotSource) == pkgCache::Flag::NotSource)
signed Cur = 989;
StatusOverride = false;
for (vector<Pin>::const_iterator I = Defaults.begin(); I != Defaults.end();
- I++, Cur--)
+ ++I, --Cur)
{
pkgVersionMatch Match(I->Data,I->Type);
- for (pkgCache::PkgFileIterator F = Cache->FileBegin(); F != Cache->FileEnd(); F++)
+ for (pkgCache::PkgFileIterator F = Cache->FileBegin(); F != Cache->FileEnd(); ++F)
{
if (Match.FileMatch(F) == true && Fixed[F->ID] == false)
{
}
if (_config->FindB("Debug::pkgPolicy",false) == true)
- for (pkgCache::PkgFileIterator F = Cache->FileBegin(); F != Cache->FileEnd(); F++)
+ for (pkgCache::PkgFileIterator F = Cache->FileBegin(); F != Cache->FileEnd(); ++F)
std::clog << "Prio of " << F.FileName() << ' ' << PFPriority[F->ID] << std::endl;
return true;
tracks the default when the default is taken away, and a permanent
pin that stays at that setting.
*/
- for (pkgCache::VerIterator Ver = Pkg.VersionList(); Ver.end() == false; Ver++)
+ for (pkgCache::VerIterator Ver = Pkg.VersionList(); Ver.end() == false; ++Ver)
{
/* Lets see if this version is the installed version */
bool instVer = (Pkg.CurrentVer() == Ver);
- for (pkgCache::VerFileIterator VF = Ver.FileList(); VF.end() == false; VF++)
+ for (pkgCache::VerFileIterator VF = Ver.FileList(); VF.end() == false; ++VF)
{
/* If this is the status file, and the current version is not the
version in the status file (ie it is not installed, or somesuch)
}
return 0;
+}
+signed short pkgPolicy::GetPriority(pkgCache::PkgFileIterator const &File)
+{
+ return PFPriority[File->ID];
}
/*}}}*/
// PreferenceSection class - Overriding the default TrimRecord method /*{{{*/
vector<string> const List = GetListOfFilesInDir(Dir, "pref", true, true);
// Read the files
- for (vector<string>::const_iterator I = List.begin(); I != List.end(); I++)
+ for (vector<string>::const_iterator I = List.begin(); I != List.end(); ++I)
if (ReadPinFile(Plcy, *I) == false)
return false;
return true;
/* */
pkgSourceList::~pkgSourceList()
{
- for (const_iterator I = SrcList.begin(); I != SrcList.end(); I++)
+ for (const_iterator I = SrcList.begin(); I != SrcList.end(); ++I)
delete *I;
}
/*}}}*/
/* */
void pkgSourceList::Reset()
{
- for (const_iterator I = SrcList.begin(); I != SrcList.end(); I++)
+ for (const_iterator I = SrcList.begin(); I != SrcList.end(); ++I)
delete *I;
SrcList.erase(SrcList.begin(),SrcList.end());
}
bool pkgSourceList::FindIndex(pkgCache::PkgFileIterator File,
pkgIndexFile *&Found) const
{
- for (const_iterator I = SrcList.begin(); I != SrcList.end(); I++)
+ for (const_iterator I = SrcList.begin(); I != SrcList.end(); ++I)
{
vector<pkgIndexFile *> *Indexes = (*I)->GetIndexFiles();
for (vector<pkgIndexFile *>::const_iterator J = Indexes->begin();
- J != Indexes->end(); J++)
+ J != Indexes->end(); ++J)
{
if ((*J)->FindInCache(*File.Cache()) == File)
{
/* */
bool pkgSourceList::GetIndexes(pkgAcquire *Owner, bool GetAll) const
{
- for (const_iterator I = SrcList.begin(); I != SrcList.end(); I++)
+ for (const_iterator I = SrcList.begin(); I != SrcList.end(); ++I)
if ((*I)->GetIndexes(Owner,GetAll) == false)
return false;
return true;
vector<string> const List = GetListOfFilesInDir(Dir, "list", true);
// Read the files
- for (vector<string>::const_iterator I = List.begin(); I != List.end(); I++)
+ for (vector<string>::const_iterator I = List.begin(); I != List.end(); ++I)
if (ReadAppend(*I) == false)
return false;
return true;
/* */
time_t pkgSourceList::GetLastModifiedTime()
{
- // go over the parts
+ vector<string> List;
+
string Main = _config->FindFile("Dir::Etc::sourcelist");
string Parts = _config->FindDir("Dir::Etc::sourceparts");
- vector<string> const List = GetListOfFilesInDir(Parts, "list", true);
+
+ // go over the parts
+ if (DirectoryExists(Parts) == true)
+ List = GetListOfFilesInDir(Parts, "list", true);
// calculate the time
time_t mtime_sources = GetModificationTime(Main);
- for (vector<string>::const_iterator I = List.begin(); I != List.end(); I++)
+ for (vector<string>::const_iterator I = List.begin(); I != List.end(); ++I)
mtime_sources = std::max(mtime_sources, GetModificationTime(*I));
return mtime_sources;
#include <apt-pkg/fileutl.h>
#include <stdio.h>
-
+
class pkgTagSection
{
const char *Section;
-
// We have a limit of 256 tags per section.
unsigned int Indexes[256];
unsigned int AlphaIndexes[0x100];
-
unsigned int TagCount;
+ // dpointer placeholder (for later in case we need it)
+ void *d;
/* This very simple hash function for the last 8 letters gives
very good performance on the debian package files */
return Res & 0xFF;
}
-
protected:
const char *Stop;
Stop = this->Stop;
};
- pkgTagSection() : Section(0), Stop(0) {};
+ pkgTagSection() : Section(0), TagCount(0), Stop(0) {};
+ virtual ~pkgTagSection() {};
};
+class pkgTagFilePrivate;
class pkgTagFile
{
- FileFd &Fd;
- char *Buffer;
- char *Start;
- char *End;
- bool Done;
- unsigned long iOffset;
- unsigned long Size;
+ pkgTagFilePrivate *d;
bool Fill();
bool Resize();
public:
bool Step(pkgTagSection &Section);
- inline unsigned long Offset() {return iOffset;};
+ unsigned long Offset();
bool Jump(pkgTagSection &Tag,unsigned long Offset);
pkgTagFile(FileFd *F,unsigned long Size = 32*1024);
- ~pkgTagFile();
+ virtual ~pkgTagFile();
};
/* This is the list of things to rewrite. The rewriter
c1out << _("Y") << endl;
return true;
}
+ else if (_config->FindB("APT::Get::Assume-No",false) == true)
+ {
+ c1out << _("N") << endl;
+ return false;
+ }
char response[1024] = "";
cin.getline(response, sizeof(response));
if (Start == End)
break;
- Start++;
+ ++Start;
}
}
}
continue;
// Print out any essential package depenendents that are to be removed
- for (pkgCache::DepIterator D = I.CurrentVer().DependsList(); D.end() == false; D++)
+ for (pkgCache::DepIterator D = I.CurrentVer().DependsList(); D.end() == false; ++D)
{
// Skip everything but depends
if (D->Type != pkgCache::Dep::PreDepends &&
unsigned long Downgrade = 0;
unsigned long Install = 0;
unsigned long ReInstall = 0;
- for (pkgCache::PkgIterator I = Dep.PkgBegin(); I.end() == false; I++)
+ for (pkgCache::PkgIterator I = Dep.PkgBegin(); I.end() == false; ++I)
{
if (Dep[I].NewInstall() == true)
Install++;
}
// if we found no candidate which provide this package, show non-candidates
if (provider == 0)
- for (I = Pkg.ProvidesList(); I.end() == false; I++)
+ for (I = Pkg.ProvidesList(); I.end() == false; ++I)
out << " " << I.OwnerPkg().FullName(true) << " " << I.OwnerVer().VerStr()
<< _(" [Not candidate version]") << endl;
else
SPtrArray<bool> Seen = new bool[Cache.GetPkgCache()->Head().PackageCount];
memset(Seen,0,Cache.GetPkgCache()->Head().PackageCount*sizeof(*Seen));
for (pkgCache::DepIterator Dep = Pkg.RevDependsList();
- Dep.end() == false; Dep++) {
+ Dep.end() == false; ++Dep) {
if (Dep->Type != pkgCache::Dep::Replaces)
continue;
if (Seen[Dep.ParentPkg()->ID] == true)
struct TryToRemove {
pkgCacheFile* Cache;
pkgProblemResolver* Fix;
- bool FixBroken;
bool PurgePkgs;
- unsigned long AutoMarkChanged;
TryToRemove(pkgCacheFile &Cache, pkgProblemResolver *PM) : Cache(&Cache), Fix(PM),
PurgePkgs(_config->FindB("APT::Get::Purge", false)) {};
List = new pkgCache::Package *[Cache->Head().PackageCount];
memset(List,0,sizeof(*List)*Cache->Head().PackageCount);
pkgCache::PkgIterator I = Cache->PkgBegin();
- for (;I.end() != true; I++)
+ for (;I.end() != true; ++I)
List[I->ID] = I;
SortCache = *this;
if ((DCache->PolicyBrokenCount() > 0))
{
// upgrade all policy-broken packages with ForceImportantDeps=True
- for (pkgCache::PkgIterator I = Cache->PkgBegin(); !I.end(); I++)
+ for (pkgCache::PkgIterator I = Cache->PkgBegin(); !I.end(); ++I)
if ((*DCache)[I].NowPolicyBroken() == true)
DCache->MarkInstall(I,true,0, false, true);
}
if (_config->FindB("APT::Get::Purge",false) == true)
{
pkgCache::PkgIterator I = Cache->PkgBegin();
- for (; I.end() == false; I++)
+ for (; I.end() == false; ++I)
{
if (I.Purge() == false && Cache[I].Mode == pkgDepCache::ModeDelete)
Cache->MarkDelete(I,true);
if (_config->FindB("APT::Get::Print-URIs") == true)
{
pkgAcquire::UriIterator I = Fetcher.UriBegin();
- for (; I != Fetcher.UriEnd(); I++)
+ for (; I != Fetcher.UriEnd(); ++I)
cout << '\'' << I->URI << "' " << flNotDir(I->Owner->DestFile) << ' ' <<
I->Owner->FileSize << ' ' << I->Owner->HashSum() << endl;
return true;
{
if ((*I)->Local == true)
{
- I++;
+ ++I;
continue;
}
// Print out errors
bool Failed = false;
- for (pkgAcquire::ItemIterator I = Fetcher.ItemsBegin(); I != Fetcher.ItemsEnd(); I++)
+ for (pkgAcquire::ItemIterator I = Fetcher.ItemsBegin(); I != Fetcher.ItemsEnd(); ++I)
{
if ((*I)->Status == pkgAcquire::Item::StatDone &&
(*I)->Complete == true)
// we have a default release, try to locate the pkg. we do it like
// this because GetCandidateVer() will not "downgrade", that means
// "apt-get source -t stable apt" won't work on a unstable system
- for (pkgCache::VerIterator Ver = Pkg.VersionList();; Ver++)
+ for (pkgCache::VerIterator Ver = Pkg.VersionList();; ++Ver)
{
// try first only exact matches, later fuzzy matches
if (Ver.end() == true)
continue;
for (pkgCache::VerFileIterator VF = Ver.FileList();
- VF.end() == false; VF++)
+ VF.end() == false; ++VF)
{
/* If this is the status file, and the current version is not the
version in the status file (ie it is not installed, or somesuch)
return false;
pkgAcquire::UriIterator I = Fetcher.UriBegin();
- for (; I != Fetcher.UriEnd(); I++)
+ for (; I != Fetcher.UriEnd(); ++I)
cout << '\'' << I->URI << "' " << flNotDir(I->Owner->DestFile) << ' ' <<
I->Owner->FileSize << ' ' << I->Owner->HashSum() << endl;
return true;
{
// Call the scored problem resolver
Fix->InstallProtect();
- if (Fix->Resolve(true) == false)
- _error->Discard();
+ Fix->Resolve(true);
delete Fix;
}
c1out << _("The following information may help to resolve the situation:") << endl;
c1out << endl;
ShowBroken(c1out,Cache,false);
- return _error->Error(_("Broken packages"));
- }
+ if (_error->PendingError() == true)
+ return false;
+ else
+ return _error->Error(_("Broken packages"));
+ }
}
if (!DoAutomaticRemove(Cache))
return false;
if(Start.TargetPkg().ProvidesList() != 0)
{
pkgCache::PrvIterator I = Start.TargetPkg().ProvidesList();
- for (; I.end() == false; I++)
+ for (; I.end() == false; ++I)
{
pkgCache::PkgIterator Pkg = I.OwnerPkg();
if (Cache[Pkg].CandidateVerIter(Cache) == I.OwnerVer() &&
if (Start >= End)
break;
- Start++;
+ ++Start;
}
if(foundInstalledInOrGroup == false)
// Install everything with the install flag set
pkgCache::PkgIterator I = Cache->PkgBegin();
- for (;I.end() != true; I++)
+ for (;I.end() != true; ++I)
{
/* Install the package only if it is a new install, the autoupgrader
will deal with the rest */
/* Now install their deps too, if we do this above then order of
the status file is significant for | groups */
- for (I = Cache->PkgBegin();I.end() != true; I++)
+ for (I = Cache->PkgBegin();I.end() != true; ++I)
{
/* Install the package only if it is a new install, the autoupgrader
will deal with the rest */
}
// Apply erasures now, they override everything else.
- for (I = Cache->PkgBegin();I.end() != true; I++)
+ for (I = Cache->PkgBegin();I.end() != true; ++I)
{
// Remove packages
if (I->SelectedState == pkgCache::State::DeInstall ||
// Hold back held packages.
if (_config->FindB("APT::Ignore-Hold",false) == false)
{
- for (pkgCache::PkgIterator I = Cache->PkgBegin(); I.end() == false; I++)
+ for (pkgCache::PkgIterator I = Cache->PkgBegin(); I.end() == false; ++I)
{
if (I->SelectedState == pkgCache::State::Hold)
{
strprintf(descr, _("Downloading %s %s"), Pkg.Name(), Ver.VerStr());
// get the most appropriate hash
HashString hash;
+ if (rec.SHA512Hash() != "")
+ hash = HashString("sha512", rec.SHA512Hash());
if (rec.SHA256Hash() != "")
hash = HashString("sha256", rec.SHA256Hash());
else if (rec.SHA1Hash() != "")
if (_config->FindB("APT::Get::Print-URIs") == true)
{
pkgAcquire::UriIterator I = Fetcher.UriBegin();
- for (; I != Fetcher.UriEnd(); I++)
+ for (; I != Fetcher.UriEnd(); ++I)
cout << '\'' << I->URI << "' " << flNotDir(I->Owner->DestFile) << ' ' <<
I->Owner->FileSize << ' ' << I->Owner->HashSum() << endl;
return true;
// Load them into the fetcher
for (vector<pkgSrcRecords::File>::const_iterator I = Lst.begin();
- I != Lst.end(); I++)
+ I != Lst.end(); ++I)
{
// Try to guess what sort of file it is we are getting.
if (I->Type == "dsc")
if (_config->FindB("APT::Get::Print-URIs") == true)
{
pkgAcquire::UriIterator I = Fetcher.UriBegin();
- for (; I != Fetcher.UriEnd(); I++)
+ for (; I != Fetcher.UriEnd(); ++I)
cout << '\'' << I->URI << "' " << flNotDir(I->Owner->DestFile) << ' ' <<
I->Owner->FileSize << ' ' << I->Owner->HashSum() << endl;
delete[] Dsc;
// Print error messages
bool Failed = false;
- for (pkgAcquire::ItemIterator I = Fetcher.ItemsBegin(); I != Fetcher.ItemsEnd(); I++)
+ for (pkgAcquire::ItemIterator I = Fetcher.ItemsBegin(); I != Fetcher.ItemsEnd(); ++I)
{
if ((*I)->Status == pkgAcquire::Item::StatDone &&
(*I)->Complete == true)
if (Process == 0)
{
bool const fixBroken = _config->FindB("APT::Get::Fix-Broken", false);
- for (unsigned I = 0; I != J; I++)
+ for (unsigned I = 0; I != J; ++I)
{
string Dir = Dsc[I].Package + '-' + Cache->VS().UpstreamVersion(Dsc[I].Version.c_str());
BuildDeps.push_back(rec);
}
- if (BuildDeps.size() == 0)
+ if (BuildDeps.empty() == true)
{
ioprintf(c1out,_("%s has no build depends.\n"),Src.c_str());
continue;
vector <pkgSrcRecords::Parser::BuildDepRec>::iterator D;
pkgProblemResolver Fix(Cache);
bool skipAlternatives = false; // skip remaining alternatives in an or group
- for (D = BuildDeps.begin(); D != BuildDeps.end(); D++)
+ for (D = BuildDeps.begin(); D != BuildDeps.end(); ++D)
{
bool hasAlternatives = (((*D).Op & pkgCache::Dep::Or) == pkgCache::Dep::Or);
* installed
*/
pkgCache::PrvIterator Prv = Pkg.ProvidesList();
- for (; Prv.end() != true; Prv++)
+ for (; Prv.end() != true; ++Prv)
{
if (_config->FindB("Debug::BuildDeps",false) == true)
cout << " Checking provider " << Prv.OwnerPkg().FullName() << endl;
{'s',"dry-run","APT::Get::Simulate",0},
{'s',"no-act","APT::Get::Simulate",0},
{'y',"yes","APT::Get::Assume-Yes",0},
- {'y',"assume-yes","APT::Get::Assume-Yes",0},
+ {'y',"assume-yes","APT::Get::Assume-Yes",0},
+ {0,"assume-no","APT::Get::Assume-No",0},
{'f',"fix-broken","APT::Get::Fix-Broken",0},
{'u',"show-upgraded","APT::Get::Show-Upgraded",0},
{'m',"ignore-missing","APT::Get::Fix-Missing",0},
{0,"install-recommends","APT::Install-Recommends",CommandLine::Boolean},
{0,"install-suggests","APT::Install-Suggests",CommandLine::Boolean},
{0,"fix-policy","APT::Get::Fix-Policy-Broken",0},
+ {0,"solver","APT::Solver",CommandLine::HasArg},
{'c',"config-file",0,CommandLine::ConfigFile},
{'o',"option",0,CommandLine::ArbItem},
{0,0,0,0}};
+apt (0.8.16~exp5) experimental; urgency=low
+
++ * merged the latest debian-sid fixes
+ * apt-pkg/makefile:
+ - install sha256.h compat header
+ * apt-pkg/pkgcachegen.{cc,h}:
+ - use ref-to-ptr semantic in NewDepends() to ensure that the
+ libapt does not segfault if the cache is remapped in between
+ (LP: #812862)
+ - fix crash when P.Arch() was used but the cache got remapped
+ * apt-pkg/acquire-item.{cc,h}:
+ - do not check for a "Package" tag in optional index targets
+ like the translations index
+
+ -- Michael Vogt <mvo@debian.org> Fri, 05 Aug 2011 10:57:08 +0200
+
+apt (0.8.16~exp4) experimental; urgency=low
+
+ [ Julian Andres Klode ]
+ * apt-pkg/pkgcache.h:
+ - [ABI break] Add pkgCache::Header::CacheFileSize, storing the cache size
+ * apt-pkg/pkgcachegen.cc:
+ - Write the file size to the cache
+ * apt-pkg/pkgcache.cc:
+ - Check that cache is at least CacheFileSize bytes large (LP: #16467)
+
+ [ Michael Vogt ]
+ * merged latest fixes from debian-sid
+ * apt-pkg/cdrom.{cc,h}:
+ - cleanup old ABI break avoidance hacks
+ * [ABI break] apt-pkg/acquire-item.{cc,h}:
+ - cleanup around OptionalIndexTarget and SubIndexTarget
+ * [ABI break] merged patch from Jonathan Thomas to have a new
+ RecordField() function in the pkgRecorder parser. Many thanks
+ Thomas
+ * [ABI break] merge patch from Jonathan Thomas to speed up the
+ depcache by caching the install-recommends and install-suggests
+ values
+ * apt-pkg/contrib/fileutl.{cc,h}:
+ - add GetModificationTime() helper
+ * apt-pkg/pkgcachegen.cc:
+ - regenerate the cache if the sources.list changes to ensure
+ that changes in the ordering there will be honored by apt
+ * apt-pkg/sourcelist.{cc,h}:
+ - add pkgSourceList::GetLastModifiedTime() helper
+
+ -- Michael Vogt <mvo@debian.org> Thu, 28 Jul 2011 16:57:08 +0200
+
+apt (0.8.16~exp3) experimental; urgency=low
+
+ [ David Kalnischkies ]
+ * apt-pkg/pkgcache.h:
+ - readd All{Foreign,Allowed} as suggested by Julian to
+ remain strictly API compatible
+ * apt-pkg/acquire*.{cc,h}:
+ - try even harder to support really big files in the fetcher by
+ converting (hopefully) everything to 'long long' (Closes: #632271)
+ * ftparchive/writer.cc:
+ - generate all checksums in one run over the file for Release
+ * cmdline/apt-get.cc:
+ - add an --assume-no option for testing to say 'no' to everything
+ * apt-pkg/deb/debmetaindex.cc:
+ - add trusted=yes option to mark unsigned (local) repository as trusted
+ based on a patch from Ansgar Burchardt, thanks a lot! (Closes: #596498)
+
+ [ Michael Vogt ]
+ * merge fixes from the debian/unstable upload
+ * merge lp:~mvo/apt/sha512-template to get fixes for the
+ sha1/md5 verifiation (closes: #632520)
+
+ -- Michael Vogt <mvo@debian.org> Fri, 15 Jul 2011 09:56:17 +0200
+
+apt (0.8.16~exp2) experimental; urgency=low
+
+ [ David Kalnischkies ]
+ * [ABI-Break] Implement EDSP in libapt-pkg so that all front-ends which
+ use the internal resolver can now be used also with external
+ ones as the usage is hidden in between the old API
+ * provide two edsp solvers in apt-utils:
+ - 'dump' to quickly output a complete scenario and
+ - 'apt' to use the internal as an external resolver
+ * apt-pkg/pkgcache.h:
+ - clean up mess with the "all" handling in MultiArch to
+ fix LP: #733741 cleanly for everyone now
+ * apt-pkg/depcache.cc:
+ - use a boolean instead of an int for Add/Remove in AddStates
+ similar to how it works with AddSizes
+ - let the Mark methods return if their marking was successful
+ - if a Breaks can't be upgraded, remove it. If it or a Conflict
+ can't be removed the installation of the breaker fails.
+ * cmdline/apt-get.cc:
+ - do not discard the error messages from the resolver and instead
+ only show the general 'Broken packages' message if nothing else
+
+ [ Stefano Zacchiroli ]
+ * doc/external-dependency-solver-protocol.txt:
+ - describe EDSP and the configuration interface around it
+
+ [ Michael Vogt ]
+ * [ABI-Break] merge lp:~mvo/apt/sha512-template to add support for sha512
+ * [ABI-Break] merge lp:~mvo/apt/dpointer to support easier extending
+ without breaking the ABI
+ * increase ABI version and update package names
+
+ -- Michael Vogt <mvo@debian.org> Wed, 29 Jun 2011 13:57:28 +0200
+
+apt (0.8.16~exp1) experimental; urgency=low
+
+ * merged with the debian/unstable upload
+
+ -- Michael Vogt <mvo@debian.org> Wed, 29 Jun 2011 12:40:31 +0200
+
+ apt (0.8.15.6) unstable; urgency=low
+
+ [ Michael Vogt ]
+ * apt-pkg/contrib/fileutl.{cc,h}:
+ - add GetModificationTime() helper
+ * apt-pkg/pkgcachegen.cc:
+ - regenerate the cache if the sources.list changes to ensure
+ that changes in the ordering there will be honored by apt
+ * apt-pkg/sourcelist.{cc,h}:
+ - add pkgSourceList::GetLastModifiedTime() helper
+ * apt-pkg/pkgcachegen.{cc,h}:
+ - use ref-to-ptr semantic in NewDepends() to ensure that the
+ libapt does not segfault if the cache is remapped in between
+ (LP: #812862)
+ - fix crash when P.Arch() was used but the cache got remapped
+ * test/integration/test-hashsum-verification:
+ - add regression test for hashsum verification
+ * apt-pkg/acquire-item.cc:
+ - if no Release.gpg file is found, still load the hashes for
+ verification (closes: #636314) and add test
+
+ [ David Kalnischkies ]
+ * lots of cppcheck fixes
+
+ -- Michael Vogt <mvo@debian.org> Mon, 15 Aug 2011 09:20:35 +0200
+
apt (0.8.15.5) unstable; urgency=low
[ David Kalnischkies ]
-- Michael Vogt <mvo@debian.org> Tue, 28 Jun 2011 18:00:48 +0200
+apt (0.8.15~exp3) experimental; urgency=low
+
+ * debian/control:
+ - add Breaks: 0.8.15~exp3) for libapt-pkg4.10 and
+ libapt-inst1.2 (thanks to Jonathan Nieder, closes: #630214)
+ - use depends for the ${shlibs:Depends} to make the breaks work
+
+ -- Michael Vogt <mvo@debian.org> Fri, 17 Jun 2011 21:51:41 +0200
+
+apt (0.8.15~exp2) experimental; urgency=low
+
+ * debian/control:
+ - fix incorrect Replaces (closes: #630204) for libapt-inst1.2
+
+ -- Michael Vogt <mvo@debian.org> Wed, 15 Jun 2011 16:51:14 +0200
+
+apt (0.8.15~exp1) experimental; urgency=low
+
+ [ Julian Andres Klode ]
+ * apt-pkg/depcache.cc:
+ - Really release action groups only once (Closes: #622744)
+ - Make purge work again for config-files (LP: #244598) (Closes: #150831)
+ * apt-pkg/acquire-item.cc:
+ - Reject files known to be invalid (LP: #346386) (Closes: #627642)
+ * debian/apt.cron.daily:
+ - Check power after wait, patch by manuel-soto (LP: #705269)
+ * debian/control:
+ - Move ${shlibs:Depends} to Pre-Depends, as we do not want APT
+ unpacked if a library is too old and thus break upgrades
+ * doc/apt-key.8.xml:
+ - Document apt-key net-update (LP: #192810)
+
+ [ Christian Perrier ]
+ * Galician translation update (Miguel Anxo Bouzada). Closes: #626505
+ * Italian translation update (Milo Casagrande). Closes: #627834
+ * German documentation translation update (Chris Leick). Closes: #629949
+
+ [ David Kalnischkies ]
+ * fix a bunch of cppcheck warnings/errors based on a patch by
+ Niels Thykier, thanks! (Closes: #622805)
+ * apt-pkg/depcache.cc:
+ - really include 'rc' packages in the delete count by fixing a
+ typo which exists since 1999 in the source… (LP: #761175)
+ - if critical or-group can't be satisfied, exit directly.
+ * apt-pkg/acquire-method.cc:
+ - write directly to stdout instead of creating the message in
+ memory first before writing to avoid hitting limits
+ - fix order of CurrentURI and UsedMirror in Status() and Log()
+ * apt-pkg/orderlist.cc:
+ - let VisitRProvides report if the calls were successful
+ * apt-pkg/deb/dpkgpm.cc:
+ - replace obsolete usleep with nanosleep
+ * debian/apt{,-utils}.symbols:
+ - update both experimental symbol-files to reflect 0.8.14 state
+ * debian/rules:
+ - remove unused embedded jquery by doxygen from libapt-pkg-doc
+ * cmdline/apt-mark.cc:
+ - reimplement apt-mark in c++
+ - provide a 'showmanual' command (Closes: #582791)
+ - provide a 'dpkg --set-selections' wrapper to set/release holds
+ * cmdline/apt-get.cc:
+ - deprecate mostly undocumented 'markauto' in favor of 'apt-mark'
+ * cmdline/apt-cache.cc:
+ - deprecate mostly undocumented 'showauto' in favor of 'apt-mark'
+ * apt-pkg/pkgcache.cc:
+ - really ignore :arch in FindPkg() in non-multiarch environment
+ * doc/po/de.po:
+ - undo the translation of the command 'dump' in manpage of apt-config
+ as report by Burghard Grossmann on debian-l10n-german, thanks!
+ * apt-pkg/deb/debmetaindex.cc:
+ - do not download TranslationIndex if no Translation-* will be
+ downloaded later on anyway (Closes: #624218)
+ * test/versions.lst:
+ - disable obscure version number tests with versions dpkg doesn't
+ allow any more as they don't start with a number
+ * apt-pkg/acquire-worker.cc:
+ - print filename in the unmatching size warning (Closes: #623137)
+ * apt-pkg/acquire-item.cc:
+ - apply fix for poorly worded 'locate file' error message from
+ Ben Finney, thanks! (Closes: #623171)
+ * methods/http.cc:
+ - add config option to ignore a closed stdin to be able to easily
+ use the method as a simple standalone downloader
+ - Location header in redirects should be absolute URI, but some
+ servers just send an absolute path so still deal with it properly
+ - dequote URL taken from Location in redirects as we will otherwise
+ quote an already quoted string in the request later (Closes: #602412)
+ * apt-pkg/contrib/netrc.cc:
+ - replace non-posix gnu-extension strdupa with strdup
+ * apt-pkg/packagemanager.cc:
+ - ensure for Multi-Arch:same packages that they are unpacked in
+ lock step even in immediate configuration (Closes: #618288)
+
+ [ Michael Vogt ]
+ * methods/mirror.cc:
+ - ignore lines starting with "#" in the mirror file
+ - ignore non http urls in the mirrors
+ - append the dist (e.g. sid, wheezy) as a query string when
+ asking for a suitable mirror
+ * debian/control:
+ - add libapt-pkg4.10 and libapt-inst1.2 library packages
+
+ -- Michael Vogt <mvo@debian.org> Fri, 10 Jun 2011 15:32:07 +0200
+
+apt (0.8.14.2) UNRELEASED; urgency=low
+
+ [ Julian Andres Klode ]
+ * apt-pkg/depcache.cc:
+ - Really release action groups only once (Closes: #622744)
+ - Make purge work again for config-files (LP: #244598) (Closes: #150831)
+ * debian/apt.cron.daily:
+ - Check power after wait, patch by manuel-soto (LP: #705269)
+ * debian/control:
+ - Move ${shlibs:Depends} to Pre-Depends, as we do not want APT
+ unpacked if a library is too old and thus break upgrades
+ * doc/apt-key.8.xml:
+ - Document apt-key net-update (LP: #192810)
+
+ [ Christian Perrier ]
+ * Galician translation update (Miguel Anxo Bouzada). Closes: #626505
+
+ [ David Kalnischkies ]
+ * fix a bunch of cppcheck warnings/errors based on a patch by
+ Niels Thykier, thanks! (Closes: #622805)
+ * apt-pkg/depcache.cc:
+ - really include 'rc' packages in the delete count by fixing a
+ typo which exists since 1999 in the source… (LP: #761175)
+ - if critical or-group can't be satisfied, exit directly.
+ * apt-pkg/acquire-method.cc:
+ - write directly to stdout instead of creating the message in
+ memory first before writing to avoid hitting limits
+ - fix order of CurrentURI and UsedMirror in Status() and Log()
+ * apt-pkg/orderlist.cc:
+ - let VisitRProvides report if the calls were successful
+ * apt-pkg/deb/dpkgpm.cc:
+ - replace obsolete usleep with nanosleep
+ * debian/apt{,-utils}.symbols:
+ - update both experimental symbol-files to reflect 0.8.14 state
+ * debian/rules:
+ - remove unused embedded jquery by doxygen from libapt-pkg-doc
+ * cmdline/apt-mark.cc:
+ - reimplement apt-mark in c++
+ - provide a 'showmanual' command (Closes: #582791)
+ - provide a 'dpkg --set-selections' wrapper to set/release holds
+ * cmdline/apt-get.cc:
+ - deprecate mostly undocumented 'markauto' in favor of 'apt-mark'
+ * cmdline/apt-cache.cc:
+ - deprecate mostly undocumented 'showauto' in favor of 'apt-mark'
+ * apt-pkg/pkgcache.cc:
+ - really ignore :arch in FindPkg() in non-multiarch environment
+ * doc/po/de.po:
+ - undo the translation of the command 'dump' in manpage of apt-config
+ as report by Burghard Grossmann on debian-l10n-german, thanks!
+ * apt-pkg/deb/debmetaindex.cc:
+ - do not download TranslationIndex if no Translation-* will be
+ downloaded later on anyway (Closes: #624218)
+ * test/versions.lst:
+ - disable obscure version number tests with versions dpkg doesn't
+ allow any more as they don't start with a number
+ * apt-pkg/acquire-worker.cc:
+ - print filename in the unmatching size warning (Closes: #623137)
+ * apt-pkg/acquire-item.cc:
+ - apply fix for poorly worded 'locate file' error message from
+ Ben Finney, thanks! (Closes: #623171)
+ * methods/http.cc:
+ - add config option to ignore a closed stdin to be able to easily
+ use the method as a simple standalone downloader
+ - Location header in redirects should be absolute URI, but some
+ servers just send an absolute path so still deal with it properly
+ - dequote URL taken from Location in redirects as we will otherwise
+ quote an already quoted string in the request later (Closes: #602412)
+ * apt-pkg/contrib/netrc.cc:
+ - replace non-posix gnu-extension strdupa with strdup
+ * apt-pkg/packagemanager.cc:
+ - ensure for Multi-Arch:same packages that they are unpacked in
+ lock step even in immediate configuration (Closes: #618288)
+
+ -- Michael Vogt <mvo@debian.org> Mon, 16 May 2011 14:57:52 +0200
+
apt (0.8.14.1) unstable; urgency=low
* apt-pkg/acquire-item.cc:
#include <apt-pkg/configuration.h>
#include <apt-pkg/aptconfiguration.h>
#include <apt-pkg/md5.h>
-#include <apt-pkg/sha1.h>
-#include <apt-pkg/sha256.h>
+#include <apt-pkg/hashes.h>
#include <apt-pkg/deblistparser.h>
#include <sys/types.h>
DoMD5 = _config->FindB("APT::FTPArchive::MD5",true);
DoSHA1 = _config->FindB("APT::FTPArchive::SHA1",true);
DoSHA256 = _config->FindB("APT::FTPArchive::SHA256",true);
+ DoSHA512 = _config->FindB("APT::FTPArchive::SHA512",true);
}
/*}}}*/
// FTWScanner::Scanner - FTW Scanner /*{{{*/
DoMD5 = _config->FindB("APT::FTPArchive::Packages::MD5",DoMD5);
DoSHA1 = _config->FindB("APT::FTPArchive::Packages::SHA1",DoSHA1);
DoSHA256 = _config->FindB("APT::FTPArchive::Packages::SHA256",DoSHA256);
+ DoSHA256 = _config->FindB("APT::FTPArchive::Packages::SHA512",true);
DoAlwaysStat = _config->FindB("APT::FTPArchive::AlwaysStat", false);
DoContents = _config->FindB("APT::FTPArchive::Contents",true);
NoOverride = _config->FindB("APT::FTPArchive::NoOverrideMsg",false);
bool PackagesWriter::DoPackage(string FileName)
{
// Pull all the data we need form the DB
- if (Db.GetFileInfo(FileName, true, DoContents, true, DoMD5, DoSHA1, DoSHA256, DoAlwaysStat)
+ if (Db.GetFileInfo(FileName, true, DoContents, true, DoMD5, DoSHA1, DoSHA256, DoSHA512, DoAlwaysStat)
== false)
{
return false;
SetTFRewriteData(Changes[End++], "SHA1", Db.SHA1Res.c_str());
if (DoSHA256 == true)
SetTFRewriteData(Changes[End++], "SHA256", Db.SHA256Res.c_str());
+ if (DoSHA512 == true)
+ SetTFRewriteData(Changes[End++], "SHA512", Db.SHA512Res.c_str());
SetTFRewriteData(Changes[End++], "Filename", NewFileName.c_str());
SetTFRewriteData(Changes[End++], "Priority", OverItem->Priority.c_str());
SetTFRewriteData(Changes[End++], "Status", 0);
}
for (map<string,string>::const_iterator I = OverItem->FieldOverride.begin();
- I != OverItem->FieldOverride.end(); I++)
+ I != OverItem->FieldOverride.end(); ++I)
SetTFRewriteData(Changes[End++],I->first.c_str(),I->second.c_str());
SetTFRewriteData(Changes[End++], 0, 0);
MD5Summation MD5;
SHA1Summation SHA1;
SHA256Summation SHA256;
+ SHA256Summation SHA512;
if (DoMD5 == true)
MD5.Add((unsigned char *)Start,BlkEnd - Start);
SHA1.Add((unsigned char *)Start,BlkEnd - Start);
if (DoSHA256 == true)
SHA256.Add((unsigned char *)Start,BlkEnd - Start);
+ if (DoSHA512 == true)
+ SHA512.Add((unsigned char *)Start,BlkEnd - Start);
// Add an extra \n to the end, just in case
*BlkEnd++ = '\n';
<< strippedName << "\n " << Tags.FindS("Checksums-Sha256");
string const ChecksumsSha256 = ostreamSha256.str();
+ std::ostringstream ostreamSha512;
+ if (Tags.Exists("Checksums-Sha512"))
+ ostreamSha512 << "\n " << string(SHA512.Result()) << " " << St.st_size << " "
+ << strippedName << "\n " << Tags.FindS("Checksums-Sha512");
+ string const ChecksumsSha512 = ostreamSha512.str();
+
// Strip the DirStrip prefix from the FileName and add the PathPrefix
string NewFileName;
if (DirStrip.empty() == false &&
SetTFRewriteData(Changes[End++],"Checksums-Sha1",ChecksumsSha1.c_str());
if (ChecksumsSha256.empty() == false)
SetTFRewriteData(Changes[End++],"Checksums-Sha256",ChecksumsSha256.c_str());
+ if (ChecksumsSha512.empty() == false)
+ SetTFRewriteData(Changes[End++],"Checksums-Sha512",ChecksumsSha512.c_str());
if (Directory != "./")
SetTFRewriteData(Changes[End++],"Directory",Directory.c_str());
SetTFRewriteData(Changes[End++],"Priority",BestPrio.c_str());
SetTFRewriteData(Changes[End++], "Maintainer", NewMaint.c_str());
for (map<string,string>::const_iterator I = SOverItem->FieldOverride.begin();
- I != SOverItem->FieldOverride.end(); I++)
+ I != SOverItem->FieldOverride.end(); ++I)
SetTFRewriteData(Changes[End++],I->first.c_str(),I->second.c_str());
SetTFRewriteData(Changes[End++], 0, 0);
CheckSums[NewFileName].size = fd.Size();
+ Hashes hs;
+ hs.AddFD(fd.Fd(), 0, DoMD5, DoSHA1, DoSHA256, DoSHA512);
if (DoMD5 == true)
- {
- MD5Summation MD5;
- MD5.AddFD(fd.Fd(), fd.Size());
- CheckSums[NewFileName].MD5 = MD5.Result();
- fd.Seek(0);
- }
+ CheckSums[NewFileName].MD5 = hs.MD5.Result();
if (DoSHA1 == true)
- {
- SHA1Summation SHA1;
- SHA1.AddFD(fd.Fd(), fd.Size());
- CheckSums[NewFileName].SHA1 = SHA1.Result();
- fd.Seek(0);
- }
+ CheckSums[NewFileName].SHA1 = hs.SHA1.Result();
if (DoSHA256 == true)
- {
- SHA256Summation SHA256;
- SHA256.AddFD(fd.Fd(), fd.Size());
- CheckSums[NewFileName].SHA256 = SHA256.Result();
- }
-
+ CheckSums[NewFileName].SHA256 = hs.SHA256.Result();
+ if (DoSHA512 == true)
+ CheckSums[NewFileName].SHA512 = hs.SHA512.Result();
fd.Close();
-
+
return true;
}
(*I).first.c_str());
}
}
+
+ fprintf(Output, "SHA512:\n");
+ for(map<string,struct CheckSum>::const_iterator I = CheckSums.begin();
+ I != CheckSums.end();
+ ++I)
+ {
+ fprintf(Output, " %s %32ld %s\n",
+ (*I).second.SHA512.c_str(),
+ (*I).second.size,
+ (*I).first.c_str());
+ }
+
}