if (DebBytes != FetchBytes)
ioprintf(c1out,_("Need to get %sB/%sB of archives.\n"),
SizeToStr(FetchBytes).c_str(),SizeToStr(DebBytes).c_str());
- else
+ else if (DebBytes != 0)
ioprintf(c1out,_("Need to get %sB of archives.\n"),
SizeToStr(DebBytes).c_str());
// Size delta
if (Cache->UsrSize() >= 0)
- ioprintf(c1out,_("After unpacking %sB of additional disk space will be used.\n"),
+ ioprintf(c1out,_("After this operation, %sB of additional disk space will be used.\n"),
SizeToStr(Cache->UsrSize()).c_str());
else
- ioprintf(c1out,_("After unpacking %sB disk space will be freed.\n"),
+ ioprintf(c1out,_("After this operation, %sB disk space will be freed.\n"),
SizeToStr(-1*Cache->UsrSize()).c_str());
if (_error->PendingError() == true)
return _error->Error(_("Unable to lock the list directory"));
}
- // Create the download object
+ // Create the progress
AcqTextStatus Stat(ScreenWidth,_config->FindI("quiet",0));
- pkgAcquire Fetcher(&Stat);
-
-
+
// Just print out the uris an exit if the --print-uris flag was used
if (_config->FindB("APT::Get::Print-URIs") == true)
{
+ // get a fetcher
+ pkgAcquire Fetcher(&Stat);
+
// Populate it with the source selection and get all Indexes
// (GetAll=true)
if (List.GetIndexes(&Fetcher,true) == false)
return true;
}
- // Populate it with the source selection
- if (List.GetIndexes(&Fetcher) == false)
- return false;
-
- // Run it
- if (Fetcher.Run() == pkgAcquire::Failed)
- return false;
-
- bool Failed = false;
- bool TransientNetworkFailure = false;
- for (pkgAcquire::ItemIterator I = Fetcher.ItemsBegin(); I != Fetcher.ItemsEnd(); I++)
- {
- if ((*I)->Status == pkgAcquire::Item::StatDone)
- continue;
-
- (*I)->Finished();
-
- fprintf(stderr,_("Failed to fetch %s %s\n"),(*I)->DescURI().c_str(),
- (*I)->ErrorText.c_str());
-
- if ((*I)->Status == pkgAcquire::Item::StatTransientNetworkError)
- {
- TransientNetworkFailure = true;
- continue;
- }
-
- Failed = true;
- }
-
- // Clean out any old list files
- if (!TransientNetworkFailure &&
- _config->FindB("APT::Get::List-Cleanup",true) == true)
- {
- if (Fetcher.Clean(_config->FindDir("Dir::State::lists")) == false ||
- Fetcher.Clean(_config->FindDir("Dir::State::lists") + "partial/") == false)
- return false;
- }
-
- // Prepare the cache.
+ // do the work
CacheFile Cache;
+ bool res = ListUpdate(Stat, List);
+
+ // Rebuild the cache.
if (Cache.BuildCaches() == false)
return false;
- if (TransientNetworkFailure == true)
- _error->Warning(_("Some index files failed to download, they have been ignored, or old ones used instead."));
- else if (Failed == true)
- return _error->Error(_("Some index files failed to download, they have been ignored, or old ones used instead."));
-
return true;
}
/*}}}*/
buf[end-start] = 0x0;
if (regexec(&Pattern,buf,0,0,0) != 0)
continue;
- res &= TryToInstall(Pkg,Cache,Fix,Remove,true,ExpectedInst);
+ res &= TryToInstall(Pkg,Cache,Fix,Remove,false,ExpectedInst);
found = true;
}
if (Last == 0)
return _error->Error(_("Unable to find a source package for %s"),Src.c_str());
+ string srec = Last->AsStr();
+ string::size_type pos = srec.find("\nVcs-");
+ while (pos != string::npos)
+ {
+ pos += strlen("\nVcs-");
+ string vcs = srec.substr(pos,srec.find(":",pos)-pos);
+ if(vcs == "Browser")
+ {
+ pos = srec.find("\nVcs-", pos);
+ continue;
+ }
+ pos += vcs.length()+2;
+ string::size_type epos = srec.find("\n", pos);
+ string uri = srec.substr(pos,epos-pos).c_str();
+ ioprintf(c1out, _("NOTICE: '%s' packaging is maintained in "
+ "the '%s' version control system at:\n"
+ "%s\n"),
+ Src.c_str(), vcs.c_str(), uri.c_str());
+ if(vcs == "Bzr")
+ ioprintf(c1out,_("Please use:\n"
+ "bzr get %s\n"
+ "to retrieve the latest (possible unreleased) "
+ "updates to the package.\n"),
+ uri.c_str());
+ break;
+ }
+
// Back track
vector<pkgSrcRecords::File> Lst;
if (Last->Files(Lst) == false)
" -d Download only - do NOT install or unpack archives\n"
" -s No-act. Perform ordering simulation\n"
" -y Assume Yes to all queries and do not prompt\n"
- " -f Attempt to continue if the integrity check fails\n"
+ " -f Attempt to correct a system with broken dependencies in place\n"
" -m Attempt to continue if archives are unlocatable\n"
" -u Show a list of upgraded packages as well\n"
" -b Build the source package after fetching it\n"