##################################################################### */
/*}}}*/
// Include Files /*{{{*/
-#include "apt-ftparchive.h"
-
+#include <config.h>
+
#include <apt-pkg/error.h>
#include <apt-pkg/configuration.h>
#include <apt-pkg/cmndline.h>
#include <apt-pkg/strutl.h>
#include <apt-pkg/init.h>
-#include <config.h>
-#include <apti18n.h>
#include <algorithm>
#include <climits>
#include <sys/time.h>
#include <regex.h>
+#include "apt-ftparchive.h"
#include "contents.h"
#include "multicompress.h"
-#include "writer.h"
+#include "writer.h"
+
+#include <apti18n.h>
/*}}}*/
using namespace std;
Packages.Output = 0; // Just in case
// Finish compressing
- unsigned long Size;
+ unsigned long long Size;
if (Comp.Finalize(Size) == false)
{
c0out << endl;
SrcDone = true;
// Create a package writer object.
- SourcesWriter Sources(flCombine(OverrideDir,BinOverride),
+ SourcesWriter Sources(_config->Find("APT::FTPArchive::DB"),
+ flCombine(OverrideDir,BinOverride),
flCombine(OverrideDir,SrcOverride),
flCombine(OverrideDir,SrcExtraOverride));
if (SrcExt.empty() == false && Sources.SetExts(SrcExt) == false)
Sources.Output = 0; // Just in case
// Finish compressing
- unsigned long Size;
+ unsigned long long Size;
if (Comp.Finalize(Size) == false)
{
c0out << endl;
if (_error->PendingError() == true)
return false;
- unsigned long Size = Head.Size();
+ unsigned long long Size = Head.Size();
unsigned char Buf[4096];
while (Size != 0)
{
- unsigned long ToRead = Size;
+ unsigned long long ToRead = Size;
if (Size > sizeof(Buf))
ToRead = sizeof(Buf);
files associated with this contents file into one great big honking
memory structure, then dump the sorted version */
c0out << ' ' << this->Contents << ":" << flush;
- for (vector<PackageMap>::iterator I = Begin; I != End; I++)
+ for (vector<PackageMap>::iterator I = Begin; I != End; ++I)
{
if (I->Contents != this->Contents)
continue;
Contents.Finish();
// Finish compressing
- unsigned long Size;
+ unsigned long long Size;
if (Comp.Finalize(Size) == false || _error->PendingError() == true)
{
c0out << endl;
string DIPrfx = Setup.Find("TreeDefault::InternalPrefix",
"$(DIST)/$(SECTION)/");
string DContents = Setup.Find("TreeDefault::Contents",
- "$(DIST)/Contents-$(ARCH)");
+ "$(DIST)/$(SECTION)/Contents-$(ARCH)");
string DContentsH = Setup.Find("TreeDefault::Contents::Header","");
string DBCache = Setup.Find("TreeDefault::BinCacheDB",
"packages-$(ARCH).db");
/* */
bool ShowHelp(CommandLine &CmdL)
{
- ioprintf(cout,_("%s %s for %s compiled on %s %s\n"),PACKAGE,VERSION,
+ ioprintf(cout,_("%s %s for %s compiled on %s %s\n"),PACKAGE,PACKAGE_VERSION,
COMMON_ARCH,__DATE__,__TIME__);
if (_config->FindB("version") == true)
return true;
SOverride.c_str());
// Create a package writer object.
- SourcesWriter Sources(Override,SOverride);
+ SourcesWriter Sources(_config->Find("APT::FTPArchive::DB"),Override,SOverride);
if (_error->PendingError() == true)
return false;
// Generate packages
if (CmdL.FileSize() <= 2)
{
- for (vector<PackageMap>::iterator I = PkgList.begin(); I != PkgList.end(); I++)
+ for (vector<PackageMap>::iterator I = PkgList.begin(); I != PkgList.end(); ++I)
if (I->GenPackages(Setup,Stats) == false)
_error->DumpErrors();
- for (vector<PackageMap>::iterator I = PkgList.begin(); I != PkgList.end(); I++)
+ for (vector<PackageMap>::iterator I = PkgList.begin(); I != PkgList.end(); ++I)
if (I->GenSources(Setup,SrcStats) == false)
_error->DumpErrors();
}
// Make a choice list out of the package list..
RxChoiceList *List = new RxChoiceList[2*PkgList.size()+1];
RxChoiceList *End = List;
- for (vector<PackageMap>::iterator I = PkgList.begin(); I != PkgList.end(); I++)
+ for (vector<PackageMap>::iterator I = PkgList.begin(); I != PkgList.end(); ++I)
{
End->UserData = &(*I);
End->Str = I->BaseDir.c_str();
}
// close the Translation master files
- for (vector<PackageMap>::reverse_iterator I = PkgList.rbegin(); I != PkgList.rend(); I++)
+ for (vector<PackageMap>::reverse_iterator I = PkgList.rbegin(); I != PkgList.rend(); ++I)
if (I->TransWriter != NULL && I->TransWriter->DecreaseRefCounter() == 0)
delete I->TransWriter;
// Sort the contents file list by date
string ArchiveDir = Setup.FindDir("Dir::ArchiveDir");
- for (vector<PackageMap>::iterator I = PkgList.begin(); I != PkgList.end(); I++)
+ for (vector<PackageMap>::iterator I = PkgList.begin(); I != PkgList.end(); ++I)
{
struct stat A;
if (MultiCompress::GetStat(flCombine(ArchiveDir,I->Contents),
hashes of the .debs this means they have not changed either so the
contents must be up to date. */
unsigned long MaxContentsChange = Setup.FindI("Default::MaxContentsChange",UINT_MAX)*1024;
- for (vector<PackageMap>::iterator I = PkgList.begin(); I != PkgList.end(); I++)
+ for (vector<PackageMap>::iterator I = PkgList.begin(); I != PkgList.end(); ++I)
{
- // This record is not relevent
+ // This record is not relevant
if (I->ContentsDone == true ||
I->Contents.empty() == true)
continue;
_error->DumpErrors();
string CacheDB = I->BinCacheDB;
- for (; I != PkgList.end() && I->BinCacheDB == CacheDB; I++);
+ for (; I != PkgList.end() && I->BinCacheDB == CacheDB; ++I);
}
return true;
CommandLine::Args Args[] = {
{'h',"help","help",0},
{0,"md5","APT::FTPArchive::MD5",0},
+ {0,"sha1","APT::FTPArchive::SHA1",0},
+ {0,"sha256","APT::FTPArchive::SHA256",0},
{'v',"version","version",0},
{'d',"db","APT::FTPArchive::DB",CommandLine::HasArg},
{'s',"source-override","APT::FTPArchive::SourceOverride",CommandLine::HasArg},