Commit
653ef26c70dc9c0e2cbfdd4e79117876bb63e87d broke the camels back in
sofar that everything works in terms of our internal use of copy:/, but
external use is completely destroyed. This is kinda the reverse of what
happened in "parallel" in the sid branch, where external use was mostly
fine, internal and external exploded on the GzipIndexes option.
We fix this now by rewriting our internal use by letting copy:/ only do
what the name suggests it does: Copy files and not uncompress them
on-the-fly. Then we teach copy and the uncompressors how to deal with
/dev/null and use it as destination file in case we don't want to store
the uncompressed files on disk.
Closes: 799158
// Methods like e.g. "file:" will give us a (compressed) FileName that is
// not the "DestFile" we set, in this case we uncompress from the local file
if (FileName != DestFile && RealFileExists(DestFile) == false)
// Methods like e.g. "file:" will give us a (compressed) FileName that is
// not the "DestFile" we set, in this case we uncompress from the local file
if (FileName != DestFile && RealFileExists(DestFile) == false)
+ if (Target.KeepCompressed == true)
+ {
+ // but if we don't keep the uncompress we copy the compressed file first
+ Stage = STAGE_DOWNLOAD;
+ Desc.URI = "copy:" + FileName;
+ QueueURI(Desc);
+ SetActiveSubprocess("copy");
+ return;
+ }
+ }
else
EraseFileName = FileName;
else
EraseFileName = FileName;
- // If we want compressed indexes, just copy in place for hash verification
- if (Target.KeepCompressed == true)
- {
- DestFile = GetPartialFileNameFromURI(Target.URI + '.' + CurrentCompressionExtension);
- EraseFileName = "";
- Stage = STAGE_DECOMPRESS_AND_VERIFY;
- Desc.URI = "copy:" + FileName;
- QueueURI(Desc);
- SetActiveSubprocess("copy");
- return;
- }
-
// get the binary name for your used compression type
string decompProg;
if(CurrentCompressionExtension == "uncompressed")
// get the binary name for your used compression type
string decompProg;
if(CurrentCompressionExtension == "uncompressed")
+ if (Target.KeepCompressed == true)
+ {
+ DestFile = "/dev/null";
+ EraseFileName.clear();
+ }
+ else
+ DestFile += ".decomp";
+
// queue uri for the next stage
Stage = STAGE_DECOMPRESS_AND_VERIFY;
// queue uri for the next stage
Stage = STAGE_DECOMPRESS_AND_VERIFY;
Desc.URI = decompProg + ":" + FileName;
QueueURI(Desc);
SetActiveSubprocess(decompProg);
Desc.URI = decompProg + ":" + FileName;
QueueURI(Desc);
SetActiveSubprocess(decompProg);
HashStringList const &,
pkgAcquire::MethodConfig const * const)
{
HashStringList const &,
pkgAcquire::MethodConfig const * const)
{
+ if (Target.KeepCompressed == true && DestFile == "/dev/null")
+ DestFile = GetPartialFileNameFromURI(Target.URI + '.' + CurrentCompressionExtension);
+
// Done, queue for rename on transaction finished
TransactionManager->TransactionStageCopy(this, DestFile, GetFinalFilename());
return;
// Done, queue for rename on transaction finished
TransactionManager->TransactionStageCopy(this, DestFile, GetFinalFilename());
return;
Multi-Arch: same
Pre-Depends: ${misc:Pre-Depends}
Depends: ${shlibs:Depends}, ${misc:Depends}
Multi-Arch: same
Pre-Depends: ${misc:Pre-Depends}
Depends: ${shlibs:Depends}, ${misc:Depends}
-Breaks: apt (<< 1.1~exp4), libapt-inst1.5 (<< 0.9.9~)
+Breaks: apt (<< 1.1~exp14), libapt-inst1.5 (<< 0.9.9~)
Recommends: apt (>= ${binary:Version})
Section: libs
Description: package management runtime library
Recommends: apt (>= ${binary:Version})
Section: libs
Description: package management runtime library
void CopyMethod::CalculateHashes(FetchItem const * const Itm, FetchResult &Res)
{
Hashes Hash(Itm->ExpectedHashes);
void CopyMethod::CalculateHashes(FetchItem const * const Itm, FetchResult &Res)
{
Hashes Hash(Itm->ExpectedHashes);
- FileFd Fd(Res.Filename, FileFd::ReadOnly, FileFd::Extension);
+ FileFd Fd(Res.Filename, FileFd::ReadOnly);
Hash.AddFD(Fd);
Res.TakeHashes(Hash);
}
Hash.AddFD(Fd);
Res.TakeHashes(Hash);
}
URIStart(Res);
// just calc the hashes if the source and destination are identical
URIStart(Res);
// just calc the hashes if the source and destination are identical
- if (File == Itm->DestFile)
+ if (File == Itm->DestFile || Itm->DestFile == "/dev/null")
{
CalculateHashes(Itm, Res);
URIDone(Res);
{
CalculateHashes(Itm, Res);
URIDone(Res);
return _error->Error("Extraction of file %s requires unknown compressor %s", Path.c_str(), Prog);
// Open the source and destination files
return _error->Error("Extraction of file %s requires unknown compressor %s", Path.c_str(), Prog);
// Open the source and destination files
if (_config->FindB("Method::Compress", false) == false)
{
From.Open(Path, FileFd::ReadOnly, *compressor);
if(From.FileSize() == 0)
return _error->Error(_("Empty files can't be valid archives"));
if (_config->FindB("Method::Compress", false) == false)
{
From.Open(Path, FileFd::ReadOnly, *compressor);
if(From.FileSize() == 0)
return _error->Error(_("Empty files can't be valid archives"));
- To.Open(Itm->DestFile, FileFd::WriteAtomic);
From.Open(Path, FileFd::ReadOnly);
From.Open(Path, FileFd::ReadOnly);
- To.Open(Itm->DestFile, FileFd::WriteOnly | FileFd::Create | FileFd::Empty, *compressor);
+ if (From.IsOpen() == false || From.Failed() == true)
+ return false;
+
+ FileFd To;
+ if (Itm->DestFile != "/dev/null")
+ {
+ if (_config->FindB("Method::Compress", false) == false)
+ To.Open(Itm->DestFile, FileFd::WriteAtomic);
+ else
+ To.Open(Itm->DestFile, FileFd::WriteOnly | FileFd::Create | FileFd::Empty, *compressor);
+
+ if (To.IsOpen() == false || To.Failed() == true)
+ return false;
+ To.EraseOnFailure();
- if (From.IsOpen() == false || From.Failed() == true ||
- To.IsOpen() == false || To.Failed() == true)
- return false;
// Read data from source, generate checksums and write
Hashes Hash(Itm->ExpectedHashes);
bool Failed = false;
// Read data from source, generate checksums and write
Hashes Hash(Itm->ExpectedHashes);
bool Failed = false;
while (1)
{
unsigned char Buffer[4*1024];
while (1)
{
unsigned char Buffer[4*1024];
if (!From.Read(Buffer,sizeof(Buffer),&Count))
{
if (!From.Read(Buffer,sizeof(Buffer),&Count))
{
+ if (To.IsOpen())
+ To.OpFail();
return false;
}
if (Count == 0)
break;
return false;
}
if (Count == 0)
break;
- if (To.Write(Buffer,Count) == false)
+ if (To.IsOpen() && To.Write(Buffer,Count) == false)
- Res.Size = To.FileSize();
To.Close();
if (Failed == true)
return false;
// Transfer the modification times
To.Close();
if (Failed == true)
return false;
// Transfer the modification times
- struct stat Buf;
- if (stat(Path.c_str(),&Buf) != 0)
- return _error->Errno("stat",_("Failed to stat"));
-
- struct timeval times[2];
- times[0].tv_sec = Buf.st_atime;
- Res.LastModified = times[1].tv_sec = Buf.st_mtime;
- times[0].tv_usec = times[1].tv_usec = 0;
- if (utimes(Itm->DestFile.c_str(), times) != 0)
- return _error->Errno("utimes",_("Failed to set modification time"));
+ if (Itm->DestFile != "/dev/null")
+ {
+ struct stat Buf;
+ if (stat(Path.c_str(),&Buf) != 0)
+ return _error->Errno("stat",_("Failed to stat"));
+
+ struct timeval times[2];
+ times[0].tv_sec = Buf.st_atime;
+ Res.LastModified = times[1].tv_sec = Buf.st_mtime;
+ times[0].tv_usec = times[1].tv_usec = 0;
+ if (utimes(Itm->DestFile.c_str(), times) != 0)
+ return _error->Errno("utimes",_("Failed to set modification time"));
+ }
// Return a Done response
Res.TakeHashes(Hash);
// Return a Done response
Res.TakeHashes(Hash);
rewritesourceslist() {
local APTARCHIVE="file://$(readlink -f "${TMPWORKINGDIRECTORY}/aptarchive" | sed 's# #%20#g')"
rewritesourceslist() {
local APTARCHIVE="file://$(readlink -f "${TMPWORKINGDIRECTORY}/aptarchive" | sed 's# #%20#g')"
+ local APTARCHIVE2="copy://$(readlink -f "${TMPWORKINGDIRECTORY}/aptarchive" | sed 's# #%20#g')"
for LIST in $(find rootdir/etc/apt/sources.list.d/ -name 'apt-test-*.list'); do
for LIST in $(find rootdir/etc/apt/sources.list.d/ -name 'apt-test-*.list'); do
- sed -i $LIST -e "s#$APTARCHIVE#${1}#" -e "s#http://localhost:${APTHTTPPORT}/#${1}#" -e "s#https://localhost:${APTHTTPSPORT}/#${1}#"
+ sed -i $LIST -e "s#$APTARCHIVE#${1}#" -e "s#$APTARCHIVE2#${1}#" \
+ -e "s#http://localhost:${APTHTTPPORT}/#${1}#" \
+ -e "s#https://localhost:${APTHTTPSPORT}/#${1}#"
test $(echo "$GOODSHOWSRC" | grep -e '^Package: testpkg' -e '^Format: 3.0 (native)' -e '^Files:' -e '^Checksums-Sha256:' | wc -l) -eq 4 || msgdie 'showsrc is broken'
testsuccessequal "$GOODSHOWSRC" aptcache showsrc testpkg
GOODPOLICY="$(aptcache policy testpkg)"
test $(echo "$GOODSHOWSRC" | grep -e '^Package: testpkg' -e '^Format: 3.0 (native)' -e '^Files:' -e '^Checksums-Sha256:' | wc -l) -eq 4 || msgdie 'showsrc is broken'
testsuccessequal "$GOODSHOWSRC" aptcache showsrc testpkg
GOODPOLICY="$(aptcache policy testpkg)"
-test $(echo "$GOODPOLICY" | grep -e '^testpkg:' -e '^ Candidate:' -e '^ Installed: (none)' -e '500 file:/' | wc -l) -eq 4 || msgdie 'policy is broken'
+test $(echo "$GOODPOLICY" | grep -e '^testpkg:' -e '^ Candidate:' -e '^ Installed: (none)' -e '500 file:/' | wc -l) -eq 4 || msgdie 'file policy is broken'
testsuccessequal "$GOODPOLICY" aptcache policy testpkg
testsuccessequal "$GOODPOLICY" aptcache policy testpkg
for COMPRESSOR in 'gzip' 'bzip2' 'lzma' 'xz'; do testovermethod 'file' $COMPRESSOR; done
for COMPRESSOR in 'gzip' 'bzip2' 'lzma' 'xz'; do testovermethod 'file' $COMPRESSOR; done
+rewritesourceslist "copy://${TMPWORKINGDIRECTORY}/aptarchive"
rm -rf rootdir/var/lib/apt/lists
testsuccess aptget update
GOODPOLICY="$(aptcache policy testpkg)"
rm -rf rootdir/var/lib/apt/lists
testsuccess aptget update
GOODPOLICY="$(aptcache policy testpkg)"
-test $(echo "$GOODPOLICY" | grep -e '^testpkg:' -e '^ Candidate:' -e '^ Installed: (none)' -e '500 http://' | wc -l) -eq 4 || msgdie 'policy is broken'
+test $(echo "$GOODPOLICY" | grep -e '^testpkg:' -e '^ Candidate:' -e '^ Installed: (none)' -e '500 copy:/' | wc -l) -eq 4 || msgdie 'copy policy is broken'
testsuccessequal "$GOODPOLICY" aptcache policy testpkg
testsuccessequal "$GOODPOLICY" aptcache policy testpkg
+for COMPRESSOR in 'gzip' 'bzip2' 'lzma' 'xz'; do testovermethod 'copy' $COMPRESSOR; done
+changetowebserver
+rm -rf rootdir/var/lib/apt/lists
+testsuccess aptget update
+GOODPOLICY="$(aptcache policy testpkg)"
+test $(echo "$GOODPOLICY" | grep -e '^testpkg:' -e '^ Candidate:' -e '^ Installed: (none)' -e '500 http://' | wc -l) -eq 4 || msgdie 'http policy is broken'
+testsuccessequal "$GOODPOLICY" aptcache policy testpkg
for COMPRESSOR in 'gzip' 'bzip2' 'lzma' 'xz'; do testovermethod 'http' $COMPRESSOR; done
for COMPRESSOR in 'gzip' 'bzip2' 'lzma' 'xz'; do testovermethod 'http' $COMPRESSOR; done
+changetohttpswebserver
+rm -rf rootdir/var/lib/apt/lists
+testsuccess aptget update
+GOODPOLICY="$(aptcache policy testpkg)"
+test $(echo "$GOODPOLICY" | grep -e '^testpkg:' -e '^ Candidate:' -e '^ Installed: (none)' -e '500 https://' | wc -l) -eq 4 || msgdie 'https policy is broken'
+testsuccessequal "$GOODPOLICY" aptcache policy testpkg
+for COMPRESSOR in 'gzip' 'bzip2' 'lzma' 'xz'; do testovermethod 'https' $COMPRESSOR; done
+
changetocdrom 'Debian APT Testdisk 0.8.15'
rm -rf rootdir/var/lib/apt/lists
testsuccess aptcdrom add </dev/null
GOODPOLICY="$(aptcache policy testpkg)"
changetocdrom 'Debian APT Testdisk 0.8.15'
rm -rf rootdir/var/lib/apt/lists
testsuccess aptcdrom add </dev/null
GOODPOLICY="$(aptcache policy testpkg)"
-test $(echo "$GOODPOLICY" | grep -e '^testpkg:' -e '^ Candidate:' -e '^ Installed: (none)' -e '500 cdrom://' | wc -l) -eq 4 || msgdie 'policy is broken'
+test $(echo "$GOODPOLICY" | grep -e '^testpkg:' -e '^ Candidate:' -e '^ Installed: (none)' -e '500 cdrom://' | wc -l) -eq 4 || msgdie 'cdrom policy is broken'
testsuccessequal "$GOODPOLICY" aptcache policy testpkg
testsuccessequal "$GOODPOLICY" aptcache policy testpkg
for COMPRESSOR in 'gzip' 'bzip2' 'lzma' 'xz'; do testovermethod 'cdrom' $COMPRESSOR; done
for COMPRESSOR in 'gzip' 'bzip2' 'lzma' 'xz'; do testovermethod 'cdrom' $COMPRESSOR; done