summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorDavid Kalnischkies <david@kalnischkies.de>2015-10-11 13:58:23 +0200
committerDavid Kalnischkies <david@kalnischkies.de>2015-11-04 18:04:00 +0100
commitaf9e40c9bfb353b8aea1e2621b3b5a8c1c1db4bd (patch)
tree45c52718410a77b27ac7ae6745b52d3ec03e9fda
parent3685f84d8c0abfbddd12e034561e5f3fe8cbf2eb (diff)
downloadapt-af9e40c9bfb353b8aea1e2621b3b5a8c1c1db4bd.tar.gz
unbreak the copy-method claiming hashsum mismatch since ~exp9
Commit 653ef26c70dc9c0e2cbfdd4e79117876bb63e87d broke the camels back in sofar that everything works in terms of our internal use of copy:/, but external use is completely destroyed. This is kinda the reverse of what happened in "parallel" in the sid branch, where external use was mostly fine, internal and external exploded on the GzipIndexes option. We fix this now by rewriting our internal use by letting copy:/ only do what the name suggests it does: Copy files and not uncompress them on-the-fly. Then we teach copy and the uncompressors how to deal with /dev/null and use it as destination file in case we don't want to store the uncompressed files on disk. Closes: 799158
-rw-r--r--apt-pkg/acquire-item.cc35
-rw-r--r--debian/control2
-rw-r--r--methods/copy.cc4
-rw-r--r--methods/gzip.cc54
-rw-r--r--test/integration/framework5
-rwxr-xr-xtest/integration/test-compressed-indexes25
6 files changed, 81 insertions, 44 deletions
diff --git a/apt-pkg/acquire-item.cc b/apt-pkg/acquire-item.cc
index 53f7f3295..98739f7a6 100644
--- a/apt-pkg/acquire-item.cc
+++ b/apt-pkg/acquire-item.cc
@@ -2619,7 +2619,18 @@ void pkgAcqIndex::StageDownloadDone(string const &Message, HashStringList const
// Methods like e.g. "file:" will give us a (compressed) FileName that is
// not the "DestFile" we set, in this case we uncompress from the local file
if (FileName != DestFile && RealFileExists(DestFile) == false)
+ {
Local = true;
+ if (Target.KeepCompressed == true)
+ {
+ // but if we don't keep the uncompress we copy the compressed file first
+ Stage = STAGE_DOWNLOAD;
+ Desc.URI = "copy:" + FileName;
+ QueueURI(Desc);
+ SetActiveSubprocess("copy");
+ return;
+ }
+ }
else
EraseFileName = FileName;
@@ -2633,18 +2644,6 @@ void pkgAcqIndex::StageDownloadDone(string const &Message, HashStringList const
return;
}
- // If we want compressed indexes, just copy in place for hash verification
- if (Target.KeepCompressed == true)
- {
- DestFile = GetPartialFileNameFromURI(Target.URI + '.' + CurrentCompressionExtension);
- EraseFileName = "";
- Stage = STAGE_DECOMPRESS_AND_VERIFY;
- Desc.URI = "copy:" + FileName;
- QueueURI(Desc);
- SetActiveSubprocess("copy");
- return;
- }
-
// get the binary name for your used compression type
string decompProg;
if(CurrentCompressionExtension == "uncompressed")
@@ -2657,9 +2656,16 @@ void pkgAcqIndex::StageDownloadDone(string const &Message, HashStringList const
return;
}
+ if (Target.KeepCompressed == true)
+ {
+ DestFile = "/dev/null";
+ EraseFileName.clear();
+ }
+ else
+ DestFile += ".decomp";
+
// queue uri for the next stage
Stage = STAGE_DECOMPRESS_AND_VERIFY;
- DestFile += ".decomp";
Desc.URI = decompProg + ":" + FileName;
QueueURI(Desc);
SetActiveSubprocess(decompProg);
@@ -2670,6 +2676,9 @@ void pkgAcqIndex::StageDecompressDone(string const &,
HashStringList const &,
pkgAcquire::MethodConfig const * const)
{
+ if (Target.KeepCompressed == true && DestFile == "/dev/null")
+ DestFile = GetPartialFileNameFromURI(Target.URI + '.' + CurrentCompressionExtension);
+
// Done, queue for rename on transaction finished
TransactionManager->TransactionStageCopy(this, DestFile, GetFinalFilename());
return;
diff --git a/debian/control b/debian/control
index df94c150f..e74afa109 100644
--- a/debian/control
+++ b/debian/control
@@ -43,7 +43,7 @@ Architecture: any
Multi-Arch: same
Pre-Depends: ${misc:Pre-Depends}
Depends: ${shlibs:Depends}, ${misc:Depends}
-Breaks: apt (<< 1.1~exp4), libapt-inst1.5 (<< 0.9.9~)
+Breaks: apt (<< 1.1~exp14), libapt-inst1.5 (<< 0.9.9~)
Recommends: apt (>= ${binary:Version})
Section: libs
Description: package management runtime library
diff --git a/methods/copy.cc b/methods/copy.cc
index 0c9f322e6..373ad3604 100644
--- a/methods/copy.cc
+++ b/methods/copy.cc
@@ -38,7 +38,7 @@ class CopyMethod : public pkgAcqMethod
void CopyMethod::CalculateHashes(FetchItem const * const Itm, FetchResult &Res)
{
Hashes Hash(Itm->ExpectedHashes);
- FileFd Fd(Res.Filename, FileFd::ReadOnly, FileFd::Extension);
+ FileFd Fd(Res.Filename, FileFd::ReadOnly);
Hash.AddFD(Fd);
Res.TakeHashes(Hash);
}
@@ -65,7 +65,7 @@ bool CopyMethod::Fetch(FetchItem *Itm)
URIStart(Res);
// just calc the hashes if the source and destination are identical
- if (File == Itm->DestFile)
+ if (File == Itm->DestFile || Itm->DestFile == "/dev/null")
{
CalculateHashes(Itm, Res);
URIDone(Res);
diff --git a/methods/gzip.cc b/methods/gzip.cc
index 637aae124..2429069e5 100644
--- a/methods/gzip.cc
+++ b/methods/gzip.cc
@@ -71,28 +71,36 @@ bool GzipMethod::Fetch(FetchItem *Itm)
return _error->Error("Extraction of file %s requires unknown compressor %s", Path.c_str(), Prog);
// Open the source and destination files
- FileFd From, To;
+ FileFd From;
if (_config->FindB("Method::Compress", false) == false)
{
From.Open(Path, FileFd::ReadOnly, *compressor);
if(From.FileSize() == 0)
return _error->Error(_("Empty files can't be valid archives"));
- To.Open(Itm->DestFile, FileFd::WriteAtomic);
}
else
- {
From.Open(Path, FileFd::ReadOnly);
- To.Open(Itm->DestFile, FileFd::WriteOnly | FileFd::Create | FileFd::Empty, *compressor);
+ if (From.IsOpen() == false || From.Failed() == true)
+ return false;
+
+ FileFd To;
+ if (Itm->DestFile != "/dev/null")
+ {
+ if (_config->FindB("Method::Compress", false) == false)
+ To.Open(Itm->DestFile, FileFd::WriteAtomic);
+ else
+ To.Open(Itm->DestFile, FileFd::WriteOnly | FileFd::Create | FileFd::Empty, *compressor);
+
+ if (To.IsOpen() == false || To.Failed() == true)
+ return false;
+ To.EraseOnFailure();
}
- To.EraseOnFailure();
- if (From.IsOpen() == false || From.Failed() == true ||
- To.IsOpen() == false || To.Failed() == true)
- return false;
// Read data from source, generate checksums and write
Hashes Hash(Itm->ExpectedHashes);
bool Failed = false;
+ Res.Size = 0;
while (1)
{
unsigned char Buffer[4*1024];
@@ -100,14 +108,16 @@ bool GzipMethod::Fetch(FetchItem *Itm)
if (!From.Read(Buffer,sizeof(Buffer),&Count))
{
- To.OpFail();
+ if (To.IsOpen())
+ To.OpFail();
return false;
}
if (Count == 0)
break;
+ Res.Size += Count;
Hash.Add(Buffer,Count);
- if (To.Write(Buffer,Count) == false)
+ if (To.IsOpen() && To.Write(Buffer,Count) == false)
{
Failed = true;
break;
@@ -115,23 +125,25 @@ bool GzipMethod::Fetch(FetchItem *Itm)
}
From.Close();
- Res.Size = To.FileSize();
To.Close();
if (Failed == true)
return false;
// Transfer the modification times
- struct stat Buf;
- if (stat(Path.c_str(),&Buf) != 0)
- return _error->Errno("stat",_("Failed to stat"));
-
- struct timeval times[2];
- times[0].tv_sec = Buf.st_atime;
- Res.LastModified = times[1].tv_sec = Buf.st_mtime;
- times[0].tv_usec = times[1].tv_usec = 0;
- if (utimes(Itm->DestFile.c_str(), times) != 0)
- return _error->Errno("utimes",_("Failed to set modification time"));
+ if (Itm->DestFile != "/dev/null")
+ {
+ struct stat Buf;
+ if (stat(Path.c_str(),&Buf) != 0)
+ return _error->Errno("stat",_("Failed to stat"));
+
+ struct timeval times[2];
+ times[0].tv_sec = Buf.st_atime;
+ Res.LastModified = times[1].tv_sec = Buf.st_mtime;
+ times[0].tv_usec = times[1].tv_usec = 0;
+ if (utimes(Itm->DestFile.c_str(), times) != 0)
+ return _error->Errno("utimes",_("Failed to set modification time"));
+ }
// Return a Done response
Res.TakeHashes(Hash);
diff --git a/test/integration/framework b/test/integration/framework
index 6e3977eee..a9acd83a9 100644
--- a/test/integration/framework
+++ b/test/integration/framework
@@ -1157,8 +1157,11 @@ webserverconfig() {
rewritesourceslist() {
local APTARCHIVE="file://$(readlink -f "${TMPWORKINGDIRECTORY}/aptarchive" | sed 's# #%20#g')"
+ local APTARCHIVE2="copy://$(readlink -f "${TMPWORKINGDIRECTORY}/aptarchive" | sed 's# #%20#g')"
for LIST in $(find rootdir/etc/apt/sources.list.d/ -name 'apt-test-*.list'); do
- sed -i $LIST -e "s#$APTARCHIVE#${1}#" -e "s#http://localhost:${APTHTTPPORT}/#${1}#" -e "s#https://localhost:${APTHTTPSPORT}/#${1}#"
+ sed -i $LIST -e "s#$APTARCHIVE#${1}#" -e "s#$APTARCHIVE2#${1}#" \
+ -e "s#http://localhost:${APTHTTPPORT}/#${1}#" \
+ -e "s#https://localhost:${APTHTTPSPORT}/#${1}#"
done
}
diff --git a/test/integration/test-compressed-indexes b/test/integration/test-compressed-indexes
index b5d6b8ae4..d163a98ef 100755
--- a/test/integration/test-compressed-indexes
+++ b/test/integration/test-compressed-indexes
@@ -140,25 +140,38 @@ GOODSHOWSRC="$(aptcache showsrc testpkg)
test $(echo "$GOODSHOWSRC" | grep -e '^Package: testpkg' -e '^Format: 3.0 (native)' -e '^Files:' -e '^Checksums-Sha256:' | wc -l) -eq 4 || msgdie 'showsrc is broken'
testsuccessequal "$GOODSHOWSRC" aptcache showsrc testpkg
GOODPOLICY="$(aptcache policy testpkg)"
-test $(echo "$GOODPOLICY" | grep -e '^testpkg:' -e '^ Candidate:' -e '^ Installed: (none)' -e '500 file:/' | wc -l) -eq 4 || msgdie 'policy is broken'
+test $(echo "$GOODPOLICY" | grep -e '^testpkg:' -e '^ Candidate:' -e '^ Installed: (none)' -e '500 file:/' | wc -l) -eq 4 || msgdie 'file policy is broken'
testsuccessequal "$GOODPOLICY" aptcache policy testpkg
-
for COMPRESSOR in 'gzip' 'bzip2' 'lzma' 'xz'; do testovermethod 'file' $COMPRESSOR; done
-changetowebserver
+rewritesourceslist "copy://${TMPWORKINGDIRECTORY}/aptarchive"
rm -rf rootdir/var/lib/apt/lists
testsuccess aptget update
GOODPOLICY="$(aptcache policy testpkg)"
-test $(echo "$GOODPOLICY" | grep -e '^testpkg:' -e '^ Candidate:' -e '^ Installed: (none)' -e '500 http://' | wc -l) -eq 4 || msgdie 'policy is broken'
+test $(echo "$GOODPOLICY" | grep -e '^testpkg:' -e '^ Candidate:' -e '^ Installed: (none)' -e '500 copy:/' | wc -l) -eq 4 || msgdie 'copy policy is broken'
testsuccessequal "$GOODPOLICY" aptcache policy testpkg
+for COMPRESSOR in 'gzip' 'bzip2' 'lzma' 'xz'; do testovermethod 'copy' $COMPRESSOR; done
+changetowebserver
+rm -rf rootdir/var/lib/apt/lists
+testsuccess aptget update
+GOODPOLICY="$(aptcache policy testpkg)"
+test $(echo "$GOODPOLICY" | grep -e '^testpkg:' -e '^ Candidate:' -e '^ Installed: (none)' -e '500 http://' | wc -l) -eq 4 || msgdie 'http policy is broken'
+testsuccessequal "$GOODPOLICY" aptcache policy testpkg
for COMPRESSOR in 'gzip' 'bzip2' 'lzma' 'xz'; do testovermethod 'http' $COMPRESSOR; done
+changetohttpswebserver
+rm -rf rootdir/var/lib/apt/lists
+testsuccess aptget update
+GOODPOLICY="$(aptcache policy testpkg)"
+test $(echo "$GOODPOLICY" | grep -e '^testpkg:' -e '^ Candidate:' -e '^ Installed: (none)' -e '500 https://' | wc -l) -eq 4 || msgdie 'https policy is broken'
+testsuccessequal "$GOODPOLICY" aptcache policy testpkg
+for COMPRESSOR in 'gzip' 'bzip2' 'lzma' 'xz'; do testovermethod 'https' $COMPRESSOR; done
+
changetocdrom 'Debian APT Testdisk 0.8.15'
rm -rf rootdir/var/lib/apt/lists
testsuccess aptcdrom add </dev/null
GOODPOLICY="$(aptcache policy testpkg)"
-test $(echo "$GOODPOLICY" | grep -e '^testpkg:' -e '^ Candidate:' -e '^ Installed: (none)' -e '500 cdrom://' | wc -l) -eq 4 || msgdie 'policy is broken'
+test $(echo "$GOODPOLICY" | grep -e '^testpkg:' -e '^ Candidate:' -e '^ Installed: (none)' -e '500 cdrom://' | wc -l) -eq 4 || msgdie 'cdrom policy is broken'
testsuccessequal "$GOODPOLICY" aptcache policy testpkg
-
for COMPRESSOR in 'gzip' 'bzip2' 'lzma' 'xz'; do testovermethod 'cdrom' $COMPRESSOR; done