summaryrefslogtreecommitdiff
path: root/methods/gzip.cc
diff options
context:
space:
mode:
authorDavid Kalnischkies <david@kalnischkies.de>2015-10-11 13:58:23 +0200
committerDavid Kalnischkies <david@kalnischkies.de>2015-11-04 18:04:00 +0100
commitaf9e40c9bfb353b8aea1e2621b3b5a8c1c1db4bd (patch)
tree45c52718410a77b27ac7ae6745b52d3ec03e9fda /methods/gzip.cc
parent3685f84d8c0abfbddd12e034561e5f3fe8cbf2eb (diff)
unbreak the copy-method claiming hashsum mismatch since ~exp9
Commit 653ef26c70dc9c0e2cbfdd4e79117876bb63e87d broke the camels back in sofar that everything works in terms of our internal use of copy:/, but external use is completely destroyed. This is kinda the reverse of what happened in "parallel" in the sid branch, where external use was mostly fine, internal and external exploded on the GzipIndexes option. We fix this now by rewriting our internal use by letting copy:/ only do what the name suggests it does: Copy files and not uncompress them on-the-fly. Then we teach copy and the uncompressors how to deal with /dev/null and use it as destination file in case we don't want to store the uncompressed files on disk. Closes: 799158
Diffstat (limited to 'methods/gzip.cc')
-rw-r--r--methods/gzip.cc54
1 files changed, 33 insertions, 21 deletions
diff --git a/methods/gzip.cc b/methods/gzip.cc
index 637aae124..2429069e5 100644
--- a/methods/gzip.cc
+++ b/methods/gzip.cc
@@ -71,28 +71,36 @@ bool GzipMethod::Fetch(FetchItem *Itm)
return _error->Error("Extraction of file %s requires unknown compressor %s", Path.c_str(), Prog);
// Open the source and destination files
- FileFd From, To;
+ FileFd From;
if (_config->FindB("Method::Compress", false) == false)
{
From.Open(Path, FileFd::ReadOnly, *compressor);
if(From.FileSize() == 0)
return _error->Error(_("Empty files can't be valid archives"));
- To.Open(Itm->DestFile, FileFd::WriteAtomic);
}
else
- {
From.Open(Path, FileFd::ReadOnly);
- To.Open(Itm->DestFile, FileFd::WriteOnly | FileFd::Create | FileFd::Empty, *compressor);
+ if (From.IsOpen() == false || From.Failed() == true)
+ return false;
+
+ FileFd To;
+ if (Itm->DestFile != "/dev/null")
+ {
+ if (_config->FindB("Method::Compress", false) == false)
+ To.Open(Itm->DestFile, FileFd::WriteAtomic);
+ else
+ To.Open(Itm->DestFile, FileFd::WriteOnly | FileFd::Create | FileFd::Empty, *compressor);
+
+ if (To.IsOpen() == false || To.Failed() == true)
+ return false;
+ To.EraseOnFailure();
}
- To.EraseOnFailure();
- if (From.IsOpen() == false || From.Failed() == true ||
- To.IsOpen() == false || To.Failed() == true)
- return false;
// Read data from source, generate checksums and write
Hashes Hash(Itm->ExpectedHashes);
bool Failed = false;
+ Res.Size = 0;
while (1)
{
unsigned char Buffer[4*1024];
@@ -100,14 +108,16 @@ bool GzipMethod::Fetch(FetchItem *Itm)
if (!From.Read(Buffer,sizeof(Buffer),&Count))
{
- To.OpFail();
+ if (To.IsOpen())
+ To.OpFail();
return false;
}
if (Count == 0)
break;
+ Res.Size += Count;
Hash.Add(Buffer,Count);
- if (To.Write(Buffer,Count) == false)
+ if (To.IsOpen() && To.Write(Buffer,Count) == false)
{
Failed = true;
break;
@@ -115,23 +125,25 @@ bool GzipMethod::Fetch(FetchItem *Itm)
}
From.Close();
- Res.Size = To.FileSize();
To.Close();
if (Failed == true)
return false;
// Transfer the modification times
- struct stat Buf;
- if (stat(Path.c_str(),&Buf) != 0)
- return _error->Errno("stat",_("Failed to stat"));
-
- struct timeval times[2];
- times[0].tv_sec = Buf.st_atime;
- Res.LastModified = times[1].tv_sec = Buf.st_mtime;
- times[0].tv_usec = times[1].tv_usec = 0;
- if (utimes(Itm->DestFile.c_str(), times) != 0)
- return _error->Errno("utimes",_("Failed to set modification time"));
+ if (Itm->DestFile != "/dev/null")
+ {
+ struct stat Buf;
+ if (stat(Path.c_str(),&Buf) != 0)
+ return _error->Errno("stat",_("Failed to stat"));
+
+ struct timeval times[2];
+ times[0].tv_sec = Buf.st_atime;
+ Res.LastModified = times[1].tv_sec = Buf.st_mtime;
+ times[0].tv_usec = times[1].tv_usec = 0;
+ if (utimes(Itm->DestFile.c_str(), times) != 0)
+ return _error->Errno("utimes",_("Failed to set modification time"));
+ }
// Return a Done response
Res.TakeHashes(Hash);