summaryrefslogtreecommitdiff
path: root/cmdline
diff options
context:
space:
mode:
authorDavid Kalnischkies <david@kalnischkies.de>2014-11-29 17:59:52 +0100
committerDavid Kalnischkies <david@kalnischkies.de>2014-12-22 14:23:39 +0100
commit92e8c1ff287ab829de825e00cdf94744e699ff97 (patch)
tree01a66b8ce8616bdc71cbc150f04e2f7e524d06b9 /cmdline
parent988d4f441eb2c7ac64cec339dbc02daa47fe84a4 (diff)
dispose http(s) 416 error page as non-content
Real webservers (like apache) actually send an error page with a 416 response, but our client didn't expect it leaving the page on the socket to be parsed as response for the next request (http) or as file content (https), which isn't what we want at all… Symptom is a "Bad header line" as html usually doesn't parse that well to an http-header. This manifests itself e.g. if we have a complete file (or larger) in partial/ which isn't discarded by If-Range as the server doesn't support it (or it is just newer, think: mirror rotation). It is a sort-of regression of 78c72d0ce22e00b194251445aae306df357d5c1a, which removed the filesize - 1 trick, but this had its own problems… To properly test this our webserver gains the ability to reply with transfer-encoding: chunked as most real webservers will use it to send the dynamically generated error pages. (The tests and their binary helpers had to be slightly modified to apply, but the patch to fix the issue itself is unchanged.) Closes: 768797
Diffstat (limited to 'cmdline')
-rw-r--r--cmdline/apt-helper.cc35
1 files changed, 23 insertions, 12 deletions
diff --git a/cmdline/apt-helper.cc b/cmdline/apt-helper.cc
index dd43ea1bc..63f70983c 100644
--- a/cmdline/apt-helper.cc
+++ b/cmdline/apt-helper.cc
@@ -48,23 +48,34 @@ static bool DoDownloadFile(CommandLine &CmdL)
if (CmdL.FileSize() <= 2)
return _error->Error(_("Must specify at least one pair url/filename"));
-
pkgAcquire Fetcher;
AcqTextStatus Stat(ScreenWidth, _config->FindI("quiet",0));
Fetcher.Setup(&Stat);
- std::string download_uri = CmdL.FileList[1];
- std::string targetfile = CmdL.FileList[2];
- std::string hash;
- if (CmdL.FileSize() > 3)
- hash = CmdL.FileList[3];
- // we use download_uri as descr and targetfile as short-descr
- new pkgAcqFile(&Fetcher, download_uri, hash, 0, download_uri, targetfile,
- "dest-dir-ignored", targetfile);
- Fetcher.Run();
+
+ size_t fileind = 0;
+ std::vector<std::string> targetfiles;
+ while (fileind + 2 <= CmdL.FileSize())
+ {
+ std::string download_uri = CmdL.FileList[fileind + 1];
+ std::string targetfile = CmdL.FileList[fileind + 2];
+ std::string hash;
+ if (CmdL.FileSize() > fileind + 3)
+ hash = CmdL.FileList[fileind + 3];
+ // we use download_uri as descr and targetfile as short-descr
+ new pkgAcqFile(&Fetcher, download_uri, hash, 0, download_uri, targetfile,
+ "dest-dir-ignored", targetfile);
+ targetfiles.push_back(targetfile);
+ fileind += 3;
+ }
+
bool Failed = false;
- if (AcquireRun(Fetcher, 0, &Failed, NULL) == false || Failed == true ||
- FileExists(targetfile) == false)
+ if (AcquireRun(Fetcher, 0, &Failed, NULL) == false || Failed == true)
return _error->Error(_("Download Failed"));
+ if (targetfiles.empty() == false)
+ for (std::vector<std::string>::const_iterator f = targetfiles.begin(); f != targetfiles.end(); ++f)
+ if (FileExists(*f) == false)
+ return _error->Error(_("Download Failed"));
+
return true;
}