diff options
author | David Kalnischkies <david@kalnischkies.de> | 2015-04-11 10:23:52 +0200 |
---|---|---|
committer | David Kalnischkies <david@kalnischkies.de> | 2015-04-19 01:13:09 +0200 |
commit | 34faa8f7ae2526f46cd1f84bb6962ad06d841e5e (patch) | |
tree | 244f91967a9e5bb44677589a7245298f2ecefca7 | |
parent | b55ec4203f7a99d380903911f8839aba2a65e27e (diff) |
calculate hashes while downloading in https
We do this in HTTP already to give the CPU some exercise while the disk
is heavily spinning (or flashing?) to store the data avoiding the need
to reread the entire file again later on to calculate the hashes – which
happens outside of the eyes of progress reporting, so you might ended up
with a bunch of https workers 'stuck' at 100% while they were busy
calculating hashes.
This is a bummer for everyone using apt as a connection speedtest as the
https method works slower now (not really, it just isn't reporting done
too early anymore).
-rw-r--r-- | methods/http.cc | 8 | ||||
-rw-r--r-- | methods/http.h | 2 | ||||
-rw-r--r-- | methods/https.cc | 38 | ||||
-rw-r--r-- | methods/https.h | 6 | ||||
-rw-r--r-- | methods/server.cc | 8 | ||||
-rw-r--r-- | methods/server.h | 3 | ||||
-rwxr-xr-x | test/integration/test-apt-download-progress | 6 |
7 files changed, 47 insertions, 24 deletions
diff --git a/methods/http.cc b/methods/http.cc index e4773b0e2..af3d5ccb6 100644 --- a/methods/http.cc +++ b/methods/http.cc @@ -484,16 +484,14 @@ APT_PURE bool HttpServerState::IsOpen() /*{{{*/ return (ServerFd != -1); } /*}}}*/ -bool HttpServerState::InitHashes(FileFd &File, HashStringList const &ExpectedHashes)/*{{{*/ +bool HttpServerState::InitHashes(HashStringList const &ExpectedHashes) /*{{{*/ { delete In.Hash; In.Hash = new Hashes(ExpectedHashes); - - // Set the expected size and read file for the hashes - File.Truncate(StartPos); - return In.Hash->AddFD(File, StartPos); + return true; } /*}}}*/ + APT_PURE Hashes * HttpServerState::GetHashes() /*{{{*/ { return In.Hash; diff --git a/methods/http.h b/methods/http.h index 6dc872659..e73871931 100644 --- a/methods/http.h +++ b/methods/http.h @@ -111,7 +111,7 @@ struct HttpServerState: public ServerState virtual bool Open(); virtual bool IsOpen(); virtual bool Close(); - virtual bool InitHashes(FileFd &File, HashStringList const &ExpectedHashes); + virtual bool InitHashes(HashStringList const &ExpectedHashes); virtual Hashes * GetHashes(); virtual bool Die(FileFd &File); virtual bool Flush(FileFd * const File); diff --git a/methods/https.cc b/methods/https.cc index 81903b239..c6b75d9ad 100644 --- a/methods/https.cc +++ b/methods/https.cc @@ -72,18 +72,18 @@ HttpsMethod::parse_header(void *buffer, size_t size, size_t nmemb, void *userp) else me->https->Server->StartPos = 0; - me->https->File->Truncate(me->https->Server->StartPos); - me->https->File->Seek(me->https->Server->StartPos); - me->Res->LastModified = me->https->Server->Date; me->Res->Size = me->https->Server->Size; me->Res->ResumePoint = me->https->Server->StartPos; // we expect valid data, so tell our caller we get the file now - if (me->https->Server->Result >= 200 && me->https->Server->Result < 300 && - me->https->Server->JunkSize == 0 && - me->Res->Size != 0 && me->Res->Size > me->Res->ResumePoint) - me->https->URIStart(*me->Res); + if (me->https->Server->Result >= 200 && me->https->Server->Result < 300) + { + if (me->https->Server->JunkSize == 0 && me->Res->Size != 0 && me->Res->Size > me->Res->ResumePoint) + me->https->URIStart(*me->Res); + if (me->https->Server->AddPartialFileToHashes(*(me->https->File)) == false) + return 0; + } } else if (me->https->Server->HeaderLine(line) == false) return 0; @@ -116,16 +116,31 @@ HttpsMethod::write_data(void *buffer, size_t size, size_t nmemb, void *userp) } } + if (me->Server->GetHashes()->Add((unsigned char const * const)buffer, buffer_size) == false) + return 0; + return buffer_size; } // HttpsServerState::HttpsServerState - Constructor /*{{{*/ -HttpsServerState::HttpsServerState(URI Srv,HttpsMethod * Owner) : ServerState(Srv, Owner) +HttpsServerState::HttpsServerState(URI Srv,HttpsMethod * Owner) : ServerState(Srv, Owner), Hash(NULL) { TimeOut = _config->FindI("Acquire::https::Timeout",TimeOut); Reset(); } /*}}}*/ +bool HttpsServerState::InitHashes(HashStringList const &ExpectedHashes) /*{{{*/ +{ + delete Hash; + Hash = new Hashes(ExpectedHashes); + return true; +} + /*}}}*/ +APT_PURE Hashes * HttpsServerState::GetHashes() /*{{{*/ +{ + return Hash; +} + /*}}}*/ void HttpsMethod::SetupProxy() /*{{{*/ { @@ -365,6 +380,8 @@ bool HttpsMethod::Fetch(FetchItem *Itm) // go for it - if the file exists, append on it File = new FileFd(Itm->DestFile, FileFd::WriteAny); Server = CreateServerState(Itm->Uri); + if (Server->InitHashes(Itm->ExpectedHashes) == false) + return false; // keep apt updated Res.Filename = Itm->DestFile; @@ -443,10 +460,7 @@ bool HttpsMethod::Fetch(FetchItem *Itm) Res.LastModified = resultStat.st_mtime; // take hashes - Hashes Hash(Itm->ExpectedHashes); - FileFd Fd(Res.Filename, FileFd::ReadOnly); - Hash.AddFD(Fd); - Res.TakeHashes(Hash); + Res.TakeHashes(*(Server->GetHashes())); // keep apt updated URIDone(Res); diff --git a/methods/https.h b/methods/https.h index dc0ff3322..6e32e8d3d 100644 --- a/methods/https.h +++ b/methods/https.h @@ -29,6 +29,8 @@ class FileFd; class HttpsServerState : public ServerState { + Hashes * Hash; + protected: virtual bool ReadHeaderLines(std::string &/*Data*/) { return false; } virtual bool LoadNextResponse(bool const /*ToFile*/, FileFd * const /*File*/) { return false; } @@ -42,8 +44,8 @@ class HttpsServerState : public ServerState virtual bool Open() { return false; } virtual bool IsOpen() { return false; } virtual bool Close() { return false; } - virtual bool InitHashes(FileFd &/*File*/, HashStringList const &/*ExpectedHashes*/) { return false; } - virtual Hashes * GetHashes() { return NULL; } + virtual bool InitHashes(HashStringList const &ExpectedHashes); + virtual Hashes * GetHashes(); virtual bool Die(FileFd &/*File*/) { return false; } virtual bool Flush(FileFd * const /*File*/) { return false; } virtual bool Go(bool /*ToFile*/, FileFd * const /*File*/) { return false; } diff --git a/methods/server.cc b/methods/server.cc index e403f1071..2116926b0 100644 --- a/methods/server.cc +++ b/methods/server.cc @@ -235,6 +235,12 @@ ServerState::ServerState(URI Srv, ServerMethod *Owner) : ServerName(Srv), TimeOu Reset(); } /*}}}*/ +bool ServerState::AddPartialFileToHashes(FileFd &File) /*{{{*/ +{ + File.Truncate(StartPos); + return GetHashes()->AddFD(File, StartPos); +} + /*}}}*/ bool ServerMethod::Configuration(string Message) /*{{{*/ { @@ -357,7 +363,7 @@ ServerMethod::DealWithHeaders(FetchResult &Res) FailFd = File->Fd(); FailTime = Server->Date; - if (Server->InitHashes(*File, Queue->ExpectedHashes) == false) + if (Server->InitHashes(Queue->ExpectedHashes) == false || Server->AddPartialFileToHashes(*File) == false) { _error->Errno("read",_("Problem hashing file")); return ERROR_NOT_FROM_SERVER; diff --git a/methods/server.h b/methods/server.h index 45622dd34..1b1f754a3 100644 --- a/methods/server.h +++ b/methods/server.h @@ -72,6 +72,7 @@ struct ServerState }; /** \brief Get the headers before the data */ RunHeadersResult RunHeaders(FileFd * const File, const std::string &Uri); + bool AddPartialFileToHashes(FileFd &File); bool Comp(URI Other) const {return Other.Host == ServerName.Host && Other.Port == ServerName.Port;}; virtual void Reset() {Major = 0; Minor = 0; Result = 0; Code[0] = '\0'; Size = 0; JunkSize = 0; @@ -85,7 +86,7 @@ struct ServerState virtual bool Open() = 0; virtual bool IsOpen() = 0; virtual bool Close() = 0; - virtual bool InitHashes(FileFd &File, HashStringList const &ExpectedHashes) = 0; + virtual bool InitHashes(HashStringList const &ExpectedHashes) = 0; virtual Hashes * GetHashes() = 0; virtual bool Die(FileFd &File) = 0; virtual bool Flush(FileFd * const File) = 0; diff --git a/test/integration/test-apt-download-progress b/test/integration/test-apt-download-progress index b2c9effe6..07c5e09c5 100755 --- a/test/integration/test-apt-download-progress +++ b/test/integration/test-apt-download-progress @@ -26,14 +26,16 @@ assertprogress() { TESTFILE=testfile.big testsuccess dd if=/dev/zero of=./aptarchive/$TESTFILE bs=800k count=1 +OPT='-o APT::Status-Fd=3 -o Debug::pkgAcquire::Worker=1 -o Debug::Acquire::http=1 -o Debug::Acquire::https=1' + msgtest 'download progress works via' 'http' exec 3> apt-progress.log -testsuccess --nomsg apthelper download-file "http://localhost:8080/$TESTFILE" http-$TESTFILE -o APT::Status-Fd=3 -o Acquire::http::Dl-Limit=800 +testsuccess --nomsg apthelper download-file "http://localhost:8080/$TESTFILE" http-$TESTFILE $OPT -o Acquire::http::Dl-Limit=800 assertprogress apt-progress.log msgtest 'download progress works via' 'https' exec 3> apt-progress.log -testsuccess --nomsg apthelper download-file "https://localhost:4433/$TESTFILE" https-$TESTFILE -o APT::Status-Fd=3 -o Acquire::https::Dl-Limit=800 +testsuccess --nomsg apthelper download-file "https://localhost:4433/$TESTFILE" https-$TESTFILE $OPT -o Acquire::https::Dl-Limit=800 assertprogress apt-progress.log # cleanup |