From dcd5856b11c685ca6d4629212d2978ce196ea65c Mon Sep 17 00:00:00 2001 From: Michael Vogt Date: Tue, 26 Aug 2014 19:08:37 -0700 Subject: Pass ExpectedSize to tthe backend method This ensures that we can stop downloading if the server send too much data by accident (or by a malicious attempt) --- apt-pkg/acquire-method.cc | 2 ++ apt-pkg/acquire-method.h | 1 + apt-pkg/acquire-worker.cc | 4 ++++ methods/http.cc | 10 +++++++++- methods/http.h | 4 +++- methods/server.cc | 5 +++++ methods/server.h | 4 +++- 7 files changed, 27 insertions(+), 3 deletions(-) diff --git a/apt-pkg/acquire-method.cc b/apt-pkg/acquire-method.cc index e4a937d1d..9fc176747 100644 --- a/apt-pkg/acquire-method.cc +++ b/apt-pkg/acquire-method.cc @@ -360,6 +360,8 @@ int pkgAcqMethod::Run(bool Single) if (hash.empty() == false) Tmp->ExpectedHashes.push_back(HashString(*t, hash)); } + char *End; + Tmp->ExpectedSize = strtoll(LookupTag(Message, "Expected-Size", "0").c_str(), &End, 10); Tmp->Next = 0; // Append it to the list diff --git a/apt-pkg/acquire-method.h b/apt-pkg/acquire-method.h index cbf79f860..8a680335e 100644 --- a/apt-pkg/acquire-method.h +++ b/apt-pkg/acquire-method.h @@ -48,6 +48,7 @@ class pkgAcqMethod bool IndexFile; bool FailIgnore; HashStringList ExpectedHashes; + unsigned long long ExpectedSize; }; struct FetchResult diff --git a/apt-pkg/acquire-worker.cc b/apt-pkg/acquire-worker.cc index 54be8e99f..8bd1618f4 100644 --- a/apt-pkg/acquire-worker.cc +++ b/apt-pkg/acquire-worker.cc @@ -526,6 +526,9 @@ bool pkgAcquire::Worker::QueueItem(pkgAcquire::Queue::QItem *Item) if (OutFd == -1) return false; + string ExpectedSize; + strprintf(ExpectedSize, "%llu", Item->Owner->FileSize); + string Message = "600 URI Acquire\n"; Message.reserve(300); Message += "URI: " + Item->URI; @@ -533,6 +536,7 @@ bool pkgAcquire::Worker::QueueItem(pkgAcquire::Queue::QItem *Item) HashStringList const hsl = Item->Owner->HashSums(); for (HashStringList::const_iterator hs = hsl.begin(); hs != hsl.end(); ++hs) Message += "\nExpected-" + hs->HashType() + ": " + hs->HashValue(); + Message += "\nExpected-Size: " + ExpectedSize; Message += Item->Owner->Custom600Headers(); Message += "\n\n"; diff --git a/methods/http.cc b/methods/http.cc index c734d3799..916fa464f 100644 --- a/methods/http.cc +++ b/methods/http.cc @@ -63,7 +63,8 @@ const unsigned int CircleBuf::BW_HZ=10; // CircleBuf::CircleBuf - Circular input buffer /*{{{*/ // --------------------------------------------------------------------- /* */ -CircleBuf::CircleBuf(unsigned long long Size) : Size(Size), Hash(0) +CircleBuf::CircleBuf(unsigned long long Size) + : Size(Size), Hash(0), TotalWriten(0) { Buf = new unsigned char[Size]; Reset(); @@ -79,6 +80,7 @@ void CircleBuf::Reset() InP = 0; OutP = 0; StrPos = 0; + TotalWriten = 0; MaxGet = (unsigned long long)-1; OutQueue = string(); if (Hash != 0) @@ -216,6 +218,8 @@ bool CircleBuf::Write(int Fd) return false; } + + TotalWriten += Res; if (Hash != 0) Hash->Add(Buf + (OutP%Size),Res); @@ -649,6 +653,10 @@ bool HttpServerState::Go(bool ToFile, FileFd * const File) return _error->Errno("write",_("Error writing to output file")); } + if (ExpectedSize > 0 && In.TotalWriten > ExpectedSize) + return _error->Error("Writing more data than expected (%llu > %llu)", + In.TotalWriten, ExpectedSize); + // Handle commands from APT if (FD_ISSET(STDIN_FILENO,&rfds)) { diff --git a/methods/http.h b/methods/http.h index 5406ce4a7..c98fe8e5f 100644 --- a/methods/http.h +++ b/methods/http.h @@ -63,6 +63,8 @@ class CircleBuf public: Hashes *Hash; + // total amount of data that got written so far + unsigned long long TotalWriten; // Read data in bool Read(int Fd); @@ -81,8 +83,8 @@ class CircleBuf bool ReadSpace() const {return Size - (InP - OutP) > 0;}; bool WriteSpace() const {return InP - OutP > 0;}; - // Dump everything void Reset(); + // Dump everything void Stats(); CircleBuf(unsigned long long Size); diff --git a/methods/server.cc b/methods/server.cc index c91d3b218..1b6511c59 100644 --- a/methods/server.cc +++ b/methods/server.cc @@ -531,6 +531,11 @@ int ServerMethod::Loop() // Run the data bool Result = true; + + // ensure we don't fetch too much + if (Queue->ExpectedSize > 0) + Server->ExpectedSize = Queue->ExpectedSize; + if (Server->HaveContent) Result = Server->RunData(File); diff --git a/methods/server.h b/methods/server.h index 5299b3954..0d7333140 100644 --- a/methods/server.h +++ b/methods/server.h @@ -49,6 +49,8 @@ struct ServerState URI Proxy; unsigned long TimeOut; + unsigned long long ExpectedSize; + protected: ServerMethod *Owner; @@ -73,7 +75,7 @@ struct ServerState bool Comp(URI Other) const {return Other.Host == ServerName.Host && Other.Port == ServerName.Port;}; virtual void Reset() {Major = 0; Minor = 0; Result = 0; Code[0] = '\0'; Size = 0; StartPos = 0; Encoding = Closes; time(&Date); HaveContent = false; - State = Header; Persistent = false; Pipeline = true;}; + State = Header; Persistent = false; Pipeline = true; ExpectedSize = 0;}; virtual bool WriteResponse(std::string const &Data) = 0; /** \brief Transfer the data from the socket */ -- cgit v1.2.3 From ffd2dd93a640b47663ebdccc4fda00b426b3db71 Mon Sep 17 00:00:00 2001 From: Michael Vogt Date: Tue, 26 Aug 2014 19:20:04 -0700 Subject: make https honor ExpectedSize as well --- methods/https.cc | 6 ++++++ methods/https.h | 3 ++- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/methods/https.cc b/methods/https.cc index e0348ab58..cacd8a6bc 100644 --- a/methods/https.cc +++ b/methods/https.cc @@ -81,6 +81,12 @@ HttpsMethod::write_data(void *buffer, size_t size, size_t nmemb, void *userp) if(me->File->Write(buffer, size*nmemb) != true) return false; + me->TotalWritten += size*nmemb; + if(me->TotalWritten > me->Queue->ExpectedSize) + return _error->Error("Writing more data than expected (%llu > %llu)", + me->TotalWritten, me->Queue->ExpectedSize); + + return size*nmemb; } diff --git a/methods/https.h b/methods/https.h index faac8a3cd..2335559fb 100644 --- a/methods/https.h +++ b/methods/https.h @@ -66,11 +66,12 @@ class HttpsMethod : public pkgAcqMethod CURL *curl; FetchResult Res; HttpsServerState *Server; + unsigned long long TotalWritten; public: FileFd *File; - HttpsMethod() : pkgAcqMethod("1.2",Pipeline | SendConfig), File(NULL) + HttpsMethod() : pkgAcqMethod("1.2",Pipeline | SendConfig), TotalWritten(0), File(NULL) { File = 0; curl = curl_easy_init(); -- cgit v1.2.3 From 62acbba8d1e8c9f395ccd1068e89bf14a93d4aa8 Mon Sep 17 00:00:00 2001 From: Michael Vogt Date: Tue, 7 Oct 2014 08:16:51 +0200 Subject: methods/https.cc: use File->Tell() here too --- methods/https.cc | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/methods/https.cc b/methods/https.cc index eec858417..f8e84a2ff 100644 --- a/methods/https.cc +++ b/methods/https.cc @@ -82,8 +82,7 @@ HttpsMethod::write_data(void *buffer, size_t size, size_t nmemb, void *userp) if(me->File->Write(buffer, size*nmemb) != true) return false; - me->TotalWritten += size*nmemb; - if(me->TotalWritten > me->Queue->ExpectedSize) + if(me->Queue->ExpectedSize > 0 && me->File->Tell() > me->Queue->ExpectedSize) return _error->Error("Writing more data than expected (%llu > %llu)", me->TotalWritten, me->Queue->ExpectedSize); -- cgit v1.2.3 From 5b33fab8c9a8c26cbc8ce3bf5e1573279d7884b3 Mon Sep 17 00:00:00 2001 From: Michael Vogt Date: Tue, 7 Oct 2014 08:43:46 +0200 Subject: add ftp expected size check --- methods/ftp.cc | 10 +++++++--- methods/ftp.h | 2 +- methods/https.cc | 1 - 3 files changed, 8 insertions(+), 5 deletions(-) diff --git a/methods/ftp.cc b/methods/ftp.cc index ac76295f0..75ace1c5a 100644 --- a/methods/ftp.cc +++ b/methods/ftp.cc @@ -849,7 +849,7 @@ bool FTPConn::Finalize() /* This opens a data connection, sends REST and RETR and then transfers the file over. */ bool FTPConn::Get(const char *Path,FileFd &To,unsigned long long Resume, - Hashes &Hash,bool &Missing) + Hashes &Hash,bool &Missing, unsigned long long ExpectedSize) { Missing = false; if (CreateDataFd() == false) @@ -922,7 +922,11 @@ bool FTPConn::Get(const char *Path,FileFd &To,unsigned long long Resume, { Close(); return false; - } + } + + if (ExpectedSize > 0 && To.Tell() > ExpectedSize) + return _error->Error("Writing more data than expected (%llu > %llu)", + To.Tell(), ExpectedSize); } // All done @@ -1063,7 +1067,7 @@ bool FtpMethod::Fetch(FetchItem *Itm) FailFd = Fd.Fd(); bool Missing; - if (Server->Get(File,Fd,Res.ResumePoint,Hash,Missing) == false) + if (Server->Get(File,Fd,Res.ResumePoint,Hash,Missing,Itm->ExpectedSize) == false) { Fd.Close(); diff --git a/methods/ftp.h b/methods/ftp.h index dd92f0086..416a91980 100644 --- a/methods/ftp.h +++ b/methods/ftp.h @@ -62,7 +62,7 @@ class FTPConn bool Size(const char *Path,unsigned long long &Size); bool ModTime(const char *Path, time_t &Time); bool Get(const char *Path,FileFd &To,unsigned long long Resume, - Hashes &MD5,bool &Missing); + Hashes &MD5,bool &Missing, unsigned long long ExpectedSize); FTPConn(URI Srv); ~FTPConn(); diff --git a/methods/https.cc b/methods/https.cc index f8e84a2ff..a4794e705 100644 --- a/methods/https.cc +++ b/methods/https.cc @@ -86,7 +86,6 @@ HttpsMethod::write_data(void *buffer, size_t size, size_t nmemb, void *userp) return _error->Error("Writing more data than expected (%llu > %llu)", me->TotalWritten, me->Queue->ExpectedSize); - return size*nmemb; } -- cgit v1.2.3 From c86bc8515a3a195aa244a1743476b102d72c9a2a Mon Sep 17 00:00:00 2001 From: Michael Vogt Date: Tue, 7 Oct 2014 13:17:16 +0200 Subject: fix test-cve-2013-1051-InRelease-parsing (fails now in the method) --- test/integration/test-cve-2013-1051-InRelease-parsing | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/integration/test-cve-2013-1051-InRelease-parsing b/test/integration/test-cve-2013-1051-InRelease-parsing index 41b27f691..8f9803991 100755 --- a/test/integration/test-cve-2013-1051-InRelease-parsing +++ b/test/integration/test-cve-2013-1051-InRelease-parsing @@ -42,7 +42,7 @@ touch -d '+1hour' aptarchive/dists/stable/InRelease # ensure the update fails # useful for debugging to add "-o Debug::pkgAcquire::auth=true" msgtest 'apt-get update for should fail with the modified' 'InRelease' -aptget update 2>&1 | grep -q 'Hash Sum mismatch' > /dev/null && msgpass || msgfail +aptget update 2>&1 | grep -E -q '(Writing more data than expected|Hash Sum mismatch)' > /dev/null && msgpass || msgfail # ensure there is no package testequal 'Reading package lists... -- cgit v1.2.3 From c48eea97b93920062ea26001081d4fdf7eb967e3 Mon Sep 17 00:00:00 2001 From: Michael Vogt Date: Tue, 7 Oct 2014 17:47:30 +0200 Subject: make expected-size a maximum-size check as this is what we want at this point --- apt-pkg/acquire-method.cc | 2 +- apt-pkg/acquire-method.h | 5 ++++- apt-pkg/acquire-worker.cc | 10 ++++++---- methods/ftp.cc | 8 ++++---- methods/ftp.h | 2 +- methods/http.cc | 4 ++-- methods/https.cc | 4 ++-- methods/server.cc | 4 ++-- methods/server.h | 4 ++-- test/integration/test-apt-update-expected-size | 7 +++++++ 10 files changed, 31 insertions(+), 19 deletions(-) diff --git a/apt-pkg/acquire-method.cc b/apt-pkg/acquire-method.cc index 330854e75..9c0558223 100644 --- a/apt-pkg/acquire-method.cc +++ b/apt-pkg/acquire-method.cc @@ -373,7 +373,7 @@ int pkgAcqMethod::Run(bool Single) Tmp->ExpectedHashes.push_back(HashString(*t, hash)); } char *End; - Tmp->ExpectedSize = strtoll(LookupTag(Message, "Expected-Size", "0").c_str(), &End, 10); + Tmp->MaximumSize = strtoll(LookupTag(Message, "Maximum-Size", "0").c_str(), &End, 10); Tmp->Next = 0; // Append it to the list diff --git a/apt-pkg/acquire-method.h b/apt-pkg/acquire-method.h index 2e4e8281a..675c4f844 100644 --- a/apt-pkg/acquire-method.h +++ b/apt-pkg/acquire-method.h @@ -48,7 +48,10 @@ class pkgAcqMethod bool IndexFile; bool FailIgnore; HashStringList ExpectedHashes; - unsigned long long ExpectedSize; + // a maximum size we will download, this can be the exact filesize + // for when we know it or a arbitrary limit when we don't know the + // filesize (like a InRelease file) + unsigned long long MaximumSize; }; struct FetchResult diff --git a/apt-pkg/acquire-worker.cc b/apt-pkg/acquire-worker.cc index 8bd1618f4..ffa84eb68 100644 --- a/apt-pkg/acquire-worker.cc +++ b/apt-pkg/acquire-worker.cc @@ -526,9 +526,6 @@ bool pkgAcquire::Worker::QueueItem(pkgAcquire::Queue::QItem *Item) if (OutFd == -1) return false; - string ExpectedSize; - strprintf(ExpectedSize, "%llu", Item->Owner->FileSize); - string Message = "600 URI Acquire\n"; Message.reserve(300); Message += "URI: " + Item->URI; @@ -536,7 +533,12 @@ bool pkgAcquire::Worker::QueueItem(pkgAcquire::Queue::QItem *Item) HashStringList const hsl = Item->Owner->HashSums(); for (HashStringList::const_iterator hs = hsl.begin(); hs != hsl.end(); ++hs) Message += "\nExpected-" + hs->HashType() + ": " + hs->HashValue(); - Message += "\nExpected-Size: " + ExpectedSize; + if(Item->Owner->FileSize > 0) + { + string MaximumSize; + strprintf(MaximumSize, "%llu", Item->Owner->FileSize); + Message += "\nMaximum-Size: " + MaximumSize; + } Message += Item->Owner->Custom600Headers(); Message += "\n\n"; diff --git a/methods/ftp.cc b/methods/ftp.cc index 75ace1c5a..bc84dda7d 100644 --- a/methods/ftp.cc +++ b/methods/ftp.cc @@ -849,7 +849,7 @@ bool FTPConn::Finalize() /* This opens a data connection, sends REST and RETR and then transfers the file over. */ bool FTPConn::Get(const char *Path,FileFd &To,unsigned long long Resume, - Hashes &Hash,bool &Missing, unsigned long long ExpectedSize) + Hashes &Hash,bool &Missing, unsigned long long MaximumSize) { Missing = false; if (CreateDataFd() == false) @@ -924,9 +924,9 @@ bool FTPConn::Get(const char *Path,FileFd &To,unsigned long long Resume, return false; } - if (ExpectedSize > 0 && To.Tell() > ExpectedSize) + if (MaximumSize > 0 && To.Tell() > MaximumSize) return _error->Error("Writing more data than expected (%llu > %llu)", - To.Tell(), ExpectedSize); + To.Tell(), MaximumSize); } // All done @@ -1067,7 +1067,7 @@ bool FtpMethod::Fetch(FetchItem *Itm) FailFd = Fd.Fd(); bool Missing; - if (Server->Get(File,Fd,Res.ResumePoint,Hash,Missing,Itm->ExpectedSize) == false) + if (Server->Get(File,Fd,Res.ResumePoint,Hash,Missing,Itm->MaximumSize) == false) { Fd.Close(); diff --git a/methods/ftp.h b/methods/ftp.h index 416a91980..a31ebc999 100644 --- a/methods/ftp.h +++ b/methods/ftp.h @@ -62,7 +62,7 @@ class FTPConn bool Size(const char *Path,unsigned long long &Size); bool ModTime(const char *Path, time_t &Time); bool Get(const char *Path,FileFd &To,unsigned long long Resume, - Hashes &MD5,bool &Missing, unsigned long long ExpectedSize); + Hashes &MD5,bool &Missing, unsigned long long MaximumSize); FTPConn(URI Srv); ~FTPConn(); diff --git a/methods/http.cc b/methods/http.cc index b076e59cc..f8faa0cf8 100644 --- a/methods/http.cc +++ b/methods/http.cc @@ -655,10 +655,10 @@ bool HttpServerState::Go(bool ToFile, FileFd * const File) return _error->Errno("write",_("Error writing to output file")); } - if (ExpectedSize > 0 && File && File->Tell() > ExpectedSize) + if (MaximumSize > 0 && File && File->Tell() > MaximumSize) { return _error->Error("Writing more data than expected (%llu > %llu)", - File->Tell(), ExpectedSize); + File->Tell(), MaximumSize); } // Handle commands from APT diff --git a/methods/https.cc b/methods/https.cc index a4794e705..787e4a507 100644 --- a/methods/https.cc +++ b/methods/https.cc @@ -82,9 +82,9 @@ HttpsMethod::write_data(void *buffer, size_t size, size_t nmemb, void *userp) if(me->File->Write(buffer, size*nmemb) != true) return false; - if(me->Queue->ExpectedSize > 0 && me->File->Tell() > me->Queue->ExpectedSize) + if(me->Queue->MaximumSize > 0 && me->File->Tell() > me->Queue->MaximumSize) return _error->Error("Writing more data than expected (%llu > %llu)", - me->TotalWritten, me->Queue->ExpectedSize); + me->TotalWritten, me->Queue->MaximumSize); return size*nmemb; } diff --git a/methods/server.cc b/methods/server.cc index 223737901..82f9b4750 100644 --- a/methods/server.cc +++ b/methods/server.cc @@ -534,8 +534,8 @@ int ServerMethod::Loop() bool Result = true; // ensure we don't fetch too much - if (Queue->ExpectedSize > 0) - Server->ExpectedSize = Queue->ExpectedSize; + if (Queue->MaximumSize > 0) + Server->MaximumSize = Queue->MaximumSize; if (Server->HaveContent) Result = Server->RunData(File); diff --git a/methods/server.h b/methods/server.h index 0134a9538..3093e00c9 100644 --- a/methods/server.h +++ b/methods/server.h @@ -49,7 +49,7 @@ struct ServerState URI Proxy; unsigned long TimeOut; - unsigned long long ExpectedSize; + unsigned long long MaximumSize; protected: ServerMethod *Owner; @@ -75,7 +75,7 @@ struct ServerState bool Comp(URI Other) const {return Other.Host == ServerName.Host && Other.Port == ServerName.Port;}; virtual void Reset() {Major = 0; Minor = 0; Result = 0; Code[0] = '\0'; Size = 0; StartPos = 0; Encoding = Closes; time(&Date); HaveContent = false; - State = Header; Persistent = false; Pipeline = true; ExpectedSize = 0;}; + State = Header; Persistent = false; Pipeline = true; MaximumSize = 0;}; virtual bool WriteResponse(std::string const &Data) = 0; /** \brief Transfer the data from the socket */ diff --git a/test/integration/test-apt-update-expected-size b/test/integration/test-apt-update-expected-size index 72812336d..c1eecc08a 100755 --- a/test/integration/test-apt-update-expected-size +++ b/test/integration/test-apt-update-expected-size @@ -15,6 +15,13 @@ changetowebserver # normal update works fine testsuccess aptget update +# make InRelease really big +mv aptarchive/dists/unstable/InRelease aptarchive/dists/unstable/InRelease.good +dd if=/dev/zero of=aptarchive/dists/unstable/InRelease bs=1M count=2 +touch -d '+1hour' aptarchive/dists/unstable/InRelease +aptget update -o acquire::MaxReleaseFileSize=$((1*1000*1000)) + + # append junk at the end of the Packages.gz/Packages SIZE="$(stat --printf=%s aptarchive/dists/unstable/main/binary-i386/Packages)" echo "1234567890" >> aptarchive/dists/unstable/main/binary-i386/Packages.gz -- cgit v1.2.3 From 27e6c17a18216e2a02de39a6d1722b83ac823d42 Mon Sep 17 00:00:00 2001 From: Michael Vogt Date: Tue, 7 Oct 2014 20:40:37 +0200 Subject: Add new Acquire::MaxReleaseFileSize=10*1000*1000 option This option controls the maximum size of Release/Release.gpg/InRelease files. The rational is that we do not know the size of these files in advance and we want to protect against a denial of service attack where someone sends us endless amounts of data until the disk is full (we do know the size all other files (Packages/Sources/debs)). --- apt-pkg/acquire-item.cc | 53 +++++++++++++------------- apt-pkg/acquire-item.h | 4 +- test/integration/test-apt-update-expected-size | 13 +++++-- 3 files changed, 39 insertions(+), 31 deletions(-) diff --git a/apt-pkg/acquire-item.cc b/apt-pkg/acquire-item.cc index 5d0a00055..1dcbde223 100644 --- a/apt-pkg/acquire-item.cc +++ b/apt-pkg/acquire-item.cc @@ -1690,14 +1690,8 @@ pkgAcqMetaSig::~pkgAcqMetaSig() /*{{{*/ // --------------------------------------------------------------------- string pkgAcqMetaSig::Custom600Headers() const { - string FinalFile = _config->FindDir("Dir::State::lists"); - FinalFile += URItoFileName(RealURI); - - struct stat Buf; - if (stat(FinalFile.c_str(),&Buf) != 0) - return "\nIndex-File: true"; - - return "\nIndex-File: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime); + std::string Header = GetCustom600Headers(RealURI); + return Header; } /*}}}*/ // pkgAcqMetaSig::Done - The signature was downloaded/verified /*{{{*/ @@ -1842,14 +1836,7 @@ void pkgAcqMetaIndex::Init(std::string URIDesc, std::string ShortDesc) // --------------------------------------------------------------------- string pkgAcqMetaIndex::Custom600Headers() const { - string Final = _config->FindDir("Dir::State::lists"); - Final += URItoFileName(RealURI); - - struct stat Buf; - if (stat(Final.c_str(),&Buf) != 0) - return "\nIndex-File: true"; - - return "\nIndex-File: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime); + return GetCustom600Headers(RealURI); } /*}}}*/ void pkgAcqMetaIndex::Done(string Message,unsigned long long Size, /*{{{*/ @@ -1910,6 +1897,26 @@ bool pkgAcqMetaBase::CheckAuthDone(string Message, const string &RealURI) /*{{{* return true; } /*}}}*/ +// pkgAcqMetaBase::GetCustom600Headers - Get header for AcqMetaBase /*{{{*/ +// --------------------------------------------------------------------- +string pkgAcqMetaBase::GetCustom600Headers(const string &RealURI) const +{ + std::string Header = "\nIndex-File: true"; + std::string MaximumSize; + strprintf(MaximumSize, "\nMaximum-Size: %i", + _config->FindI("Acquire::MaxReleaseFileSize", 10*1000*1000)); + Header += MaximumSize; + + string FinalFile = _config->FindDir("Dir::State::lists"); + FinalFile += URItoFileName(RealURI); + + struct stat Buf; + if (stat(FinalFile.c_str(),&Buf) == 0) + Header += "\nLast-Modified: " + TimeRFC1123(Buf.st_mtime); + + return Header; +} + /*}}}*/ // pkgAcqMetaBase::QueueForSignatureVerify /*{{{*/ void pkgAcqMetaBase::QueueForSignatureVerify(const std::string &MetaIndexFile, const std::string &MetaIndexFileSignature) @@ -2187,17 +2194,9 @@ pkgAcqMetaClearSig::~pkgAcqMetaClearSig() /*{{{*/ // --------------------------------------------------------------------- string pkgAcqMetaClearSig::Custom600Headers() const { - string Final = _config->FindDir("Dir::State::lists"); - Final += URItoFileName(RealURI); - - struct stat Buf; - if (stat(Final.c_str(),&Buf) != 0) - { - if (stat(Final.c_str(),&Buf) != 0) - return "\nIndex-File: true\nFail-Ignore: true\n"; - } - - return "\nIndex-File: true\nFail-Ignore: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime); + string Header = GetCustom600Headers(RealURI); + Header += "\nFail-Ignore: true"; + return Header; } /*}}}*/ // pkgAcqMetaClearSig::Done - We got a file /*{{{*/ diff --git a/apt-pkg/acquire-item.h b/apt-pkg/acquire-item.h index 0e7212fc5..68d5a01ce 100644 --- a/apt-pkg/acquire-item.h +++ b/apt-pkg/acquire-item.h @@ -390,7 +390,6 @@ class pkgAcqMetaBase : public pkgAcquire::Item */ void QueueIndexes(bool verify); - /** \brief Called when a file is finished being retrieved. * * If the file was not downloaded to DestFile, a copy process is @@ -407,6 +406,9 @@ class pkgAcqMetaBase : public pkgAcquire::Item void QueueForSignatureVerify(const std::string &MetaIndexFile, const std::string &MetaIndexFileSignature); + /** \brief get the custom600 header for all pkgAcqMeta */ + std::string GetCustom600Headers(const std::string &RealURI) const; + /** \brief Called when authentication succeeded. * * Sanity-checks the authenticated file, queues up the individual diff --git a/test/integration/test-apt-update-expected-size b/test/integration/test-apt-update-expected-size index c1eecc08a..f8ec24dcc 100755 --- a/test/integration/test-apt-update-expected-size +++ b/test/integration/test-apt-update-expected-size @@ -17,10 +17,17 @@ testsuccess aptget update # make InRelease really big mv aptarchive/dists/unstable/InRelease aptarchive/dists/unstable/InRelease.good -dd if=/dev/zero of=aptarchive/dists/unstable/InRelease bs=1M count=2 +dd if=/dev/zero of=aptarchive/dists/unstable/InRelease bs=1M count=2 2>/dev/null touch -d '+1hour' aptarchive/dists/unstable/InRelease -aptget update -o acquire::MaxReleaseFileSize=$((1*1000*1000)) - +aptget update -o acquire::MaxReleaseFileSize=$((1*1000*1000)) -o Debug::pkgAcquire::worker=0 > output.log +msgtest 'Check that the max write warning is triggered' +if grep -q "Writing more data than expected" output.log; then + msgpass +else + cat output.log + msgfail +fi +mv aptarchive/dists/unstable/InRelease.good aptarchive/dists/unstable/InRelease # append junk at the end of the Packages.gz/Packages SIZE="$(stat --printf=%s aptarchive/dists/unstable/main/binary-i386/Packages)" -- cgit v1.2.3 From ee27950632c149bb14c9c490e92147579ba4fc2a Mon Sep 17 00:00:00 2001 From: Michael Vogt Date: Tue, 7 Oct 2014 22:36:09 +0200 Subject: Send "Fail-Reason: MaximumSizeExceeded" from the method Communicate the fail reason from the methods to the parent and Rename() failed files. --- apt-pkg/acquire-item.cc | 6 +++++- methods/ftp.cc | 8 ++++++-- methods/ftp.h | 3 ++- methods/http.cc | 1 + methods/https.cc | 4 +++- test/integration/test-apt-update-expected-size | 5 ++++- 6 files changed, 21 insertions(+), 6 deletions(-) diff --git a/apt-pkg/acquire-item.cc b/apt-pkg/acquire-item.cc index 1dcbde223..f630129b9 100644 --- a/apt-pkg/acquire-item.cc +++ b/apt-pkg/acquire-item.cc @@ -148,8 +148,12 @@ void pkgAcquire::Item::Failed(string Message,pkgAcquire::MethodConfig *Cnf) else Status = StatIdle; - // report mirror failure back to LP if we actually use a mirror + // check fail reason string FailReason = LookupTag(Message, "FailReason"); + if(FailReason == "MaximumSizeExceeded") + Rename(DestFile, DestFile+".FAILED"); + + // report mirror failure back to LP if we actually use a mirror if(FailReason.size() != 0) ReportMirrorFailure(FailReason); else diff --git a/methods/ftp.cc b/methods/ftp.cc index bc84dda7d..5b739ea06 100644 --- a/methods/ftp.cc +++ b/methods/ftp.cc @@ -849,7 +849,8 @@ bool FTPConn::Finalize() /* This opens a data connection, sends REST and RETR and then transfers the file over. */ bool FTPConn::Get(const char *Path,FileFd &To,unsigned long long Resume, - Hashes &Hash,bool &Missing, unsigned long long MaximumSize) + Hashes &Hash,bool &Missing, unsigned long long MaximumSize, + pkgAcqMethod *Owner) { Missing = false; if (CreateDataFd() == false) @@ -925,8 +926,11 @@ bool FTPConn::Get(const char *Path,FileFd &To,unsigned long long Resume, } if (MaximumSize > 0 && To.Tell() > MaximumSize) + { + Owner->SetFailReason("MaximumSizeExceeded"); return _error->Error("Writing more data than expected (%llu > %llu)", To.Tell(), MaximumSize); + } } // All done @@ -1067,7 +1071,7 @@ bool FtpMethod::Fetch(FetchItem *Itm) FailFd = Fd.Fd(); bool Missing; - if (Server->Get(File,Fd,Res.ResumePoint,Hash,Missing,Itm->MaximumSize) == false) + if (Server->Get(File,Fd,Res.ResumePoint,Hash,Missing,Itm->MaximumSize,this) == false) { Fd.Close(); diff --git a/methods/ftp.h b/methods/ftp.h index a31ebc999..2efd28ec6 100644 --- a/methods/ftp.h +++ b/methods/ftp.h @@ -62,7 +62,8 @@ class FTPConn bool Size(const char *Path,unsigned long long &Size); bool ModTime(const char *Path, time_t &Time); bool Get(const char *Path,FileFd &To,unsigned long long Resume, - Hashes &MD5,bool &Missing, unsigned long long MaximumSize); + Hashes &MD5,bool &Missing, unsigned long long MaximumSize, + pkgAcqMethod *Owner); FTPConn(URI Srv); ~FTPConn(); diff --git a/methods/http.cc b/methods/http.cc index f8faa0cf8..c00b439b7 100644 --- a/methods/http.cc +++ b/methods/http.cc @@ -657,6 +657,7 @@ bool HttpServerState::Go(bool ToFile, FileFd * const File) if (MaximumSize > 0 && File && File->Tell() > MaximumSize) { + Owner->SetFailReason("MaximumSizeExceeded"); return _error->Error("Writing more data than expected (%llu > %llu)", File->Tell(), MaximumSize); } diff --git a/methods/https.cc b/methods/https.cc index 787e4a507..16d564b34 100644 --- a/methods/https.cc +++ b/methods/https.cc @@ -83,9 +83,11 @@ HttpsMethod::write_data(void *buffer, size_t size, size_t nmemb, void *userp) return false; if(me->Queue->MaximumSize > 0 && me->File->Tell() > me->Queue->MaximumSize) + { + me->SetFailReason("MaximumSizeExceeded"); return _error->Error("Writing more data than expected (%llu > %llu)", me->TotalWritten, me->Queue->MaximumSize); - + } return size*nmemb; } diff --git a/test/integration/test-apt-update-expected-size b/test/integration/test-apt-update-expected-size index f8ec24dcc..58920f544 100755 --- a/test/integration/test-apt-update-expected-size +++ b/test/integration/test-apt-update-expected-size @@ -19,7 +19,7 @@ testsuccess aptget update mv aptarchive/dists/unstable/InRelease aptarchive/dists/unstable/InRelease.good dd if=/dev/zero of=aptarchive/dists/unstable/InRelease bs=1M count=2 2>/dev/null touch -d '+1hour' aptarchive/dists/unstable/InRelease -aptget update -o acquire::MaxReleaseFileSize=$((1*1000*1000)) -o Debug::pkgAcquire::worker=0 > output.log +aptget update -o Apt::Get::List-Cleanup=0 -o acquire::MaxReleaseFileSize=$((1*1000*1000)) -o Debug::pkgAcquire::worker=0 > output.log msgtest 'Check that the max write warning is triggered' if grep -q "Writing more data than expected" output.log; then msgpass @@ -27,8 +27,11 @@ else cat output.log msgfail fi +# ensure the failed InRelease file got renamed +testsuccess ls rootdir/var/lib/apt/lists/partial/*InRelease.FAILED mv aptarchive/dists/unstable/InRelease.good aptarchive/dists/unstable/InRelease + # append junk at the end of the Packages.gz/Packages SIZE="$(stat --printf=%s aptarchive/dists/unstable/main/binary-i386/Packages)" echo "1234567890" >> aptarchive/dists/unstable/main/binary-i386/Packages.gz -- cgit v1.2.3 From f2b47ba290f3a178c584da83f007cf0f720baabb Mon Sep 17 00:00:00 2001 From: Michael Vogt Date: Wed, 8 Oct 2014 08:32:42 +0200 Subject: Fix http pipeline messup detection The Maximum-Size protection breaks the http pipeline reorder code because it relies on that the object got fetched entirely so that it can compare the hash of the downloaded data. So instead of stopping when the Maximum-Size of the expected item is reached we only stop when the maximum size of the biggest item in the queue is reached. This way the pipeline reoder code keeps working. --- methods/server.cc | 16 ++++++++++++++-- methods/server.h | 4 ++++ 2 files changed, 18 insertions(+), 2 deletions(-) diff --git a/methods/server.cc b/methods/server.cc index 82f9b4750..cef809738 100644 --- a/methods/server.cc +++ b/methods/server.cc @@ -534,8 +534,10 @@ int ServerMethod::Loop() bool Result = true; // ensure we don't fetch too much - if (Queue->MaximumSize > 0) - Server->MaximumSize = Queue->MaximumSize; + // we could do "Server->MaximumSize = Queue->MaximumSize" here + // but that would break the clever pipeline messup detection + // so instead we use the size of the biggest item in the queue + Server->MaximumSize = FindMaximumObjectSizeInQueue(); if (Server->HaveContent) Result = Server->RunData(File); @@ -706,5 +708,15 @@ int ServerMethod::Loop() } return 0; +} + /*}}}*/ + /*{{{*/ +unsigned long long +ServerMethod::FindMaximumObjectSizeInQueue() const +{ + unsigned long long MaxSizeInQueue = 0; + for (FetchItem *I = Queue->Next; I != 0 && I != QueueBack; I = I->Next) + MaxSizeInQueue = std::max(MaxSizeInQueue, I->MaximumSize); + return MaxSizeInQueue; } /*}}}*/ diff --git a/methods/server.h b/methods/server.h index 3093e00c9..7d5198478 100644 --- a/methods/server.h +++ b/methods/server.h @@ -106,6 +106,10 @@ class ServerMethod : public pkgAcqMethod unsigned long PipelineDepth; bool AllowRedirect; + // Find the biggest item in the fetch queue for the checking of the maximum + // size + unsigned long long FindMaximumObjectSizeInQueue() const APT_PURE; + public: bool Debug; -- cgit v1.2.3