diff options
-rw-r--r-- | apt-pkg/acquire-method.cc | 2 | ||||
-rw-r--r-- | apt-pkg/acquire-method.h | 4 | ||||
-rw-r--r-- | apt-pkg/acquire-worker.cc | 6 | ||||
-rw-r--r-- | methods/ftp.cc | 10 | ||||
-rw-r--r-- | methods/ftp.h | 2 | ||||
-rw-r--r-- | methods/http.cc | 12 | ||||
-rw-r--r-- | methods/http.h | 4 | ||||
-rw-r--r-- | methods/https.cc | 4 | ||||
-rw-r--r-- | methods/https.h | 5 | ||||
-rw-r--r-- | methods/server.cc | 8 | ||||
-rw-r--r-- | methods/server.h | 4 | ||||
-rwxr-xr-x | test/integration/test-apt-update-expected-size | 34 | ||||
-rwxr-xr-x | test/integration/test-cve-2013-1051-InRelease-parsing | 2 |
13 files changed, 87 insertions, 10 deletions
diff --git a/apt-pkg/acquire-method.cc b/apt-pkg/acquire-method.cc index 82f2fb3ce..9c0558223 100644 --- a/apt-pkg/acquire-method.cc +++ b/apt-pkg/acquire-method.cc @@ -372,6 +372,8 @@ int pkgAcqMethod::Run(bool Single) if (hash.empty() == false) Tmp->ExpectedHashes.push_back(HashString(*t, hash)); } + char *End; + Tmp->MaximumSize = strtoll(LookupTag(Message, "Maximum-Size", "0").c_str(), &End, 10); Tmp->Next = 0; // Append it to the list diff --git a/apt-pkg/acquire-method.h b/apt-pkg/acquire-method.h index cdeecc9a7..675c4f844 100644 --- a/apt-pkg/acquire-method.h +++ b/apt-pkg/acquire-method.h @@ -48,6 +48,10 @@ class pkgAcqMethod bool IndexFile; bool FailIgnore; HashStringList ExpectedHashes; + // a maximum size we will download, this can be the exact filesize + // for when we know it or a arbitrary limit when we don't know the + // filesize (like a InRelease file) + unsigned long long MaximumSize; }; struct FetchResult diff --git a/apt-pkg/acquire-worker.cc b/apt-pkg/acquire-worker.cc index 4a357bdab..64df3c80f 100644 --- a/apt-pkg/acquire-worker.cc +++ b/apt-pkg/acquire-worker.cc @@ -534,6 +534,12 @@ bool pkgAcquire::Worker::QueueItem(pkgAcquire::Queue::QItem *Item) HashStringList const hsl = Item->Owner->HashSums(); for (HashStringList::const_iterator hs = hsl.begin(); hs != hsl.end(); ++hs) Message += "\nExpected-" + hs->HashType() + ": " + hs->HashValue(); + if(Item->Owner->FileSize > 0) + { + string MaximumSize; + strprintf(MaximumSize, "%llu", Item->Owner->FileSize); + Message += "\nMaximum-Size: " + MaximumSize; + } Message += Item->Owner->Custom600Headers(); Message += "\n\n"; diff --git a/methods/ftp.cc b/methods/ftp.cc index ac76295f0..bc84dda7d 100644 --- a/methods/ftp.cc +++ b/methods/ftp.cc @@ -849,7 +849,7 @@ bool FTPConn::Finalize() /* This opens a data connection, sends REST and RETR and then transfers the file over. */ bool FTPConn::Get(const char *Path,FileFd &To,unsigned long long Resume, - Hashes &Hash,bool &Missing) + Hashes &Hash,bool &Missing, unsigned long long MaximumSize) { Missing = false; if (CreateDataFd() == false) @@ -922,7 +922,11 @@ bool FTPConn::Get(const char *Path,FileFd &To,unsigned long long Resume, { Close(); return false; - } + } + + if (MaximumSize > 0 && To.Tell() > MaximumSize) + return _error->Error("Writing more data than expected (%llu > %llu)", + To.Tell(), MaximumSize); } // All done @@ -1063,7 +1067,7 @@ bool FtpMethod::Fetch(FetchItem *Itm) FailFd = Fd.Fd(); bool Missing; - if (Server->Get(File,Fd,Res.ResumePoint,Hash,Missing) == false) + if (Server->Get(File,Fd,Res.ResumePoint,Hash,Missing,Itm->MaximumSize) == false) { Fd.Close(); diff --git a/methods/ftp.h b/methods/ftp.h index dd92f0086..a31ebc999 100644 --- a/methods/ftp.h +++ b/methods/ftp.h @@ -62,7 +62,7 @@ class FTPConn bool Size(const char *Path,unsigned long long &Size); bool ModTime(const char *Path, time_t &Time); bool Get(const char *Path,FileFd &To,unsigned long long Resume, - Hashes &MD5,bool &Missing); + Hashes &MD5,bool &Missing, unsigned long long MaximumSize); FTPConn(URI Srv); ~FTPConn(); diff --git a/methods/http.cc b/methods/http.cc index f2a4a4db6..f8faa0cf8 100644 --- a/methods/http.cc +++ b/methods/http.cc @@ -64,7 +64,8 @@ const unsigned int CircleBuf::BW_HZ=10; // CircleBuf::CircleBuf - Circular input buffer /*{{{*/ // --------------------------------------------------------------------- /* */ -CircleBuf::CircleBuf(unsigned long long Size) : Size(Size), Hash(0) +CircleBuf::CircleBuf(unsigned long long Size) + : Size(Size), Hash(0), TotalWriten(0) { Buf = new unsigned char[Size]; Reset(); @@ -80,6 +81,7 @@ void CircleBuf::Reset() InP = 0; OutP = 0; StrPos = 0; + TotalWriten = 0; MaxGet = (unsigned long long)-1; OutQueue = string(); if (Hash != 0) @@ -217,6 +219,8 @@ bool CircleBuf::Write(int Fd) return false; } + + TotalWriten += Res; if (Hash != 0) Hash->Add(Buf + (OutP%Size),Res); @@ -651,6 +655,12 @@ bool HttpServerState::Go(bool ToFile, FileFd * const File) return _error->Errno("write",_("Error writing to output file")); } + if (MaximumSize > 0 && File && File->Tell() > MaximumSize) + { + return _error->Error("Writing more data than expected (%llu > %llu)", + File->Tell(), MaximumSize); + } + // Handle commands from APT if (FD_ISSET(STDIN_FILENO,&rfds)) { diff --git a/methods/http.h b/methods/http.h index 1df9fa07d..40a88a7be 100644 --- a/methods/http.h +++ b/methods/http.h @@ -63,6 +63,8 @@ class CircleBuf public: Hashes *Hash; + // total amount of data that got written so far + unsigned long long TotalWriten; // Read data in bool Read(int Fd); @@ -81,8 +83,8 @@ class CircleBuf bool ReadSpace() const {return Size - (InP - OutP) > 0;}; bool WriteSpace() const {return InP - OutP > 0;}; - // Dump everything void Reset(); + // Dump everything void Stats(); CircleBuf(unsigned long long Size); diff --git a/methods/https.cc b/methods/https.cc index a74d2a38b..787e4a507 100644 --- a/methods/https.cc +++ b/methods/https.cc @@ -82,6 +82,10 @@ HttpsMethod::write_data(void *buffer, size_t size, size_t nmemb, void *userp) if(me->File->Write(buffer, size*nmemb) != true) return false; + if(me->Queue->MaximumSize > 0 && me->File->Tell() > me->Queue->MaximumSize) + return _error->Error("Writing more data than expected (%llu > %llu)", + me->TotalWritten, me->Queue->MaximumSize); + return size*nmemb; } diff --git a/methods/https.h b/methods/https.h index 45d1f7f63..0387cb9b5 100644 --- a/methods/https.h +++ b/methods/https.h @@ -66,11 +66,12 @@ class HttpsMethod : public pkgAcqMethod CURL *curl; FetchResult Res; HttpsServerState *Server; + unsigned long long TotalWritten; public: FileFd *File; - - HttpsMethod() : pkgAcqMethod("1.2",Pipeline | SendConfig), Server(NULL), File(NULL) + + HttpsMethod() : pkgAcqMethod("1.2",Pipeline | SendConfig), Server(NULL), TotalWritten(0), File(NULL) { curl = curl_easy_init(); }; diff --git a/methods/server.cc b/methods/server.cc index 4a961f454..82f9b4750 100644 --- a/methods/server.cc +++ b/methods/server.cc @@ -532,6 +532,11 @@ int ServerMethod::Loop() // Run the data bool Result = true; + + // ensure we don't fetch too much + if (Queue->MaximumSize > 0) + Server->MaximumSize = Queue->MaximumSize; + if (Server->HaveContent) Result = Server->RunData(File); @@ -605,7 +610,10 @@ int ServerMethod::Loop() QueueBack = Queue; } else + { + Server->Close(); Fail(true); + } } break; } diff --git a/methods/server.h b/methods/server.h index aa692ea93..3093e00c9 100644 --- a/methods/server.h +++ b/methods/server.h @@ -49,6 +49,8 @@ struct ServerState URI Proxy; unsigned long TimeOut; + unsigned long long MaximumSize; + protected: ServerMethod *Owner; @@ -73,7 +75,7 @@ struct ServerState bool Comp(URI Other) const {return Other.Host == ServerName.Host && Other.Port == ServerName.Port;}; virtual void Reset() {Major = 0; Minor = 0; Result = 0; Code[0] = '\0'; Size = 0; StartPos = 0; Encoding = Closes; time(&Date); HaveContent = false; - State = Header; Persistent = false; Pipeline = true;}; + State = Header; Persistent = false; Pipeline = true; MaximumSize = 0;}; virtual bool WriteResponse(std::string const &Data) = 0; /** \brief Transfer the data from the socket */ diff --git a/test/integration/test-apt-update-expected-size b/test/integration/test-apt-update-expected-size new file mode 100755 index 000000000..c1eecc08a --- /dev/null +++ b/test/integration/test-apt-update-expected-size @@ -0,0 +1,34 @@ +#!/bin/sh +set -e + +TESTDIR=$(readlink -f $(dirname $0)) +. $TESTDIR/framework + +setupenvironment +configarchitecture "i386" + +insertpackage 'unstable' 'apt' 'all' '1.0' + +setupaptarchive --no-update +changetowebserver + +# normal update works fine +testsuccess aptget update + +# make InRelease really big +mv aptarchive/dists/unstable/InRelease aptarchive/dists/unstable/InRelease.good +dd if=/dev/zero of=aptarchive/dists/unstable/InRelease bs=1M count=2 +touch -d '+1hour' aptarchive/dists/unstable/InRelease +aptget update -o acquire::MaxReleaseFileSize=$((1*1000*1000)) + + +# append junk at the end of the Packages.gz/Packages +SIZE="$(stat --printf=%s aptarchive/dists/unstable/main/binary-i386/Packages)" +echo "1234567890" >> aptarchive/dists/unstable/main/binary-i386/Packages.gz +echo "1234567890" >> aptarchive/dists/unstable/main/binary-i386/Packages +NEW_SIZE="$(stat --printf=%s aptarchive/dists/unstable/main/binary-i386/Packages)" +rm -f rootdir/var/lib/apt/lists/localhost* +testequal "W: Failed to fetch http://localhost:8080/dists/unstable/main/binary-i386/Packages Writing more data than expected ($NEW_SIZE > $SIZE) [IP: ::1 8080] + +E: Some index files failed to download. They have been ignored, or old ones used instead." aptget update -qq + diff --git a/test/integration/test-cve-2013-1051-InRelease-parsing b/test/integration/test-cve-2013-1051-InRelease-parsing index 41b27f691..8f9803991 100755 --- a/test/integration/test-cve-2013-1051-InRelease-parsing +++ b/test/integration/test-cve-2013-1051-InRelease-parsing @@ -42,7 +42,7 @@ touch -d '+1hour' aptarchive/dists/stable/InRelease # ensure the update fails # useful for debugging to add "-o Debug::pkgAcquire::auth=true" msgtest 'apt-get update for should fail with the modified' 'InRelease' -aptget update 2>&1 | grep -q 'Hash Sum mismatch' > /dev/null && msgpass || msgfail +aptget update 2>&1 | grep -E -q '(Writing more data than expected|Hash Sum mismatch)' > /dev/null && msgpass || msgfail # ensure there is no package testequal 'Reading package lists... |