summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMichael Vogt <mvo@ubuntu.com>2014-10-08 08:37:01 +0200
committerMichael Vogt <mvo@ubuntu.com>2014-10-08 08:37:01 +0200
commit0c158e03ceb509a08d3d4ded79c3151237f669dd (patch)
treedc7a5cf8f15a7907d00beb75b5fa4d246ec3d36a
parenta943fbf21ca0b9b58b8f1492fc518679b1d7c749 (diff)
parentf2b47ba290f3a178c584da83f007cf0f720baabb (diff)
Merge remote-tracking branch 'mvo/feature/expected-size' into debian/experimental
-rw-r--r--apt-pkg/acquire-item.cc59
-rw-r--r--apt-pkg/acquire-item.h4
-rw-r--r--apt-pkg/acquire-method.cc2
-rw-r--r--apt-pkg/acquire-method.h4
-rw-r--r--apt-pkg/acquire-worker.cc6
-rw-r--r--methods/ftp.cc14
-rw-r--r--methods/ftp.h3
-rw-r--r--methods/http.cc13
-rw-r--r--methods/http.h4
-rw-r--r--methods/https.cc6
-rw-r--r--methods/https.h5
-rw-r--r--methods/server.cc20
-rw-r--r--methods/server.h8
-rwxr-xr-xtest/integration/test-apt-update-expected-size44
-rwxr-xr-xtest/integration/test-cve-2013-1051-InRelease-parsing2
15 files changed, 155 insertions, 39 deletions
diff --git a/apt-pkg/acquire-item.cc b/apt-pkg/acquire-item.cc
index 5d0a00055..f630129b9 100644
--- a/apt-pkg/acquire-item.cc
+++ b/apt-pkg/acquire-item.cc
@@ -148,8 +148,12 @@ void pkgAcquire::Item::Failed(string Message,pkgAcquire::MethodConfig *Cnf)
else
Status = StatIdle;
- // report mirror failure back to LP if we actually use a mirror
+ // check fail reason
string FailReason = LookupTag(Message, "FailReason");
+ if(FailReason == "MaximumSizeExceeded")
+ Rename(DestFile, DestFile+".FAILED");
+
+ // report mirror failure back to LP if we actually use a mirror
if(FailReason.size() != 0)
ReportMirrorFailure(FailReason);
else
@@ -1690,14 +1694,8 @@ pkgAcqMetaSig::~pkgAcqMetaSig() /*{{{*/
// ---------------------------------------------------------------------
string pkgAcqMetaSig::Custom600Headers() const
{
- string FinalFile = _config->FindDir("Dir::State::lists");
- FinalFile += URItoFileName(RealURI);
-
- struct stat Buf;
- if (stat(FinalFile.c_str(),&Buf) != 0)
- return "\nIndex-File: true";
-
- return "\nIndex-File: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime);
+ std::string Header = GetCustom600Headers(RealURI);
+ return Header;
}
/*}}}*/
// pkgAcqMetaSig::Done - The signature was downloaded/verified /*{{{*/
@@ -1842,14 +1840,7 @@ void pkgAcqMetaIndex::Init(std::string URIDesc, std::string ShortDesc)
// ---------------------------------------------------------------------
string pkgAcqMetaIndex::Custom600Headers() const
{
- string Final = _config->FindDir("Dir::State::lists");
- Final += URItoFileName(RealURI);
-
- struct stat Buf;
- if (stat(Final.c_str(),&Buf) != 0)
- return "\nIndex-File: true";
-
- return "\nIndex-File: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime);
+ return GetCustom600Headers(RealURI);
}
/*}}}*/
void pkgAcqMetaIndex::Done(string Message,unsigned long long Size, /*{{{*/
@@ -1910,6 +1901,26 @@ bool pkgAcqMetaBase::CheckAuthDone(string Message, const string &RealURI) /*{{{*
return true;
}
/*}}}*/
+// pkgAcqMetaBase::GetCustom600Headers - Get header for AcqMetaBase /*{{{*/
+// ---------------------------------------------------------------------
+string pkgAcqMetaBase::GetCustom600Headers(const string &RealURI) const
+{
+ std::string Header = "\nIndex-File: true";
+ std::string MaximumSize;
+ strprintf(MaximumSize, "\nMaximum-Size: %i",
+ _config->FindI("Acquire::MaxReleaseFileSize", 10*1000*1000));
+ Header += MaximumSize;
+
+ string FinalFile = _config->FindDir("Dir::State::lists");
+ FinalFile += URItoFileName(RealURI);
+
+ struct stat Buf;
+ if (stat(FinalFile.c_str(),&Buf) == 0)
+ Header += "\nLast-Modified: " + TimeRFC1123(Buf.st_mtime);
+
+ return Header;
+}
+ /*}}}*/
// pkgAcqMetaBase::QueueForSignatureVerify /*{{{*/
void pkgAcqMetaBase::QueueForSignatureVerify(const std::string &MetaIndexFile,
const std::string &MetaIndexFileSignature)
@@ -2187,17 +2198,9 @@ pkgAcqMetaClearSig::~pkgAcqMetaClearSig() /*{{{*/
// ---------------------------------------------------------------------
string pkgAcqMetaClearSig::Custom600Headers() const
{
- string Final = _config->FindDir("Dir::State::lists");
- Final += URItoFileName(RealURI);
-
- struct stat Buf;
- if (stat(Final.c_str(),&Buf) != 0)
- {
- if (stat(Final.c_str(),&Buf) != 0)
- return "\nIndex-File: true\nFail-Ignore: true\n";
- }
-
- return "\nIndex-File: true\nFail-Ignore: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime);
+ string Header = GetCustom600Headers(RealURI);
+ Header += "\nFail-Ignore: true";
+ return Header;
}
/*}}}*/
// pkgAcqMetaClearSig::Done - We got a file /*{{{*/
diff --git a/apt-pkg/acquire-item.h b/apt-pkg/acquire-item.h
index 0e7212fc5..68d5a01ce 100644
--- a/apt-pkg/acquire-item.h
+++ b/apt-pkg/acquire-item.h
@@ -390,7 +390,6 @@ class pkgAcqMetaBase : public pkgAcquire::Item
*/
void QueueIndexes(bool verify);
-
/** \brief Called when a file is finished being retrieved.
*
* If the file was not downloaded to DestFile, a copy process is
@@ -407,6 +406,9 @@ class pkgAcqMetaBase : public pkgAcquire::Item
void QueueForSignatureVerify(const std::string &MetaIndexFile,
const std::string &MetaIndexFileSignature);
+ /** \brief get the custom600 header for all pkgAcqMeta */
+ std::string GetCustom600Headers(const std::string &RealURI) const;
+
/** \brief Called when authentication succeeded.
*
* Sanity-checks the authenticated file, queues up the individual
diff --git a/apt-pkg/acquire-method.cc b/apt-pkg/acquire-method.cc
index 8533e319f..cfa93e95c 100644
--- a/apt-pkg/acquire-method.cc
+++ b/apt-pkg/acquire-method.cc
@@ -375,6 +375,8 @@ int pkgAcqMethod::Run(bool Single)
if (hash.empty() == false)
Tmp->ExpectedHashes.push_back(HashString(*t, hash));
}
+ char *End;
+ Tmp->MaximumSize = strtoll(LookupTag(Message, "Maximum-Size", "0").c_str(), &End, 10);
Tmp->Next = 0;
// Append it to the list
diff --git a/apt-pkg/acquire-method.h b/apt-pkg/acquire-method.h
index cdeecc9a7..675c4f844 100644
--- a/apt-pkg/acquire-method.h
+++ b/apt-pkg/acquire-method.h
@@ -48,6 +48,10 @@ class pkgAcqMethod
bool IndexFile;
bool FailIgnore;
HashStringList ExpectedHashes;
+ // a maximum size we will download, this can be the exact filesize
+ // for when we know it or a arbitrary limit when we don't know the
+ // filesize (like a InRelease file)
+ unsigned long long MaximumSize;
};
struct FetchResult
diff --git a/apt-pkg/acquire-worker.cc b/apt-pkg/acquire-worker.cc
index 4a357bdab..64df3c80f 100644
--- a/apt-pkg/acquire-worker.cc
+++ b/apt-pkg/acquire-worker.cc
@@ -534,6 +534,12 @@ bool pkgAcquire::Worker::QueueItem(pkgAcquire::Queue::QItem *Item)
HashStringList const hsl = Item->Owner->HashSums();
for (HashStringList::const_iterator hs = hsl.begin(); hs != hsl.end(); ++hs)
Message += "\nExpected-" + hs->HashType() + ": " + hs->HashValue();
+ if(Item->Owner->FileSize > 0)
+ {
+ string MaximumSize;
+ strprintf(MaximumSize, "%llu", Item->Owner->FileSize);
+ Message += "\nMaximum-Size: " + MaximumSize;
+ }
Message += Item->Owner->Custom600Headers();
Message += "\n\n";
diff --git a/methods/ftp.cc b/methods/ftp.cc
index ac76295f0..5b739ea06 100644
--- a/methods/ftp.cc
+++ b/methods/ftp.cc
@@ -849,7 +849,8 @@ bool FTPConn::Finalize()
/* This opens a data connection, sends REST and RETR and then
transfers the file over. */
bool FTPConn::Get(const char *Path,FileFd &To,unsigned long long Resume,
- Hashes &Hash,bool &Missing)
+ Hashes &Hash,bool &Missing, unsigned long long MaximumSize,
+ pkgAcqMethod *Owner)
{
Missing = false;
if (CreateDataFd() == false)
@@ -922,7 +923,14 @@ bool FTPConn::Get(const char *Path,FileFd &To,unsigned long long Resume,
{
Close();
return false;
- }
+ }
+
+ if (MaximumSize > 0 && To.Tell() > MaximumSize)
+ {
+ Owner->SetFailReason("MaximumSizeExceeded");
+ return _error->Error("Writing more data than expected (%llu > %llu)",
+ To.Tell(), MaximumSize);
+ }
}
// All done
@@ -1063,7 +1071,7 @@ bool FtpMethod::Fetch(FetchItem *Itm)
FailFd = Fd.Fd();
bool Missing;
- if (Server->Get(File,Fd,Res.ResumePoint,Hash,Missing) == false)
+ if (Server->Get(File,Fd,Res.ResumePoint,Hash,Missing,Itm->MaximumSize,this) == false)
{
Fd.Close();
diff --git a/methods/ftp.h b/methods/ftp.h
index dd92f0086..2efd28ec6 100644
--- a/methods/ftp.h
+++ b/methods/ftp.h
@@ -62,7 +62,8 @@ class FTPConn
bool Size(const char *Path,unsigned long long &Size);
bool ModTime(const char *Path, time_t &Time);
bool Get(const char *Path,FileFd &To,unsigned long long Resume,
- Hashes &MD5,bool &Missing);
+ Hashes &MD5,bool &Missing, unsigned long long MaximumSize,
+ pkgAcqMethod *Owner);
FTPConn(URI Srv);
~FTPConn();
diff --git a/methods/http.cc b/methods/http.cc
index f2a4a4db6..c00b439b7 100644
--- a/methods/http.cc
+++ b/methods/http.cc
@@ -64,7 +64,8 @@ const unsigned int CircleBuf::BW_HZ=10;
// CircleBuf::CircleBuf - Circular input buffer /*{{{*/
// ---------------------------------------------------------------------
/* */
-CircleBuf::CircleBuf(unsigned long long Size) : Size(Size), Hash(0)
+CircleBuf::CircleBuf(unsigned long long Size)
+ : Size(Size), Hash(0), TotalWriten(0)
{
Buf = new unsigned char[Size];
Reset();
@@ -80,6 +81,7 @@ void CircleBuf::Reset()
InP = 0;
OutP = 0;
StrPos = 0;
+ TotalWriten = 0;
MaxGet = (unsigned long long)-1;
OutQueue = string();
if (Hash != 0)
@@ -217,6 +219,8 @@ bool CircleBuf::Write(int Fd)
return false;
}
+
+ TotalWriten += Res;
if (Hash != 0)
Hash->Add(Buf + (OutP%Size),Res);
@@ -651,6 +655,13 @@ bool HttpServerState::Go(bool ToFile, FileFd * const File)
return _error->Errno("write",_("Error writing to output file"));
}
+ if (MaximumSize > 0 && File && File->Tell() > MaximumSize)
+ {
+ Owner->SetFailReason("MaximumSizeExceeded");
+ return _error->Error("Writing more data than expected (%llu > %llu)",
+ File->Tell(), MaximumSize);
+ }
+
// Handle commands from APT
if (FD_ISSET(STDIN_FILENO,&rfds))
{
diff --git a/methods/http.h b/methods/http.h
index 1df9fa07d..40a88a7be 100644
--- a/methods/http.h
+++ b/methods/http.h
@@ -63,6 +63,8 @@ class CircleBuf
public:
Hashes *Hash;
+ // total amount of data that got written so far
+ unsigned long long TotalWriten;
// Read data in
bool Read(int Fd);
@@ -81,8 +83,8 @@ class CircleBuf
bool ReadSpace() const {return Size - (InP - OutP) > 0;};
bool WriteSpace() const {return InP - OutP > 0;};
- // Dump everything
void Reset();
+ // Dump everything
void Stats();
CircleBuf(unsigned long long Size);
diff --git a/methods/https.cc b/methods/https.cc
index a74d2a38b..16d564b34 100644
--- a/methods/https.cc
+++ b/methods/https.cc
@@ -82,6 +82,12 @@ HttpsMethod::write_data(void *buffer, size_t size, size_t nmemb, void *userp)
if(me->File->Write(buffer, size*nmemb) != true)
return false;
+ if(me->Queue->MaximumSize > 0 && me->File->Tell() > me->Queue->MaximumSize)
+ {
+ me->SetFailReason("MaximumSizeExceeded");
+ return _error->Error("Writing more data than expected (%llu > %llu)",
+ me->TotalWritten, me->Queue->MaximumSize);
+ }
return size*nmemb;
}
diff --git a/methods/https.h b/methods/https.h
index 45d1f7f63..0387cb9b5 100644
--- a/methods/https.h
+++ b/methods/https.h
@@ -66,11 +66,12 @@ class HttpsMethod : public pkgAcqMethod
CURL *curl;
FetchResult Res;
HttpsServerState *Server;
+ unsigned long long TotalWritten;
public:
FileFd *File;
-
- HttpsMethod() : pkgAcqMethod("1.2",Pipeline | SendConfig), Server(NULL), File(NULL)
+
+ HttpsMethod() : pkgAcqMethod("1.2",Pipeline | SendConfig), Server(NULL), TotalWritten(0), File(NULL)
{
curl = curl_easy_init();
};
diff --git a/methods/server.cc b/methods/server.cc
index 4a961f454..cef809738 100644
--- a/methods/server.cc
+++ b/methods/server.cc
@@ -532,6 +532,13 @@ int ServerMethod::Loop()
// Run the data
bool Result = true;
+
+ // ensure we don't fetch too much
+ // we could do "Server->MaximumSize = Queue->MaximumSize" here
+ // but that would break the clever pipeline messup detection
+ // so instead we use the size of the biggest item in the queue
+ Server->MaximumSize = FindMaximumObjectSizeInQueue();
+
if (Server->HaveContent)
Result = Server->RunData(File);
@@ -605,7 +612,10 @@ int ServerMethod::Loop()
QueueBack = Queue;
}
else
+ {
+ Server->Close();
Fail(true);
+ }
}
break;
}
@@ -700,3 +710,13 @@ int ServerMethod::Loop()
return 0;
}
/*}}}*/
+ /*{{{*/
+unsigned long long
+ServerMethod::FindMaximumObjectSizeInQueue() const
+{
+ unsigned long long MaxSizeInQueue = 0;
+ for (FetchItem *I = Queue->Next; I != 0 && I != QueueBack; I = I->Next)
+ MaxSizeInQueue = std::max(MaxSizeInQueue, I->MaximumSize);
+ return MaxSizeInQueue;
+}
+ /*}}}*/
diff --git a/methods/server.h b/methods/server.h
index aa692ea93..7d5198478 100644
--- a/methods/server.h
+++ b/methods/server.h
@@ -49,6 +49,8 @@ struct ServerState
URI Proxy;
unsigned long TimeOut;
+ unsigned long long MaximumSize;
+
protected:
ServerMethod *Owner;
@@ -73,7 +75,7 @@ struct ServerState
bool Comp(URI Other) const {return Other.Host == ServerName.Host && Other.Port == ServerName.Port;};
virtual void Reset() {Major = 0; Minor = 0; Result = 0; Code[0] = '\0'; Size = 0;
StartPos = 0; Encoding = Closes; time(&Date); HaveContent = false;
- State = Header; Persistent = false; Pipeline = true;};
+ State = Header; Persistent = false; Pipeline = true; MaximumSize = 0;};
virtual bool WriteResponse(std::string const &Data) = 0;
/** \brief Transfer the data from the socket */
@@ -104,6 +106,10 @@ class ServerMethod : public pkgAcqMethod
unsigned long PipelineDepth;
bool AllowRedirect;
+ // Find the biggest item in the fetch queue for the checking of the maximum
+ // size
+ unsigned long long FindMaximumObjectSizeInQueue() const APT_PURE;
+
public:
bool Debug;
diff --git a/test/integration/test-apt-update-expected-size b/test/integration/test-apt-update-expected-size
new file mode 100755
index 000000000..58920f544
--- /dev/null
+++ b/test/integration/test-apt-update-expected-size
@@ -0,0 +1,44 @@
+#!/bin/sh
+set -e
+
+TESTDIR=$(readlink -f $(dirname $0))
+. $TESTDIR/framework
+
+setupenvironment
+configarchitecture "i386"
+
+insertpackage 'unstable' 'apt' 'all' '1.0'
+
+setupaptarchive --no-update
+changetowebserver
+
+# normal update works fine
+testsuccess aptget update
+
+# make InRelease really big
+mv aptarchive/dists/unstable/InRelease aptarchive/dists/unstable/InRelease.good
+dd if=/dev/zero of=aptarchive/dists/unstable/InRelease bs=1M count=2 2>/dev/null
+touch -d '+1hour' aptarchive/dists/unstable/InRelease
+aptget update -o Apt::Get::List-Cleanup=0 -o acquire::MaxReleaseFileSize=$((1*1000*1000)) -o Debug::pkgAcquire::worker=0 > output.log
+msgtest 'Check that the max write warning is triggered'
+if grep -q "Writing more data than expected" output.log; then
+ msgpass
+else
+ cat output.log
+ msgfail
+fi
+# ensure the failed InRelease file got renamed
+testsuccess ls rootdir/var/lib/apt/lists/partial/*InRelease.FAILED
+mv aptarchive/dists/unstable/InRelease.good aptarchive/dists/unstable/InRelease
+
+
+# append junk at the end of the Packages.gz/Packages
+SIZE="$(stat --printf=%s aptarchive/dists/unstable/main/binary-i386/Packages)"
+echo "1234567890" >> aptarchive/dists/unstable/main/binary-i386/Packages.gz
+echo "1234567890" >> aptarchive/dists/unstable/main/binary-i386/Packages
+NEW_SIZE="$(stat --printf=%s aptarchive/dists/unstable/main/binary-i386/Packages)"
+rm -f rootdir/var/lib/apt/lists/localhost*
+testequal "W: Failed to fetch http://localhost:8080/dists/unstable/main/binary-i386/Packages Writing more data than expected ($NEW_SIZE > $SIZE) [IP: ::1 8080]
+
+E: Some index files failed to download. They have been ignored, or old ones used instead." aptget update -qq
+
diff --git a/test/integration/test-cve-2013-1051-InRelease-parsing b/test/integration/test-cve-2013-1051-InRelease-parsing
index 41b27f691..8f9803991 100755
--- a/test/integration/test-cve-2013-1051-InRelease-parsing
+++ b/test/integration/test-cve-2013-1051-InRelease-parsing
@@ -42,7 +42,7 @@ touch -d '+1hour' aptarchive/dists/stable/InRelease
# ensure the update fails
# useful for debugging to add "-o Debug::pkgAcquire::auth=true"
msgtest 'apt-get update for should fail with the modified' 'InRelease'
-aptget update 2>&1 | grep -q 'Hash Sum mismatch' > /dev/null && msgpass || msgfail
+aptget update 2>&1 | grep -E -q '(Writing more data than expected|Hash Sum mismatch)' > /dev/null && msgpass || msgfail
# ensure there is no package
testequal 'Reading package lists...