summaryrefslogtreecommitdiff
path: root/methods
diff options
context:
space:
mode:
Diffstat (limited to 'methods')
-rw-r--r--methods/copy.cc9
-rw-r--r--methods/ftp.cc22
-rw-r--r--methods/ftp.h3
-rw-r--r--methods/gpgv.cc17
-rw-r--r--methods/gzip.cc3
-rw-r--r--methods/http.cc13
-rw-r--r--methods/http.h4
-rw-r--r--methods/http_main.cc5
-rw-r--r--methods/https.cc8
-rw-r--r--methods/https.h4
-rw-r--r--methods/server.cc72
-rw-r--r--methods/server.h10
12 files changed, 143 insertions, 27 deletions
diff --git a/methods/copy.cc b/methods/copy.cc
index 40f8f85ec..a23c0316c 100644
--- a/methods/copy.cc
+++ b/methods/copy.cc
@@ -67,6 +67,14 @@ bool CopyMethod::Fetch(FetchItem *Itm)
Res.LastModified = Buf.st_mtime;
Res.IMSHit = false;
URIStart(Res);
+
+ // when the files are identical, just compute the hashes
+ if(File == Itm->DestFile)
+ {
+ CalculateHashes(Res);
+ URIDone(Res);
+ return true;
+ }
// just calc the hashes if the source and destination are identical
if (File == Itm->DestFile)
@@ -116,5 +124,6 @@ int main()
setlocale(LC_ALL, "");
CopyMethod Mth;
+
return Mth.Run();
}
diff --git a/methods/ftp.cc b/methods/ftp.cc
index 66787a7be..5b739ea06 100644
--- a/methods/ftp.cc
+++ b/methods/ftp.cc
@@ -75,9 +75,10 @@ time_t FtpMethod::FailTime = 0;
// FTPConn::FTPConn - Constructor /*{{{*/
// ---------------------------------------------------------------------
/* */
-FTPConn::FTPConn(URI Srv) : Len(0), ServerFd(-1), DataFd(-1),
+FTPConn::FTPConn(URI Srv) : Len(0), ServerFd(-1), DataFd(-1),
DataListenFd(-1), ServerName(Srv),
- ForceExtended(false), TryPassive(true)
+ ForceExtended(false), TryPassive(true),
+ PeerAddrLen(0), ServerAddrLen(0)
{
Debug = _config->FindB("Debug::Acquire::Ftp",false);
PasvAddr = 0;
@@ -848,7 +849,8 @@ bool FTPConn::Finalize()
/* This opens a data connection, sends REST and RETR and then
transfers the file over. */
bool FTPConn::Get(const char *Path,FileFd &To,unsigned long long Resume,
- Hashes &Hash,bool &Missing)
+ Hashes &Hash,bool &Missing, unsigned long long MaximumSize,
+ pkgAcqMethod *Owner)
{
Missing = false;
if (CreateDataFd() == false)
@@ -921,7 +923,14 @@ bool FTPConn::Get(const char *Path,FileFd &To,unsigned long long Resume,
{
Close();
return false;
- }
+ }
+
+ if (MaximumSize > 0 && To.Tell() > MaximumSize)
+ {
+ Owner->SetFailReason("MaximumSizeExceeded");
+ return _error->Error("Writing more data than expected (%llu > %llu)",
+ To.Tell(), MaximumSize);
+ }
}
// All done
@@ -1062,7 +1071,7 @@ bool FtpMethod::Fetch(FetchItem *Itm)
FailFd = Fd.Fd();
bool Missing;
- if (Server->Get(File,Fd,Res.ResumePoint,Hash,Missing) == false)
+ if (Server->Get(File,Fd,Res.ResumePoint,Hash,Missing,Itm->MaximumSize,this) == false)
{
Fd.Close();
@@ -1131,6 +1140,9 @@ int main(int, const char *argv[])
}
FtpMethod Mth;
+
+ // no more active ftp, sorry
+ Mth.DropPrivsOrDie();
return Mth.Run();
}
diff --git a/methods/ftp.h b/methods/ftp.h
index dd92f0086..2efd28ec6 100644
--- a/methods/ftp.h
+++ b/methods/ftp.h
@@ -62,7 +62,8 @@ class FTPConn
bool Size(const char *Path,unsigned long long &Size);
bool ModTime(const char *Path, time_t &Time);
bool Get(const char *Path,FileFd &To,unsigned long long Resume,
- Hashes &MD5,bool &Missing);
+ Hashes &MD5,bool &Missing, unsigned long long MaximumSize,
+ pkgAcqMethod *Owner);
FTPConn(URI Srv);
~FTPConn();
diff --git a/methods/gpgv.cc b/methods/gpgv.cc
index ae521a2ed..7e8500c51 100644
--- a/methods/gpgv.cc
+++ b/methods/gpgv.cc
@@ -5,6 +5,7 @@
#include <apt-pkg/error.h>
#include <apt-pkg/gpgv.h>
#include <apt-pkg/strutl.h>
+#include <apt-pkg/fileutl.h>
#include <ctype.h>
#include <errno.h>
@@ -74,7 +75,7 @@ string GPGVMethod::VerifyGetSigners(const char *file, const char *outfile,
FILE *pipein = fdopen(fd[0], "r");
- // Loop over the output of gpgv, and check the signatures.
+ // Loop over the output of apt-key (which really is gnupg), and check the signatures.
size_t buffersize = 64;
char *buffer = (char *) malloc(buffersize);
size_t bufferoff = 0;
@@ -159,7 +160,7 @@ string GPGVMethod::VerifyGetSigners(const char *file, const char *outfile,
waitpid(pid, &status, 0);
if (Debug == true)
{
- std::clog << "gpgv exited\n";
+ ioprintf(std::clog, "gpgv exited with status %i\n", WEXITSTATUS(status));
}
if (WEXITSTATUS(status) == 0)
@@ -171,7 +172,7 @@ string GPGVMethod::VerifyGetSigners(const char *file, const char *outfile,
else if (WEXITSTATUS(status) == 1)
return _("At least one invalid signature was encountered.");
else if (WEXITSTATUS(status) == 111)
- return _("Could not execute 'gpgv' to verify signature (is gpgv installed?)");
+ return _("Could not execute 'apt-key' to verify signature (is gnupg installed?)");
else if (WEXITSTATUS(status) == 112)
{
// acquire system checks for "NODATA" to generate GPG errors (the others are only warnings)
@@ -181,7 +182,7 @@ string GPGVMethod::VerifyGetSigners(const char *file, const char *outfile,
return errmsg;
}
else
- return _("Unknown error executing gpgv");
+ return _("Unknown error executing apt-key");
}
bool GPGVMethod::Fetch(FetchItem *Itm)
@@ -199,7 +200,7 @@ bool GPGVMethod::Fetch(FetchItem *Itm)
Res.Filename = Itm->DestFile;
URIStart(Res);
- // Run gpgv on file, extract contents and get the key ID of the signer
+ // Run apt-key on file, extract contents and get the key ID of the signer
string msg = VerifyGetSigners(Path.c_str(), Itm->DestFile.c_str(),
GoodSigners, BadSigners, WorthlessSigners,
NoPubKeySigners);
@@ -251,7 +252,7 @@ bool GPGVMethod::Fetch(FetchItem *Itm)
if (_config->FindB("Debug::Acquire::gpgv", false))
{
- std::clog << "gpgv succeeded\n";
+ std::clog << "apt-key succeeded\n";
}
return true;
@@ -261,8 +262,10 @@ bool GPGVMethod::Fetch(FetchItem *Itm)
int main()
{
setlocale(LC_ALL, "");
-
+
GPGVMethod Mth;
+ Mth.DropPrivsOrDie();
+
return Mth.Run();
}
diff --git a/methods/gzip.cc b/methods/gzip.cc
index df3f8828f..7ffcda60f 100644
--- a/methods/gzip.cc
+++ b/methods/gzip.cc
@@ -139,5 +139,8 @@ int main(int, char *argv[])
++Prog;
GzipMethod Mth;
+
+ Mth.DropPrivsOrDie();
+
return Mth.Run();
}
diff --git a/methods/http.cc b/methods/http.cc
index f2a4a4db6..c00b439b7 100644
--- a/methods/http.cc
+++ b/methods/http.cc
@@ -64,7 +64,8 @@ const unsigned int CircleBuf::BW_HZ=10;
// CircleBuf::CircleBuf - Circular input buffer /*{{{*/
// ---------------------------------------------------------------------
/* */
-CircleBuf::CircleBuf(unsigned long long Size) : Size(Size), Hash(0)
+CircleBuf::CircleBuf(unsigned long long Size)
+ : Size(Size), Hash(0), TotalWriten(0)
{
Buf = new unsigned char[Size];
Reset();
@@ -80,6 +81,7 @@ void CircleBuf::Reset()
InP = 0;
OutP = 0;
StrPos = 0;
+ TotalWriten = 0;
MaxGet = (unsigned long long)-1;
OutQueue = string();
if (Hash != 0)
@@ -217,6 +219,8 @@ bool CircleBuf::Write(int Fd)
return false;
}
+
+ TotalWriten += Res;
if (Hash != 0)
Hash->Add(Buf + (OutP%Size),Res);
@@ -651,6 +655,13 @@ bool HttpServerState::Go(bool ToFile, FileFd * const File)
return _error->Errno("write",_("Error writing to output file"));
}
+ if (MaximumSize > 0 && File && File->Tell() > MaximumSize)
+ {
+ Owner->SetFailReason("MaximumSizeExceeded");
+ return _error->Error("Writing more data than expected (%llu > %llu)",
+ File->Tell(), MaximumSize);
+ }
+
// Handle commands from APT
if (FD_ISSET(STDIN_FILENO,&rfds))
{
diff --git a/methods/http.h b/methods/http.h
index 1df9fa07d..40a88a7be 100644
--- a/methods/http.h
+++ b/methods/http.h
@@ -63,6 +63,8 @@ class CircleBuf
public:
Hashes *Hash;
+ // total amount of data that got written so far
+ unsigned long long TotalWriten;
// Read data in
bool Read(int Fd);
@@ -81,8 +83,8 @@ class CircleBuf
bool ReadSpace() const {return Size - (InP - OutP) > 0;};
bool WriteSpace() const {return InP - OutP > 0;};
- // Dump everything
void Reset();
+ // Dump everything
void Stats();
CircleBuf(unsigned long long Size);
diff --git a/methods/http_main.cc b/methods/http_main.cc
index 3b346a514..f21a5709c 100644
--- a/methods/http_main.cc
+++ b/methods/http_main.cc
@@ -1,5 +1,6 @@
#include <config.h>
-
+#include <apt-pkg/fileutl.h>
+#include <apt-pkg/error.h>
#include <signal.h>
#include "http.h"
@@ -13,5 +14,7 @@ int main()
signal(SIGPIPE, SIG_IGN);
HttpMethod Mth;
+
+ Mth.DropPrivsOrDie();
return Mth.Loop();
}
diff --git a/methods/https.cc b/methods/https.cc
index 0499af0c5..16d564b34 100644
--- a/methods/https.cc
+++ b/methods/https.cc
@@ -82,6 +82,12 @@ HttpsMethod::write_data(void *buffer, size_t size, size_t nmemb, void *userp)
if(me->File->Write(buffer, size*nmemb) != true)
return false;
+ if(me->Queue->MaximumSize > 0 && me->File->Tell() > me->Queue->MaximumSize)
+ {
+ me->SetFailReason("MaximumSizeExceeded");
+ return _error->Error("Writing more data than expected (%llu > %llu)",
+ me->TotalWritten, me->Queue->MaximumSize);
+ }
return size*nmemb;
}
@@ -446,6 +452,8 @@ int main()
HttpsMethod Mth;
curl_global_init(CURL_GLOBAL_SSL) ;
+ Mth.DropPrivsOrDie();
+
return Mth.Run();
}
diff --git a/methods/https.h b/methods/https.h
index faac8a3cd..0387cb9b5 100644
--- a/methods/https.h
+++ b/methods/https.h
@@ -66,13 +66,13 @@ class HttpsMethod : public pkgAcqMethod
CURL *curl;
FetchResult Res;
HttpsServerState *Server;
+ unsigned long long TotalWritten;
public:
FileFd *File;
- HttpsMethod() : pkgAcqMethod("1.2",Pipeline | SendConfig), File(NULL)
+ HttpsMethod() : pkgAcqMethod("1.2",Pipeline | SendConfig), Server(NULL), TotalWritten(0), File(NULL)
{
- File = 0;
curl = curl_easy_init();
};
diff --git a/methods/server.cc b/methods/server.cc
index 92d94e638..cef809738 100644
--- a/methods/server.cc
+++ b/methods/server.cc
@@ -324,10 +324,10 @@ ServerMethod::DealWithHeaders(FetchResult &Res)
failure */
if (Server->Result < 200 || Server->Result >= 300)
{
- char err[255];
- snprintf(err,sizeof(err)-1,"HttpError%i",Server->Result);
+ std::string err;
+ strprintf(err, "HttpError%u", Server->Result);
SetFailReason(err);
- _error->Error("%u %s",Server->Result,Server->Code);
+ _error->Error("%u %s", Server->Result, Server->Code);
if (Server->HaveContent == true)
return ERROR_WITH_CONTENT_PAGE;
return ERROR_UNRECOVERABLE;
@@ -393,9 +393,16 @@ bool ServerMethod::Fetch(FetchItem *)
for (FetchItem *I = Queue; I != 0 && Depth < (signed)PipelineDepth;
I = I->Next, Depth++)
{
- // If pipelining is disabled, we only queue 1 request
- if (Server->Pipeline == false && Depth >= 0)
- break;
+ if (Depth >= 0)
+ {
+ // If pipelining is disabled, we only queue 1 request
+ if (Server->Pipeline == false)
+ break;
+ // if we have no hashes, do at most one such request
+ // as we can't fixup pipeling misbehaviors otherwise
+ else if (I->ExpectedHashes.usable() == false)
+ break;
+ }
// Make sure we stick with the same server
if (Server->Comp(I->Uri) == false)
@@ -525,6 +532,13 @@ int ServerMethod::Loop()
// Run the data
bool Result = true;
+
+ // ensure we don't fetch too much
+ // we could do "Server->MaximumSize = Queue->MaximumSize" here
+ // but that would break the clever pipeline messup detection
+ // so instead we use the size of the biggest item in the queue
+ Server->MaximumSize = FindMaximumObjectSizeInQueue();
+
if (Server->HaveContent)
Result = Server->RunData(File);
@@ -547,7 +561,38 @@ int ServerMethod::Loop()
// Send status to APT
if (Result == true)
{
- Res.TakeHashes(*Server->GetHashes());
+ Hashes * const resultHashes = Server->GetHashes();
+ HashStringList const hashList = resultHashes->GetHashStringList();
+ if (PipelineDepth != 0 && Queue->ExpectedHashes.usable() == true && Queue->ExpectedHashes != hashList)
+ {
+ // we did not get the expected hash… mhhh:
+ // could it be that server/proxy messed up pipelining?
+ FetchItem * BeforeI = Queue;
+ for (FetchItem *I = Queue->Next; I != 0 && I != QueueBack; I = I->Next)
+ {
+ if (I->ExpectedHashes.usable() == true && I->ExpectedHashes == hashList)
+ {
+ // yes, he did! Disable pipelining and rewrite queue
+ if (Server->Pipeline == true)
+ {
+ // FIXME: fake a warning message as we have no proper way of communicating here
+ std::string out;
+ strprintf(out, _("Automatically disabled %s due to incorrect response from server/proxy. (man 5 apt.conf)"), "Acquire::http::PipelineDepth");
+ std::cerr << "W: " << out << std::endl;
+ Server->Pipeline = false;
+ // we keep the PipelineDepth value so that the rest of the queue can be fixed up as well
+ }
+ Rename(Res.Filename, I->DestFile);
+ Res.Filename = I->DestFile;
+ BeforeI->Next = I->Next;
+ I->Next = Queue;
+ Queue = I;
+ break;
+ }
+ BeforeI = I;
+ }
+ }
+ Res.TakeHashes(*resultHashes);
URIDone(Res);
}
else
@@ -567,7 +612,10 @@ int ServerMethod::Loop()
QueueBack = Queue;
}
else
+ {
+ Server->Close();
Fail(true);
+ }
}
break;
}
@@ -662,3 +710,13 @@ int ServerMethod::Loop()
return 0;
}
/*}}}*/
+ /*{{{*/
+unsigned long long
+ServerMethod::FindMaximumObjectSizeInQueue() const
+{
+ unsigned long long MaxSizeInQueue = 0;
+ for (FetchItem *I = Queue->Next; I != 0 && I != QueueBack; I = I->Next)
+ MaxSizeInQueue = std::max(MaxSizeInQueue, I->MaximumSize);
+ return MaxSizeInQueue;
+}
+ /*}}}*/
diff --git a/methods/server.h b/methods/server.h
index f5e68d902..7d5198478 100644
--- a/methods/server.h
+++ b/methods/server.h
@@ -49,6 +49,8 @@ struct ServerState
URI Proxy;
unsigned long TimeOut;
+ unsigned long long MaximumSize;
+
protected:
ServerMethod *Owner;
@@ -73,7 +75,7 @@ struct ServerState
bool Comp(URI Other) const {return Other.Host == ServerName.Host && Other.Port == ServerName.Port;};
virtual void Reset() {Major = 0; Minor = 0; Result = 0; Code[0] = '\0'; Size = 0;
StartPos = 0; Encoding = Closes; time(&Date); HaveContent = false;
- State = Header; Persistent = false; Pipeline = true;};
+ State = Header; Persistent = false; Pipeline = true; MaximumSize = 0;};
virtual bool WriteResponse(std::string const &Data) = 0;
/** \brief Transfer the data from the socket */
@@ -104,6 +106,10 @@ class ServerMethod : public pkgAcqMethod
unsigned long PipelineDepth;
bool AllowRedirect;
+ // Find the biggest item in the fetch queue for the checking of the maximum
+ // size
+ unsigned long long FindMaximumObjectSizeInQueue() const APT_PURE;
+
public:
bool Debug;
@@ -140,7 +146,7 @@ class ServerMethod : public pkgAcqMethod
virtual ServerState * CreateServerState(URI uri) = 0;
virtual void RotateDNS() = 0;
- ServerMethod(const char *Ver,unsigned long Flags = 0) : pkgAcqMethod(Ver, Flags), Server(NULL), File(NULL), PipelineDepth(0), AllowRedirect(false), Debug(false) {};
+ ServerMethod(const char *Ver,unsigned long Flags = 0) : pkgAcqMethod(Ver, Flags), Server(NULL), File(NULL), PipelineDepth(10), AllowRedirect(false), Debug(false) {};
virtual ~ServerMethod() {};
};