summaryrefslogtreecommitdiff
path: root/methods
diff options
context:
space:
mode:
Diffstat (limited to 'methods')
-rw-r--r--methods/cdrom.cc2
-rw-r--r--methods/copy.cc15
-rw-r--r--methods/file.cc45
-rw-r--r--methods/ftp.cc43
-rw-r--r--methods/ftp.h3
-rw-r--r--methods/gpgv.cc58
-rw-r--r--methods/gzip.cc13
-rw-r--r--methods/http.cc35
-rw-r--r--methods/http.h6
-rw-r--r--methods/http_main.cc4
-rw-r--r--methods/https.cc157
-rw-r--r--methods/https.h31
-rw-r--r--methods/rred.cc6
-rw-r--r--methods/rsh.cc2
-rw-r--r--methods/server.cc104
-rw-r--r--methods/server.h13
16 files changed, 370 insertions, 167 deletions
diff --git a/methods/cdrom.cc b/methods/cdrom.cc
index 74e2ecc6b..10cb29f66 100644
--- a/methods/cdrom.cc
+++ b/methods/cdrom.cc
@@ -266,7 +266,7 @@ bool CDROMMethod::Fetch(FetchItem *Itm)
Res.LastModified = Buf.st_mtime;
Res.Size = Buf.st_size;
- Hashes Hash;
+ Hashes Hash(Itm->ExpectedHashes);
FileFd Fd(Res.Filename, FileFd::ReadOnly);
Hash.AddFD(Fd);
Res.TakeHashes(Hash);
diff --git a/methods/copy.cc b/methods/copy.cc
index 40f8f85ec..a8e289df5 100644
--- a/methods/copy.cc
+++ b/methods/copy.cc
@@ -28,16 +28,16 @@
class CopyMethod : public pkgAcqMethod
{
virtual bool Fetch(FetchItem *Itm);
- void CalculateHashes(FetchResult &Res);
+ void CalculateHashes(FetchItem const * const Itm, FetchResult &Res);
public:
CopyMethod() : pkgAcqMethod("1.0",SingleInstance | SendConfig) {};
};
-void CopyMethod::CalculateHashes(FetchResult &Res)
+void CopyMethod::CalculateHashes(FetchItem const * const Itm, FetchResult &Res)
{
- Hashes Hash;
+ Hashes Hash(Itm->ExpectedHashes);
FileFd::CompressMode CompressMode = FileFd::None;
if (_config->FindB("Acquire::GzipIndexes", false) == true)
CompressMode = FileFd::Extension;
@@ -65,13 +65,13 @@ bool CopyMethod::Fetch(FetchItem *Itm)
Res.Size = Buf.st_size;
Res.Filename = Itm->DestFile;
Res.LastModified = Buf.st_mtime;
- Res.IMSHit = false;
+ Res.IMSHit = false;
URIStart(Res);
-
+
// just calc the hashes if the source and destination are identical
if (File == Itm->DestFile)
{
- CalculateHashes(Res);
+ CalculateHashes(Itm, Res);
URIDone(Res);
return true;
}
@@ -104,7 +104,7 @@ bool CopyMethod::Fetch(FetchItem *Itm)
if (utimes(Res.Filename.c_str(), times) != 0)
return _error->Errno("utimes",_("Failed to set modification time"));
- CalculateHashes(Res);
+ CalculateHashes(Itm, Res);
URIDone(Res);
return true;
@@ -116,5 +116,6 @@ int main()
setlocale(LC_ALL, "");
CopyMethod Mth;
+
return Mth.Run();
}
diff --git a/methods/file.cc b/methods/file.cc
index 12db62203..043ab04b8 100644
--- a/methods/file.cc
+++ b/methods/file.cc
@@ -16,6 +16,7 @@
#include <config.h>
#include <apt-pkg/acquire-method.h>
+#include <apt-pkg/aptconfiguration.h>
#include <apt-pkg/error.h>
#include <apt-pkg/hashes.h>
#include <apt-pkg/fileutl.h>
@@ -33,7 +34,7 @@ class FileMethod : public pkgAcqMethod
public:
- FileMethod() : pkgAcqMethod("1.0",SingleInstance | LocalOnly) {};
+ FileMethod() : pkgAcqMethod("1.0",SingleInstance | SendConfig | LocalOnly) {};
};
// FileMethod::Fetch - Fetch a file /*{{{*/
@@ -58,31 +59,35 @@ bool FileMethod::Fetch(FetchItem *Itm)
if (Itm->LastModified == Buf.st_mtime && Itm->LastModified != 0)
Res.IMSHit = true;
}
-
- // See if we can compute a file without a .gz exentsion
- std::string::size_type Pos = File.rfind(".gz");
- if (Pos + 3 == File.length())
+
+ // See if the uncompressed file exists and reuse it
+ std::vector<std::string> extensions = APT::Configuration::getCompressorExtensions();
+ for (std::vector<std::string>::const_iterator ext = extensions.begin(); ext != extensions.end(); ++ext)
{
- File = std::string(File,0,Pos);
- if (stat(File.c_str(),&Buf) == 0)
+ if (APT::String::Endswith(File, *ext) == true)
{
- FetchResult AltRes;
- AltRes.Size = Buf.st_size;
- AltRes.Filename = File;
- AltRes.LastModified = Buf.st_mtime;
- AltRes.IMSHit = false;
- if (Itm->LastModified == Buf.st_mtime && Itm->LastModified != 0)
- AltRes.IMSHit = true;
-
- URIDone(Res,&AltRes);
- return true;
- }
+ std::string const unfile = File.substr(0, File.length() - ext->length() - 1);
+ if (stat(unfile.c_str(),&Buf) == 0)
+ {
+ FetchResult AltRes;
+ AltRes.Size = Buf.st_size;
+ AltRes.Filename = unfile;
+ AltRes.LastModified = Buf.st_mtime;
+ AltRes.IMSHit = false;
+ if (Itm->LastModified == Buf.st_mtime && Itm->LastModified != 0)
+ AltRes.IMSHit = true;
+
+ URIDone(Res,&AltRes);
+ return true;
+ }
+ // no break here as we could have situations similar to '.gz' vs '.tar.gz' here
+ }
}
-
+
if (Res.Filename.empty() == true)
return _error->Error(_("File not found"));
- Hashes Hash;
+ Hashes Hash(Itm->ExpectedHashes);
FileFd Fd(Res.Filename, FileFd::ReadOnly);
Hash.AddFD(Fd);
Res.TakeHashes(Hash);
diff --git a/methods/ftp.cc b/methods/ftp.cc
index 66787a7be..92d8573f1 100644
--- a/methods/ftp.cc
+++ b/methods/ftp.cc
@@ -75,9 +75,10 @@ time_t FtpMethod::FailTime = 0;
// FTPConn::FTPConn - Constructor /*{{{*/
// ---------------------------------------------------------------------
/* */
-FTPConn::FTPConn(URI Srv) : Len(0), ServerFd(-1), DataFd(-1),
+FTPConn::FTPConn(URI Srv) : Len(0), ServerFd(-1), DataFd(-1),
DataListenFd(-1), ServerName(Srv),
- ForceExtended(false), TryPassive(true)
+ ForceExtended(false), TryPassive(true),
+ PeerAddrLen(0), ServerAddrLen(0)
{
Debug = _config->FindB("Debug::Acquire::Ftp",false);
PasvAddr = 0;
@@ -258,19 +259,21 @@ bool FTPConn::Login()
{
if (Opts->Value.empty() == true)
continue;
-
+
// Substitute the variables into the command
- char SitePort[20];
- if (ServerName.Port != 0)
- sprintf(SitePort,"%u",ServerName.Port);
- else
- strcpy(SitePort,"21");
string Tmp = Opts->Value;
Tmp = SubstVar(Tmp,"$(PROXY_USER)",Proxy.User);
Tmp = SubstVar(Tmp,"$(PROXY_PASS)",Proxy.Password);
Tmp = SubstVar(Tmp,"$(SITE_USER)",User);
Tmp = SubstVar(Tmp,"$(SITE_PASS)",Pass);
- Tmp = SubstVar(Tmp,"$(SITE_PORT)",SitePort);
+ if (ServerName.Port != 0)
+ {
+ std::string SitePort;
+ strprintf(SitePort, "%u", ServerName.Port);
+ Tmp = SubstVar(Tmp,"$(SITE_PORT)", SitePort);
+ }
+ else
+ Tmp = SubstVar(Tmp,"$(SITE_PORT)", "21");
Tmp = SubstVar(Tmp,"$(SITE)",ServerName.Host);
// Send the command
@@ -848,7 +851,8 @@ bool FTPConn::Finalize()
/* This opens a data connection, sends REST and RETR and then
transfers the file over. */
bool FTPConn::Get(const char *Path,FileFd &To,unsigned long long Resume,
- Hashes &Hash,bool &Missing)
+ Hashes &Hash,bool &Missing, unsigned long long MaximumSize,
+ pkgAcqMethod *Owner)
{
Missing = false;
if (CreateDataFd() == false)
@@ -921,7 +925,14 @@ bool FTPConn::Get(const char *Path,FileFd &To,unsigned long long Resume,
{
Close();
return false;
- }
+ }
+
+ if (MaximumSize > 0 && To.Tell() > MaximumSize)
+ {
+ Owner->SetFailReason("MaximumSizeExceeded");
+ return _error->Error("Writing more data than expected (%llu > %llu)",
+ To.Tell(), MaximumSize);
+ }
}
// All done
@@ -979,6 +990,10 @@ bool FtpMethod::Configuration(string Message)
return false;
TimeOut = _config->FindI("Acquire::Ftp::Timeout",TimeOut);
+
+ // no more active ftp, sorry
+ DropPrivsOrDie();
+
return true;
}
/*}}}*/
@@ -1049,7 +1064,7 @@ bool FtpMethod::Fetch(FetchItem *Itm)
}
// Open the file
- Hashes Hash;
+ Hashes Hash(Itm->ExpectedHashes);
{
FileFd Fd(Itm->DestFile,FileFd::WriteAny);
if (_error->PendingError() == true)
@@ -1062,7 +1077,7 @@ bool FtpMethod::Fetch(FetchItem *Itm)
FailFd = Fd.Fd();
bool Missing;
- if (Server->Get(File,Fd,Res.ResumePoint,Hash,Missing) == false)
+ if (Server->Get(File,Fd,Res.ResumePoint,Hash,Missing,Itm->MaximumSize,this) == false)
{
Fd.Close();
@@ -1131,6 +1146,6 @@ int main(int, const char *argv[])
}
FtpMethod Mth;
-
+
return Mth.Run();
}
diff --git a/methods/ftp.h b/methods/ftp.h
index dd92f0086..2efd28ec6 100644
--- a/methods/ftp.h
+++ b/methods/ftp.h
@@ -62,7 +62,8 @@ class FTPConn
bool Size(const char *Path,unsigned long long &Size);
bool ModTime(const char *Path, time_t &Time);
bool Get(const char *Path,FileFd &To,unsigned long long Resume,
- Hashes &MD5,bool &Missing);
+ Hashes &MD5,bool &Missing, unsigned long long MaximumSize,
+ pkgAcqMethod *Owner);
FTPConn(URI Srv);
~FTPConn();
diff --git a/methods/gpgv.cc b/methods/gpgv.cc
index ae521a2ed..41f138be6 100644
--- a/methods/gpgv.cc
+++ b/methods/gpgv.cc
@@ -5,6 +5,7 @@
#include <apt-pkg/error.h>
#include <apt-pkg/gpgv.h>
#include <apt-pkg/strutl.h>
+#include <apt-pkg/fileutl.h>
#include <ctype.h>
#include <errno.h>
@@ -43,12 +44,22 @@ class GPGVMethod : public pkgAcqMethod
protected:
virtual bool Fetch(FetchItem *Itm);
-
+ virtual bool Configuration(string Message);
public:
GPGVMethod() : pkgAcqMethod("1.0",SingleInstance | SendConfig) {};
};
+bool GPGVMethod::Configuration(string Message)
+{
+ if (pkgAcqMethod::Configuration(Message) == false)
+ return false;
+
+ DropPrivsOrDie();
+
+ return true;
+}
+
string GPGVMethod::VerifyGetSigners(const char *file, const char *outfile,
vector<string> &GoodSigners,
vector<string> &BadSigners,
@@ -74,34 +85,13 @@ string GPGVMethod::VerifyGetSigners(const char *file, const char *outfile,
FILE *pipein = fdopen(fd[0], "r");
- // Loop over the output of gpgv, and check the signatures.
- size_t buffersize = 64;
- char *buffer = (char *) malloc(buffersize);
- size_t bufferoff = 0;
+ // Loop over the output of apt-key (which really is gnupg), and check the signatures.
+ size_t buffersize = 0;
+ char *buffer = NULL;
while (1)
{
- int c;
-
- // Read a line. Sigh.
- while ((c = getc(pipein)) != EOF && c != '\n')
- {
- if (bufferoff == buffersize)
- {
- char* newBuffer = (char *) realloc(buffer, buffersize *= 2);
- if (newBuffer == NULL)
- {
- free(buffer);
- return "Couldn't allocate a buffer big enough for reading";
- }
- buffer = newBuffer;
- }
- *(buffer+bufferoff) = c;
- bufferoff++;
- }
- if (bufferoff == 0 && c == EOF)
- break;
- *(buffer+bufferoff) = '\0';
- bufferoff = 0;
+ if (getline(&buffer, &buffersize, pipein) == -1)
+ break;
if (Debug == true)
std::clog << "Read: " << buffer << std::endl;
@@ -115,7 +105,7 @@ string GPGVMethod::VerifyGetSigners(const char *file, const char *outfile,
std::clog << "Got BADSIG! " << std::endl;
BadSigners.push_back(string(buffer+sizeof(GNUPGPREFIX)));
}
-
+
if (strncmp(buffer, GNUPGNOPUBKEY, sizeof(GNUPGNOPUBKEY)-1) == 0)
{
if (Debug == true)
@@ -159,7 +149,7 @@ string GPGVMethod::VerifyGetSigners(const char *file, const char *outfile,
waitpid(pid, &status, 0);
if (Debug == true)
{
- std::clog << "gpgv exited\n";
+ ioprintf(std::clog, "gpgv exited with status %i\n", WEXITSTATUS(status));
}
if (WEXITSTATUS(status) == 0)
@@ -171,7 +161,7 @@ string GPGVMethod::VerifyGetSigners(const char *file, const char *outfile,
else if (WEXITSTATUS(status) == 1)
return _("At least one invalid signature was encountered.");
else if (WEXITSTATUS(status) == 111)
- return _("Could not execute 'gpgv' to verify signature (is gpgv installed?)");
+ return _("Could not execute 'apt-key' to verify signature (is gnupg installed?)");
else if (WEXITSTATUS(status) == 112)
{
// acquire system checks for "NODATA" to generate GPG errors (the others are only warnings)
@@ -181,7 +171,7 @@ string GPGVMethod::VerifyGetSigners(const char *file, const char *outfile,
return errmsg;
}
else
- return _("Unknown error executing gpgv");
+ return _("Unknown error executing apt-key");
}
bool GPGVMethod::Fetch(FetchItem *Itm)
@@ -199,7 +189,7 @@ bool GPGVMethod::Fetch(FetchItem *Itm)
Res.Filename = Itm->DestFile;
URIStart(Res);
- // Run gpgv on file, extract contents and get the key ID of the signer
+ // Run apt-key on file, extract contents and get the key ID of the signer
string msg = VerifyGetSigners(Path.c_str(), Itm->DestFile.c_str(),
GoodSigners, BadSigners, WorthlessSigners,
NoPubKeySigners);
@@ -251,7 +241,7 @@ bool GPGVMethod::Fetch(FetchItem *Itm)
if (_config->FindB("Debug::Acquire::gpgv", false))
{
- std::clog << "gpgv succeeded\n";
+ std::clog << "apt-key succeeded\n";
}
return true;
@@ -261,7 +251,7 @@ bool GPGVMethod::Fetch(FetchItem *Itm)
int main()
{
setlocale(LC_ALL, "");
-
+
GPGVMethod Mth;
return Mth.Run();
diff --git a/methods/gzip.cc b/methods/gzip.cc
index df3f8828f..65519633c 100644
--- a/methods/gzip.cc
+++ b/methods/gzip.cc
@@ -33,12 +33,22 @@ const char *Prog;
class GzipMethod : public pkgAcqMethod
{
virtual bool Fetch(FetchItem *Itm);
+ virtual bool Configuration(std::string Message);
public:
GzipMethod() : pkgAcqMethod("1.1",SingleInstance | SendConfig) {};
};
+bool GzipMethod::Configuration(std::string Message)
+{
+ if (pkgAcqMethod::Configuration(Message) == false)
+ return false;
+
+ DropPrivsOrDie();
+
+ return true;
+}
// GzipMethod::Fetch - Decompress the passed URI /*{{{*/
// ---------------------------------------------------------------------
@@ -81,7 +91,7 @@ bool GzipMethod::Fetch(FetchItem *Itm)
return false;
// Read data from source, generate checksums and write
- Hashes Hash;
+ Hashes Hash(Itm->ExpectedHashes);
bool Failed = false;
while (1)
{
@@ -139,5 +149,6 @@ int main(int, char *argv[])
++Prog;
GzipMethod Mth;
+
return Mth.Run();
}
diff --git a/methods/http.cc b/methods/http.cc
index ad90c9891..ce697a338 100644
--- a/methods/http.cc
+++ b/methods/http.cc
@@ -64,7 +64,8 @@ const unsigned int CircleBuf::BW_HZ=10;
// CircleBuf::CircleBuf - Circular input buffer /*{{{*/
// ---------------------------------------------------------------------
/* */
-CircleBuf::CircleBuf(unsigned long long Size) : Size(Size), Hash(0)
+CircleBuf::CircleBuf(unsigned long long Size)
+ : Size(Size), Hash(NULL), TotalWriten(0)
{
Buf = new unsigned char[Size];
Reset();
@@ -80,12 +81,13 @@ void CircleBuf::Reset()
InP = 0;
OutP = 0;
StrPos = 0;
+ TotalWriten = 0;
MaxGet = (unsigned long long)-1;
OutQueue = string();
- if (Hash != 0)
+ if (Hash != NULL)
{
delete Hash;
- Hash = new Hashes;
+ Hash = NULL;
}
}
/*}}}*/
@@ -217,8 +219,10 @@ bool CircleBuf::Write(int Fd)
return false;
}
+
+ TotalWriten += Res;
- if (Hash != 0)
+ if (Hash != NULL)
Hash->Add(Buf + (OutP%Size),Res);
OutP += Res;
@@ -438,7 +442,7 @@ bool HttpServerState::RunData(FileFd * const File)
{
/* Closes encoding is used when the server did not specify a size, the
loss of the connection means we are done */
- if (Encoding == Closes)
+ if (Persistent == false)
In.Limit(-1);
else if (JunkSize != 0)
In.Limit(JunkSize);
@@ -480,16 +484,14 @@ APT_PURE bool HttpServerState::IsOpen() /*{{{*/
return (ServerFd != -1);
}
/*}}}*/
-bool HttpServerState::InitHashes(FileFd &File) /*{{{*/
+bool HttpServerState::InitHashes(HashStringList const &ExpectedHashes) /*{{{*/
{
delete In.Hash;
- In.Hash = new Hashes;
-
- // Set the expected size and read file for the hashes
- File.Truncate(StartPos);
- return In.Hash->AddFD(File, StartPos);
+ In.Hash = new Hashes(ExpectedHashes);
+ return true;
}
/*}}}*/
+
APT_PURE Hashes * HttpServerState::GetHashes() /*{{{*/
{
return In.Hash;
@@ -520,7 +522,7 @@ bool HttpServerState::Die(FileFd &File)
// See if this is because the server finished the data stream
if (In.IsLimit() == false && State != HttpServerState::Header &&
- Encoding != HttpServerState::Closes)
+ Persistent == true)
{
Close();
if (LErrno == 0)
@@ -567,7 +569,7 @@ bool HttpServerState::Flush(FileFd * const File)
return true;
}
- if (In.IsLimit() == true || Encoding == ServerState::Closes)
+ if (In.IsLimit() == true || Persistent == false)
return true;
}
return false;
@@ -653,6 +655,13 @@ bool HttpServerState::Go(bool ToFile, FileFd * const File)
return _error->Errno("write",_("Error writing to output file"));
}
+ if (MaximumSize > 0 && File && File->Tell() > MaximumSize)
+ {
+ Owner->SetFailReason("MaximumSizeExceeded");
+ return _error->Error("Writing more data than expected (%llu > %llu)",
+ File->Tell(), MaximumSize);
+ }
+
// Handle commands from APT
if (FD_ISSET(STDIN_FILENO,&rfds))
{
diff --git a/methods/http.h b/methods/http.h
index 1df9fa07d..e73871931 100644
--- a/methods/http.h
+++ b/methods/http.h
@@ -63,6 +63,8 @@ class CircleBuf
public:
Hashes *Hash;
+ // total amount of data that got written so far
+ unsigned long long TotalWriten;
// Read data in
bool Read(int Fd);
@@ -81,8 +83,8 @@ class CircleBuf
bool ReadSpace() const {return Size - (InP - OutP) > 0;};
bool WriteSpace() const {return InP - OutP > 0;};
- // Dump everything
void Reset();
+ // Dump everything
void Stats();
CircleBuf(unsigned long long Size);
@@ -109,7 +111,7 @@ struct HttpServerState: public ServerState
virtual bool Open();
virtual bool IsOpen();
virtual bool Close();
- virtual bool InitHashes(FileFd &File);
+ virtual bool InitHashes(HashStringList const &ExpectedHashes);
virtual Hashes * GetHashes();
virtual bool Die(FileFd &File);
virtual bool Flush(FileFd * const File);
diff --git a/methods/http_main.cc b/methods/http_main.cc
index 3b346a514..cd52c42e8 100644
--- a/methods/http_main.cc
+++ b/methods/http_main.cc
@@ -1,5 +1,6 @@
#include <config.h>
-
+#include <apt-pkg/fileutl.h>
+#include <apt-pkg/error.h>
#include <signal.h>
#include "http.h"
@@ -13,5 +14,6 @@ int main()
signal(SIGPIPE, SIG_IGN);
HttpMethod Mth;
+
return Mth.Loop();
}
diff --git a/methods/https.cc b/methods/https.cc
index c97367323..d2ddf6fcf 100644
--- a/methods/https.cc
+++ b/methods/https.cc
@@ -37,11 +37,19 @@
/*}}}*/
using namespace std;
+struct APT_HIDDEN CURLUserPointer {
+ HttpsMethod * const https;
+ HttpsMethod::FetchResult * const Res;
+ HttpsMethod::FetchItem const * const Itm;
+ CURLUserPointer(HttpsMethod * const https, HttpsMethod::FetchResult * const Res,
+ HttpsMethod::FetchItem const * const Itm) : https(https), Res(Res), Itm(Itm) {}
+};
+
size_t
HttpsMethod::parse_header(void *buffer, size_t size, size_t nmemb, void *userp)
{
size_t len = size * nmemb;
- HttpsMethod *me = (HttpsMethod *)userp;
+ CURLUserPointer *me = (CURLUserPointer *)userp;
std::string line((char*) buffer, len);
for (--len; len > 0; --len)
if (isspace(line[len]) == 0)
@@ -53,25 +61,52 @@ HttpsMethod::parse_header(void *buffer, size_t size, size_t nmemb, void *userp)
if (line.empty() == true)
{
- if (me->Server->Result != 416 && me->Server->StartPos != 0)
+ if (me->https->Server->Result != 416 && me->https->Server->StartPos != 0)
;
- else if (me->Server->Result == 416 && me->Server->TotalFileSize == me->File->FileSize())
+ else if (me->https->Server->Result == 416)
{
- me->Server->Result = 200;
- me->Server->StartPos = me->Server->TotalFileSize;
- // the actual size is not important for https as curl will deal with it
- // by itself and e.g. doesn't bother us with transport-encoding…
- me->Server->JunkSize = std::numeric_limits<unsigned long long>::max();
+ bool partialHit = false;
+ if (me->Itm->ExpectedHashes.usable() == true)
+ {
+ Hashes resultHashes(me->Itm->ExpectedHashes);
+ FileFd file(me->Itm->DestFile, FileFd::ReadOnly);
+ me->https->Server->TotalFileSize = file.FileSize();
+ me->https->Server->Date = file.ModificationTime();
+ resultHashes.AddFD(file);
+ HashStringList const hashList = resultHashes.GetHashStringList();
+ partialHit = (me->Itm->ExpectedHashes == hashList);
+ }
+ else if (me->https->Server->Result == 416 && me->https->Server->TotalFileSize == me->https->File->FileSize())
+ partialHit = true;
+
+ if (partialHit == true)
+ {
+ me->https->Server->Result = 200;
+ me->https->Server->StartPos = me->https->Server->TotalFileSize;
+ // the actual size is not important for https as curl will deal with it
+ // by itself and e.g. doesn't bother us with transport-encoding…
+ me->https->Server->JunkSize = std::numeric_limits<unsigned long long>::max();
+ }
+ else
+ me->https->Server->StartPos = 0;
}
else
- me->Server->StartPos = 0;
+ me->https->Server->StartPos = 0;
- me->File->Truncate(me->Server->StartPos);
- me->File->Seek(me->Server->StartPos);
+ me->Res->LastModified = me->https->Server->Date;
+ me->Res->Size = me->https->Server->TotalFileSize;
+ me->Res->ResumePoint = me->https->Server->StartPos;
- me->Res.Size = me->Server->TotalFileSize;
+ // we expect valid data, so tell our caller we get the file now
+ if (me->https->Server->Result >= 200 && me->https->Server->Result < 300)
+ {
+ if (me->https->Server->JunkSize == 0 && me->Res->Size != 0 && me->Res->Size > me->Res->ResumePoint)
+ me->https->URIStart(*me->Res);
+ if (me->https->Server->AddPartialFileToHashes(*(me->https->File)) == false)
+ return 0;
+ }
}
- else if (me->Server->HeaderLine(line) == false)
+ else if (me->https->Server->HeaderLine(line) == false)
return 0;
return size*nmemb;
@@ -87,27 +122,48 @@ HttpsMethod::write_data(void *buffer, size_t size, size_t nmemb, void *userp)
if (me->Server->JunkSize != 0)
return buffer_size;
- if (me->ReceivedData == false)
+ if(me->File->Write(buffer, buffer_size) != true)
+ return 0;
+
+ if(me->Queue->MaximumSize > 0)
{
- me->URIStart(me->Res);
- me->ReceivedData = true;
+ unsigned long long const TotalWritten = me->File->Tell();
+ if (TotalWritten > me->Queue->MaximumSize)
+ {
+ me->SetFailReason("MaximumSizeExceeded");
+ _error->Error("Writing more data than expected (%llu > %llu)",
+ TotalWritten, me->Queue->MaximumSize);
+ return 0;
+ }
}
- if(me->File->Write(buffer, buffer_size) != true)
- return false;
+ if (me->Server->GetHashes()->Add((unsigned char const * const)buffer, buffer_size) == false)
+ return 0;
return buffer_size;
}
// HttpsServerState::HttpsServerState - Constructor /*{{{*/
-HttpsServerState::HttpsServerState(URI Srv,HttpsMethod * /*Owner*/) : ServerState(Srv, NULL)
+HttpsServerState::HttpsServerState(URI Srv,HttpsMethod * Owner) : ServerState(Srv, Owner), Hash(NULL)
{
TimeOut = _config->FindI("Acquire::https::Timeout",TimeOut);
Reset();
}
/*}}}*/
+bool HttpsServerState::InitHashes(HashStringList const &ExpectedHashes) /*{{{*/
+{
+ delete Hash;
+ Hash = new Hashes(ExpectedHashes);
+ return true;
+}
+ /*}}}*/
+APT_PURE Hashes * HttpsServerState::GetHashes() /*{{{*/
+{
+ return Hash;
+}
+ /*}}}*/
-void HttpsMethod::SetupProxy() /*{{{*/
+void HttpsMethod::SetupProxy() /*{{{*/
{
URI ServerName = Queue->Uri;
@@ -170,11 +226,10 @@ void HttpsMethod::SetupProxy() /*{{{*/
bool HttpsMethod::Fetch(FetchItem *Itm)
{
struct stat SBuf;
- struct curl_slist *headers=NULL;
+ struct curl_slist *headers=NULL;
char curl_errorstr[CURL_ERROR_SIZE];
URI Uri = Itm->Uri;
string remotehost = Uri.Host;
- ReceivedData = false;
// TODO:
// - http::Pipeline-Depth
@@ -186,10 +241,12 @@ bool HttpsMethod::Fetch(FetchItem *Itm)
maybe_add_auth (Uri, _config->FindFile("Dir::Etc::netrc"));
+ FetchResult Res;
+ CURLUserPointer userp(this, &Res, Itm);
// callbacks
curl_easy_setopt(curl, CURLOPT_URL, static_cast<string>(Uri).c_str());
curl_easy_setopt(curl, CURLOPT_HEADERFUNCTION, parse_header);
- curl_easy_setopt(curl, CURLOPT_WRITEHEADER, this);
+ curl_easy_setopt(curl, CURLOPT_WRITEHEADER, &userp);
curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, write_data);
curl_easy_setopt(curl, CURLOPT_WRITEDATA, this);
// options
@@ -302,13 +359,11 @@ bool HttpsMethod::Fetch(FetchItem *Itm)
curl_easy_setopt(curl, CURLOPT_LOW_SPEED_TIME, timeout);
// set redirect options and default to 10 redirects
- bool const AllowRedirect = _config->FindB("Acquire::https::AllowRedirect",
- _config->FindB("Acquire::http::AllowRedirect",true));
curl_easy_setopt(curl, CURLOPT_FOLLOWLOCATION, AllowRedirect);
curl_easy_setopt(curl, CURLOPT_MAXREDIRS, 10);
// debug
- if(_config->FindB("Debug::Acquire::https", false))
+ if (Debug == true)
curl_easy_setopt(curl, CURLOPT_VERBOSE, true);
// error handling
@@ -345,8 +400,9 @@ bool HttpsMethod::Fetch(FetchItem *Itm)
// go for it - if the file exists, append on it
File = new FileFd(Itm->DestFile, FileFd::WriteAny);
- Server = new HttpsServerState(Itm->Uri, this);
- Res = FetchResult();
+ Server = CreateServerState(Itm->Uri);
+ if (Server->InitHashes(Itm->ExpectedHashes) == false)
+ return false;
// keep apt updated
Res.Filename = Itm->DestFile;
@@ -366,7 +422,6 @@ bool HttpsMethod::Fetch(FetchItem *Itm)
if (success != 0)
{
_error->Error("%s", curl_errorstr);
- unlink(File->Name().c_str());
return false;
}
@@ -389,30 +444,29 @@ bool HttpsMethod::Fetch(FetchItem *Itm)
char err[255];
snprintf(err, sizeof(err) - 1, "HttpError%i", Server->Result);
SetFailReason(err);
- _error->Error("%s", err);
+ _error->Error("%i %s", Server->Result, Server->Code);
// unlink, no need keep 401/404 page content in partial/
unlink(File->Name().c_str());
return false;
}
- struct stat resultStat;
- if (unlikely(stat(File->Name().c_str(), &resultStat) != 0))
- {
- _error->Errno("stat", "Unable to access file %s", File->Name().c_str());
- return false;
- }
- Res.Size = resultStat.st_size;
-
// invalid range-request
if (Server->Result == 416)
{
unlink(File->Name().c_str());
- Res.Size = 0;
delete File;
Redirect(Itm->Uri);
return true;
}
+ struct stat resultStat;
+ if (unlikely(stat(File->Name().c_str(), &resultStat) != 0))
+ {
+ _error->Errno("stat", "Unable to access file %s", File->Name().c_str());
+ return false;
+ }
+ Res.Size = resultStat.st_size;
+
// Timestamp
curl_easy_getinfo(curl, CURLINFO_FILETIME, &Res.LastModified);
if (Res.LastModified != -1)
@@ -427,20 +481,35 @@ bool HttpsMethod::Fetch(FetchItem *Itm)
Res.LastModified = resultStat.st_mtime;
// take hashes
- Hashes Hash;
- FileFd Fd(Res.Filename, FileFd::ReadOnly);
- Hash.AddFD(Fd);
- Res.TakeHashes(Hash);
+ Res.TakeHashes(*(Server->GetHashes()));
// keep apt updated
URIDone(Res);
// cleanup
- Res.Size = 0;
delete File;
return true;
}
+ /*}}}*/
+// HttpsMethod::Configuration - Handle a configuration message /*{{{*/
+bool HttpsMethod::Configuration(string Message)
+{
+ if (ServerMethod::Configuration(Message) == false)
+ return false;
+
+ AllowRedirect = _config->FindB("Acquire::https::AllowRedirect",
+ _config->FindB("Acquire::http::AllowRedirect", true));
+ Debug = _config->FindB("Debug::Acquire::https",false);
+
+ return true;
+}
+ /*}}}*/
+ServerState * HttpsMethod::CreateServerState(URI uri) /*{{{*/
+{
+ return new HttpsServerState(uri, this);
+}
+ /*}}}*/
int main()
{
diff --git a/methods/https.h b/methods/https.h
index 411b71440..57fc292ee 100644
--- a/methods/https.h
+++ b/methods/https.h
@@ -29,6 +29,8 @@ class FileFd;
class HttpsServerState : public ServerState
{
+ Hashes * Hash;
+
protected:
virtual bool ReadHeaderLines(std::string &/*Data*/) { return false; }
virtual bool LoadNextResponse(bool const /*ToFile*/, FileFd * const /*File*/) { return false; }
@@ -42,8 +44,8 @@ class HttpsServerState : public ServerState
virtual bool Open() { return false; }
virtual bool IsOpen() { return false; }
virtual bool Close() { return false; }
- virtual bool InitHashes(FileFd &/*File*/) { return false; }
- virtual Hashes * GetHashes() { return NULL; }
+ virtual bool InitHashes(HashStringList const &ExpectedHashes);
+ virtual Hashes * GetHashes();
virtual bool Die(FileFd &/*File*/) { return false; }
virtual bool Flush(FileFd * const /*File*/) { return false; }
virtual bool Go(bool /*ToFile*/, FileFd * const /*File*/) { return false; }
@@ -52,28 +54,35 @@ class HttpsServerState : public ServerState
virtual ~HttpsServerState() {Close();};
};
-class HttpsMethod : public pkgAcqMethod
+class HttpsMethod : public ServerMethod
{
// minimum speed in bytes/se that triggers download timeout handling
static const int DL_MIN_SPEED = 10;
virtual bool Fetch(FetchItem *);
+
static size_t parse_header(void *buffer, size_t size, size_t nmemb, void *userp);
static size_t write_data(void *buffer, size_t size, size_t nmemb, void *userp);
- static int progress_callback(void *clientp, double dltotal, double dlnow,
- double ultotal, double ulnow);
+ static int progress_callback(void *clientp, double dltotal, double dlnow,
+ double ultotal, double ulnow);
void SetupProxy();
CURL *curl;
- FetchResult Res;
- HttpsServerState *Server;
- bool ReceivedData;
+ ServerState *Server;
+
+ // Used by ServerMethods unused by https
+ virtual void SendReq(FetchItem *) { exit(42); }
+ virtual void RotateDNS() { exit(42); }
public:
FileFd *File;
-
- HttpsMethod() : pkgAcqMethod("1.2",Pipeline | SendConfig), File(NULL)
+
+ virtual bool Configuration(std::string Message);
+ virtual ServerState * CreateServerState(URI uri);
+ using pkgAcqMethod::FetchResult;
+ using pkgAcqMethod::FetchItem;
+
+ HttpsMethod() : ServerMethod("1.2",Pipeline | SendConfig), File(NULL)
{
- File = 0;
curl = curl_easy_init();
};
diff --git a/methods/rred.cc b/methods/rred.cc
index cabb3c456..554ac99b4 100644
--- a/methods/rred.cc
+++ b/methods/rred.cc
@@ -150,11 +150,11 @@ class FileChanges {
std::list<struct Change>::iterator where;
size_t pos; // line number is as far left of iterator as possible
- bool pos_is_okay(void)
+ bool pos_is_okay(void) const
{
#ifdef POSDEBUG
size_t cpos = 0;
- std::list<struct Change>::iterator x;
+ std::list<struct Change>::const_iterator x;
for (x = changes.begin(); x != where; ++x) {
assert(x != changes.end());
cpos += x->offset + x->add_cnt;
@@ -581,7 +581,7 @@ class RredMethod : public pkgAcqMethod {
FILE *inp = fopen(Path.c_str(), "r");
FILE *out = fopen(Itm->DestFile.c_str(), "w");
- Hashes hash;
+ Hashes hash(Itm->ExpectedHashes);
patch.apply_against_file(out, inp, &hash);
diff --git a/methods/rsh.cc b/methods/rsh.cc
index 0e949160b..52349c61c 100644
--- a/methods/rsh.cc
+++ b/methods/rsh.cc
@@ -477,7 +477,7 @@ bool RSHMethod::Fetch(FetchItem *Itm)
}
// Open the file
- Hashes Hash;
+ Hashes Hash(Itm->ExpectedHashes);
{
FileFd Fd(Itm->DestFile,FileFd::WriteAny);
if (_error->PendingError() == true)
diff --git a/methods/server.cc b/methods/server.cc
index 6c05700a5..f61a6fedb 100644
--- a/methods/server.cc
+++ b/methods/server.cc
@@ -245,10 +245,21 @@ ServerState::ServerState(URI Srv, ServerMethod *Owner) : ServerName(Srv), TimeOu
Reset();
}
/*}}}*/
+bool ServerState::AddPartialFileToHashes(FileFd &File) /*{{{*/
+{
+ File.Truncate(StartPos);
+ return GetHashes()->AddFD(File, StartPos);
+}
+ /*}}}*/
bool ServerMethod::Configuration(string Message) /*{{{*/
{
- return pkgAcqMethod::Configuration(Message);
+ if (pkgAcqMethod::Configuration(Message) == false)
+ return false;
+
+ DropPrivsOrDie();
+
+ return true;
}
/*}}}*/
@@ -268,7 +279,7 @@ ServerMethod::DealWithHeaders(FetchResult &Res)
Res.LastModified = Queue->LastModified;
return IMS_HIT;
}
-
+
/* Redirect
*
* Note that it is only OK for us to treat all redirection the same
@@ -313,7 +324,20 @@ ServerMethod::DealWithHeaders(FetchResult &Res)
struct stat SBuf;
if (stat(Queue->DestFile.c_str(),&SBuf) >= 0 && SBuf.st_size > 0)
{
- if ((unsigned long long)SBuf.st_size == Server->TotalFileSize)
+ bool partialHit = false;
+ if (Queue->ExpectedHashes.usable() == true)
+ {
+ Hashes resultHashes(Queue->ExpectedHashes);
+ FileFd file(Queue->DestFile, FileFd::ReadOnly);
+ Server->TotalFileSize = file.FileSize();
+ Server->Date = file.ModificationTime();
+ resultHashes.AddFD(file);
+ HashStringList const hashList = resultHashes.GetHashStringList();
+ partialHit = (Queue->ExpectedHashes == hashList);
+ }
+ else if ((unsigned long long)SBuf.st_size == Server->TotalFileSize)
+ partialHit = true;
+ if (partialHit == true)
{
// the file is completely downloaded, but was not moved
if (Server->HaveContent == true)
@@ -338,10 +362,10 @@ ServerMethod::DealWithHeaders(FetchResult &Res)
failure */
if (Server->Result < 200 || Server->Result >= 300)
{
- char err[255];
- snprintf(err,sizeof(err)-1,"HttpError%i",Server->Result);
+ std::string err;
+ strprintf(err, "HttpError%u", Server->Result);
SetFailReason(err);
- _error->Error("%u %s",Server->Result,Server->Code);
+ _error->Error("%u %s", Server->Result, Server->Code);
if (Server->HaveContent == true)
return ERROR_WITH_CONTENT_PAGE;
return ERROR_UNRECOVERABLE;
@@ -362,7 +386,7 @@ ServerMethod::DealWithHeaders(FetchResult &Res)
FailFd = File->Fd();
FailTime = Server->Date;
- if (Server->InitHashes(*File) == false)
+ if (Server->InitHashes(Queue->ExpectedHashes) == false || Server->AddPartialFileToHashes(*File) == false)
{
_error->Errno("read",_("Problem hashing file"));
return ERROR_NOT_FROM_SERVER;
@@ -407,9 +431,16 @@ bool ServerMethod::Fetch(FetchItem *)
for (FetchItem *I = Queue; I != 0 && Depth < (signed)PipelineDepth;
I = I->Next, Depth++)
{
- // If pipelining is disabled, we only queue 1 request
- if (Server->Pipeline == false && Depth >= 0)
- break;
+ if (Depth >= 0)
+ {
+ // If pipelining is disabled, we only queue 1 request
+ if (Server->Pipeline == false)
+ break;
+ // if we have no hashes, do at most one such request
+ // as we can't fixup pipeling misbehaviors otherwise
+ else if (I->ExpectedHashes.usable() == false)
+ break;
+ }
// Make sure we stick with the same server
if (Server->Comp(I->Uri) == false)
@@ -539,6 +570,13 @@ int ServerMethod::Loop()
// Run the data
bool Result = true;
+
+ // ensure we don't fetch too much
+ // we could do "Server->MaximumSize = Queue->MaximumSize" here
+ // but that would break the clever pipeline messup detection
+ // so instead we use the size of the biggest item in the queue
+ Server->MaximumSize = FindMaximumObjectSizeInQueue();
+
if (Server->HaveContent)
Result = Server->RunData(File);
@@ -561,7 +599,38 @@ int ServerMethod::Loop()
// Send status to APT
if (Result == true)
{
- Res.TakeHashes(*Server->GetHashes());
+ Hashes * const resultHashes = Server->GetHashes();
+ HashStringList const hashList = resultHashes->GetHashStringList();
+ if (PipelineDepth != 0 && Queue->ExpectedHashes.usable() == true && Queue->ExpectedHashes != hashList)
+ {
+ // we did not get the expected hash… mhhh:
+ // could it be that server/proxy messed up pipelining?
+ FetchItem * BeforeI = Queue;
+ for (FetchItem *I = Queue->Next; I != 0 && I != QueueBack; I = I->Next)
+ {
+ if (I->ExpectedHashes.usable() == true && I->ExpectedHashes == hashList)
+ {
+ // yes, he did! Disable pipelining and rewrite queue
+ if (Server->Pipeline == true)
+ {
+ // FIXME: fake a warning message as we have no proper way of communicating here
+ std::string out;
+ strprintf(out, _("Automatically disabled %s due to incorrect response from server/proxy. (man 5 apt.conf)"), "Acquire::http::PipelineDepth");
+ std::cerr << "W: " << out << std::endl;
+ Server->Pipeline = false;
+ // we keep the PipelineDepth value so that the rest of the queue can be fixed up as well
+ }
+ Rename(Res.Filename, I->DestFile);
+ Res.Filename = I->DestFile;
+ BeforeI->Next = I->Next;
+ I->Next = Queue;
+ Queue = I;
+ break;
+ }
+ BeforeI = I;
+ }
+ }
+ Res.TakeHashes(*resultHashes);
URIDone(Res);
}
else
@@ -581,7 +650,10 @@ int ServerMethod::Loop()
QueueBack = Queue;
}
else
+ {
+ Server->Close();
Fail(true);
+ }
}
break;
}
@@ -676,3 +748,13 @@ int ServerMethod::Loop()
return 0;
}
/*}}}*/
+ /*{{{*/
+unsigned long long
+ServerMethod::FindMaximumObjectSizeInQueue() const
+{
+ unsigned long long MaxSizeInQueue = 0;
+ for (FetchItem *I = Queue; I != 0 && I != QueueBack; I = I->Next)
+ MaxSizeInQueue = std::max(MaxSizeInQueue, I->MaximumSize);
+ return MaxSizeInQueue;
+}
+ /*}}}*/
diff --git a/methods/server.h b/methods/server.h
index 8c14282b6..8d7d33ee6 100644
--- a/methods/server.h
+++ b/methods/server.h
@@ -57,6 +57,8 @@ struct ServerState
URI Proxy;
unsigned long TimeOut;
+ unsigned long long MaximumSize;
+
protected:
ServerMethod *Owner;
@@ -77,11 +79,12 @@ struct ServerState
};
/** \brief Get the headers before the data */
RunHeadersResult RunHeaders(FileFd * const File, const std::string &Uri);
+ bool AddPartialFileToHashes(FileFd &File);
bool Comp(URI Other) const {return Other.Host == ServerName.Host && Other.Port == ServerName.Port;};
virtual void Reset() {Major = 0; Minor = 0; Result = 0; Code[0] = '\0'; TotalFileSize = 0; JunkSize = 0;
StartPos = 0; Encoding = Closes; time(&Date); HaveContent = false;
- State = Header; Persistent = false; Pipeline = true;};
+ State = Header; Persistent = false; Pipeline = true; MaximumSize = 0;};
virtual bool WriteResponse(std::string const &Data) = 0;
/** \brief Transfer the data from the socket */
@@ -90,7 +93,7 @@ struct ServerState
virtual bool Open() = 0;
virtual bool IsOpen() = 0;
virtual bool Close() = 0;
- virtual bool InitHashes(FileFd &File) = 0;
+ virtual bool InitHashes(HashStringList const &ExpectedHashes) = 0;
virtual Hashes * GetHashes() = 0;
virtual bool Die(FileFd &File) = 0;
virtual bool Flush(FileFd * const File) = 0;
@@ -112,6 +115,10 @@ class ServerMethod : public pkgAcqMethod
unsigned long PipelineDepth;
bool AllowRedirect;
+ // Find the biggest item in the fetch queue for the checking of the maximum
+ // size
+ unsigned long long FindMaximumObjectSizeInQueue() const APT_PURE;
+
public:
bool Debug;
@@ -148,7 +155,7 @@ class ServerMethod : public pkgAcqMethod
virtual ServerState * CreateServerState(URI uri) = 0;
virtual void RotateDNS() = 0;
- ServerMethod(const char *Ver,unsigned long Flags = 0) : pkgAcqMethod(Ver, Flags), Server(NULL), File(NULL), PipelineDepth(0), AllowRedirect(false), Debug(false) {};
+ ServerMethod(const char *Ver,unsigned long Flags = 0) : pkgAcqMethod(Ver, Flags), Server(NULL), File(NULL), PipelineDepth(10), AllowRedirect(false), Debug(false) {};
virtual ~ServerMethod() {};
};