summaryrefslogtreecommitdiff
path: root/methods
diff options
context:
space:
mode:
authorMichael Vogt <mvo@debian.org>2015-08-18 11:54:05 +0200
committerMichael Vogt <mvo@debian.org>2015-08-18 11:54:05 +0200
commit21248c0f00ee71412dbadc6ebf84011cf974346d (patch)
tree7dc1f5904399482d2128765b5b86d57a4ac5b3e1 /methods
parente5f34ad3b043abf033c1626eb8449b75955d6760 (diff)
parent4fc6b7570c3e97b65c118b58cdf6729fa94c9b03 (diff)
Merge branch 'debian/experimental' into feature/srv-records
Conflicts: cmdline/apt-helper.cc cmdline/makefile
Diffstat (limited to 'methods')
-rw-r--r--methods/cdrom.cc2
-rw-r--r--methods/copy.cc38
-rw-r--r--methods/file.cc45
-rw-r--r--methods/ftp.cc43
-rw-r--r--methods/ftp.h3
-rw-r--r--methods/gpgv.cc58
-rw-r--r--methods/gzip.cc13
-rw-r--r--methods/http.cc103
-rw-r--r--methods/http.h9
-rw-r--r--methods/http_main.cc4
-rw-r--r--methods/https.cc178
-rw-r--r--methods/https.h30
-rw-r--r--methods/rred.cc6
-rw-r--r--methods/rsh.cc15
-rw-r--r--methods/server.cc161
-rw-r--r--methods/server.h27
16 files changed, 466 insertions, 269 deletions
diff --git a/methods/cdrom.cc b/methods/cdrom.cc
index 74e2ecc6b..10cb29f66 100644
--- a/methods/cdrom.cc
+++ b/methods/cdrom.cc
@@ -266,7 +266,7 @@ bool CDROMMethod::Fetch(FetchItem *Itm)
Res.LastModified = Buf.st_mtime;
Res.Size = Buf.st_size;
- Hashes Hash;
+ Hashes Hash(Itm->ExpectedHashes);
FileFd Fd(Res.Filename, FileFd::ReadOnly);
Hash.AddFD(Fd);
Res.TakeHashes(Hash);
diff --git a/methods/copy.cc b/methods/copy.cc
index d59f032ff..a8e289df5 100644
--- a/methods/copy.cc
+++ b/methods/copy.cc
@@ -16,6 +16,7 @@
#include <apt-pkg/acquire-method.h>
#include <apt-pkg/error.h>
#include <apt-pkg/hashes.h>
+#include <apt-pkg/configuration.h>
#include <string>
#include <sys/stat.h>
@@ -27,19 +28,32 @@
class CopyMethod : public pkgAcqMethod
{
virtual bool Fetch(FetchItem *Itm);
+ void CalculateHashes(FetchItem const * const Itm, FetchResult &Res);
public:
- CopyMethod() : pkgAcqMethod("1.0",SingleInstance) {};
+ CopyMethod() : pkgAcqMethod("1.0",SingleInstance | SendConfig) {};
};
+void CopyMethod::CalculateHashes(FetchItem const * const Itm, FetchResult &Res)
+{
+ Hashes Hash(Itm->ExpectedHashes);
+ FileFd::CompressMode CompressMode = FileFd::None;
+ if (_config->FindB("Acquire::GzipIndexes", false) == true)
+ CompressMode = FileFd::Extension;
+
+ FileFd Fd(Res.Filename, FileFd::ReadOnly, CompressMode);
+ Hash.AddFD(Fd);
+ Res.TakeHashes(Hash);
+}
+
// CopyMethod::Fetch - Fetch a file /*{{{*/
// ---------------------------------------------------------------------
/* */
bool CopyMethod::Fetch(FetchItem *Itm)
{
- URI Get = Itm->Uri;
- std::string File = Get.Path;
+ // this ensures that relative paths work in copy
+ std::string File = Itm->Uri.substr(Itm->Uri.find(':')+1);
// Stat the file and send a start message
struct stat Buf;
@@ -51,9 +65,17 @@ bool CopyMethod::Fetch(FetchItem *Itm)
Res.Size = Buf.st_size;
Res.Filename = Itm->DestFile;
Res.LastModified = Buf.st_mtime;
- Res.IMSHit = false;
+ Res.IMSHit = false;
URIStart(Res);
-
+
+ // just calc the hashes if the source and destination are identical
+ if (File == Itm->DestFile)
+ {
+ CalculateHashes(Itm, Res);
+ URIDone(Res);
+ return true;
+ }
+
// See if the file exists
FileFd From(File,FileFd::ReadOnly);
FileFd To(Itm->DestFile,FileFd::WriteAtomic);
@@ -82,10 +104,7 @@ bool CopyMethod::Fetch(FetchItem *Itm)
if (utimes(Res.Filename.c_str(), times) != 0)
return _error->Errno("utimes",_("Failed to set modification time"));
- Hashes Hash;
- FileFd Fd(Res.Filename, FileFd::ReadOnly);
- Hash.AddFD(Fd);
- Res.TakeHashes(Hash);
+ CalculateHashes(Itm, Res);
URIDone(Res);
return true;
@@ -97,5 +116,6 @@ int main()
setlocale(LC_ALL, "");
CopyMethod Mth;
+
return Mth.Run();
}
diff --git a/methods/file.cc b/methods/file.cc
index 12db62203..043ab04b8 100644
--- a/methods/file.cc
+++ b/methods/file.cc
@@ -16,6 +16,7 @@
#include <config.h>
#include <apt-pkg/acquire-method.h>
+#include <apt-pkg/aptconfiguration.h>
#include <apt-pkg/error.h>
#include <apt-pkg/hashes.h>
#include <apt-pkg/fileutl.h>
@@ -33,7 +34,7 @@ class FileMethod : public pkgAcqMethod
public:
- FileMethod() : pkgAcqMethod("1.0",SingleInstance | LocalOnly) {};
+ FileMethod() : pkgAcqMethod("1.0",SingleInstance | SendConfig | LocalOnly) {};
};
// FileMethod::Fetch - Fetch a file /*{{{*/
@@ -58,31 +59,35 @@ bool FileMethod::Fetch(FetchItem *Itm)
if (Itm->LastModified == Buf.st_mtime && Itm->LastModified != 0)
Res.IMSHit = true;
}
-
- // See if we can compute a file without a .gz exentsion
- std::string::size_type Pos = File.rfind(".gz");
- if (Pos + 3 == File.length())
+
+ // See if the uncompressed file exists and reuse it
+ std::vector<std::string> extensions = APT::Configuration::getCompressorExtensions();
+ for (std::vector<std::string>::const_iterator ext = extensions.begin(); ext != extensions.end(); ++ext)
{
- File = std::string(File,0,Pos);
- if (stat(File.c_str(),&Buf) == 0)
+ if (APT::String::Endswith(File, *ext) == true)
{
- FetchResult AltRes;
- AltRes.Size = Buf.st_size;
- AltRes.Filename = File;
- AltRes.LastModified = Buf.st_mtime;
- AltRes.IMSHit = false;
- if (Itm->LastModified == Buf.st_mtime && Itm->LastModified != 0)
- AltRes.IMSHit = true;
-
- URIDone(Res,&AltRes);
- return true;
- }
+ std::string const unfile = File.substr(0, File.length() - ext->length() - 1);
+ if (stat(unfile.c_str(),&Buf) == 0)
+ {
+ FetchResult AltRes;
+ AltRes.Size = Buf.st_size;
+ AltRes.Filename = unfile;
+ AltRes.LastModified = Buf.st_mtime;
+ AltRes.IMSHit = false;
+ if (Itm->LastModified == Buf.st_mtime && Itm->LastModified != 0)
+ AltRes.IMSHit = true;
+
+ URIDone(Res,&AltRes);
+ return true;
+ }
+ // no break here as we could have situations similar to '.gz' vs '.tar.gz' here
+ }
}
-
+
if (Res.Filename.empty() == true)
return _error->Error(_("File not found"));
- Hashes Hash;
+ Hashes Hash(Itm->ExpectedHashes);
FileFd Fd(Res.Filename, FileFd::ReadOnly);
Hash.AddFD(Fd);
Res.TakeHashes(Hash);
diff --git a/methods/ftp.cc b/methods/ftp.cc
index 66787a7be..92d8573f1 100644
--- a/methods/ftp.cc
+++ b/methods/ftp.cc
@@ -75,9 +75,10 @@ time_t FtpMethod::FailTime = 0;
// FTPConn::FTPConn - Constructor /*{{{*/
// ---------------------------------------------------------------------
/* */
-FTPConn::FTPConn(URI Srv) : Len(0), ServerFd(-1), DataFd(-1),
+FTPConn::FTPConn(URI Srv) : Len(0), ServerFd(-1), DataFd(-1),
DataListenFd(-1), ServerName(Srv),
- ForceExtended(false), TryPassive(true)
+ ForceExtended(false), TryPassive(true),
+ PeerAddrLen(0), ServerAddrLen(0)
{
Debug = _config->FindB("Debug::Acquire::Ftp",false);
PasvAddr = 0;
@@ -258,19 +259,21 @@ bool FTPConn::Login()
{
if (Opts->Value.empty() == true)
continue;
-
+
// Substitute the variables into the command
- char SitePort[20];
- if (ServerName.Port != 0)
- sprintf(SitePort,"%u",ServerName.Port);
- else
- strcpy(SitePort,"21");
string Tmp = Opts->Value;
Tmp = SubstVar(Tmp,"$(PROXY_USER)",Proxy.User);
Tmp = SubstVar(Tmp,"$(PROXY_PASS)",Proxy.Password);
Tmp = SubstVar(Tmp,"$(SITE_USER)",User);
Tmp = SubstVar(Tmp,"$(SITE_PASS)",Pass);
- Tmp = SubstVar(Tmp,"$(SITE_PORT)",SitePort);
+ if (ServerName.Port != 0)
+ {
+ std::string SitePort;
+ strprintf(SitePort, "%u", ServerName.Port);
+ Tmp = SubstVar(Tmp,"$(SITE_PORT)", SitePort);
+ }
+ else
+ Tmp = SubstVar(Tmp,"$(SITE_PORT)", "21");
Tmp = SubstVar(Tmp,"$(SITE)",ServerName.Host);
// Send the command
@@ -848,7 +851,8 @@ bool FTPConn::Finalize()
/* This opens a data connection, sends REST and RETR and then
transfers the file over. */
bool FTPConn::Get(const char *Path,FileFd &To,unsigned long long Resume,
- Hashes &Hash,bool &Missing)
+ Hashes &Hash,bool &Missing, unsigned long long MaximumSize,
+ pkgAcqMethod *Owner)
{
Missing = false;
if (CreateDataFd() == false)
@@ -921,7 +925,14 @@ bool FTPConn::Get(const char *Path,FileFd &To,unsigned long long Resume,
{
Close();
return false;
- }
+ }
+
+ if (MaximumSize > 0 && To.Tell() > MaximumSize)
+ {
+ Owner->SetFailReason("MaximumSizeExceeded");
+ return _error->Error("Writing more data than expected (%llu > %llu)",
+ To.Tell(), MaximumSize);
+ }
}
// All done
@@ -979,6 +990,10 @@ bool FtpMethod::Configuration(string Message)
return false;
TimeOut = _config->FindI("Acquire::Ftp::Timeout",TimeOut);
+
+ // no more active ftp, sorry
+ DropPrivsOrDie();
+
return true;
}
/*}}}*/
@@ -1049,7 +1064,7 @@ bool FtpMethod::Fetch(FetchItem *Itm)
}
// Open the file
- Hashes Hash;
+ Hashes Hash(Itm->ExpectedHashes);
{
FileFd Fd(Itm->DestFile,FileFd::WriteAny);
if (_error->PendingError() == true)
@@ -1062,7 +1077,7 @@ bool FtpMethod::Fetch(FetchItem *Itm)
FailFd = Fd.Fd();
bool Missing;
- if (Server->Get(File,Fd,Res.ResumePoint,Hash,Missing) == false)
+ if (Server->Get(File,Fd,Res.ResumePoint,Hash,Missing,Itm->MaximumSize,this) == false)
{
Fd.Close();
@@ -1131,6 +1146,6 @@ int main(int, const char *argv[])
}
FtpMethod Mth;
-
+
return Mth.Run();
}
diff --git a/methods/ftp.h b/methods/ftp.h
index dd92f0086..2efd28ec6 100644
--- a/methods/ftp.h
+++ b/methods/ftp.h
@@ -62,7 +62,8 @@ class FTPConn
bool Size(const char *Path,unsigned long long &Size);
bool ModTime(const char *Path, time_t &Time);
bool Get(const char *Path,FileFd &To,unsigned long long Resume,
- Hashes &MD5,bool &Missing);
+ Hashes &MD5,bool &Missing, unsigned long long MaximumSize,
+ pkgAcqMethod *Owner);
FTPConn(URI Srv);
~FTPConn();
diff --git a/methods/gpgv.cc b/methods/gpgv.cc
index ae521a2ed..41f138be6 100644
--- a/methods/gpgv.cc
+++ b/methods/gpgv.cc
@@ -5,6 +5,7 @@
#include <apt-pkg/error.h>
#include <apt-pkg/gpgv.h>
#include <apt-pkg/strutl.h>
+#include <apt-pkg/fileutl.h>
#include <ctype.h>
#include <errno.h>
@@ -43,12 +44,22 @@ class GPGVMethod : public pkgAcqMethod
protected:
virtual bool Fetch(FetchItem *Itm);
-
+ virtual bool Configuration(string Message);
public:
GPGVMethod() : pkgAcqMethod("1.0",SingleInstance | SendConfig) {};
};
+bool GPGVMethod::Configuration(string Message)
+{
+ if (pkgAcqMethod::Configuration(Message) == false)
+ return false;
+
+ DropPrivsOrDie();
+
+ return true;
+}
+
string GPGVMethod::VerifyGetSigners(const char *file, const char *outfile,
vector<string> &GoodSigners,
vector<string> &BadSigners,
@@ -74,34 +85,13 @@ string GPGVMethod::VerifyGetSigners(const char *file, const char *outfile,
FILE *pipein = fdopen(fd[0], "r");
- // Loop over the output of gpgv, and check the signatures.
- size_t buffersize = 64;
- char *buffer = (char *) malloc(buffersize);
- size_t bufferoff = 0;
+ // Loop over the output of apt-key (which really is gnupg), and check the signatures.
+ size_t buffersize = 0;
+ char *buffer = NULL;
while (1)
{
- int c;
-
- // Read a line. Sigh.
- while ((c = getc(pipein)) != EOF && c != '\n')
- {
- if (bufferoff == buffersize)
- {
- char* newBuffer = (char *) realloc(buffer, buffersize *= 2);
- if (newBuffer == NULL)
- {
- free(buffer);
- return "Couldn't allocate a buffer big enough for reading";
- }
- buffer = newBuffer;
- }
- *(buffer+bufferoff) = c;
- bufferoff++;
- }
- if (bufferoff == 0 && c == EOF)
- break;
- *(buffer+bufferoff) = '\0';
- bufferoff = 0;
+ if (getline(&buffer, &buffersize, pipein) == -1)
+ break;
if (Debug == true)
std::clog << "Read: " << buffer << std::endl;
@@ -115,7 +105,7 @@ string GPGVMethod::VerifyGetSigners(const char *file, const char *outfile,
std::clog << "Got BADSIG! " << std::endl;
BadSigners.push_back(string(buffer+sizeof(GNUPGPREFIX)));
}
-
+
if (strncmp(buffer, GNUPGNOPUBKEY, sizeof(GNUPGNOPUBKEY)-1) == 0)
{
if (Debug == true)
@@ -159,7 +149,7 @@ string GPGVMethod::VerifyGetSigners(const char *file, const char *outfile,
waitpid(pid, &status, 0);
if (Debug == true)
{
- std::clog << "gpgv exited\n";
+ ioprintf(std::clog, "gpgv exited with status %i\n", WEXITSTATUS(status));
}
if (WEXITSTATUS(status) == 0)
@@ -171,7 +161,7 @@ string GPGVMethod::VerifyGetSigners(const char *file, const char *outfile,
else if (WEXITSTATUS(status) == 1)
return _("At least one invalid signature was encountered.");
else if (WEXITSTATUS(status) == 111)
- return _("Could not execute 'gpgv' to verify signature (is gpgv installed?)");
+ return _("Could not execute 'apt-key' to verify signature (is gnupg installed?)");
else if (WEXITSTATUS(status) == 112)
{
// acquire system checks for "NODATA" to generate GPG errors (the others are only warnings)
@@ -181,7 +171,7 @@ string GPGVMethod::VerifyGetSigners(const char *file, const char *outfile,
return errmsg;
}
else
- return _("Unknown error executing gpgv");
+ return _("Unknown error executing apt-key");
}
bool GPGVMethod::Fetch(FetchItem *Itm)
@@ -199,7 +189,7 @@ bool GPGVMethod::Fetch(FetchItem *Itm)
Res.Filename = Itm->DestFile;
URIStart(Res);
- // Run gpgv on file, extract contents and get the key ID of the signer
+ // Run apt-key on file, extract contents and get the key ID of the signer
string msg = VerifyGetSigners(Path.c_str(), Itm->DestFile.c_str(),
GoodSigners, BadSigners, WorthlessSigners,
NoPubKeySigners);
@@ -251,7 +241,7 @@ bool GPGVMethod::Fetch(FetchItem *Itm)
if (_config->FindB("Debug::Acquire::gpgv", false))
{
- std::clog << "gpgv succeeded\n";
+ std::clog << "apt-key succeeded\n";
}
return true;
@@ -261,7 +251,7 @@ bool GPGVMethod::Fetch(FetchItem *Itm)
int main()
{
setlocale(LC_ALL, "");
-
+
GPGVMethod Mth;
return Mth.Run();
diff --git a/methods/gzip.cc b/methods/gzip.cc
index df3f8828f..65519633c 100644
--- a/methods/gzip.cc
+++ b/methods/gzip.cc
@@ -33,12 +33,22 @@ const char *Prog;
class GzipMethod : public pkgAcqMethod
{
virtual bool Fetch(FetchItem *Itm);
+ virtual bool Configuration(std::string Message);
public:
GzipMethod() : pkgAcqMethod("1.1",SingleInstance | SendConfig) {};
};
+bool GzipMethod::Configuration(std::string Message)
+{
+ if (pkgAcqMethod::Configuration(Message) == false)
+ return false;
+
+ DropPrivsOrDie();
+
+ return true;
+}
// GzipMethod::Fetch - Decompress the passed URI /*{{{*/
// ---------------------------------------------------------------------
@@ -81,7 +91,7 @@ bool GzipMethod::Fetch(FetchItem *Itm)
return false;
// Read data from source, generate checksums and write
- Hashes Hash;
+ Hashes Hash(Itm->ExpectedHashes);
bool Failed = false;
while (1)
{
@@ -139,5 +149,6 @@ int main(int, char *argv[])
++Prog;
GzipMethod Mth;
+
return Mth.Run();
}
diff --git a/methods/http.cc b/methods/http.cc
index c734d3799..ce697a338 100644
--- a/methods/http.cc
+++ b/methods/http.cc
@@ -34,6 +34,7 @@
#include <apt-pkg/hashes.h>
#include <apt-pkg/netrc.h>
#include <apt-pkg/strutl.h>
+#include <apt-pkg/proxy.h>
#include <stddef.h>
#include <stdlib.h>
@@ -63,7 +64,8 @@ const unsigned int CircleBuf::BW_HZ=10;
// CircleBuf::CircleBuf - Circular input buffer /*{{{*/
// ---------------------------------------------------------------------
/* */
-CircleBuf::CircleBuf(unsigned long long Size) : Size(Size), Hash(0)
+CircleBuf::CircleBuf(unsigned long long Size)
+ : Size(Size), Hash(NULL), TotalWriten(0)
{
Buf = new unsigned char[Size];
Reset();
@@ -79,12 +81,13 @@ void CircleBuf::Reset()
InP = 0;
OutP = 0;
StrPos = 0;
+ TotalWriten = 0;
MaxGet = (unsigned long long)-1;
OutQueue = string();
- if (Hash != 0)
+ if (Hash != NULL)
{
delete Hash;
- Hash = new Hashes;
+ Hash = NULL;
}
}
/*}}}*/
@@ -216,8 +219,10 @@ bool CircleBuf::Write(int Fd)
return false;
}
+
+ TotalWriten += Res;
- if (Hash != 0)
+ if (Hash != NULL)
Hash->Add(Buf + (OutP%Size),Res);
OutP += Res;
@@ -304,6 +309,7 @@ bool HttpServerState::Open()
Persistent = true;
// Determine the proxy setting
+ AutoDetectProxy(ServerName);
string SpecificProxy = _config->Find("Acquire::http::Proxy::" + ServerName.Host);
if (!SpecificProxy.empty())
{
@@ -436,10 +442,12 @@ bool HttpServerState::RunData(FileFd * const File)
{
/* Closes encoding is used when the server did not specify a size, the
loss of the connection means we are done */
- if (Encoding == Closes)
+ if (Persistent == false)
In.Limit(-1);
+ else if (JunkSize != 0)
+ In.Limit(JunkSize);
else
- In.Limit(Size - StartPos);
+ In.Limit(DownloadSize);
// Just transfer the whole block.
do
@@ -476,16 +484,14 @@ APT_PURE bool HttpServerState::IsOpen() /*{{{*/
return (ServerFd != -1);
}
/*}}}*/
-bool HttpServerState::InitHashes(FileFd &File) /*{{{*/
+bool HttpServerState::InitHashes(HashStringList const &ExpectedHashes) /*{{{*/
{
delete In.Hash;
- In.Hash = new Hashes;
-
- // Set the expected size and read file for the hashes
- File.Truncate(StartPos);
- return In.Hash->AddFD(File, StartPos);
+ In.Hash = new Hashes(ExpectedHashes);
+ return true;
}
/*}}}*/
+
APT_PURE Hashes * HttpServerState::GetHashes() /*{{{*/
{
return In.Hash;
@@ -516,7 +522,7 @@ bool HttpServerState::Die(FileFd &File)
// See if this is because the server finished the data stream
if (In.IsLimit() == false && State != HttpServerState::Header &&
- Encoding != HttpServerState::Closes)
+ Persistent == true)
{
Close();
if (LErrno == 0)
@@ -563,7 +569,7 @@ bool HttpServerState::Flush(FileFd * const File)
return true;
}
- if (In.IsLimit() == true || Encoding == ServerState::Closes)
+ if (In.IsLimit() == true || Persistent == false)
return true;
}
return false;
@@ -649,6 +655,13 @@ bool HttpServerState::Go(bool ToFile, FileFd * const File)
return _error->Errno("write",_("Error writing to output file"));
}
+ if (MaximumSize > 0 && File && File->Tell() > MaximumSize)
+ {
+ Owner->SetFailReason("MaximumSizeExceeded");
+ return _error->Error("Writing more data than expected (%llu > %llu)",
+ File->Tell(), MaximumSize);
+ }
+
// Handle commands from APT
if (FD_ISSET(STDIN_FILENO,&rfds))
{
@@ -744,7 +757,7 @@ void HttpMethod::SendReq(FetchItem *Itm)
Req << "\r\n";
if (Debug == true)
- cerr << Req << endl;
+ cerr << Req.str() << endl;
Server->WriteResponse(Req.str());
}
@@ -762,66 +775,6 @@ bool HttpMethod::Configuration(string Message)
PipelineDepth);
Debug = _config->FindB("Debug::Acquire::http",false);
- // Get the proxy to use
- AutoDetectProxy();
-
- return true;
-}
- /*}}}*/
-// HttpMethod::AutoDetectProxy - auto detect proxy /*{{{*/
-// ---------------------------------------------------------------------
-/* */
-bool HttpMethod::AutoDetectProxy()
-{
- // option is "Acquire::http::Proxy-Auto-Detect" but we allow the old
- // name without the dash ("-")
- AutoDetectProxyCmd = _config->Find("Acquire::http::Proxy-Auto-Detect",
- _config->Find("Acquire::http::ProxyAutoDetect"));
-
- if (AutoDetectProxyCmd.empty())
- return true;
-
- if (Debug)
- clog << "Using auto proxy detect command: " << AutoDetectProxyCmd << endl;
-
- int Pipes[2] = {-1,-1};
- if (pipe(Pipes) != 0)
- return _error->Errno("pipe", "Failed to create Pipe");
-
- pid_t Process = ExecFork();
- if (Process == 0)
- {
- close(Pipes[0]);
- dup2(Pipes[1],STDOUT_FILENO);
- SetCloseExec(STDOUT_FILENO,false);
-
- const char *Args[2];
- Args[0] = AutoDetectProxyCmd.c_str();
- Args[1] = 0;
- execv(Args[0],(char **)Args);
- cerr << "Failed to exec method " << Args[0] << endl;
- _exit(100);
- }
- char buf[512];
- int InFd = Pipes[0];
- close(Pipes[1]);
- int res = read(InFd, buf, sizeof(buf)-1);
- ExecWait(Process, "ProxyAutoDetect", true);
-
- if (res < 0)
- return _error->Errno("read", "Failed to read");
- if (res == 0)
- return _error->Warning("ProxyAutoDetect returned no data");
-
- // add trailing \0
- buf[res] = 0;
-
- if (Debug)
- clog << "auto detect command returned: '" << buf << "'" << endl;
-
- if (strstr(buf, "http://") == buf)
- _config->Set("Acquire::http::proxy", _strstrip(buf));
-
return true;
}
/*}}}*/
diff --git a/methods/http.h b/methods/http.h
index 5406ce4a7..e73871931 100644
--- a/methods/http.h
+++ b/methods/http.h
@@ -63,6 +63,8 @@ class CircleBuf
public:
Hashes *Hash;
+ // total amount of data that got written so far
+ unsigned long long TotalWriten;
// Read data in
bool Read(int Fd);
@@ -81,8 +83,8 @@ class CircleBuf
bool ReadSpace() const {return Size - (InP - OutP) > 0;};
bool WriteSpace() const {return InP - OutP > 0;};
- // Dump everything
void Reset();
+ // Dump everything
void Stats();
CircleBuf(unsigned long long Size);
@@ -109,7 +111,7 @@ struct HttpServerState: public ServerState
virtual bool Open();
virtual bool IsOpen();
virtual bool Close();
- virtual bool InitHashes(FileFd &File);
+ virtual bool InitHashes(HashStringList const &ExpectedHashes);
virtual Hashes * GetHashes();
virtual bool Die(FileFd &File);
virtual bool Flush(FileFd * const File);
@@ -124,9 +126,6 @@ class HttpMethod : public ServerMethod
public:
virtual void SendReq(FetchItem *Itm);
- /** \brief Try to AutoDetect the proxy */
- bool AutoDetectProxy();
-
virtual bool Configuration(std::string Message);
virtual ServerState * CreateServerState(URI uri);
diff --git a/methods/http_main.cc b/methods/http_main.cc
index 3b346a514..cd52c42e8 100644
--- a/methods/http_main.cc
+++ b/methods/http_main.cc
@@ -1,5 +1,6 @@
#include <config.h>
-
+#include <apt-pkg/fileutl.h>
+#include <apt-pkg/error.h>
#include <signal.h>
#include "http.h"
@@ -13,5 +14,6 @@ int main()
signal(SIGPIPE, SIG_IGN);
HttpMethod Mth;
+
return Mth.Loop();
}
diff --git a/methods/https.cc b/methods/https.cc
index e0348ab58..d2ddf6fcf 100644
--- a/methods/https.cc
+++ b/methods/https.cc
@@ -20,6 +20,7 @@
#include <apt-pkg/configuration.h>
#include <apt-pkg/macros.h>
#include <apt-pkg/strutl.h>
+#include <apt-pkg/proxy.h>
#include <sys/stat.h>
#include <sys/time.h>
@@ -36,11 +37,19 @@
/*}}}*/
using namespace std;
+struct APT_HIDDEN CURLUserPointer {
+ HttpsMethod * const https;
+ HttpsMethod::FetchResult * const Res;
+ HttpsMethod::FetchItem const * const Itm;
+ CURLUserPointer(HttpsMethod * const https, HttpsMethod::FetchResult * const Res,
+ HttpsMethod::FetchItem const * const Itm) : https(https), Res(Res), Itm(Itm) {}
+};
+
size_t
HttpsMethod::parse_header(void *buffer, size_t size, size_t nmemb, void *userp)
{
size_t len = size * nmemb;
- HttpsMethod *me = (HttpsMethod *)userp;
+ CURLUserPointer *me = (CURLUserPointer *)userp;
std::string line((char*) buffer, len);
for (--len; len > 0; --len)
if (isspace(line[len]) == 0)
@@ -52,20 +61,52 @@ HttpsMethod::parse_header(void *buffer, size_t size, size_t nmemb, void *userp)
if (line.empty() == true)
{
- if (me->Server->Result != 416 && me->Server->StartPos != 0)
+ if (me->https->Server->Result != 416 && me->https->Server->StartPos != 0)
;
- else if (me->Server->Result == 416 && me->Server->Size == me->File->FileSize())
+ else if (me->https->Server->Result == 416)
{
- me->Server->Result = 200;
- me->Server->StartPos = me->Server->Size;
+ bool partialHit = false;
+ if (me->Itm->ExpectedHashes.usable() == true)
+ {
+ Hashes resultHashes(me->Itm->ExpectedHashes);
+ FileFd file(me->Itm->DestFile, FileFd::ReadOnly);
+ me->https->Server->TotalFileSize = file.FileSize();
+ me->https->Server->Date = file.ModificationTime();
+ resultHashes.AddFD(file);
+ HashStringList const hashList = resultHashes.GetHashStringList();
+ partialHit = (me->Itm->ExpectedHashes == hashList);
+ }
+ else if (me->https->Server->Result == 416 && me->https->Server->TotalFileSize == me->https->File->FileSize())
+ partialHit = true;
+
+ if (partialHit == true)
+ {
+ me->https->Server->Result = 200;
+ me->https->Server->StartPos = me->https->Server->TotalFileSize;
+ // the actual size is not important for https as curl will deal with it
+ // by itself and e.g. doesn't bother us with transport-encoding…
+ me->https->Server->JunkSize = std::numeric_limits<unsigned long long>::max();
+ }
+ else
+ me->https->Server->StartPos = 0;
}
else
- me->Server->StartPos = 0;
+ me->https->Server->StartPos = 0;
- me->File->Truncate(me->Server->StartPos);
- me->File->Seek(me->Server->StartPos);
+ me->Res->LastModified = me->https->Server->Date;
+ me->Res->Size = me->https->Server->TotalFileSize;
+ me->Res->ResumePoint = me->https->Server->StartPos;
+
+ // we expect valid data, so tell our caller we get the file now
+ if (me->https->Server->Result >= 200 && me->https->Server->Result < 300)
+ {
+ if (me->https->Server->JunkSize == 0 && me->Res->Size != 0 && me->Res->Size > me->Res->ResumePoint)
+ me->https->URIStart(*me->Res);
+ if (me->https->Server->AddPartialFileToHashes(*(me->https->File)) == false)
+ return 0;
+ }
}
- else if (me->Server->HeaderLine(line) == false)
+ else if (me->https->Server->HeaderLine(line) == false)
return 0;
return size*nmemb;
@@ -75,38 +116,60 @@ size_t
HttpsMethod::write_data(void *buffer, size_t size, size_t nmemb, void *userp)
{
HttpsMethod *me = (HttpsMethod *)userp;
+ size_t buffer_size = size * nmemb;
+ // we don't need to count the junk here, just drop anything we get as
+ // we don't always know how long it would be, e.g. in chunked encoding.
+ if (me->Server->JunkSize != 0)
+ return buffer_size;
- if (me->Res.Size == 0)
- me->URIStart(me->Res);
- if(me->File->Write(buffer, size*nmemb) != true)
- return false;
-
- return size*nmemb;
-}
+ if(me->File->Write(buffer, buffer_size) != true)
+ return 0;
-int
-HttpsMethod::progress_callback(void *clientp, double dltotal, double /*dlnow*/,
- double /*ultotal*/, double /*ulnow*/)
-{
- HttpsMethod *me = (HttpsMethod *)clientp;
- if(dltotal > 0 && me->Res.Size == 0) {
- me->Res.Size = (unsigned long long)dltotal;
+ if(me->Queue->MaximumSize > 0)
+ {
+ unsigned long long const TotalWritten = me->File->Tell();
+ if (TotalWritten > me->Queue->MaximumSize)
+ {
+ me->SetFailReason("MaximumSizeExceeded");
+ _error->Error("Writing more data than expected (%llu > %llu)",
+ TotalWritten, me->Queue->MaximumSize);
+ return 0;
+ }
}
- return 0;
+
+ if (me->Server->GetHashes()->Add((unsigned char const * const)buffer, buffer_size) == false)
+ return 0;
+
+ return buffer_size;
}
// HttpsServerState::HttpsServerState - Constructor /*{{{*/
-HttpsServerState::HttpsServerState(URI Srv,HttpsMethod * /*Owner*/) : ServerState(Srv, NULL)
+HttpsServerState::HttpsServerState(URI Srv,HttpsMethod * Owner) : ServerState(Srv, Owner), Hash(NULL)
{
TimeOut = _config->FindI("Acquire::https::Timeout",TimeOut);
Reset();
}
/*}}}*/
+bool HttpsServerState::InitHashes(HashStringList const &ExpectedHashes) /*{{{*/
+{
+ delete Hash;
+ Hash = new Hashes(ExpectedHashes);
+ return true;
+}
+ /*}}}*/
+APT_PURE Hashes * HttpsServerState::GetHashes() /*{{{*/
+{
+ return Hash;
+}
+ /*}}}*/
-void HttpsMethod::SetupProxy() /*{{{*/
+void HttpsMethod::SetupProxy() /*{{{*/
{
URI ServerName = Queue->Uri;
+ // Determine the proxy setting
+ AutoDetectProxy(ServerName);
+
// Curl should never read proxy settings from the environment, as
// we determine which proxy to use. Do this for consistency among
// methods and prevent an environment variable overriding a
@@ -163,7 +226,7 @@ void HttpsMethod::SetupProxy() /*{{{*/
bool HttpsMethod::Fetch(FetchItem *Itm)
{
struct stat SBuf;
- struct curl_slist *headers=NULL;
+ struct curl_slist *headers=NULL;
char curl_errorstr[CURL_ERROR_SIZE];
URI Uri = Itm->Uri;
string remotehost = Uri.Host;
@@ -178,16 +241,16 @@ bool HttpsMethod::Fetch(FetchItem *Itm)
maybe_add_auth (Uri, _config->FindFile("Dir::Etc::netrc"));
+ FetchResult Res;
+ CURLUserPointer userp(this, &Res, Itm);
// callbacks
curl_easy_setopt(curl, CURLOPT_URL, static_cast<string>(Uri).c_str());
curl_easy_setopt(curl, CURLOPT_HEADERFUNCTION, parse_header);
- curl_easy_setopt(curl, CURLOPT_WRITEHEADER, this);
+ curl_easy_setopt(curl, CURLOPT_WRITEHEADER, &userp);
curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, write_data);
curl_easy_setopt(curl, CURLOPT_WRITEDATA, this);
- curl_easy_setopt(curl, CURLOPT_PROGRESSFUNCTION, progress_callback);
- curl_easy_setopt(curl, CURLOPT_PROGRESSDATA, this);
// options
- curl_easy_setopt(curl, CURLOPT_NOPROGRESS, false);
+ curl_easy_setopt(curl, CURLOPT_NOPROGRESS, true);
curl_easy_setopt(curl, CURLOPT_FILETIME, true);
// only allow curl to handle https, not the other stuff it supports
curl_easy_setopt(curl, CURLOPT_PROTOCOLS, CURLPROTO_HTTPS);
@@ -296,13 +359,11 @@ bool HttpsMethod::Fetch(FetchItem *Itm)
curl_easy_setopt(curl, CURLOPT_LOW_SPEED_TIME, timeout);
// set redirect options and default to 10 redirects
- bool const AllowRedirect = _config->FindB("Acquire::https::AllowRedirect",
- _config->FindB("Acquire::http::AllowRedirect",true));
curl_easy_setopt(curl, CURLOPT_FOLLOWLOCATION, AllowRedirect);
curl_easy_setopt(curl, CURLOPT_MAXREDIRS, 10);
// debug
- if(_config->FindB("Debug::Acquire::https", false))
+ if (Debug == true)
curl_easy_setopt(curl, CURLOPT_VERBOSE, true);
// error handling
@@ -339,7 +400,9 @@ bool HttpsMethod::Fetch(FetchItem *Itm)
// go for it - if the file exists, append on it
File = new FileFd(Itm->DestFile, FileFd::WriteAny);
- Server = new HttpsServerState(Itm->Uri, this);
+ Server = CreateServerState(Itm->Uri);
+ if (Server->InitHashes(Itm->ExpectedHashes) == false)
+ return false;
// keep apt updated
Res.Filename = Itm->DestFile;
@@ -359,7 +422,6 @@ bool HttpsMethod::Fetch(FetchItem *Itm)
if (success != 0)
{
_error->Error("%s", curl_errorstr);
- unlink(File->Name().c_str());
return false;
}
@@ -382,30 +444,29 @@ bool HttpsMethod::Fetch(FetchItem *Itm)
char err[255];
snprintf(err, sizeof(err) - 1, "HttpError%i", Server->Result);
SetFailReason(err);
- _error->Error("%s", err);
+ _error->Error("%i %s", Server->Result, Server->Code);
// unlink, no need keep 401/404 page content in partial/
unlink(File->Name().c_str());
return false;
}
- struct stat resultStat;
- if (unlikely(stat(File->Name().c_str(), &resultStat) != 0))
- {
- _error->Errno("stat", "Unable to access file %s", File->Name().c_str());
- return false;
- }
- Res.Size = resultStat.st_size;
-
// invalid range-request
if (Server->Result == 416)
{
unlink(File->Name().c_str());
- Res.Size = 0;
delete File;
Redirect(Itm->Uri);
return true;
}
+ struct stat resultStat;
+ if (unlikely(stat(File->Name().c_str(), &resultStat) != 0))
+ {
+ _error->Errno("stat", "Unable to access file %s", File->Name().c_str());
+ return false;
+ }
+ Res.Size = resultStat.st_size;
+
// Timestamp
curl_easy_getinfo(curl, CURLINFO_FILETIME, &Res.LastModified);
if (Res.LastModified != -1)
@@ -420,20 +481,35 @@ bool HttpsMethod::Fetch(FetchItem *Itm)
Res.LastModified = resultStat.st_mtime;
// take hashes
- Hashes Hash;
- FileFd Fd(Res.Filename, FileFd::ReadOnly);
- Hash.AddFD(Fd);
- Res.TakeHashes(Hash);
+ Res.TakeHashes(*(Server->GetHashes()));
// keep apt updated
URIDone(Res);
// cleanup
- Res.Size = 0;
delete File;
return true;
}
+ /*}}}*/
+// HttpsMethod::Configuration - Handle a configuration message /*{{{*/
+bool HttpsMethod::Configuration(string Message)
+{
+ if (ServerMethod::Configuration(Message) == false)
+ return false;
+
+ AllowRedirect = _config->FindB("Acquire::https::AllowRedirect",
+ _config->FindB("Acquire::http::AllowRedirect", true));
+ Debug = _config->FindB("Debug::Acquire::https",false);
+
+ return true;
+}
+ /*}}}*/
+ServerState * HttpsMethod::CreateServerState(URI uri) /*{{{*/
+{
+ return new HttpsServerState(uri, this);
+}
+ /*}}}*/
int main()
{
diff --git a/methods/https.h b/methods/https.h
index faac8a3cd..57fc292ee 100644
--- a/methods/https.h
+++ b/methods/https.h
@@ -29,6 +29,8 @@ class FileFd;
class HttpsServerState : public ServerState
{
+ Hashes * Hash;
+
protected:
virtual bool ReadHeaderLines(std::string &/*Data*/) { return false; }
virtual bool LoadNextResponse(bool const /*ToFile*/, FileFd * const /*File*/) { return false; }
@@ -42,8 +44,8 @@ class HttpsServerState : public ServerState
virtual bool Open() { return false; }
virtual bool IsOpen() { return false; }
virtual bool Close() { return false; }
- virtual bool InitHashes(FileFd &/*File*/) { return false; }
- virtual Hashes * GetHashes() { return NULL; }
+ virtual bool InitHashes(HashStringList const &ExpectedHashes);
+ virtual Hashes * GetHashes();
virtual bool Die(FileFd &/*File*/) { return false; }
virtual bool Flush(FileFd * const /*File*/) { return false; }
virtual bool Go(bool /*ToFile*/, FileFd * const /*File*/) { return false; }
@@ -52,27 +54,35 @@ class HttpsServerState : public ServerState
virtual ~HttpsServerState() {Close();};
};
-class HttpsMethod : public pkgAcqMethod
+class HttpsMethod : public ServerMethod
{
// minimum speed in bytes/se that triggers download timeout handling
static const int DL_MIN_SPEED = 10;
virtual bool Fetch(FetchItem *);
+
static size_t parse_header(void *buffer, size_t size, size_t nmemb, void *userp);
static size_t write_data(void *buffer, size_t size, size_t nmemb, void *userp);
- static int progress_callback(void *clientp, double dltotal, double dlnow,
- double ultotal, double ulnow);
+ static int progress_callback(void *clientp, double dltotal, double dlnow,
+ double ultotal, double ulnow);
void SetupProxy();
CURL *curl;
- FetchResult Res;
- HttpsServerState *Server;
+ ServerState *Server;
+
+ // Used by ServerMethods unused by https
+ virtual void SendReq(FetchItem *) { exit(42); }
+ virtual void RotateDNS() { exit(42); }
public:
FileFd *File;
-
- HttpsMethod() : pkgAcqMethod("1.2",Pipeline | SendConfig), File(NULL)
+
+ virtual bool Configuration(std::string Message);
+ virtual ServerState * CreateServerState(URI uri);
+ using pkgAcqMethod::FetchResult;
+ using pkgAcqMethod::FetchItem;
+
+ HttpsMethod() : ServerMethod("1.2",Pipeline | SendConfig), File(NULL)
{
- File = 0;
curl = curl_easy_init();
};
diff --git a/methods/rred.cc b/methods/rred.cc
index cabb3c456..554ac99b4 100644
--- a/methods/rred.cc
+++ b/methods/rred.cc
@@ -150,11 +150,11 @@ class FileChanges {
std::list<struct Change>::iterator where;
size_t pos; // line number is as far left of iterator as possible
- bool pos_is_okay(void)
+ bool pos_is_okay(void) const
{
#ifdef POSDEBUG
size_t cpos = 0;
- std::list<struct Change>::iterator x;
+ std::list<struct Change>::const_iterator x;
for (x = changes.begin(); x != where; ++x) {
assert(x != changes.end());
cpos += x->offset + x->add_cnt;
@@ -581,7 +581,7 @@ class RredMethod : public pkgAcqMethod {
FILE *inp = fopen(Path.c_str(), "r");
FILE *out = fopen(Itm->DestFile.c_str(), "w");
- Hashes hash;
+ Hashes hash(Itm->ExpectedHashes);
patch.apply_against_file(out, inp, &hash);
diff --git a/methods/rsh.cc b/methods/rsh.cc
index bd46d2515..52349c61c 100644
--- a/methods/rsh.cc
+++ b/methods/rsh.cc
@@ -218,17 +218,20 @@ bool RSHConn::WriteMsg(std::string &Text,bool Sync,const char *Fmt,...)
va_list args;
va_start(args,Fmt);
- // sprintf the description
- char S[512];
- vsnprintf(S,sizeof(S) - 4,Fmt,args);
+ // sprintf into a buffer
+ char Tmp[1024];
+ vsnprintf(Tmp,sizeof(Tmp),Fmt,args);
va_end(args);
+ // concat to create the real msg
+ std::string Msg;
if (Sync == true)
- strcat(S," 2> /dev/null || echo\n");
+ Msg = std::string(Tmp) + " 2> /dev/null || echo\n";
else
- strcat(S," 2> /dev/null\n");
+ Msg = std::string(Tmp) + " 2> /dev/null\n";
// Send it off
+ const char *S = Msg.c_str();
unsigned long Len = strlen(S);
unsigned long Start = 0;
while (Len != 0)
@@ -474,7 +477,7 @@ bool RSHMethod::Fetch(FetchItem *Itm)
}
// Open the file
- Hashes Hash;
+ Hashes Hash(Itm->ExpectedHashes);
{
FileFd Fd(Itm->DestFile,FileFd::WriteAny);
if (_error->PendingError() == true)
diff --git a/methods/server.cc b/methods/server.cc
index 5a13f18a7..f61a6fedb 100644
--- a/methods/server.cc
+++ b/methods/server.cc
@@ -44,7 +44,8 @@ time_t ServerMethod::FailTime = 0;
// ---------------------------------------------------------------------
/* Returns 0 if things are OK, 1 if an IO error occurred and 2 if a header
parse error occurred */
-ServerState::RunHeadersResult ServerState::RunHeaders(FileFd * const File)
+ServerState::RunHeadersResult ServerState::RunHeaders(FileFd * const File,
+ const std::string &Uri)
{
State = Header;
@@ -53,7 +54,8 @@ ServerState::RunHeadersResult ServerState::RunHeaders(FileFd * const File)
Major = 0;
Minor = 0;
Result = 0;
- Size = 0;
+ TotalFileSize = 0;
+ JunkSize = 0;
StartPos = 0;
Encoding = Closes;
HaveContent = false;
@@ -66,7 +68,7 @@ ServerState::RunHeadersResult ServerState::RunHeaders(FileFd * const File)
continue;
if (Owner->Debug == true)
- clog << Data;
+ clog << "Answer for: " << Uri << endl << Data;
for (string::const_iterator I = Data.begin(); I < Data.end(); ++I)
{
@@ -127,7 +129,7 @@ bool ServerState::HeaderLine(string Line)
if (elements == 3)
{
Code[0] = '\0';
- if (Owner->Debug == true)
+ if (Owner != NULL && Owner->Debug == true)
clog << "HTTP server doesn't give Reason-Phrase for " << Result << std::endl;
}
else if (elements != 4)
@@ -162,15 +164,22 @@ bool ServerState::HeaderLine(string Line)
Encoding = Stream;
HaveContent = true;
- // The length is already set from the Content-Range header
- if (StartPos != 0)
- return true;
+ unsigned long long * DownloadSizePtr = &DownloadSize;
+ if (Result == 416)
+ DownloadSizePtr = &JunkSize;
- Size = strtoull(Val.c_str(), NULL, 10);
- if (Size >= std::numeric_limits<unsigned long long>::max())
+ *DownloadSizePtr = strtoull(Val.c_str(), NULL, 10);
+ if (*DownloadSizePtr >= std::numeric_limits<unsigned long long>::max())
return _error->Errno("HeaderLine", _("The HTTP server sent an invalid Content-Length header"));
- else if (Size == 0)
+ else if (*DownloadSizePtr == 0)
HaveContent = false;
+
+ // On partial content (206) the Content-Length less than the real
+ // size, so do not set it here but leave that to the Content-Range
+ // header instead
+ if(Result != 206 && TotalFileSize == 0)
+ TotalFileSize = DownloadSize;
+
return true;
}
@@ -185,15 +194,15 @@ bool ServerState::HeaderLine(string Line)
HaveContent = true;
// §14.16 says 'byte-range-resp-spec' should be a '*' in case of 416
- if (Result == 416 && sscanf(Val.c_str(), "bytes */%llu",&Size) == 1)
- {
- StartPos = 1; // ignore Content-Length, it would override Size
- HaveContent = false;
- }
- else if (sscanf(Val.c_str(),"bytes %llu-%*u/%llu",&StartPos,&Size) != 2)
+ if (Result == 416 && sscanf(Val.c_str(), "bytes */%llu",&TotalFileSize) == 1)
+ ; // we got the expected filesize which is all we wanted
+ else if (sscanf(Val.c_str(),"bytes %llu-%*u/%llu",&StartPos,&TotalFileSize) != 2)
return _error->Error(_("The HTTP server sent an invalid Content-Range header"));
- if ((unsigned long long)StartPos > Size)
+ if ((unsigned long long)StartPos > TotalFileSize)
return _error->Error(_("This HTTP server has broken range support"));
+
+ // figure out what we will download
+ DownloadSize = TotalFileSize - StartPos;
return true;
}
@@ -236,10 +245,21 @@ ServerState::ServerState(URI Srv, ServerMethod *Owner) : ServerName(Srv), TimeOu
Reset();
}
/*}}}*/
+bool ServerState::AddPartialFileToHashes(FileFd &File) /*{{{*/
+{
+ File.Truncate(StartPos);
+ return GetHashes()->AddFD(File, StartPos);
+}
+ /*}}}*/
bool ServerMethod::Configuration(string Message) /*{{{*/
{
- return pkgAcqMethod::Configuration(Message);
+ if (pkgAcqMethod::Configuration(Message) == false)
+ return false;
+
+ DropPrivsOrDie();
+
+ return true;
}
/*}}}*/
@@ -259,7 +279,7 @@ ServerMethod::DealWithHeaders(FetchResult &Res)
Res.LastModified = Queue->LastModified;
return IMS_HIT;
}
-
+
/* Redirect
*
* Note that it is only OK for us to treat all redirection the same
@@ -304,12 +324,31 @@ ServerMethod::DealWithHeaders(FetchResult &Res)
struct stat SBuf;
if (stat(Queue->DestFile.c_str(),&SBuf) >= 0 && SBuf.st_size > 0)
{
- if ((unsigned long long)SBuf.st_size == Server->Size)
+ bool partialHit = false;
+ if (Queue->ExpectedHashes.usable() == true)
+ {
+ Hashes resultHashes(Queue->ExpectedHashes);
+ FileFd file(Queue->DestFile, FileFd::ReadOnly);
+ Server->TotalFileSize = file.FileSize();
+ Server->Date = file.ModificationTime();
+ resultHashes.AddFD(file);
+ HashStringList const hashList = resultHashes.GetHashStringList();
+ partialHit = (Queue->ExpectedHashes == hashList);
+ }
+ else if ((unsigned long long)SBuf.st_size == Server->TotalFileSize)
+ partialHit = true;
+ if (partialHit == true)
{
// the file is completely downloaded, but was not moved
- Server->StartPos = Server->Size;
- Server->Result = 200;
+ if (Server->HaveContent == true)
+ {
+ // Send to error page to dev/null
+ FileFd DevNull("/dev/null",FileFd::WriteExists);
+ Server->RunData(&DevNull);
+ }
Server->HaveContent = false;
+ Server->StartPos = Server->TotalFileSize;
+ Server->Result = 200;
}
else if (unlink(Queue->DestFile.c_str()) == 0)
{
@@ -323,10 +362,10 @@ ServerMethod::DealWithHeaders(FetchResult &Res)
failure */
if (Server->Result < 200 || Server->Result >= 300)
{
- char err[255];
- snprintf(err,sizeof(err)-1,"HttpError%i",Server->Result);
+ std::string err;
+ strprintf(err, "HttpError%u", Server->Result);
SetFailReason(err);
- _error->Error("%u %s",Server->Result,Server->Code);
+ _error->Error("%u %s", Server->Result, Server->Code);
if (Server->HaveContent == true)
return ERROR_WITH_CONTENT_PAGE;
return ERROR_UNRECOVERABLE;
@@ -334,7 +373,7 @@ ServerMethod::DealWithHeaders(FetchResult &Res)
// This is some sort of 2xx 'data follows' reply
Res.LastModified = Server->Date;
- Res.Size = Server->Size;
+ Res.Size = Server->TotalFileSize;
// Open the file
delete File;
@@ -347,7 +386,7 @@ ServerMethod::DealWithHeaders(FetchResult &Res)
FailFd = File->Fd();
FailTime = Server->Date;
- if (Server->InitHashes(*File) == false)
+ if (Server->InitHashes(Queue->ExpectedHashes) == false || Server->AddPartialFileToHashes(*File) == false)
{
_error->Errno("read",_("Problem hashing file"));
return ERROR_NOT_FROM_SERVER;
@@ -392,9 +431,16 @@ bool ServerMethod::Fetch(FetchItem *)
for (FetchItem *I = Queue; I != 0 && Depth < (signed)PipelineDepth;
I = I->Next, Depth++)
{
- // If pipelining is disabled, we only queue 1 request
- if (Server->Pipeline == false && Depth >= 0)
- break;
+ if (Depth >= 0)
+ {
+ // If pipelining is disabled, we only queue 1 request
+ if (Server->Pipeline == false)
+ break;
+ // if we have no hashes, do at most one such request
+ // as we can't fixup pipeling misbehaviors otherwise
+ else if (I->ExpectedHashes.usable() == false)
+ break;
+ }
// Make sure we stick with the same server
if (Server->Comp(I->Uri) == false)
@@ -478,7 +524,7 @@ int ServerMethod::Loop()
Fetch(0);
// Fetch the next URL header data from the server.
- switch (Server->RunHeaders(File))
+ switch (Server->RunHeaders(File, Queue->Uri))
{
case ServerState::RUN_HEADERS_OK:
break;
@@ -524,6 +570,13 @@ int ServerMethod::Loop()
// Run the data
bool Result = true;
+
+ // ensure we don't fetch too much
+ // we could do "Server->MaximumSize = Queue->MaximumSize" here
+ // but that would break the clever pipeline messup detection
+ // so instead we use the size of the biggest item in the queue
+ Server->MaximumSize = FindMaximumObjectSizeInQueue();
+
if (Server->HaveContent)
Result = Server->RunData(File);
@@ -546,7 +599,38 @@ int ServerMethod::Loop()
// Send status to APT
if (Result == true)
{
- Res.TakeHashes(*Server->GetHashes());
+ Hashes * const resultHashes = Server->GetHashes();
+ HashStringList const hashList = resultHashes->GetHashStringList();
+ if (PipelineDepth != 0 && Queue->ExpectedHashes.usable() == true && Queue->ExpectedHashes != hashList)
+ {
+ // we did not get the expected hash… mhhh:
+ // could it be that server/proxy messed up pipelining?
+ FetchItem * BeforeI = Queue;
+ for (FetchItem *I = Queue->Next; I != 0 && I != QueueBack; I = I->Next)
+ {
+ if (I->ExpectedHashes.usable() == true && I->ExpectedHashes == hashList)
+ {
+ // yes, he did! Disable pipelining and rewrite queue
+ if (Server->Pipeline == true)
+ {
+ // FIXME: fake a warning message as we have no proper way of communicating here
+ std::string out;
+ strprintf(out, _("Automatically disabled %s due to incorrect response from server/proxy. (man 5 apt.conf)"), "Acquire::http::PipelineDepth");
+ std::cerr << "W: " << out << std::endl;
+ Server->Pipeline = false;
+ // we keep the PipelineDepth value so that the rest of the queue can be fixed up as well
+ }
+ Rename(Res.Filename, I->DestFile);
+ Res.Filename = I->DestFile;
+ BeforeI->Next = I->Next;
+ I->Next = Queue;
+ Queue = I;
+ break;
+ }
+ BeforeI = I;
+ }
+ }
+ Res.TakeHashes(*resultHashes);
URIDone(Res);
}
else
@@ -566,7 +650,10 @@ int ServerMethod::Loop()
QueueBack = Queue;
}
else
+ {
+ Server->Close();
Fail(true);
+ }
}
break;
}
@@ -661,3 +748,13 @@ int ServerMethod::Loop()
return 0;
}
/*}}}*/
+ /*{{{*/
+unsigned long long
+ServerMethod::FindMaximumObjectSizeInQueue() const
+{
+ unsigned long long MaxSizeInQueue = 0;
+ for (FetchItem *I = Queue; I != 0 && I != QueueBack; I = I->Next)
+ MaxSizeInQueue = std::max(MaxSizeInQueue, I->MaximumSize);
+ return MaxSizeInQueue;
+}
+ /*}}}*/
diff --git a/methods/server.h b/methods/server.h
index 0f45ab994..8d7d33ee6 100644
--- a/methods/server.h
+++ b/methods/server.h
@@ -34,8 +34,16 @@ struct ServerState
char Code[360];
// These are some statistics from the last parsed header lines
- unsigned long long Size;
+
+ // total size of the usable content (aka: the file)
+ unsigned long long TotalFileSize;
+ // size we actually download (can be smaller than Size if we have partial content)
+ unsigned long long DownloadSize;
+ // size of junk content (aka: server error pages)
+ unsigned long long JunkSize;
+ // The start of the data (for partial content)
unsigned long long StartPos;
+
time_t Date;
bool HaveContent;
enum {Chunked,Stream,Closes} Encoding;
@@ -49,6 +57,8 @@ struct ServerState
URI Proxy;
unsigned long TimeOut;
+ unsigned long long MaximumSize;
+
protected:
ServerMethod *Owner;
@@ -68,12 +78,13 @@ struct ServerState
RUN_HEADERS_PARSE_ERROR
};
/** \brief Get the headers before the data */
- RunHeadersResult RunHeaders(FileFd * const File);
+ RunHeadersResult RunHeaders(FileFd * const File, const std::string &Uri);
+ bool AddPartialFileToHashes(FileFd &File);
bool Comp(URI Other) const {return Other.Host == ServerName.Host && Other.Port == ServerName.Port;};
- virtual void Reset() {Major = 0; Minor = 0; Result = 0; Code[0] = '\0'; Size = 0;
+ virtual void Reset() {Major = 0; Minor = 0; Result = 0; Code[0] = '\0'; TotalFileSize = 0; JunkSize = 0;
StartPos = 0; Encoding = Closes; time(&Date); HaveContent = false;
- State = Header; Persistent = false; Pipeline = true;};
+ State = Header; Persistent = false; Pipeline = true; MaximumSize = 0;};
virtual bool WriteResponse(std::string const &Data) = 0;
/** \brief Transfer the data from the socket */
@@ -82,7 +93,7 @@ struct ServerState
virtual bool Open() = 0;
virtual bool IsOpen() = 0;
virtual bool Close() = 0;
- virtual bool InitHashes(FileFd &File) = 0;
+ virtual bool InitHashes(HashStringList const &ExpectedHashes) = 0;
virtual Hashes * GetHashes() = 0;
virtual bool Die(FileFd &File) = 0;
virtual bool Flush(FileFd * const File) = 0;
@@ -104,6 +115,10 @@ class ServerMethod : public pkgAcqMethod
unsigned long PipelineDepth;
bool AllowRedirect;
+ // Find the biggest item in the fetch queue for the checking of the maximum
+ // size
+ unsigned long long FindMaximumObjectSizeInQueue() const APT_PURE;
+
public:
bool Debug;
@@ -140,7 +155,7 @@ class ServerMethod : public pkgAcqMethod
virtual ServerState * CreateServerState(URI uri) = 0;
virtual void RotateDNS() = 0;
- ServerMethod(const char *Ver,unsigned long Flags = 0) : pkgAcqMethod(Ver, Flags), Server(NULL), File(NULL), PipelineDepth(0), AllowRedirect(false), Debug(false) {};
+ ServerMethod(const char *Ver,unsigned long Flags = 0) : pkgAcqMethod(Ver, Flags), Server(NULL), File(NULL), PipelineDepth(10), AllowRedirect(false), Debug(false) {};
virtual ~ServerMethod() {};
};