summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.travis.yml2
-rw-r--r--BUGS9
-rw-r--r--apt-inst/deb/debfile.cc3
-rw-r--r--apt-pkg/acquire-item.cc103
-rw-r--r--apt-pkg/acquire-item.h19
-rw-r--r--apt-pkg/aptconfiguration.cc2
-rw-r--r--apt-pkg/deb/deblistparser.cc6
-rw-r--r--apt-pkg/depcache.cc7
-rw-r--r--apt-pkg/depcache.h2
-rw-r--r--apt-pkg/tagfile.cc2
-rw-r--r--apt-pkg/upgrade.cc2
-rw-r--r--apt-pkg/upgrade.h3
-rw-r--r--apt-private/makefile2
-rw-r--r--apt-private/private-download.cc96
-rw-r--r--apt-private/private-download.h9
-rw-r--r--apt-private/private-install.cc84
-rw-r--r--apt-private/private-show.cc2
-rw-r--r--apt-private/private-upgrade.cc2
-rw-r--r--cmdline/apt-get.cc107
-rw-r--r--debian/changelog19
-rw-r--r--methods/http.cc968
-rw-r--r--methods/http.h137
-rw-r--r--methods/https.cc138
-rw-r--r--methods/https.h29
-rw-r--r--methods/makefile6
-rw-r--r--methods/server.cc665
-rw-r--r--methods/server.h144
-rw-r--r--test/integration/Packages-bug-723705-tagfile-truncates-fields167
-rw-r--r--test/integration/apt.pem49
-rw-r--r--test/integration/framework174
-rwxr-xr-xtest/integration/skip-bug-601016-description-translation (renamed from test/integration/test-bug-601016-description-translation)0
-rw-r--r--test/integration/status-bug-723705-tagfile-truncates-fields62
-rwxr-xr-xtest/integration/test-apt-get-autoremove20
-rwxr-xr-xtest/integration/test-apt-get-download3
-rwxr-xr-xtest/integration/test-bug-254770-segfault-if-cache-not-buildable2
-rwxr-xr-xtest/integration/test-bug-596498-trusted-unsigned-repo9
-rwxr-xr-xtest/integration/test-bug-617690-allow-unauthenticated-makes-all-untrusted63
-rwxr-xr-xtest/integration/test-bug-633350-do-not-kill-last-char-in-Release2
-rwxr-xr-xtest/integration/test-bug-679371-apt-get-autoclean-multiarch5
-rwxr-xr-xtest/integration/test-bug-686346-package-missing-architecture13
-rwxr-xr-xtest/integration/test-bug-722207-print-uris-even-if-very-quiet30
-rwxr-xr-xtest/integration/test-bug-723586-any-stripped-in-single-arch54
-rwxr-xr-xtest/integration/test-bug-723705-tagfile-truncates-fields33
-rwxr-xr-xtest/integration/test-partial-file-support107
-rwxr-xr-xtest/integration/test-releasefile-verification59
-rwxr-xr-xtest/integration/test-ubuntu-bug-859188-multiarch-reinstall7
-rw-r--r--test/interactive-helper/aptwebserver.cc138
47 files changed, 2362 insertions, 1203 deletions
diff --git a/.travis.yml b/.travis.yml
index 56536837f..2d9194c28 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -1,5 +1,5 @@
language: cpp
before_install:
- sudo apt-get update -q
- - sudo apt-get install -q dpkg-dev debhelper libdb-dev gettext libcurl4-gnutls-dev zlib1g-dev libbz2-dev xsltproc docbook-xsl docbook-xml po4a autotools-dev autoconf automake doxygen debiandoc-sgml
+ - sudo apt-get install -q --no-install-recommends dpkg-dev debhelper libdb-dev gettext libcurl4-gnutls-dev zlib1g-dev libbz2-dev xsltproc docbook-xsl docbook-xml po4a autotools-dev autoconf automake doxygen debiandoc-sgml stunnel4
script: make && make test && test/integration/run-tests
diff --git a/BUGS b/BUGS
deleted file mode 100644
index a7b6b1114..000000000
--- a/BUGS
+++ /dev/null
@@ -1,9 +0,0 @@
-
-DDTP problems:
---------------
-- apt-get update clean the /var/lib/apt/lists dir
- from all Translation-$index that are not in the current
- enviroment or Translations apt variable
-- there needs to be a list of locales (pt, sv, en) that need
- both language and country code to get the right file
- (is in the code in indexfile::LanguageCode(), just a bit ugly
diff --git a/apt-inst/deb/debfile.cc b/apt-inst/deb/debfile.cc
index ab4037915..79434d8b5 100644
--- a/apt-inst/deb/debfile.cc
+++ b/apt-inst/deb/debfile.cc
@@ -51,8 +51,7 @@ debDebFile::debDebFile(FileFd &File) : File(File), AR(File)
!CheckMember("data.tar.bz2") &&
!CheckMember("data.tar.lzma") &&
!CheckMember("data.tar.xz")) {
- // FIXME: add data.tar.xz here - adding it now would require a Translation round for a very small gain
- _error->Error(_("This is not a valid DEB archive, it has no '%s', '%s' or '%s' member"), "data.tar.gz", "data.tar.bz2", "data.tar.lzma");
+ _error->Error(_("This is not a valid DEB archive, missing '%s' member"), "data.tar");
return;
}
}
diff --git a/apt-pkg/acquire-item.cc b/apt-pkg/acquire-item.cc
index 97b2d1e29..04505b35a 100644
--- a/apt-pkg/acquire-item.cc
+++ b/apt-pkg/acquire-item.cc
@@ -143,6 +143,32 @@ void pkgAcquire::Item::Rename(string From,string To)
}
}
/*}}}*/
+bool pkgAcquire::Item::RenameOnError(pkgAcquire::Item::RenameOnErrorState const error)/*{{{*/
+{
+ if(FileExists(DestFile))
+ Rename(DestFile, DestFile + ".FAILED");
+
+ switch (error)
+ {
+ case HashSumMismatch:
+ ErrorText = _("Hash Sum mismatch");
+ Status = StatAuthError;
+ ReportMirrorFailure("HashChecksumFailure");
+ break;
+ case SizeMismatch:
+ ErrorText = _("Size mismatch");
+ Status = StatAuthError;
+ ReportMirrorFailure("SizeFailure");
+ break;
+ case InvalidFormat:
+ ErrorText = _("Invalid file format");
+ Status = StatError;
+ // do not report as usually its not the mirrors fault, but Portal/Proxy
+ break;
+ }
+ return false;
+}
+ /*}}}*/
// Acquire::Item::ReportMirrorFailure /*{{{*/
// ---------------------------------------------------------------------
void pkgAcquire::Item::ReportMirrorFailure(string FailCode)
@@ -595,9 +621,7 @@ void pkgAcqIndexDiffs::Finish(bool allDone)
if(!ExpectedHash.empty() && !ExpectedHash.VerifyFile(DestFile))
{
- Status = StatAuthError;
- ErrorText = _("MD5Sum mismatch");
- Rename(DestFile,DestFile + ".FAILED");
+ RenameOnError(HashSumMismatch);
Dequeue();
return;
}
@@ -866,10 +890,7 @@ void pkgAcqIndex::Done(string Message,unsigned long long Size,string Hash,
if (!ExpectedHash.empty() && ExpectedHash.toStr() != Hash)
{
- Status = StatAuthError;
- ErrorText = _("Hash Sum mismatch");
- Rename(DestFile,DestFile + ".FAILED");
- ReportMirrorFailure("HashChecksumFailure");
+ RenameOnError(HashSumMismatch);
return;
}
@@ -878,22 +899,18 @@ void pkgAcqIndex::Done(string Message,unsigned long long Size,string Hash,
if (Verify == true)
{
FileFd fd(DestFile, FileFd::ReadOnly);
- pkgTagSection sec;
- pkgTagFile tag(&fd);
-
- // Only test for correctness if the file is not empty (empty is ok)
- if (fd.Size() > 0) {
- if (_error->PendingError() || !tag.Step(sec)) {
- Status = StatError;
- _error->DumpErrors();
- Rename(DestFile,DestFile + ".FAILED");
- return;
- } else if (!sec.Exists("Package")) {
- Status = StatError;
- ErrorText = ("Encountered a section with no Package: header");
- Rename(DestFile,DestFile + ".FAILED");
- return;
- }
+ // Only test for correctness if the file is not empty (empty is ok)
+ if (fd.FileSize() > 0)
+ {
+ pkgTagSection sec;
+ pkgTagFile tag(&fd);
+
+ // all our current indexes have a field 'Package' in each section
+ if (_error->PendingError() == true || tag.Step(sec) == false || sec.Exists("Package") == false)
+ {
+ RenameOnError(InvalidFormat);
+ return;
+ }
}
}
@@ -1719,34 +1736,40 @@ pkgAcqArchive::pkgAcqArchive(pkgAcquire *Owner,pkgSourceList *Sources,
}
// check if we have one trusted source for the package. if so, switch
- // to "TrustedOnly" mode
+ // to "TrustedOnly" mode - but only if not in AllowUnauthenticated mode
+ bool const allowUnauth = _config->FindB("APT::Get::AllowUnauthenticated", false);
+ bool const debugAuth = _config->FindB("Debug::pkgAcquire::Auth", false);
+ bool seenUntrusted = false;
for (pkgCache::VerFileIterator i = Version.FileList(); i.end() == false; ++i)
{
pkgIndexFile *Index;
if (Sources->FindIndex(i.File(),Index) == false)
continue;
- if (_config->FindB("Debug::pkgAcquire::Auth", false))
- {
+
+ if (debugAuth == true)
std::cerr << "Checking index: " << Index->Describe()
- << "(Trusted=" << Index->IsTrusted() << ")\n";
- }
- if (Index->IsTrusted()) {
+ << "(Trusted=" << Index->IsTrusted() << ")" << std::endl;
+
+ if (Index->IsTrusted() == true)
+ {
Trusted = true;
- break;
+ if (allowUnauth == false)
+ break;
}
+ else
+ seenUntrusted = true;
}
// "allow-unauthenticated" restores apts old fetching behaviour
// that means that e.g. unauthenticated file:// uris are higher
// priority than authenticated http:// uris
- if (_config->FindB("APT::Get::AllowUnauthenticated",false) == true)
+ if (allowUnauth == true && seenUntrusted == true)
Trusted = false;
// Select a source
if (QueueNext() == false && _error->PendingError() == false)
- _error->Error(_("I wasn't able to locate a file for the %s package. "
- "This might mean you need to manually fix this package."),
- Version.ParentPkg().Name());
+ _error->Error(_("Can't find a source to download version '%s' of '%s'"),
+ Version.VerStr(), Version.ParentPkg().FullName(false).c_str());
}
/*}}}*/
// AcqArchive::QueueNext - Queue the next file source /*{{{*/
@@ -1900,18 +1923,14 @@ void pkgAcqArchive::Done(string Message,unsigned long long Size,string CalcHash,
// Check the size
if (Size != Version->Size)
{
- Status = StatError;
- ErrorText = _("Size mismatch");
+ RenameOnError(SizeMismatch);
return;
}
// Check the hash
if(ExpectedHash.toStr() != CalcHash)
{
- Status = StatError;
- ErrorText = _("Hash Sum mismatch");
- if(FileExists(DestFile))
- Rename(DestFile,DestFile + ".FAILED");
+ RenameOnError(HashSumMismatch);
return;
}
@@ -2051,9 +2070,7 @@ void pkgAcqFile::Done(string Message,unsigned long long Size,string CalcHash,
// Check the hash
if(!ExpectedHash.empty() && ExpectedHash.toStr() != CalcHash)
{
- Status = StatError;
- ErrorText = _("Hash Sum mismatch");
- Rename(DestFile,DestFile + ".FAILED");
+ RenameOnError(HashSumMismatch);
return;
}
diff --git a/apt-pkg/acquire-item.h b/apt-pkg/acquire-item.h
index 10c855e63..6b4f73708 100644
--- a/apt-pkg/acquire-item.h
+++ b/apt-pkg/acquire-item.h
@@ -83,7 +83,7 @@ class pkgAcquire::Item : public WeakPointable
* overwritten.
*/
void Rename(std::string From,std::string To);
-
+
public:
/** \brief The current status of this item. */
@@ -281,6 +281,21 @@ class pkgAcquire::Item : public WeakPointable
* pkgAcquire::Remove.
*/
virtual ~Item();
+
+ protected:
+
+ enum RenameOnErrorState {
+ HashSumMismatch,
+ SizeMismatch,
+ InvalidFormat
+ };
+
+ /** \brief Rename failed file and set error
+ *
+ * \param state respresenting the error we encountered
+ * \param errorMsg a message describing the error
+ */
+ bool RenameOnError(RenameOnErrorState const state);
};
/*}}}*/
/** \brief Information about an index patch (aka diff). */ /*{{{*/
@@ -982,7 +997,7 @@ class pkgAcqArchive : public pkgAcquire::Item
*
* \param Version The package version to download.
*
- * \param StoreFilename A location in which the actual filename of
+ * \param[out] StoreFilename A location in which the actual filename of
* the package should be stored. It will be set to a guessed
* basename in the constructor, and filled in with a fully
* qualified filename once the download finishes.
diff --git a/apt-pkg/aptconfiguration.cc b/apt-pkg/aptconfiguration.cc
index 4f9b84e00..115d11616 100644
--- a/apt-pkg/aptconfiguration.cc
+++ b/apt-pkg/aptconfiguration.cc
@@ -453,7 +453,7 @@ void Configuration::setDefaultConfigurationForCompressors() {
_config->CndSet("Dir::Bin::bzip2", "/bin/bzip2");
_config->CndSet("Dir::Bin::xz", "/usr/bin/xz");
if (FileExists(_config->FindFile("Dir::Bin::xz")) == true) {
- _config->Clear("Dir::Bin::lzma");
+ _config->Set("Dir::Bin::lzma", _config->FindFile("Dir::Bin::xz"));
_config->Set("APT::Compressor::lzma::Binary", "xz");
if (_config->Exists("APT::Compressor::lzma::CompressArg") == false) {
_config->Set("APT::Compressor::lzma::CompressArg::", "--format=lzma");
diff --git a/apt-pkg/deb/deblistparser.cc b/apt-pkg/deb/deblistparser.cc
index 87aab6ee2..68d544e1f 100644
--- a/apt-pkg/deb/deblistparser.cc
+++ b/apt-pkg/deb/deblistparser.cc
@@ -635,7 +635,7 @@ bool debListParser::ParseDepends(pkgCache::VerIterator &Ver,
string Version;
unsigned int Op;
- Start = ParseDepends(Start,Stop,Package,Version,Op,false,!MultiArchEnabled);
+ Start = ParseDepends(Start, Stop, Package, Version, Op, false, false);
if (Start == 0)
return _error->Error("Problem parsing dependency %s",Tag);
size_t const found = Package.rfind(':');
@@ -717,9 +717,7 @@ bool debListParser::ParseProvides(pkgCache::VerIterator &Ver)
}
}
- if (MultiArchEnabled == false)
- return true;
- else if ((Ver->MultiArch & pkgCache::Version::Allowed) == pkgCache::Version::Allowed)
+ if ((Ver->MultiArch & pkgCache::Version::Allowed) == pkgCache::Version::Allowed)
{
string const Package = string(Ver.ParentPkg().Name()).append(":").append("any");
return NewProvidesAllArch(Ver, Package, Ver.VerStr());
diff --git a/apt-pkg/depcache.cc b/apt-pkg/depcache.cc
index 978a893f7..a06789cdf 100644
--- a/apt-pkg/depcache.cc
+++ b/apt-pkg/depcache.cc
@@ -896,6 +896,7 @@ char const* PrintMode(char const mode)
case pkgDepCache::ModeInstall: return "Install";
case pkgDepCache::ModeKeep: return "Keep";
case pkgDepCache::ModeDelete: return "Delete";
+ case pkgDepCache::ModeGarbage: return "Garbage";
default: return "UNKNOWN";
}
}
@@ -1726,8 +1727,6 @@ bool pkgDepCache::MarkRequired(InRootSetFunc &userFunc)
follow_recommends = MarkFollowsRecommends();
follow_suggests = MarkFollowsSuggests();
-
-
// do the mark part, this is the core bit of the algorithm
for(PkgIterator p = PkgBegin(); !p.end(); ++p)
{
@@ -1738,7 +1737,9 @@ bool pkgDepCache::MarkRequired(InRootSetFunc &userFunc)
// be nice even then a required package violates the policy (#583517)
// and do the full mark process also for required packages
(p.CurrentVer().end() != true &&
- p.CurrentVer()->Priority == pkgCache::State::Required))
+ p.CurrentVer()->Priority == pkgCache::State::Required) ||
+ // packages which can't be changed (like holds) can't be garbage
+ (IsModeChangeOk(ModeGarbage, p, 0, false) == false))
{
// the package is installed (and set to keep)
if(PkgState[p->ID].Keep() && !p.CurrentVer().end())
diff --git a/apt-pkg/depcache.h b/apt-pkg/depcache.h
index d9c95349b..61c9aa559 100644
--- a/apt-pkg/depcache.h
+++ b/apt-pkg/depcache.h
@@ -128,7 +128,7 @@ class pkgDepCache : protected pkgCache::Namespace
enum InternalFlags {AutoKept = (1 << 0), Purge = (1 << 1), ReInstall = (1 << 2), Protected = (1 << 3)};
enum VersionTypes {NowVersion, InstallVersion, CandidateVersion};
- enum ModeList {ModeDelete = 0, ModeKeep = 1, ModeInstall = 2};
+ enum ModeList {ModeDelete = 0, ModeKeep = 1, ModeInstall = 2, ModeGarbage = 3};
/** \brief Represents an active action group.
*
diff --git a/apt-pkg/tagfile.cc b/apt-pkg/tagfile.cc
index b91e868e2..e0802e3d5 100644
--- a/apt-pkg/tagfile.cc
+++ b/apt-pkg/tagfile.cc
@@ -164,7 +164,7 @@ bool pkgTagFile::Fill()
unsigned long long const dataSize = d->Size - ((d->End - d->Buffer) + 1);
if (d->Fd.Read(d->End, dataSize, &Actual) == false)
return false;
- if (Actual != dataSize || d->Fd.Eof() == true)
+ if (Actual != dataSize)
d->Done = true;
d->End += Actual;
}
diff --git a/apt-pkg/upgrade.cc b/apt-pkg/upgrade.cc
index 84c781c61..f06f6d40d 100644
--- a/apt-pkg/upgrade.cc
+++ b/apt-pkg/upgrade.cc
@@ -251,7 +251,7 @@ bool APT::Upgrade::Upgrade(pkgDepCache &Cache, int mode)
{
return pkgAllUpgradeWithNewPackages(Cache);
}
- else if ((mode & ~(FORBID_REMOVE_PACKAGES|FORBID_NEW_INSTALL_PACKAGES)) == 0)
+ else if ((mode & ~(FORBID_REMOVE_PACKAGES|FORBID_INSTALL_NEW_PACKAGES)) == 0)
{
return pkgAllUpgradeNoNewPackages(Cache);
}
diff --git a/apt-pkg/upgrade.h b/apt-pkg/upgrade.h
index 9fdfac2fd..c4973472f 100644
--- a/apt-pkg/upgrade.h
+++ b/apt-pkg/upgrade.h
@@ -15,12 +15,13 @@ namespace APT {
// FIXME: make this "enum class UpgradeMode {" once we enable c++11
enum UpgradeMode {
FORBID_REMOVE_PACKAGES = 1,
- FORBID_NEW_INSTALL_PACKAGES = 2,
+ FORBID_INSTALL_NEW_PACKAGES = 2,
};
bool Upgrade(pkgDepCache &Cache, int UpgradeMode);
}
}
+// please use APT::Upgrade::Upgrade() instead
bool pkgDistUpgrade(pkgDepCache &Cache);
bool pkgAllUpgrade(pkgDepCache &Cache);
bool pkgMinimizeUpgrade(pkgDepCache &Cache);
diff --git a/apt-private/makefile b/apt-private/makefile
index 8feb1ce6c..1d179f0b2 100644
--- a/apt-private/makefile
+++ b/apt-private/makefile
@@ -17,7 +17,7 @@ MAJOR=0.0
MINOR=0
SLIBS=$(PTHREADLIB) -lapt-pkg
-PRIVATES=list install output cachefile cacheset update upgrade cmndline moo search show main
+PRIVATES=list install download output cachefile cacheset update upgrade cmndline moo search show main
SOURCE += $(foreach private, $(PRIVATES), private-$(private).cc)
HEADERS += $(foreach private, $(PRIVATES), private-$(private).h)
diff --git a/apt-private/private-download.cc b/apt-private/private-download.cc
new file mode 100644
index 000000000..f02991cde
--- /dev/null
+++ b/apt-private/private-download.cc
@@ -0,0 +1,96 @@
+// Include Files /*{{{*/
+#include <config.h>
+
+#include <apt-pkg/acquire.h>
+#include <apt-pkg/acquire-item.h>
+#include <apt-pkg/configuration.h>
+#include <apt-pkg/error.h>
+#include <apt-pkg/strutl.h>
+
+#include "private-output.h"
+
+#include <locale.h>
+
+#include <fstream>
+#include <string>
+#include <vector>
+
+#include <apti18n.h>
+ /*}}}*/
+
+// CheckAuth - check if each download comes form a trusted source /*{{{*/
+bool CheckAuth(pkgAcquire& Fetcher, bool const PromptUser)
+{
+ std::string UntrustedList;
+ for (pkgAcquire::ItemIterator I = Fetcher.ItemsBegin(); I < Fetcher.ItemsEnd(); ++I)
+ if (!(*I)->IsTrusted())
+ UntrustedList += std::string((*I)->ShortDesc()) + " ";
+
+ if (UntrustedList == "")
+ return true;
+
+ ShowList(c2out,_("WARNING: The following packages cannot be authenticated!"),UntrustedList,"");
+
+ if (_config->FindB("APT::Get::AllowUnauthenticated",false) == true)
+ {
+ c2out << _("Authentication warning overridden.\n");
+ return true;
+ }
+
+ if (PromptUser == false)
+ return _error->Error(_("Some packages could not be authenticated"));
+
+ if (_config->FindI("quiet",0) < 2
+ && _config->FindB("APT::Get::Assume-Yes",false) == false)
+ {
+ c2out << _("Install these packages without verification?") << std::flush;
+ if (!YnPrompt(false))
+ return _error->Error(_("Some packages could not be authenticated"));
+
+ return true;
+ }
+ else if (_config->FindB("APT::Get::Force-Yes",false) == true)
+ return true;
+
+ return _error->Error(_("There are problems and -y was used without --force-yes"));
+}
+ /*}}}*/
+bool AcquireRun(pkgAcquire &Fetcher, int const PulseInterval, bool * const Failure, bool * const TransientNetworkFailure)/*{{{*/
+{
+ pkgAcquire::RunResult res;
+ if(PulseInterval > 0)
+ res = Fetcher.Run(PulseInterval);
+ else
+ res = Fetcher.Run();
+
+ if (res == pkgAcquire::Failed)
+ return false;
+
+ for (pkgAcquire::ItemIterator I = Fetcher.ItemsBegin();
+ I != Fetcher.ItemsEnd(); ++I)
+ {
+
+ if ((*I)->Status == pkgAcquire::Item::StatDone &&
+ (*I)->Complete == true)
+ continue;
+
+ if (TransientNetworkFailure != NULL && (*I)->Status == pkgAcquire::Item::StatIdle)
+ {
+ *TransientNetworkFailure = true;
+ continue;
+ }
+
+ ::URI uri((*I)->DescURI());
+ uri.User.clear();
+ uri.Password.clear();
+ std::string descUri = std::string(uri);
+ _error->Error(_("Failed to fetch %s %s\n"), descUri.c_str(),
+ (*I)->ErrorText.c_str());
+
+ if (Failure != NULL)
+ *Failure = true;
+ }
+
+ return true;
+}
+ /*}}}*/
diff --git a/apt-private/private-download.h b/apt-private/private-download.h
new file mode 100644
index 000000000..b8cc8da1e
--- /dev/null
+++ b/apt-private/private-download.h
@@ -0,0 +1,9 @@
+#ifndef APT_PRIVATE_DOWNLOAD_H
+#define APT_PRIVATE_DOWNLOAD_H
+
+#include <apt-pkg/acquire.h>
+
+bool CheckAuth(pkgAcquire& Fetcher, bool const PromptUser);
+bool AcquireRun(pkgAcquire &Fetcher, int const PulseInterval, bool * const Failure, bool * const TransientNetworkFailure);
+
+#endif
diff --git a/apt-private/private-install.cc b/apt-private/private-install.cc
index 9808c3dde..643a6b370 100644
--- a/apt-private/private-install.cc
+++ b/apt-private/private-install.cc
@@ -42,6 +42,7 @@
#include <sstream>
#include "private-install.h"
+#include "private-download.h"
#include "private-cachefile.h"
#include "private-output.h"
#include "private-cacheset.h"
@@ -50,50 +51,6 @@
#include <apti18n.h>
/*}}}*/
-// CheckAuth - check if each download comes form a trusted source /*{{{*/
-// ---------------------------------------------------------------------
-/* */
-static bool CheckAuth(pkgAcquire& Fetcher)
-{
- std::string UntrustedList;
- for (pkgAcquire::ItemIterator I = Fetcher.ItemsBegin(); I < Fetcher.ItemsEnd(); ++I)
- {
- if (!(*I)->IsTrusted())
- {
- UntrustedList += std::string((*I)->ShortDesc()) + " ";
- }
- }
-
- if (UntrustedList == "")
- {
- return true;
- }
-
- ShowList(c2out,_("WARNING: The following packages cannot be authenticated!"),UntrustedList,"");
-
- if (_config->FindB("APT::Get::AllowUnauthenticated",false) == true)
- {
- c2out << _("Authentication warning overridden.\n");
- return true;
- }
-
- if (_config->FindI("quiet",0) < 2
- && _config->FindB("APT::Get::Assume-Yes",false) == false)
- {
- c2out << _("Install these packages without verification?") << std::flush;
- if (!YnPrompt(false))
- return _error->Error(_("Some packages could not be authenticated"));
-
- return true;
- }
- else if (_config->FindB("APT::Get::Force-Yes",false) == true)
- {
- return true;
- }
-
- return _error->Error(_("There are problems and -y was used without --force-yes"));
-}
- /*}}}*/
// InstallPackages - Actually download and install the packages /*{{{*/
// ---------------------------------------------------------------------
/* This displays the informative messages describing what is going to
@@ -299,12 +256,12 @@ bool InstallPackages(CacheFile &Cache,bool ShwKept,bool Ask, bool Safety)
{
pkgAcquire::UriIterator I = Fetcher.UriBegin();
for (; I != Fetcher.UriEnd(); ++I)
- c1out << '\'' << I->URI << "' " << flNotDir(I->Owner->DestFile) << ' ' <<
+ std::cout << '\'' << I->URI << "' " << flNotDir(I->Owner->DestFile) << ' ' <<
I->Owner->FileSize << ' ' << I->Owner->HashSum() << std::endl;
return true;
}
- if (!CheckAuth(Fetcher))
+ if (!CheckAuth(Fetcher, true))
return false;
/* Unlock the dpkg lock if we are not going to be doing an install
@@ -336,29 +293,10 @@ bool InstallPackages(CacheFile &Cache,bool ShwKept,bool Ask, bool Safety)
I = Fetcher.ItemsBegin();
}
}
-
- if (Fetcher.Run() == pkgAcquire::Failed)
- return false;
-
- // Print out errors
- bool Failed = false;
- for (pkgAcquire::ItemIterator I = Fetcher.ItemsBegin(); I != Fetcher.ItemsEnd(); ++I)
- {
- if ((*I)->Status == pkgAcquire::Item::StatDone &&
- (*I)->Complete == true)
- continue;
-
- if ((*I)->Status == pkgAcquire::Item::StatIdle)
- {
- Transient = true;
- // Failed = true;
- continue;
- }
- fprintf(stderr,_("Failed to fetch %s %s\n"),(*I)->DescURI().c_str(),
- (*I)->ErrorText.c_str());
- Failed = true;
- }
+ bool Failed = false;
+ if (AcquireRun(Fetcher, 0, &Failed, &Transient) == false)
+ return false;
/* If we are in no download mode and missing files and there were
'failures' then the user must specify -m. Furthermore, there
@@ -500,15 +438,15 @@ bool DoAutomaticRemove(CacheFile &Cache)
do {
Changed = false;
for (APT::PackageSet::const_iterator Pkg = tooMuch.begin();
- Pkg != tooMuch.end() && Changed == false; ++Pkg)
+ Pkg != tooMuch.end(); ++Pkg)
{
APT::PackageSet too;
too.insert(*Pkg);
for (pkgCache::PrvIterator Prv = Cache[Pkg].CandidateVerIter(Cache).ProvidesList();
Prv.end() == false; ++Prv)
too.insert(Prv.ParentPkg());
- for (APT::PackageSet::const_iterator P = too.begin();
- P != too.end() && Changed == false; ++P) {
+ for (APT::PackageSet::const_iterator P = too.begin(); P != too.end(); ++P)
+ {
for (pkgCache::DepIterator R = P.RevDependsList();
R.end() == false; ++R)
{
@@ -527,7 +465,11 @@ bool DoAutomaticRemove(CacheFile &Cache)
Changed = true;
break;
}
+ if (Changed == true)
+ break;
}
+ if (Changed == true)
+ break;
}
} while (Changed == true);
}
diff --git a/apt-private/private-show.cc b/apt-private/private-show.cc
index 7f4beb7d0..ddc75dbeb 100644
--- a/apt-private/private-show.cc
+++ b/apt-private/private-show.cc
@@ -103,7 +103,7 @@ bool ShowPackage(CommandLine &CmdL) /*{{{*/
Pkg != helper.virtualPkgs.end(); ++Pkg)
{
c1out << "Package: " << Pkg.FullName(true) << std::endl;
- c1out << "State: " << _("not a real pacakge (virtual)") << std::endl;
+ c1out << "State: " << _("not a real package (virtual)") << std::endl;
// FIXME: show providers, see private-cacheset.h
// CacheSetHelperAPTGet::showVirtualPackageErrors()
}
diff --git a/apt-private/private-upgrade.cc b/apt-private/private-upgrade.cc
index 456cbc0fa..e76b5d7fc 100644
--- a/apt-private/private-upgrade.cc
+++ b/apt-private/private-upgrade.cc
@@ -48,7 +48,7 @@ bool DoUpgradeNoNewPackages(CommandLine &CmdL)
// Do the upgrade
return UpgradeHelper(CmdL,
APT::Upgrade::FORBID_REMOVE_PACKAGES|
- APT::Upgrade::FORBID_NEW_INSTALL_PACKAGES);
+ APT::Upgrade::FORBID_INSTALL_NEW_PACKAGES);
}
/*}}}*/
// DoSafeUpgrade - Upgrade all packages with install but not remove /*{{{*/
diff --git a/cmdline/apt-get.cc b/cmdline/apt-get.cc
index 1025005a2..b150cf541 100644
--- a/cmdline/apt-get.cc
+++ b/cmdline/apt-get.cc
@@ -51,6 +51,7 @@
#include <apt-pkg/indexfile.h>
#include <apt-pkg/upgrade.h>
+#include <apt-private/private-download.h>
#include <apt-private/private-install.h>
#include <apt-private/private-upgrade.h>
#include <apt-private/private-output.h>
@@ -62,9 +63,11 @@
#include <apt-private/acqprogress.h>
#include <set>
+#include <fstream>
+#include <sstream>
+
#include <locale.h>
#include <langinfo.h>
-#include <fstream>
#include <termios.h>
#include <sys/ioctl.h>
#include <sys/stat.h>
@@ -76,7 +79,6 @@
#include <errno.h>
#include <regex.h>
#include <sys/wait.h>
-#include <sstream>
#include <apt-private/private-output.h>
#include <apt-private/private-main.h>
@@ -498,7 +500,7 @@ bool DoDownload(CommandLine &CmdL)
CacheFile Cache;
if (Cache.ReadOnlyOpen() == false)
return false;
-
+
APT::CacheSetHelper helper(c0out);
APT::VersionList verset = APT::VersionList::FromCommandLine(Cache,
CmdL.FileList + 1, APT::VersionList::CANDIDATE, helper);
@@ -506,67 +508,57 @@ bool DoDownload(CommandLine &CmdL)
if (verset.empty() == true)
return false;
+ AcqTextStatus Stat(ScreenWidth, _config->FindI("quiet", 0));
pkgAcquire Fetcher;
- AcqTextStatus Stat(ScreenWidth, _config->FindI("quiet",0));
- if (_config->FindB("APT::Get::Print-URIs") == false)
- Fetcher.Setup(&Stat);
+ if (Fetcher.Setup(&Stat) == false)
+ return false;
pkgRecords Recs(Cache);
pkgSourceList *SrcList = Cache.GetSourceList();
- bool gotAll = true;
- for (APT::VersionList::const_iterator Ver = verset.begin();
- Ver != verset.end();
- ++Ver)
+ // reuse the usual acquire methods for deb files, but don't drop them into
+ // the usual directories - keep everything in the current directory
+ std::vector<std::string> storefile(verset.size());
+ std::string const cwd = SafeGetCWD();
+ _config->Set("Dir::Cache::Archives", cwd);
+ int i = 0;
+ for (APT::VersionList::const_iterator Ver = verset.begin();
+ Ver != verset.end(); ++Ver, ++i)
{
- string descr;
- // get the right version
- pkgCache::PkgIterator Pkg = Ver.ParentPkg();
- pkgRecords::Parser &rec=Recs.Lookup(Ver.FileList());
- pkgCache::VerFileIterator Vf = Ver.FileList();
- if (Vf.end() == true)
- {
- _error->Error("Can not find VerFile for %s in version %s", Pkg.FullName().c_str(), Ver.VerStr());
- gotAll = false;
- continue;
- }
- pkgCache::PkgFileIterator F = Vf.File();
- pkgIndexFile *index;
- if(SrcList->FindIndex(F, index) == false)
- {
- _error->Error(_("Can't find a source to download version '%s' of '%s'"), Ver.VerStr(), Pkg.FullName().c_str());
- gotAll = false;
- continue;
- }
- string uri = index->ArchiveURI(rec.FileName());
- strprintf(descr, _("Downloading %s %s"), Pkg.Name(), Ver.VerStr());
- // get the most appropriate hash
- HashString hash;
- if (rec.SHA512Hash() != "")
- hash = HashString("sha512", rec.SHA512Hash());
- else if (rec.SHA256Hash() != "")
- hash = HashString("sha256", rec.SHA256Hash());
- else if (rec.SHA1Hash() != "")
- hash = HashString("sha1", rec.SHA1Hash());
- else if (rec.MD5Hash() != "")
- hash = HashString("md5", rec.MD5Hash());
- // get the file
- new pkgAcqFile(&Fetcher, uri, hash.toStr(), (*Ver)->Size, descr, Pkg.Name(), ".");
+ pkgAcquire::Item *I = new pkgAcqArchive(&Fetcher, SrcList, &Recs, *Ver, storefile[i]);
+ std::string const filename = cwd + flNotDir(storefile[i]);
+ storefile[i].assign(filename);
+ I->DestFile.assign(filename);
}
- if (gotAll == false)
- return false;
// Just print out the uris and exit if the --print-uris flag was used
if (_config->FindB("APT::Get::Print-URIs") == true)
{
pkgAcquire::UriIterator I = Fetcher.UriBegin();
for (; I != Fetcher.UriEnd(); ++I)
- cout << '\'' << I->URI << "' " << flNotDir(I->Owner->DestFile) << ' ' <<
+ cout << '\'' << I->URI << "' " << flNotDir(I->Owner->DestFile) << ' ' <<
I->Owner->FileSize << ' ' << I->Owner->HashSum() << endl;
return true;
}
- return (Fetcher.Run() == pkgAcquire::Continue);
+ if (_error->PendingError() == true || CheckAuth(Fetcher, false) == false)
+ return false;
+
+ bool Failed = false;
+ if (AcquireRun(Fetcher, 0, &Failed, NULL) == false)
+ return false;
+
+ // copy files in local sources to the current directory
+ for (pkgAcquire::ItemIterator I = Fetcher.ItemsBegin(); I != Fetcher.ItemsEnd(); ++I)
+ if ((*I)->Local == true && (*I)->Status == pkgAcquire::Item::StatDone)
+ {
+ std::string const filename = cwd + flNotDir((*I)->DestFile);
+ std::ifstream src((*I)->DestFile.c_str(), std::ios::binary);
+ std::ofstream dst(filename.c_str(), std::ios::binary);
+ dst << src.rdbuf();
+ }
+
+ return Failed == false;
}
/*}}}*/
// DoCheck - Perform the check operation /*{{{*/
@@ -786,27 +778,10 @@ bool DoSource(CommandLine &CmdL)
delete[] Dsc;
return true;
}
-
- // Run it
- if (Fetcher.Run() == pkgAcquire::Failed)
- {
- delete[] Dsc;
- return false;
- }
- // Print error messages
+ // Run it
bool Failed = false;
- for (pkgAcquire::ItemIterator I = Fetcher.ItemsBegin(); I != Fetcher.ItemsEnd(); ++I)
- {
- if ((*I)->Status == pkgAcquire::Item::StatDone &&
- (*I)->Complete == true)
- continue;
-
- fprintf(stderr,_("Failed to fetch %s %s\n"),(*I)->DescURI().c_str(),
- (*I)->ErrorText.c_str());
- Failed = true;
- }
- if (Failed == true)
+ if (AcquireRun(Fetcher, 0, &Failed, NULL) == false || Failed == true)
{
delete[] Dsc;
return _error->Error(_("Failed to fetch some archives."));
diff --git a/debian/changelog b/debian/changelog
index 40160c303..6e31784f2 100644
--- a/debian/changelog
+++ b/debian/changelog
@@ -1,3 +1,22 @@
+apt (0.9.11.5) UNRELEASED; urgency=low
+
+ * Fix typo in apt-private/private-show.cc. Thanks to Benjamin
+ Keresa. Closes: #724073
+
+ -- Christian Perrier <bubulle@debian.org> Mon, 23 Sep 2013 07:05:34 +0200
+
+apt (0.9.11.4) unstable; urgency=low
+
+ [ Oskari Saarenmaa ]
+ * don't truncate 100 char long paths in tar extraction.
+ Thanks to Mika Eloranta for the testcase! (Closes: #689582)
+
+ [ David Kalnischkies ]
+ * do not trust FileFd::Eof() in pkgTagFile::Fill()
+ Thanks to Cyril Brulebois (Closes: 723705)
+
+ -- Michael Vogt <mvo@debian.org> Fri, 20 Sep 2013 16:12:07 +0200
+
apt (0.9.11.3) unstable; urgency=low
[ Michael Vogt ]
diff --git a/methods/http.cc b/methods/http.cc
index 278ddb290..71a02e53a 100644
--- a/methods/http.cc
+++ b/methods/http.cc
@@ -58,15 +58,6 @@
/*}}}*/
using namespace std;
-string HttpMethod::FailFile;
-int HttpMethod::FailFd = -1;
-time_t HttpMethod::FailTime = 0;
-unsigned long PipelineDepth = 0;
-unsigned long TimeOut = 120;
-bool AllowRedirect = false;
-bool Debug = false;
-URI Proxy;
-
unsigned long long CircleBuf::BwReadLimit=0;
unsigned long long CircleBuf::BwTickReadData=0;
struct timeval CircleBuf::BwReadTick={0,0};
@@ -296,20 +287,17 @@ CircleBuf::~CircleBuf()
delete Hash;
}
-// ServerState::ServerState - Constructor /*{{{*/
-// ---------------------------------------------------------------------
-/* */
-ServerState::ServerState(URI Srv,HttpMethod *Owner) : Owner(Owner),
- In(64*1024), Out(4*1024),
- ServerName(Srv)
+// HttpServerState::HttpServerState - Constructor /*{{{*/
+HttpServerState::HttpServerState(URI Srv,HttpMethod *Owner) : ServerState(Srv, Owner), In(64*1024), Out(4*1024)
{
+ TimeOut = _config->FindI("Acquire::http::Timeout",TimeOut);
Reset();
}
/*}}}*/
-// ServerState::Open - Open a connection to the server /*{{{*/
+// HttpServerState::Open - Open a connection to the server /*{{{*/
// ---------------------------------------------------------------------
/* This opens a connection to the server. */
-bool ServerState::Open()
+bool HttpServerState::Open()
{
// Use the already open connection if possible.
if (ServerFd != -1)
@@ -373,72 +361,18 @@ bool ServerState::Open()
return true;
}
/*}}}*/
-// ServerState::Close - Close a connection to the server /*{{{*/
+// HttpServerState::Close - Close a connection to the server /*{{{*/
// ---------------------------------------------------------------------
/* */
-bool ServerState::Close()
+bool HttpServerState::Close()
{
close(ServerFd);
ServerFd = -1;
return true;
}
/*}}}*/
-// ServerState::RunHeaders - Get the headers before the data /*{{{*/
-// ---------------------------------------------------------------------
-/* Returns 0 if things are OK, 1 if an IO error occurred and 2 if a header
- parse error occurred */
-ServerState::RunHeadersResult ServerState::RunHeaders()
-{
- State = Header;
-
- Owner->Status(_("Waiting for headers"));
-
- Major = 0;
- Minor = 0;
- Result = 0;
- Size = 0;
- StartPos = 0;
- Encoding = Closes;
- HaveContent = false;
- time(&Date);
-
- do
- {
- string Data;
- if (In.WriteTillEl(Data) == false)
- continue;
-
- if (Debug == true)
- clog << Data;
-
- for (string::const_iterator I = Data.begin(); I < Data.end(); ++I)
- {
- string::const_iterator J = I;
- for (; J != Data.end() && *J != '\n' && *J != '\r'; ++J);
- if (HeaderLine(string(I,J)) == false)
- return RUN_HEADERS_PARSE_ERROR;
- I = J;
- }
-
- // 100 Continue is a Nop...
- if (Result == 100)
- continue;
-
- // Tidy up the connection persistance state.
- if (Encoding == Closes && HaveContent == true)
- Persistent = false;
-
- return RUN_HEADERS_OK;
- }
- while (Owner->Go(false,this) == true);
-
- return RUN_HEADERS_IO_ERROR;
-}
- /*}}}*/
-// ServerState::RunData - Transfer the data from the socket /*{{{*/
-// ---------------------------------------------------------------------
-/* */
-bool ServerState::RunData()
+// HttpServerState::RunData - Transfer the data from the socket /*{{{*/
+bool HttpServerState::RunData(FileFd * const File)
{
State = Data;
@@ -456,7 +390,7 @@ bool ServerState::RunData()
if (In.WriteTillEl(Data,true) == true)
break;
}
- while ((Last = Owner->Go(false,this)) == true);
+ while ((Last = Go(false, File)) == true);
if (Last == false)
return false;
@@ -474,7 +408,7 @@ bool ServerState::RunData()
if (In.WriteTillEl(Data,true) == true && Data.length() <= 2)
break;
}
- while ((Last = Owner->Go(false,this)) == true);
+ while ((Last = Go(false, File)) == true);
if (Last == false)
return false;
return !_error->PendingError();
@@ -482,7 +416,7 @@ bool ServerState::RunData()
// Transfer the block
In.Limit(Len);
- while (Owner->Go(true,this) == true)
+ while (Go(true, File) == true)
if (In.IsLimit() == true)
break;
@@ -498,7 +432,7 @@ bool ServerState::RunData()
if (In.WriteTillEl(Data,true) == true)
break;
}
- while ((Last = Owner->Go(false,this)) == true);
+ while ((Last = Go(false, File)) == true);
if (Last == false)
return false;
}
@@ -521,139 +455,217 @@ bool ServerState::RunData()
In.Limit(-1);
return !_error->PendingError();
}
- while (Owner->Go(true,this) == true);
+ while (Go(true, File) == true);
}
- return Owner->Flush(this) && !_error->PendingError();
+ return Owner->Flush() && !_error->PendingError();
}
/*}}}*/
-// ServerState::HeaderLine - Process a header line /*{{{*/
-// ---------------------------------------------------------------------
-/* */
-bool ServerState::HeaderLine(string Line)
+bool HttpServerState::ReadHeaderLines(std::string &Data) /*{{{*/
{
- if (Line.empty() == true)
- return true;
+ return In.WriteTillEl(Data);
+}
+ /*}}}*/
+bool HttpServerState::LoadNextResponse(bool const ToFile, FileFd * const File)/*{{{*/
+{
+ return Go(ToFile, File);
+}
+ /*}}}*/
+bool HttpServerState::WriteResponse(const std::string &Data) /*{{{*/
+{
+ return Out.Read(Data);
+}
+ /*}}}*/
+bool HttpServerState::IsOpen() /*{{{*/
+{
+ return (ServerFd != -1);
+}
+ /*}}}*/
+bool HttpServerState::InitHashes(FileFd &File) /*{{{*/
+{
+ delete In.Hash;
+ In.Hash = new Hashes;
- string::size_type Pos = Line.find(' ');
- if (Pos == string::npos || Pos+1 > Line.length())
+ // Set the expected size and read file for the hashes
+ if (StartPos >= 0)
{
- // Blah, some servers use "connection:closes", evil.
- Pos = Line.find(':');
- if (Pos == string::npos || Pos + 2 > Line.length())
- return _error->Error(_("Bad header line"));
- Pos++;
+ File.Truncate(StartPos);
+
+ return In.Hash->AddFD(File, StartPos);
}
+ return true;
+}
+ /*}}}*/
+Hashes * HttpServerState::GetHashes() /*{{{*/
+{
+ return In.Hash;
+}
+ /*}}}*/
+// HttpServerState::Die - The server has closed the connection. /*{{{*/
+bool HttpServerState::Die(FileFd &File)
+{
+ unsigned int LErrno = errno;
- // Parse off any trailing spaces between the : and the next word.
- string::size_type Pos2 = Pos;
- while (Pos2 < Line.length() && isspace(Line[Pos2]) != 0)
- Pos2++;
-
- string Tag = string(Line,0,Pos);
- string Val = string(Line,Pos2);
-
- if (stringcasecmp(Tag.c_str(),Tag.c_str()+4,"HTTP") == 0)
+ // Dump the buffer to the file
+ if (State == ServerState::Data)
{
- // Evil servers return no version
- if (Line[4] == '/')
- {
- int const elements = sscanf(Line.c_str(),"HTTP/%3u.%3u %3u%359[^\n]",&Major,&Minor,&Result,Code);
- if (elements == 3)
- {
- Code[0] = '\0';
- if (Debug == true)
- clog << "HTTP server doesn't give Reason-Phrase for " << Result << std::endl;
- }
- else if (elements != 4)
- return _error->Error(_("The HTTP server sent an invalid reply header"));
- }
- else
+ // on GNU/kFreeBSD, apt dies on /dev/null because non-blocking
+ // can't be set
+ if (File.Name() != "/dev/null")
+ SetNonBlock(File.Fd(),false);
+ while (In.WriteSpace() == true)
{
- Major = 0;
- Minor = 9;
- if (sscanf(Line.c_str(),"HTTP %3u%359[^\n]",&Result,Code) != 2)
- return _error->Error(_("The HTTP server sent an invalid reply header"));
- }
+ if (In.Write(File.Fd()) == false)
+ return _error->Errno("write",_("Error writing to the file"));
- /* Check the HTTP response header to get the default persistance
- state. */
- if (Major < 1)
- Persistent = false;
- else
- {
- if (Major == 1 && Minor == 0)
- Persistent = false;
- else
- Persistent = true;
+ // Done
+ if (In.IsLimit() == true)
+ return true;
}
+ }
- return true;
- }
-
- if (stringcasecmp(Tag,"Content-Length:") == 0)
+ // See if this is because the server finished the data stream
+ if (In.IsLimit() == false && State != HttpServerState::Header &&
+ Encoding != HttpServerState::Closes)
{
- if (Encoding == Closes)
- Encoding = Stream;
- HaveContent = true;
-
- // The length is already set from the Content-Range header
- if (StartPos != 0)
- return true;
+ Close();
+ if (LErrno == 0)
+ return _error->Error(_("Error reading from server. Remote end closed connection"));
+ errno = LErrno;
+ return _error->Errno("read",_("Error reading from server"));
+ }
+ else
+ {
+ In.Limit(-1);
+
+ // Nothing left in the buffer
+ if (In.WriteSpace() == false)
+ return false;
- Size = strtoull(Val.c_str(), NULL, 10);
- if (Size >= std::numeric_limits<unsigned long long>::max())
- return _error->Errno("HeaderLine", _("The HTTP server sent an invalid Content-Length header"));
+ // We may have got multiple responses back in one packet..
+ Close();
return true;
}
- if (stringcasecmp(Tag,"Content-Type:") == 0)
+ return false;
+}
+ /*}}}*/
+// HttpServerState::Flush - Dump the buffer into the file /*{{{*/
+// ---------------------------------------------------------------------
+/* This takes the current input buffer from the Server FD and writes it
+ into the file */
+bool HttpServerState::Flush(FileFd * const File)
+{
+ if (File != NULL)
{
- HaveContent = true;
- return true;
+ // on GNU/kFreeBSD, apt dies on /dev/null because non-blocking
+ // can't be set
+ if (File->Name() != "/dev/null")
+ SetNonBlock(File->Fd(),false);
+ if (In.WriteSpace() == false)
+ return true;
+
+ while (In.WriteSpace() == true)
+ {
+ if (In.Write(File->Fd()) == false)
+ return _error->Errno("write",_("Error writing to file"));
+ if (In.IsLimit() == true)
+ return true;
+ }
+
+ if (In.IsLimit() == true || Encoding == ServerState::Closes)
+ return true;
}
+ return false;
+}
+ /*}}}*/
+// HttpServerState::Go - Run a single loop /*{{{*/
+// ---------------------------------------------------------------------
+/* This runs the select loop over the server FDs, Output file FDs and
+ stdin. */
+bool HttpServerState::Go(bool ToFile, FileFd * const File)
+{
+ // Server has closed the connection
+ if (ServerFd == -1 && (In.WriteSpace() == false ||
+ ToFile == false))
+ return false;
+
+ fd_set rfds,wfds;
+ FD_ZERO(&rfds);
+ FD_ZERO(&wfds);
+
+ /* Add the server. We only send more requests if the connection will
+ be persisting */
+ if (Out.WriteSpace() == true && ServerFd != -1
+ && Persistent == true)
+ FD_SET(ServerFd,&wfds);
+ if (In.ReadSpace() == true && ServerFd != -1)
+ FD_SET(ServerFd,&rfds);
+
+ // Add the file
+ int FileFD = -1;
+ if (File != NULL)
+ FileFD = File->Fd();
- if (stringcasecmp(Tag,"Content-Range:") == 0)
+ if (In.WriteSpace() == true && ToFile == true && FileFD != -1)
+ FD_SET(FileFD,&wfds);
+
+ // Add stdin
+ if (_config->FindB("Acquire::http::DependOnSTDIN", true) == true)
+ FD_SET(STDIN_FILENO,&rfds);
+
+ // Figure out the max fd
+ int MaxFd = FileFD;
+ if (MaxFd < ServerFd)
+ MaxFd = ServerFd;
+
+ // Select
+ struct timeval tv;
+ tv.tv_sec = TimeOut;
+ tv.tv_usec = 0;
+ int Res = 0;
+ if ((Res = select(MaxFd+1,&rfds,&wfds,0,&tv)) < 0)
{
- HaveContent = true;
-
- if (sscanf(Val.c_str(),"bytes %llu-%*u/%llu",&StartPos,&Size) != 2)
- return _error->Error(_("The HTTP server sent an invalid Content-Range header"));
- if ((unsigned long long)StartPos > Size)
- return _error->Error(_("This HTTP server has broken range support"));
- return true;
+ if (errno == EINTR)
+ return true;
+ return _error->Errno("select",_("Select failed"));
}
- if (stringcasecmp(Tag,"Transfer-Encoding:") == 0)
+ if (Res == 0)
{
- HaveContent = true;
- if (stringcasecmp(Val,"chunked") == 0)
- Encoding = Chunked;
- return true;
+ _error->Error(_("Connection timed out"));
+ return Die(*File);
}
-
- if (stringcasecmp(Tag,"Connection:") == 0)
+
+ // Handle server IO
+ if (ServerFd != -1 && FD_ISSET(ServerFd,&rfds))
{
- if (stringcasecmp(Val,"close") == 0)
- Persistent = false;
- if (stringcasecmp(Val,"keep-alive") == 0)
- Persistent = true;
- return true;
+ errno = 0;
+ if (In.Read(ServerFd) == false)
+ return Die(*File);
}
-
- if (stringcasecmp(Tag,"Last-Modified:") == 0)
+
+ if (ServerFd != -1 && FD_ISSET(ServerFd,&wfds))
{
- if (RFC1123StrToTime(Val.c_str(), Date) == false)
- return _error->Error(_("Unknown date format"));
- return true;
+ errno = 0;
+ if (Out.Write(ServerFd) == false)
+ return Die(*File);
}
- if (stringcasecmp(Tag,"Location:") == 0)
+ // Send data to the file
+ if (FileFD != -1 && FD_ISSET(FileFD,&wfds))
{
- Location = Val;
- return true;
+ if (In.Write(FileFD) == false)
+ return _error->Errno("write",_("Error writing to output file"));
}
+ // Handle commands from APT
+ if (FD_ISSET(STDIN_FILENO,&rfds))
+ {
+ if (Owner->Run(true) != -1)
+ exit(100);
+ }
+
return true;
}
/*}}}*/
@@ -661,7 +673,7 @@ bool ServerState::HeaderLine(string Line)
// HttpMethod::SendReq - Send the HTTP request /*{{{*/
// ---------------------------------------------------------------------
/* This places the http request in the outbound buffer */
-void HttpMethod::SendReq(FetchItem *Itm,CircleBuf &Out)
+void HttpMethod::SendReq(FetchItem *Itm)
{
URI Uri = Itm->Uri;
@@ -687,7 +699,7 @@ void HttpMethod::SendReq(FetchItem *Itm,CircleBuf &Out)
but while its a must for all servers to accept absolute URIs,
it is assumed clients will sent an absolute path for non-proxies */
std::string requesturi;
- if (Proxy.empty() == true || Proxy.Host.empty())
+ if (Server->Proxy.empty() == true || Server->Proxy.Host.empty())
requesturi = Uri.Path;
else
requesturi = Itm->Uri;
@@ -742,7 +754,7 @@ void HttpMethod::SendReq(FetchItem *Itm,CircleBuf &Out)
if (stat(Itm->DestFile.c_str(),&SBuf) >= 0 && SBuf.st_size > 0)
{
// In this case we send an if-range query with a range header
- sprintf(Buf,"Range: bytes=%lli-\r\nIf-Range: %s\r\n",(long long)SBuf.st_size - 1,
+ sprintf(Buf,"Range: bytes=%lli-\r\nIf-Range: %s\r\n",(long long)SBuf.st_size,
TimeRFC1123(SBuf.st_mtime).c_str());
Req += Buf;
}
@@ -755,9 +767,9 @@ void HttpMethod::SendReq(FetchItem *Itm,CircleBuf &Out)
}
}
- if (Proxy.User.empty() == false || Proxy.Password.empty() == false)
+ if (Server->Proxy.User.empty() == false || Server->Proxy.Password.empty() == false)
Req += string("Proxy-Authorization: Basic ") +
- Base64Encode(Proxy.User + ":" + Proxy.Password) + "\r\n";
+ Base64Encode(Server->Proxy.User + ":" + Server->Proxy.Password) + "\r\n";
maybe_add_auth (Uri, _config->FindFile("Dir::Etc::netrc"));
if (Uri.User.empty() == false || Uri.Password.empty() == false)
@@ -771,340 +783,18 @@ void HttpMethod::SendReq(FetchItem *Itm,CircleBuf &Out)
if (Debug == true)
cerr << Req << endl;
- Out.Read(Req);
+ Server->WriteResponse(Req);
}
/*}}}*/
-// HttpMethod::Go - Run a single loop /*{{{*/
-// ---------------------------------------------------------------------
-/* This runs the select loop over the server FDs, Output file FDs and
- stdin. */
-bool HttpMethod::Go(bool ToFile,ServerState *Srv)
-{
- // Server has closed the connection
- if (Srv->ServerFd == -1 && (Srv->In.WriteSpace() == false ||
- ToFile == false))
- return false;
-
- fd_set rfds,wfds;
- FD_ZERO(&rfds);
- FD_ZERO(&wfds);
-
- /* Add the server. We only send more requests if the connection will
- be persisting */
- if (Srv->Out.WriteSpace() == true && Srv->ServerFd != -1
- && Srv->Persistent == true)
- FD_SET(Srv->ServerFd,&wfds);
- if (Srv->In.ReadSpace() == true && Srv->ServerFd != -1)
- FD_SET(Srv->ServerFd,&rfds);
-
- // Add the file
- int FileFD = -1;
- if (File != 0)
- FileFD = File->Fd();
-
- if (Srv->In.WriteSpace() == true && ToFile == true && FileFD != -1)
- FD_SET(FileFD,&wfds);
-
- // Add stdin
- if (_config->FindB("Acquire::http::DependOnSTDIN", true) == true)
- FD_SET(STDIN_FILENO,&rfds);
-
- // Figure out the max fd
- int MaxFd = FileFD;
- if (MaxFd < Srv->ServerFd)
- MaxFd = Srv->ServerFd;
-
- // Select
- struct timeval tv;
- tv.tv_sec = TimeOut;
- tv.tv_usec = 0;
- int Res = 0;
- if ((Res = select(MaxFd+1,&rfds,&wfds,0,&tv)) < 0)
- {
- if (errno == EINTR)
- return true;
- return _error->Errno("select",_("Select failed"));
- }
-
- if (Res == 0)
- {
- _error->Error(_("Connection timed out"));
- return ServerDie(Srv);
- }
-
- // Handle server IO
- if (Srv->ServerFd != -1 && FD_ISSET(Srv->ServerFd,&rfds))
- {
- errno = 0;
- if (Srv->In.Read(Srv->ServerFd) == false)
- return ServerDie(Srv);
- }
-
- if (Srv->ServerFd != -1 && FD_ISSET(Srv->ServerFd,&wfds))
- {
- errno = 0;
- if (Srv->Out.Write(Srv->ServerFd) == false)
- return ServerDie(Srv);
- }
-
- // Send data to the file
- if (FileFD != -1 && FD_ISSET(FileFD,&wfds))
- {
- if (Srv->In.Write(FileFD) == false)
- return _error->Errno("write",_("Error writing to output file"));
- }
-
- // Handle commands from APT
- if (FD_ISSET(STDIN_FILENO,&rfds))
- {
- if (Run(true) != -1)
- exit(100);
- }
-
- return true;
-}
- /*}}}*/
-// HttpMethod::Flush - Dump the buffer into the file /*{{{*/
-// ---------------------------------------------------------------------
-/* This takes the current input buffer from the Server FD and writes it
- into the file */
-bool HttpMethod::Flush(ServerState *Srv)
-{
- if (File != 0)
- {
- // on GNU/kFreeBSD, apt dies on /dev/null because non-blocking
- // can't be set
- if (File->Name() != "/dev/null")
- SetNonBlock(File->Fd(),false);
- if (Srv->In.WriteSpace() == false)
- return true;
-
- while (Srv->In.WriteSpace() == true)
- {
- if (Srv->In.Write(File->Fd()) == false)
- return _error->Errno("write",_("Error writing to file"));
- if (Srv->In.IsLimit() == true)
- return true;
- }
-
- if (Srv->In.IsLimit() == true || Srv->Encoding == ServerState::Closes)
- return true;
- }
- return false;
-}
- /*}}}*/
-// HttpMethod::ServerDie - The server has closed the connection. /*{{{*/
-// ---------------------------------------------------------------------
-/* */
-bool HttpMethod::ServerDie(ServerState *Srv)
-{
- unsigned int LErrno = errno;
-
- // Dump the buffer to the file
- if (Srv->State == ServerState::Data)
- {
- // on GNU/kFreeBSD, apt dies on /dev/null because non-blocking
- // can't be set
- if (File->Name() != "/dev/null")
- SetNonBlock(File->Fd(),false);
- while (Srv->In.WriteSpace() == true)
- {
- if (Srv->In.Write(File->Fd()) == false)
- return _error->Errno("write",_("Error writing to the file"));
-
- // Done
- if (Srv->In.IsLimit() == true)
- return true;
- }
- }
-
- // See if this is because the server finished the data stream
- if (Srv->In.IsLimit() == false && Srv->State != ServerState::Header &&
- Srv->Encoding != ServerState::Closes)
- {
- Srv->Close();
- if (LErrno == 0)
- return _error->Error(_("Error reading from server. Remote end closed connection"));
- errno = LErrno;
- return _error->Errno("read",_("Error reading from server"));
- }
- else
- {
- Srv->In.Limit(-1);
-
- // Nothing left in the buffer
- if (Srv->In.WriteSpace() == false)
- return false;
-
- // We may have got multiple responses back in one packet..
- Srv->Close();
- return true;
- }
-
- return false;
-}
- /*}}}*/
-// HttpMethod::DealWithHeaders - Handle the retrieved header data /*{{{*/
-// ---------------------------------------------------------------------
-/* We look at the header data we got back from the server and decide what
- to do. Returns DealWithHeadersResult (see http.h for details).
- */
-HttpMethod::DealWithHeadersResult
-HttpMethod::DealWithHeaders(FetchResult &Res,ServerState *Srv)
-{
- // Not Modified
- if (Srv->Result == 304)
- {
- unlink(Queue->DestFile.c_str());
- Res.IMSHit = true;
- Res.LastModified = Queue->LastModified;
- return IMS_HIT;
- }
-
- /* Redirect
- *
- * Note that it is only OK for us to treat all redirection the same
- * because we *always* use GET, not other HTTP methods. There are
- * three redirection codes for which it is not appropriate that we
- * redirect. Pass on those codes so the error handling kicks in.
- */
- if (AllowRedirect
- && (Srv->Result > 300 && Srv->Result < 400)
- && (Srv->Result != 300 // Multiple Choices
- && Srv->Result != 304 // Not Modified
- && Srv->Result != 306)) // (Not part of HTTP/1.1, reserved)
- {
- if (Srv->Location.empty() == true);
- else if (Srv->Location[0] == '/' && Queue->Uri.empty() == false)
- {
- URI Uri = Queue->Uri;
- if (Uri.Host.empty() == false)
- NextURI = URI::SiteOnly(Uri);
- else
- NextURI.clear();
- NextURI.append(DeQuoteString(Srv->Location));
- return TRY_AGAIN_OR_REDIRECT;
- }
- else
- {
- NextURI = DeQuoteString(Srv->Location);
- URI tmpURI = NextURI;
- // Do not allow a redirection to switch protocol
- if (tmpURI.Access == "http")
- return TRY_AGAIN_OR_REDIRECT;
- }
- /* else pass through for error message */
- }
-
- /* We have a reply we dont handle. This should indicate a perm server
- failure */
- if (Srv->Result < 200 || Srv->Result >= 300)
- {
- char err[255];
- snprintf(err,sizeof(err)-1,"HttpError%i",Srv->Result);
- SetFailReason(err);
- _error->Error("%u %s",Srv->Result,Srv->Code);
- if (Srv->HaveContent == true)
- return ERROR_WITH_CONTENT_PAGE;
- return ERROR_UNRECOVERABLE;
- }
-
- // This is some sort of 2xx 'data follows' reply
- Res.LastModified = Srv->Date;
- Res.Size = Srv->Size;
-
- // Open the file
- delete File;
- File = new FileFd(Queue->DestFile,FileFd::WriteAny);
- if (_error->PendingError() == true)
- return ERROR_NOT_FROM_SERVER;
-
- FailFile = Queue->DestFile;
- FailFile.c_str(); // Make sure we dont do a malloc in the signal handler
- FailFd = File->Fd();
- FailTime = Srv->Date;
-
- delete Srv->In.Hash;
- Srv->In.Hash = new Hashes;
-
- // Set the expected size and read file for the hashes
- if (Srv->StartPos >= 0)
- {
- Res.ResumePoint = Srv->StartPos;
- File->Truncate(Srv->StartPos);
-
- if (Srv->In.Hash->AddFD(*File,Srv->StartPos) == false)
- {
- _error->Errno("read",_("Problem hashing file"));
- return ERROR_NOT_FROM_SERVER;
- }
- }
-
- SetNonBlock(File->Fd(),true);
- return FILE_IS_OPEN;
-}
- /*}}}*/
-// HttpMethod::SigTerm - Handle a fatal signal /*{{{*/
-// ---------------------------------------------------------------------
-/* This closes and timestamps the open file. This is neccessary to get
- resume behavoir on user abort */
-void HttpMethod::SigTerm(int)
-{
- if (FailFd == -1)
- _exit(100);
- close(FailFd);
-
- // Timestamp
- struct utimbuf UBuf;
- UBuf.actime = FailTime;
- UBuf.modtime = FailTime;
- utime(FailFile.c_str(),&UBuf);
-
- _exit(100);
-}
- /*}}}*/
-// HttpMethod::Fetch - Fetch an item /*{{{*/
-// ---------------------------------------------------------------------
-/* This adds an item to the pipeline. We keep the pipeline at a fixed
- depth. */
-bool HttpMethod::Fetch(FetchItem *)
-{
- if (Server == 0)
- return true;
-
- // Queue the requests
- int Depth = -1;
- for (FetchItem *I = Queue; I != 0 && Depth < (signed)PipelineDepth;
- I = I->Next, Depth++)
- {
- // If pipelining is disabled, we only queue 1 request
- if (Server->Pipeline == false && Depth >= 0)
- break;
-
- // Make sure we stick with the same server
- if (Server->Comp(I->Uri) == false)
- break;
- if (QueueBack == I)
- {
- QueueBack = I->Next;
- SendReq(I,Server->Out);
- continue;
- }
- }
-
- return true;
-};
- /*}}}*/
// HttpMethod::Configuration - Handle a configuration message /*{{{*/
// ---------------------------------------------------------------------
/* We stash the desired pipeline depth */
bool HttpMethod::Configuration(string Message)
{
- if (pkgAcqMethod::Configuration(Message) == false)
+ if (ServerMethod::Configuration(Message) == false)
return false;
-
+
AllowRedirect = _config->FindB("Acquire::http::AllowRedirect",true);
- TimeOut = _config->FindI("Acquire::http::Timeout",TimeOut);
PipelineDepth = _config->FindI("Acquire::http::Pipeline-Depth",
PipelineDepth);
Debug = _config->FindB("Debug::Acquire::http",false);
@@ -1116,258 +806,6 @@ bool HttpMethod::Configuration(string Message)
return true;
}
/*}}}*/
-// HttpMethod::Loop - Main loop /*{{{*/
-// ---------------------------------------------------------------------
-/* */
-int HttpMethod::Loop()
-{
- typedef vector<string> StringVector;
- typedef vector<string>::iterator StringVectorIterator;
- map<string, StringVector> Redirected;
-
- signal(SIGTERM,SigTerm);
- signal(SIGINT,SigTerm);
-
- Server = 0;
-
- int FailCounter = 0;
- while (1)
- {
- // We have no commands, wait for some to arrive
- if (Queue == 0)
- {
- if (WaitFd(STDIN_FILENO) == false)
- return 0;
- }
-
- /* Run messages, we can accept 0 (no message) if we didn't
- do a WaitFd above.. Otherwise the FD is closed. */
- int Result = Run(true);
- if (Result != -1 && (Result != 0 || Queue == 0))
- {
- if(FailReason.empty() == false ||
- _config->FindB("Acquire::http::DependOnSTDIN", true) == true)
- return 100;
- else
- return 0;
- }
-
- if (Queue == 0)
- continue;
-
- // Connect to the server
- if (Server == 0 || Server->Comp(Queue->Uri) == false)
- {
- delete Server;
- Server = new ServerState(Queue->Uri,this);
- }
- /* If the server has explicitly said this is the last connection
- then we pre-emptively shut down the pipeline and tear down
- the connection. This will speed up HTTP/1.0 servers a tad
- since we don't have to wait for the close sequence to
- complete */
- if (Server->Persistent == false)
- Server->Close();
-
- // Reset the pipeline
- if (Server->ServerFd == -1)
- QueueBack = Queue;
-
- // Connnect to the host
- if (Server->Open() == false)
- {
- Fail(true);
- delete Server;
- Server = 0;
- continue;
- }
-
- // Fill the pipeline.
- Fetch(0);
-
- // Fetch the next URL header data from the server.
- switch (Server->RunHeaders())
- {
- case ServerState::RUN_HEADERS_OK:
- break;
-
- // The header data is bad
- case ServerState::RUN_HEADERS_PARSE_ERROR:
- {
- _error->Error(_("Bad header data"));
- Fail(true);
- RotateDNS();
- continue;
- }
-
- // The server closed a connection during the header get..
- default:
- case ServerState::RUN_HEADERS_IO_ERROR:
- {
- FailCounter++;
- _error->Discard();
- Server->Close();
- Server->Pipeline = false;
-
- if (FailCounter >= 2)
- {
- Fail(_("Connection failed"),true);
- FailCounter = 0;
- }
-
- RotateDNS();
- continue;
- }
- };
-
- // Decide what to do.
- FetchResult Res;
- Res.Filename = Queue->DestFile;
- switch (DealWithHeaders(Res,Server))
- {
- // Ok, the file is Open
- case FILE_IS_OPEN:
- {
- URIStart(Res);
-
- // Run the data
- bool Result = Server->RunData();
-
- /* If the server is sending back sizeless responses then fill in
- the size now */
- if (Res.Size == 0)
- Res.Size = File->Size();
-
- // Close the file, destroy the FD object and timestamp it
- FailFd = -1;
- delete File;
- File = 0;
-
- // Timestamp
- struct utimbuf UBuf;
- time(&UBuf.actime);
- UBuf.actime = Server->Date;
- UBuf.modtime = Server->Date;
- utime(Queue->DestFile.c_str(),&UBuf);
-
- // Send status to APT
- if (Result == true)
- {
- Res.TakeHashes(*Server->In.Hash);
- URIDone(Res);
- }
- else
- {
- if (Server->ServerFd == -1)
- {
- FailCounter++;
- _error->Discard();
- Server->Close();
-
- if (FailCounter >= 2)
- {
- Fail(_("Connection failed"),true);
- FailCounter = 0;
- }
-
- QueueBack = Queue;
- }
- else
- Fail(true);
- }
- break;
- }
-
- // IMS hit
- case IMS_HIT:
- {
- URIDone(Res);
- break;
- }
-
- // Hard server error, not found or something
- case ERROR_UNRECOVERABLE:
- {
- Fail();
- break;
- }
-
- // Hard internal error, kill the connection and fail
- case ERROR_NOT_FROM_SERVER:
- {
- delete File;
- File = 0;
-
- Fail();
- RotateDNS();
- Server->Close();
- break;
- }
-
- // We need to flush the data, the header is like a 404 w/ error text
- case ERROR_WITH_CONTENT_PAGE:
- {
- Fail();
-
- // Send to content to dev/null
- File = new FileFd("/dev/null",FileFd::WriteExists);
- Server->RunData();
- delete File;
- File = 0;
- break;
- }
-
- // Try again with a new URL
- case TRY_AGAIN_OR_REDIRECT:
- {
- // Clear rest of response if there is content
- if (Server->HaveContent)
- {
- File = new FileFd("/dev/null",FileFd::WriteExists);
- Server->RunData();
- delete File;
- File = 0;
- }
-
- /* Detect redirect loops. No more redirects are allowed
- after the same URI is seen twice in a queue item. */
- StringVector &R = Redirected[Queue->DestFile];
- bool StopRedirects = false;
- if (R.empty() == true)
- R.push_back(Queue->Uri);
- else if (R[0] == "STOP" || R.size() > 10)
- StopRedirects = true;
- else
- {
- for (StringVectorIterator I = R.begin(); I != R.end(); ++I)
- if (Queue->Uri == *I)
- {
- R[0] = "STOP";
- break;
- }
-
- R.push_back(Queue->Uri);
- }
-
- if (StopRedirects == false)
- Redirect(NextURI);
- else
- Fail();
-
- break;
- }
-
- default:
- Fail(_("Internal error"));
- break;
- }
-
- FailCounter = 0;
- }
-
- return 0;
-}
- /*}}}*/
// HttpMethod::AutoDetectProxy - auto detect proxy /*{{{*/
// ---------------------------------------------------------------------
/* */
@@ -1420,5 +858,13 @@ bool HttpMethod::AutoDetectProxy()
return true;
}
/*}}}*/
-
-
+ServerState * HttpMethod::CreateServerState(URI uri) /*{{{*/
+{
+ return new HttpServerState(uri, this);
+}
+ /*}}}*/
+void HttpMethod::RotateDNS() /*{{{*/
+{
+ ::RotateDNS();
+}
+ /*}}}*/
diff --git a/methods/http.h b/methods/http.h
index 7446119cd..02c04e8ae 100644
--- a/methods/http.h
+++ b/methods/http.h
@@ -15,6 +15,8 @@
#include <string>
+#include "server.h"
+
using std::cout;
using std::endl;
@@ -31,7 +33,7 @@ class CircleBuf
unsigned long long StrPos;
unsigned long long MaxGet;
struct timeval Start;
-
+
static unsigned long long BwReadLimit;
static unsigned long long BwTickReadData;
static struct timeval BwReadTick;
@@ -54,21 +56,20 @@ class CircleBuf
return Sz;
}
void FillOut();
-
+
public:
-
Hashes *Hash;
-
+
// Read data in
bool Read(int Fd);
bool Read(std::string Data);
-
+
// Write data out
bool Write(int Fd);
bool WriteTillEl(std::string &Data,bool Single = false);
-
+
// Control the write limit
- void Limit(long long Max) {if (Max == -1) MaxGet = 0-1; else MaxGet = OutP + Max;}
+ void Limit(long long Max) {if (Max == -1) MaxGet = 0-1; else MaxGet = OutP + Max;}
bool IsLimit() const {return MaxGet == OutP;};
void Print() const {cout << MaxGet << ',' << OutP << endl;};
@@ -84,114 +85,56 @@ class CircleBuf
~CircleBuf();
};
-struct ServerState
+struct HttpServerState: public ServerState
{
- // This is the last parsed Header Line
- unsigned int Major;
- unsigned int Minor;
- unsigned int Result;
- char Code[360];
-
- // These are some statistics from the last parsed header lines
- unsigned long long Size;
- signed long long StartPos;
- time_t Date;
- bool HaveContent;
- enum {Chunked,Stream,Closes} Encoding;
- enum {Header, Data} State;
- bool Persistent;
- std::string Location;
-
- // This is a Persistent attribute of the server itself.
- bool Pipeline;
-
- HttpMethod *Owner;
-
// This is the connection itself. Output is data FROM the server
CircleBuf In;
CircleBuf Out;
int ServerFd;
- URI ServerName;
-
- bool HeaderLine(std::string Line);
- bool Comp(URI Other) const {return Other.Host == ServerName.Host && Other.Port == ServerName.Port;};
- void Reset() {Major = 0; Minor = 0; Result = 0; Code[0] = '\0'; Size = 0;
- StartPos = 0; Encoding = Closes; time(&Date); HaveContent = false;
- State = Header; Persistent = false; ServerFd = -1;
- Pipeline = true;};
-
- /** \brief Result of the header acquire */
- enum RunHeadersResult {
- /** \brief Header ok */
- RUN_HEADERS_OK,
- /** \brief IO error while retrieving */
- RUN_HEADERS_IO_ERROR,
- /** \brief Parse error after retrieving */
- RUN_HEADERS_PARSE_ERROR,
- };
- /** \brief Get the headers before the data */
- RunHeadersResult RunHeaders();
- /** \brief Transfer the data from the socket */
- bool RunData();
-
- bool Open();
- bool Close();
-
- ServerState(URI Srv,HttpMethod *Owner);
- ~ServerState() {Close();};
+
+ protected:
+ virtual bool ReadHeaderLines(std::string &Data);
+ virtual bool LoadNextResponse(bool const ToFile, FileFd * const File);
+ virtual bool WriteResponse(std::string const &Data);
+
+ public:
+ virtual void Reset() { ServerState::Reset(); ServerFd = -1; };
+
+ virtual bool RunData(FileFd * const File);
+
+ virtual bool Open();
+ virtual bool IsOpen();
+ virtual bool Close();
+ virtual bool InitHashes(FileFd &File);
+ virtual Hashes * GetHashes();
+ virtual bool Die(FileFd &File);
+ virtual bool Flush(FileFd * const File);
+ virtual bool Go(bool ToFile, FileFd * const File);
+
+ HttpServerState(URI Srv, HttpMethod *Owner);
+ virtual ~HttpServerState() {Close();};
};
-class HttpMethod : public pkgAcqMethod
+class HttpMethod : public ServerMethod
{
- void SendReq(FetchItem *Itm,CircleBuf &Out);
- bool Go(bool ToFile,ServerState *Srv);
- bool Flush(ServerState *Srv);
- bool ServerDie(ServerState *Srv);
-
- /** \brief Result of the header parsing */
- enum DealWithHeadersResult {
- /** \brief The file is open and ready */
- FILE_IS_OPEN,
- /** \brief We got a IMS hit, the file has not changed */
- IMS_HIT,
- /** \brief The server reported a unrecoverable error */
- ERROR_UNRECOVERABLE,
- /** \brief The server reported a error with a error content page */
- ERROR_WITH_CONTENT_PAGE,
- /** \brief An error on the client side */
- ERROR_NOT_FROM_SERVER,
- /** \brief A redirect or retry request */
- TRY_AGAIN_OR_REDIRECT
- };
- /** \brief Handle the retrieved header data */
- DealWithHeadersResult DealWithHeaders(FetchResult &Res,ServerState *Srv);
+ public:
+ virtual void SendReq(FetchItem *Itm);
/** \brief Try to AutoDetect the proxy */
bool AutoDetectProxy();
virtual bool Configuration(std::string Message);
-
- // In the event of a fatal signal this file will be closed and timestamped.
- static std::string FailFile;
- static int FailFd;
- static time_t FailTime;
- static void SigTerm(int);
+
+ virtual ServerState * CreateServerState(URI uri);
+ virtual void RotateDNS();
protected:
- virtual bool Fetch(FetchItem *);
-
- std::string NextURI;
std::string AutoDetectProxyCmd;
public:
- friend struct ServerState;
-
- FileFd *File;
- ServerState *Server;
-
- int Loop();
-
- HttpMethod() : pkgAcqMethod("1.2",Pipeline | SendConfig)
+ friend struct HttpServerState;
+
+ HttpMethod() : ServerMethod("1.2",Pipeline | SendConfig)
{
File = 0;
Server = 0;
diff --git a/methods/https.cc b/methods/https.cc
index 84ce2d68f..2a562434b 100644
--- a/methods/https.cc
+++ b/methods/https.cc
@@ -36,6 +36,41 @@
/*}}}*/
using namespace std;
+size_t
+HttpsMethod::parse_header(void *buffer, size_t size, size_t nmemb, void *userp)
+{
+ size_t len = size * nmemb;
+ HttpsMethod *me = (HttpsMethod *)userp;
+ std::string line((char*) buffer, len);
+ for (--len; len > 0; --len)
+ if (isspace(line[len]) == 0)
+ {
+ ++len;
+ break;
+ }
+ line.erase(len);
+
+ if (line.empty() == true)
+ {
+ if (me->Server->Result != 416 && me->Server->StartPos != 0)
+ ;
+ else if (me->Server->Result == 416 && me->Server->Size == me->File->FileSize())
+ {
+ me->Server->Result = 200;
+ me->Server->StartPos = me->Server->Size;
+ }
+ else
+ me->Server->StartPos = 0;
+
+ me->File->Truncate(me->Server->StartPos);
+ me->File->Seek(me->Server->StartPos);
+ }
+ else if (me->Server->HeaderLine(line) == false)
+ return 0;
+
+ return size*nmemb;
+}
+
size_t
HttpsMethod::write_data(void *buffer, size_t size, size_t nmemb, void *userp)
{
@@ -59,6 +94,14 @@ HttpsMethod::progress_callback(void *clientp, double dltotal, double dlnow,
return 0;
}
+// HttpsServerState::HttpsServerState - Constructor /*{{{*/
+HttpsServerState::HttpsServerState(URI Srv,HttpsMethod *Owner) : ServerState(Srv, NULL)
+{
+ TimeOut = _config->FindI("Acquire::https::Timeout",TimeOut);
+ Reset();
+}
+ /*}}}*/
+
void HttpsMethod::SetupProxy() /*{{{*/
{
URI ServerName = Queue->Uri;
@@ -121,7 +164,6 @@ bool HttpsMethod::Fetch(FetchItem *Itm)
struct stat SBuf;
struct curl_slist *headers=NULL;
char curl_errorstr[CURL_ERROR_SIZE];
- long curl_responsecode;
URI Uri = Itm->Uri;
string remotehost = Uri.Host;
@@ -137,6 +179,8 @@ bool HttpsMethod::Fetch(FetchItem *Itm)
// callbacks
curl_easy_setopt(curl, CURLOPT_URL, static_cast<string>(Uri).c_str());
+ curl_easy_setopt(curl, CURLOPT_HEADERFUNCTION, parse_header);
+ curl_easy_setopt(curl, CURLOPT_WRITEHEADER, this);
curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, write_data);
curl_easy_setopt(curl, CURLOPT_WRITEDATA, this);
curl_easy_setopt(curl, CURLOPT_PROGRESSFUNCTION, progress_callback);
@@ -277,7 +321,7 @@ bool HttpsMethod::Fetch(FetchItem *Itm)
if (stat(Itm->DestFile.c_str(),&SBuf) >= 0 && SBuf.st_size > 0)
{
char Buf[1000];
- sprintf(Buf, "Range: bytes=%li-", (long) SBuf.st_size - 1);
+ sprintf(Buf, "Range: bytes=%li-", (long) SBuf.st_size);
headers = curl_slist_append(headers, Buf);
sprintf(Buf, "If-Range: %s", TimeRFC1123(SBuf.st_mtime).c_str());
headers = curl_slist_append(headers, Buf);
@@ -290,18 +334,13 @@ bool HttpsMethod::Fetch(FetchItem *Itm)
// go for it - if the file exists, append on it
File = new FileFd(Itm->DestFile, FileFd::WriteAny);
- if (File->Size() > 0)
- File->Seek(File->Size() - 1);
-
+ Server = new HttpsServerState(Itm->Uri, this);
+
// keep apt updated
Res.Filename = Itm->DestFile;
// get it!
CURLcode success = curl_easy_perform(curl);
- curl_easy_getinfo(curl, CURLINFO_RESPONSE_CODE, &curl_responsecode);
-
- long curl_servdate;
- curl_easy_getinfo(curl, CURLINFO_FILETIME, &curl_servdate);
// If the server returns 200 OK but the If-Modified-Since condition is not
// met, CURLINFO_CONDITION_UNMET will be set to 1
@@ -309,57 +348,83 @@ bool HttpsMethod::Fetch(FetchItem *Itm)
curl_easy_getinfo(curl, CURLINFO_CONDITION_UNMET, &curl_condition_unmet);
File->Close();
+ curl_slist_free_all(headers);
// cleanup
- if(success != 0 || (curl_responsecode != 200 && curl_responsecode != 304))
+ if (success != 0)
{
_error->Error("%s", curl_errorstr);
- // unlink, no need keep 401/404 page content in partial/
unlink(File->Name().c_str());
- Fail();
+ return false;
+ }
+
+ // server says file not modified
+ if (Server->Result == 304 || curl_condition_unmet == 1)
+ {
+ unlink(File->Name().c_str());
+ Res.IMSHit = true;
+ Res.LastModified = Itm->LastModified;
+ Res.Size = 0;
+ URIDone(Res);
return true;
}
+ Res.IMSHit = false;
- // Timestamp
- struct utimbuf UBuf;
- if (curl_servdate != -1) {
- UBuf.actime = curl_servdate;
- UBuf.modtime = curl_servdate;
- utime(File->Name().c_str(),&UBuf);
+ if (Server->Result != 200 && // OK
+ Server->Result != 206 && // Partial
+ Server->Result != 416) // invalid Range
+ {
+ char err[255];
+ snprintf(err, sizeof(err) - 1, "HttpError%i", Server->Result);
+ SetFailReason(err);
+ _error->Error("%s", err);
+ // unlink, no need keep 401/404 page content in partial/
+ unlink(File->Name().c_str());
+ return false;
}
- // check the downloaded result
- struct stat Buf;
- if (stat(File->Name().c_str(),&Buf) == 0)
+ struct stat resultStat;
+ if (unlikely(stat(File->Name().c_str(), &resultStat) != 0))
{
- Res.Filename = File->Name();
- Res.LastModified = Buf.st_mtime;
- Res.IMSHit = false;
- if (curl_responsecode == 304 || curl_condition_unmet)
- {
- unlink(File->Name().c_str());
- Res.IMSHit = true;
- Res.LastModified = Itm->LastModified;
- Res.Size = 0;
- URIDone(Res);
- return true;
- }
- Res.Size = Buf.st_size;
+ _error->Errno("stat", "Unable to access file %s", File->Name().c_str());
+ return false;
+ }
+ Res.Size = resultStat.st_size;
+
+ // invalid range-request
+ if (Server->Result == 416)
+ {
+ unlink(File->Name().c_str());
+ Res.Size = 0;
+ delete File;
+ Redirect(Itm->Uri);
+ return true;
+ }
+
+ // Timestamp
+ curl_easy_getinfo(curl, CURLINFO_FILETIME, &Res.LastModified);
+ if (Res.LastModified != -1)
+ {
+ struct utimbuf UBuf;
+ UBuf.actime = Res.LastModified;
+ UBuf.modtime = Res.LastModified;
+ utime(File->Name().c_str(),&UBuf);
}
+ else
+ Res.LastModified = resultStat.st_mtime;
// take hashes
Hashes Hash;
FileFd Fd(Res.Filename, FileFd::ReadOnly);
Hash.AddFD(Fd);
Res.TakeHashes(Hash);
-
+
// keep apt updated
URIDone(Res);
// cleanup
Res.Size = 0;
delete File;
- curl_slist_free_all(headers);
return true;
};
@@ -374,4 +439,3 @@ int main()
return Mth.Run();
}
-
diff --git a/methods/https.h b/methods/https.h
index 293e288e0..8632d6d02 100644
--- a/methods/https.h
+++ b/methods/https.h
@@ -14,24 +14,53 @@
#include <iostream>
#include <curl/curl.h>
+#include "server.h"
+
using std::cout;
using std::endl;
class HttpsMethod;
class FileFd;
+class HttpsServerState : public ServerState
+{
+ protected:
+ virtual bool ReadHeaderLines(std::string &Data) { return false; }
+ virtual bool LoadNextResponse(bool const ToFile, FileFd * const File) { return false; }
+
+ public:
+ virtual bool WriteResponse(std::string const &Data) { return false; }
+
+ /** \brief Transfer the data from the socket */
+ virtual bool RunData(FileFd * const File) { return false; }
+
+ virtual bool Open() { return false; }
+ virtual bool IsOpen() { return false; }
+ virtual bool Close() { return false; }
+ virtual bool InitHashes(FileFd &File) { return false; }
+ virtual Hashes * GetHashes() { return NULL; }
+ virtual bool Die(FileFd &File) { return false; }
+ virtual bool Flush(FileFd * const File) { return false; }
+ virtual bool Go(bool ToFile, FileFd * const File) { return false; }
+
+ HttpsServerState(URI Srv, HttpsMethod *Owner);
+ virtual ~HttpsServerState() {Close();};
+};
+
class HttpsMethod : public pkgAcqMethod
{
// minimum speed in bytes/se that triggers download timeout handling
static const int DL_MIN_SPEED = 10;
virtual bool Fetch(FetchItem *);
+ static size_t parse_header(void *buffer, size_t size, size_t nmemb, void *userp);
static size_t write_data(void *buffer, size_t size, size_t nmemb, void *userp);
static int progress_callback(void *clientp, double dltotal, double dlnow,
double ultotal, double ulnow);
void SetupProxy();
CURL *curl;
FetchResult Res;
+ HttpsServerState *Server;
public:
FileFd *File;
diff --git a/methods/makefile b/methods/makefile
index 294c55d23..6b7781294 100644
--- a/methods/makefile
+++ b/methods/makefile
@@ -48,14 +48,14 @@ include $(PROGRAM_H)
PROGRAM=http
SLIBS = -lapt-pkg $(SOCKETLIBS) $(INTLLIBS)
LIB_MAKES = apt-pkg/makefile
-SOURCE = http.cc http_main.cc rfc2553emu.cc connect.cc
+SOURCE = http.cc http_main.cc rfc2553emu.cc connect.cc server.cc
include $(PROGRAM_H)
# The https method
PROGRAM=https
SLIBS = -lapt-pkg -lcurl $(INTLLIBS)
LIB_MAKES = apt-pkg/makefile
-SOURCE = https.cc
+SOURCE = https.cc server.cc
include $(PROGRAM_H)
# The ftp method
@@ -83,7 +83,7 @@ include $(PROGRAM_H)
PROGRAM=mirror
SLIBS = -lapt-pkg $(SOCKETLIBS)
LIB_MAKES = apt-pkg/makefile
-SOURCE = mirror.cc http.cc rfc2553emu.cc connect.cc
+SOURCE = mirror.cc http.cc rfc2553emu.cc connect.cc server.cc
include $(PROGRAM_H)
# SSH method symlink
diff --git a/methods/server.cc b/methods/server.cc
new file mode 100644
index 000000000..a2128441c
--- /dev/null
+++ b/methods/server.cc
@@ -0,0 +1,665 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+/* ######################################################################
+
+ HTTP and HTTPS share a lot of common code and these classes are
+ exactly the dumping ground for this common code
+
+ ##################################################################### */
+ /*}}}*/
+// Include Files /*{{{*/
+#include <config.h>
+
+#include <apt-pkg/fileutl.h>
+#include <apt-pkg/acquire-method.h>
+#include <apt-pkg/configuration.h>
+#include <apt-pkg/error.h>
+#include <apt-pkg/hashes.h>
+#include <apt-pkg/netrc.h>
+
+#include <sys/stat.h>
+#include <sys/time.h>
+#include <utime.h>
+#include <unistd.h>
+#include <signal.h>
+#include <stdio.h>
+#include <errno.h>
+#include <string.h>
+#include <climits>
+#include <iostream>
+#include <map>
+
+// Internet stuff
+#include <netdb.h>
+
+#include "config.h"
+#include "connect.h"
+#include "rfc2553emu.h"
+#include "http.h"
+
+#include <apti18n.h>
+ /*}}}*/
+using namespace std;
+
+string ServerMethod::FailFile;
+int ServerMethod::FailFd = -1;
+time_t ServerMethod::FailTime = 0;
+
+// ServerState::RunHeaders - Get the headers before the data /*{{{*/
+// ---------------------------------------------------------------------
+/* Returns 0 if things are OK, 1 if an IO error occurred and 2 if a header
+ parse error occurred */
+ServerState::RunHeadersResult ServerState::RunHeaders(FileFd * const File)
+{
+ State = Header;
+
+ Owner->Status(_("Waiting for headers"));
+
+ Major = 0;
+ Minor = 0;
+ Result = 0;
+ Size = 0;
+ StartPos = 0;
+ Encoding = Closes;
+ HaveContent = false;
+ time(&Date);
+
+ do
+ {
+ string Data;
+ if (ReadHeaderLines(Data) == false)
+ continue;
+
+ if (Owner->Debug == true)
+ clog << Data;
+
+ for (string::const_iterator I = Data.begin(); I < Data.end(); ++I)
+ {
+ string::const_iterator J = I;
+ for (; J != Data.end() && *J != '\n' && *J != '\r'; ++J);
+ if (HeaderLine(string(I,J)) == false)
+ return RUN_HEADERS_PARSE_ERROR;
+ I = J;
+ }
+
+ // 100 Continue is a Nop...
+ if (Result == 100)
+ continue;
+
+ // Tidy up the connection persistance state.
+ if (Encoding == Closes && HaveContent == true)
+ Persistent = false;
+
+ return RUN_HEADERS_OK;
+ }
+ while (LoadNextResponse(false, File) == true);
+
+ return RUN_HEADERS_IO_ERROR;
+}
+ /*}}}*/
+// ServerState::HeaderLine - Process a header line /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool ServerState::HeaderLine(string Line)
+{
+ if (Line.empty() == true)
+ return true;
+
+ string::size_type Pos = Line.find(' ');
+ if (Pos == string::npos || Pos+1 > Line.length())
+ {
+ // Blah, some servers use "connection:closes", evil.
+ Pos = Line.find(':');
+ if (Pos == string::npos || Pos + 2 > Line.length())
+ return _error->Error(_("Bad header line"));
+ Pos++;
+ }
+
+ // Parse off any trailing spaces between the : and the next word.
+ string::size_type Pos2 = Pos;
+ while (Pos2 < Line.length() && isspace(Line[Pos2]) != 0)
+ Pos2++;
+
+ string Tag = string(Line,0,Pos);
+ string Val = string(Line,Pos2);
+
+ if (stringcasecmp(Tag.c_str(),Tag.c_str()+4,"HTTP") == 0)
+ {
+ // Evil servers return no version
+ if (Line[4] == '/')
+ {
+ int const elements = sscanf(Line.c_str(),"HTTP/%3u.%3u %3u%359[^\n]",&Major,&Minor,&Result,Code);
+ if (elements == 3)
+ {
+ Code[0] = '\0';
+ if (Owner->Debug == true)
+ clog << "HTTP server doesn't give Reason-Phrase for " << Result << std::endl;
+ }
+ else if (elements != 4)
+ return _error->Error(_("The HTTP server sent an invalid reply header"));
+ }
+ else
+ {
+ Major = 0;
+ Minor = 9;
+ if (sscanf(Line.c_str(),"HTTP %3u%359[^\n]",&Result,Code) != 2)
+ return _error->Error(_("The HTTP server sent an invalid reply header"));
+ }
+
+ /* Check the HTTP response header to get the default persistance
+ state. */
+ if (Major < 1)
+ Persistent = false;
+ else
+ {
+ if (Major == 1 && Minor == 0)
+ Persistent = false;
+ else
+ Persistent = true;
+ }
+
+ return true;
+ }
+
+ if (stringcasecmp(Tag,"Content-Length:") == 0)
+ {
+ if (Encoding == Closes)
+ Encoding = Stream;
+ HaveContent = true;
+
+ // The length is already set from the Content-Range header
+ if (StartPos != 0)
+ return true;
+
+ Size = strtoull(Val.c_str(), NULL, 10);
+ if (Size >= std::numeric_limits<unsigned long long>::max())
+ return _error->Errno("HeaderLine", _("The HTTP server sent an invalid Content-Length header"));
+ else if (Size == 0)
+ HaveContent = false;
+ return true;
+ }
+
+ if (stringcasecmp(Tag,"Content-Type:") == 0)
+ {
+ HaveContent = true;
+ return true;
+ }
+
+ if (stringcasecmp(Tag,"Content-Range:") == 0)
+ {
+ HaveContent = true;
+
+ // §14.16 says 'byte-range-resp-spec' should be a '*' in case of 416
+ if (Result == 416 && sscanf(Val.c_str(), "bytes */%llu",&Size) == 1)
+ {
+ StartPos = 1; // ignore Content-Length, it would override Size
+ HaveContent = false;
+ }
+ else if (sscanf(Val.c_str(),"bytes %llu-%*u/%llu",&StartPos,&Size) != 2)
+ return _error->Error(_("The HTTP server sent an invalid Content-Range header"));
+ if ((unsigned long long)StartPos > Size)
+ return _error->Error(_("This HTTP server has broken range support"));
+ return true;
+ }
+
+ if (stringcasecmp(Tag,"Transfer-Encoding:") == 0)
+ {
+ HaveContent = true;
+ if (stringcasecmp(Val,"chunked") == 0)
+ Encoding = Chunked;
+ return true;
+ }
+
+ if (stringcasecmp(Tag,"Connection:") == 0)
+ {
+ if (stringcasecmp(Val,"close") == 0)
+ Persistent = false;
+ if (stringcasecmp(Val,"keep-alive") == 0)
+ Persistent = true;
+ return true;
+ }
+
+ if (stringcasecmp(Tag,"Last-Modified:") == 0)
+ {
+ if (RFC1123StrToTime(Val.c_str(), Date) == false)
+ return _error->Error(_("Unknown date format"));
+ return true;
+ }
+
+ if (stringcasecmp(Tag,"Location:") == 0)
+ {
+ Location = Val;
+ return true;
+ }
+
+ return true;
+}
+ /*}}}*/
+// ServerState::ServerState - Constructor /*{{{*/
+ServerState::ServerState(URI Srv, ServerMethod *Owner) : ServerName(Srv), TimeOut(120), Owner(Owner)
+{
+ Reset();
+}
+ /*}}}*/
+
+bool ServerMethod::Configuration(string Message) /*{{{*/
+{
+ return pkgAcqMethod::Configuration(Message);
+}
+ /*}}}*/
+
+// ServerMethod::DealWithHeaders - Handle the retrieved header data /*{{{*/
+// ---------------------------------------------------------------------
+/* We look at the header data we got back from the server and decide what
+ to do. Returns DealWithHeadersResult (see http.h for details).
+ */
+ServerMethod::DealWithHeadersResult
+ServerMethod::DealWithHeaders(FetchResult &Res)
+{
+ // Not Modified
+ if (Server->Result == 304)
+ {
+ unlink(Queue->DestFile.c_str());
+ Res.IMSHit = true;
+ Res.LastModified = Queue->LastModified;
+ return IMS_HIT;
+ }
+
+ /* Redirect
+ *
+ * Note that it is only OK for us to treat all redirection the same
+ * because we *always* use GET, not other HTTP methods. There are
+ * three redirection codes for which it is not appropriate that we
+ * redirect. Pass on those codes so the error handling kicks in.
+ */
+ if (AllowRedirect
+ && (Server->Result > 300 && Server->Result < 400)
+ && (Server->Result != 300 // Multiple Choices
+ && Server->Result != 304 // Not Modified
+ && Server->Result != 306)) // (Not part of HTTP/1.1, reserved)
+ {
+ if (Server->Location.empty() == true);
+ else if (Server->Location[0] == '/' && Queue->Uri.empty() == false)
+ {
+ URI Uri = Queue->Uri;
+ if (Uri.Host.empty() == false)
+ NextURI = URI::SiteOnly(Uri);
+ else
+ NextURI.clear();
+ NextURI.append(DeQuoteString(Server->Location));
+ return TRY_AGAIN_OR_REDIRECT;
+ }
+ else
+ {
+ NextURI = DeQuoteString(Server->Location);
+ URI tmpURI = NextURI;
+ // Do not allow a redirection to switch protocol
+ if (tmpURI.Access == "http")
+ return TRY_AGAIN_OR_REDIRECT;
+ }
+ /* else pass through for error message */
+ }
+ // retry after an invalid range response without partial data
+ else if (Server->Result == 416)
+ {
+ struct stat SBuf;
+ if (stat(Queue->DestFile.c_str(),&SBuf) >= 0 && SBuf.st_size > 0)
+ {
+ if ((unsigned long long)SBuf.st_size == Server->Size)
+ {
+ // the file is completely downloaded, but was not moved
+ Server->StartPos = Server->Size;
+ Server->Result = 200;
+ Server->HaveContent = false;
+ }
+ else if (unlink(Queue->DestFile.c_str()) == 0)
+ {
+ NextURI = Queue->Uri;
+ return TRY_AGAIN_OR_REDIRECT;
+ }
+ }
+ }
+
+ /* We have a reply we dont handle. This should indicate a perm server
+ failure */
+ if (Server->Result < 200 || Server->Result >= 300)
+ {
+ char err[255];
+ snprintf(err,sizeof(err)-1,"HttpError%i",Server->Result);
+ SetFailReason(err);
+ _error->Error("%u %s",Server->Result,Server->Code);
+ if (Server->HaveContent == true)
+ return ERROR_WITH_CONTENT_PAGE;
+ return ERROR_UNRECOVERABLE;
+ }
+
+ // This is some sort of 2xx 'data follows' reply
+ Res.LastModified = Server->Date;
+ Res.Size = Server->Size;
+
+ // Open the file
+ delete File;
+ File = new FileFd(Queue->DestFile,FileFd::WriteAny);
+ if (_error->PendingError() == true)
+ return ERROR_NOT_FROM_SERVER;
+
+ FailFile = Queue->DestFile;
+ FailFile.c_str(); // Make sure we dont do a malloc in the signal handler
+ FailFd = File->Fd();
+ FailTime = Server->Date;
+
+ if (Server->InitHashes(*File) == false)
+ {
+ _error->Errno("read",_("Problem hashing file"));
+ return ERROR_NOT_FROM_SERVER;
+ }
+ if (Server->StartPos > 0)
+ Res.ResumePoint = Server->StartPos;
+
+ SetNonBlock(File->Fd(),true);
+ return FILE_IS_OPEN;
+}
+ /*}}}*/
+// ServerMethod::SigTerm - Handle a fatal signal /*{{{*/
+// ---------------------------------------------------------------------
+/* This closes and timestamps the open file. This is neccessary to get
+ resume behavoir on user abort */
+void ServerMethod::SigTerm(int)
+{
+ if (FailFd == -1)
+ _exit(100);
+ close(FailFd);
+
+ // Timestamp
+ struct utimbuf UBuf;
+ UBuf.actime = FailTime;
+ UBuf.modtime = FailTime;
+ utime(FailFile.c_str(),&UBuf);
+
+ _exit(100);
+}
+ /*}}}*/
+// ServerMethod::Fetch - Fetch an item /*{{{*/
+// ---------------------------------------------------------------------
+/* This adds an item to the pipeline. We keep the pipeline at a fixed
+ depth. */
+bool ServerMethod::Fetch(FetchItem *)
+{
+ if (Server == 0)
+ return true;
+
+ // Queue the requests
+ int Depth = -1;
+ for (FetchItem *I = Queue; I != 0 && Depth < (signed)PipelineDepth;
+ I = I->Next, Depth++)
+ {
+ // If pipelining is disabled, we only queue 1 request
+ if (Server->Pipeline == false && Depth >= 0)
+ break;
+
+ // Make sure we stick with the same server
+ if (Server->Comp(I->Uri) == false)
+ break;
+ if (QueueBack == I)
+ {
+ QueueBack = I->Next;
+ SendReq(I);
+ continue;
+ }
+ }
+
+ return true;
+};
+ /*}}}*/
+// ServerMethod::Loop - Main loop /*{{{*/
+int ServerMethod::Loop()
+{
+ typedef vector<string> StringVector;
+ typedef vector<string>::iterator StringVectorIterator;
+ map<string, StringVector> Redirected;
+
+ signal(SIGTERM,SigTerm);
+ signal(SIGINT,SigTerm);
+
+ Server = 0;
+
+ int FailCounter = 0;
+ while (1)
+ {
+ // We have no commands, wait for some to arrive
+ if (Queue == 0)
+ {
+ if (WaitFd(STDIN_FILENO) == false)
+ return 0;
+ }
+
+ /* Run messages, we can accept 0 (no message) if we didn't
+ do a WaitFd above.. Otherwise the FD is closed. */
+ int Result = Run(true);
+ if (Result != -1 && (Result != 0 || Queue == 0))
+ {
+ if(FailReason.empty() == false ||
+ _config->FindB("Acquire::http::DependOnSTDIN", true) == true)
+ return 100;
+ else
+ return 0;
+ }
+
+ if (Queue == 0)
+ continue;
+
+ // Connect to the server
+ if (Server == 0 || Server->Comp(Queue->Uri) == false)
+ {
+ delete Server;
+ Server = CreateServerState(Queue->Uri);
+ }
+ /* If the server has explicitly said this is the last connection
+ then we pre-emptively shut down the pipeline and tear down
+ the connection. This will speed up HTTP/1.0 servers a tad
+ since we don't have to wait for the close sequence to
+ complete */
+ if (Server->Persistent == false)
+ Server->Close();
+
+ // Reset the pipeline
+ if (Server->IsOpen() == false)
+ QueueBack = Queue;
+
+ // Connnect to the host
+ if (Server->Open() == false)
+ {
+ Fail(true);
+ delete Server;
+ Server = 0;
+ continue;
+ }
+
+ // Fill the pipeline.
+ Fetch(0);
+
+ // Fetch the next URL header data from the server.
+ switch (Server->RunHeaders(File))
+ {
+ case ServerState::RUN_HEADERS_OK:
+ break;
+
+ // The header data is bad
+ case ServerState::RUN_HEADERS_PARSE_ERROR:
+ {
+ _error->Error(_("Bad header data"));
+ Fail(true);
+ RotateDNS();
+ continue;
+ }
+
+ // The server closed a connection during the header get..
+ default:
+ case ServerState::RUN_HEADERS_IO_ERROR:
+ {
+ FailCounter++;
+ _error->Discard();
+ Server->Close();
+ Server->Pipeline = false;
+
+ if (FailCounter >= 2)
+ {
+ Fail(_("Connection failed"),true);
+ FailCounter = 0;
+ }
+
+ RotateDNS();
+ continue;
+ }
+ };
+
+ // Decide what to do.
+ FetchResult Res;
+ Res.Filename = Queue->DestFile;
+ switch (DealWithHeaders(Res))
+ {
+ // Ok, the file is Open
+ case FILE_IS_OPEN:
+ {
+ URIStart(Res);
+
+ // Run the data
+ bool Result = true;
+ if (Server->HaveContent)
+ Result = Server->RunData(File);
+
+ /* If the server is sending back sizeless responses then fill in
+ the size now */
+ if (Res.Size == 0)
+ Res.Size = File->Size();
+
+ // Close the file, destroy the FD object and timestamp it
+ FailFd = -1;
+ delete File;
+ File = 0;
+
+ // Timestamp
+ struct utimbuf UBuf;
+ time(&UBuf.actime);
+ UBuf.actime = Server->Date;
+ UBuf.modtime = Server->Date;
+ utime(Queue->DestFile.c_str(),&UBuf);
+
+ // Send status to APT
+ if (Result == true)
+ {
+ Res.TakeHashes(*Server->GetHashes());
+ URIDone(Res);
+ }
+ else
+ {
+ if (Server->IsOpen() == false)
+ {
+ FailCounter++;
+ _error->Discard();
+ Server->Close();
+
+ if (FailCounter >= 2)
+ {
+ Fail(_("Connection failed"),true);
+ FailCounter = 0;
+ }
+
+ QueueBack = Queue;
+ }
+ else
+ Fail(true);
+ }
+ break;
+ }
+
+ // IMS hit
+ case IMS_HIT:
+ {
+ URIDone(Res);
+ break;
+ }
+
+ // Hard server error, not found or something
+ case ERROR_UNRECOVERABLE:
+ {
+ Fail();
+ break;
+ }
+
+ // Hard internal error, kill the connection and fail
+ case ERROR_NOT_FROM_SERVER:
+ {
+ delete File;
+ File = 0;
+
+ Fail();
+ RotateDNS();
+ Server->Close();
+ break;
+ }
+
+ // We need to flush the data, the header is like a 404 w/ error text
+ case ERROR_WITH_CONTENT_PAGE:
+ {
+ Fail();
+
+ // Send to content to dev/null
+ File = new FileFd("/dev/null",FileFd::WriteExists);
+ Server->RunData(File);
+ delete File;
+ File = 0;
+ break;
+ }
+
+ // Try again with a new URL
+ case TRY_AGAIN_OR_REDIRECT:
+ {
+ // Clear rest of response if there is content
+ if (Server->HaveContent)
+ {
+ File = new FileFd("/dev/null",FileFd::WriteExists);
+ Server->RunData(File);
+ delete File;
+ File = 0;
+ }
+
+ /* Detect redirect loops. No more redirects are allowed
+ after the same URI is seen twice in a queue item. */
+ StringVector &R = Redirected[Queue->DestFile];
+ bool StopRedirects = false;
+ if (R.empty() == true)
+ R.push_back(Queue->Uri);
+ else if (R[0] == "STOP" || R.size() > 10)
+ StopRedirects = true;
+ else
+ {
+ for (StringVectorIterator I = R.begin(); I != R.end(); ++I)
+ if (Queue->Uri == *I)
+ {
+ R[0] = "STOP";
+ break;
+ }
+
+ R.push_back(Queue->Uri);
+ }
+
+ if (StopRedirects == false)
+ Redirect(NextURI);
+ else
+ Fail();
+
+ break;
+ }
+
+ default:
+ Fail(_("Internal error"));
+ break;
+ }
+
+ FailCounter = 0;
+ }
+
+ return 0;
+}
+ /*}}}*/
diff --git a/methods/server.h b/methods/server.h
new file mode 100644
index 000000000..4dc6a1f2f
--- /dev/null
+++ b/methods/server.h
@@ -0,0 +1,144 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+/* ######################################################################
+
+ Classes dealing with the abstraction of talking to a end via a text
+ protocol like HTTP (which is used by the http and https methods)
+
+ ##################################################################### */
+ /*}}}*/
+
+#ifndef APT_SERVER_H
+#define APT_SERVER_H
+
+#include <apt-pkg/strutl.h>
+
+#include <string>
+
+using std::cout;
+using std::endl;
+
+class Hashes;
+class ServerMethod;
+class FileFd;
+
+struct ServerState
+{
+ // This is the last parsed Header Line
+ unsigned int Major;
+ unsigned int Minor;
+ unsigned int Result;
+ char Code[360];
+
+ // These are some statistics from the last parsed header lines
+ unsigned long long Size;
+ signed long long StartPos;
+ time_t Date;
+ bool HaveContent;
+ enum {Chunked,Stream,Closes} Encoding;
+ enum {Header, Data} State;
+ bool Persistent;
+ std::string Location;
+
+ // This is a Persistent attribute of the server itself.
+ bool Pipeline;
+ URI ServerName;
+ URI Proxy;
+ unsigned long TimeOut;
+
+ protected:
+ ServerMethod *Owner;
+
+ virtual bool ReadHeaderLines(std::string &Data) = 0;
+ virtual bool LoadNextResponse(bool const ToFile, FileFd * const File) = 0;
+
+ public:
+ bool HeaderLine(std::string Line);
+
+ /** \brief Result of the header acquire */
+ enum RunHeadersResult {
+ /** \brief Header ok */
+ RUN_HEADERS_OK,
+ /** \brief IO error while retrieving */
+ RUN_HEADERS_IO_ERROR,
+ /** \brief Parse error after retrieving */
+ RUN_HEADERS_PARSE_ERROR,
+ };
+ /** \brief Get the headers before the data */
+ RunHeadersResult RunHeaders(FileFd * const File);
+
+ bool Comp(URI Other) const {return Other.Host == ServerName.Host && Other.Port == ServerName.Port;};
+ virtual void Reset() {Major = 0; Minor = 0; Result = 0; Code[0] = '\0'; Size = 0;
+ StartPos = 0; Encoding = Closes; time(&Date); HaveContent = false;
+ State = Header; Persistent = false; Pipeline = true;};
+ virtual bool WriteResponse(std::string const &Data) = 0;
+
+ /** \brief Transfer the data from the socket */
+ virtual bool RunData(FileFd * const File) = 0;
+
+ virtual bool Open() = 0;
+ virtual bool IsOpen() = 0;
+ virtual bool Close() = 0;
+ virtual bool InitHashes(FileFd &File) = 0;
+ virtual Hashes * GetHashes() = 0;
+ virtual bool Die(FileFd &File) = 0;
+ virtual bool Flush(FileFd * const File) = 0;
+ virtual bool Go(bool ToFile, FileFd * const File) = 0;
+
+ ServerState(URI Srv, ServerMethod *Owner);
+ virtual ~ServerState() {};
+};
+
+class ServerMethod : public pkgAcqMethod
+{
+ protected:
+ virtual bool Fetch(FetchItem *);
+
+ ServerState *Server;
+ std::string NextURI;
+ FileFd *File;
+
+ unsigned long PipelineDepth;
+ bool AllowRedirect;
+
+ public:
+ bool Debug;
+
+ /** \brief Result of the header parsing */
+ enum DealWithHeadersResult {
+ /** \brief The file is open and ready */
+ FILE_IS_OPEN,
+ /** \brief We got a IMS hit, the file has not changed */
+ IMS_HIT,
+ /** \brief The server reported a unrecoverable error */
+ ERROR_UNRECOVERABLE,
+ /** \brief The server reported a error with a error content page */
+ ERROR_WITH_CONTENT_PAGE,
+ /** \brief An error on the client side */
+ ERROR_NOT_FROM_SERVER,
+ /** \brief A redirect or retry request */
+ TRY_AGAIN_OR_REDIRECT
+ };
+ /** \brief Handle the retrieved header data */
+ DealWithHeadersResult DealWithHeaders(FetchResult &Res);
+
+ // In the event of a fatal signal this file will be closed and timestamped.
+ static std::string FailFile;
+ static int FailFd;
+ static time_t FailTime;
+ static void SigTerm(int);
+
+ virtual bool Configuration(std::string Message);
+ virtual bool Flush() { return Server->Flush(File); };
+
+ int Loop();
+
+ virtual void SendReq(FetchItem *Itm) = 0;
+ virtual ServerState * CreateServerState(URI uri) = 0;
+ virtual void RotateDNS() = 0;
+
+ ServerMethod(const char *Ver,unsigned long Flags = 0) : pkgAcqMethod(Ver, Flags), PipelineDepth(0), AllowRedirect(false), Debug(false) {};
+ virtual ~ServerMethod() {};
+};
+
+#endif
diff --git a/test/integration/Packages-bug-723705-tagfile-truncates-fields b/test/integration/Packages-bug-723705-tagfile-truncates-fields
new file mode 100644
index 000000000..c42b85072
--- /dev/null
+++ b/test/integration/Packages-bug-723705-tagfile-truncates-fields
@@ -0,0 +1,167 @@
+Package: cdebconf-gtk-udeb
+Source: cdebconf
+Version: 0.185
+Installed-Size: 92
+Maintainer: Debian Install System Team <debian-boot@lists.debian.org>
+Architecture: amd64
+Description: Gtk+ frontend for Debian Configuration Management System
+Description-md5: 75d036e0a245499123544e2254b92e9c
+Section: debian-installer
+Priority: optional
+Filename: pool/main/c/cdebconf/cdebconf-gtk-udeb_0.185_amd64.udeb
+Size: 27278
+MD5sum: a1bbbc1d4fb8e0615b5621abac021924
+SHA1: b1a7ab55a90f61e5337847d02ff1d12d73559def
+SHA256: cd79f3205304a7932b3309c4df9898c9a53929bc651912659858e087ebe1c18a
+
+Package: cdebconf-newt-udeb
+Source: cdebconf
+Version: 0.185
+Installed-Size: 58
+Maintainer: Debian Install System Team <debian-boot@lists.debian.org>
+Architecture: amd64
+Description: Newt frontend for Debian Configuration Management System
+Description-md5: e080be5e38cb8c57bca2f3effe9ee030
+Section: debian-installer
+Priority: optional
+Filename: pool/main/c/cdebconf/cdebconf-newt-udeb_0.185_amd64.udeb
+Size: 19192
+MD5sum: de27807f56dae2f2403b3322d5fe6bd2
+SHA1: 57883e223d46a9f25966f9b986e6a3bc2f67d8ef
+SHA256: 5f8b9c3a5430f2ec879484a7736582b152d76cc8ba9bc19328268f3635759a1b
+
+Package: cdebconf-udeb
+Source: cdebconf
+Version: 0.185
+Installed-Size: 245
+Maintainer: Debian Install System Team <debian-boot@lists.debian.org>
+Architecture: amd64
+Provides: debconf-2.0
+Description: Debian Configuration Management System (C-implementation)
+Description-md5: 9f3579e9d9f86ac89e667a8707d3cbd3
+Section: debian-installer
+Priority: standard
+Filename: pool/main/c/cdebconf/cdebconf-udeb_0.185_amd64.udeb
+Size: 77376
+MD5sum: e3883706fdbf54c2e5ea959c92b2d37f
+SHA1: 0232f1bdf1531db628516ed3a46a27466b267fdc
+SHA256: 96345575417a3e4df8a2cadaa55784ec8f6c042defb1e2fc002d941b6116ceab
+
+Package: cdebconf-gtk-terminal
+Source: cdebconf-terminal
+Version: 0.22
+Installed-Size: 64
+Maintainer: Debian Install System Team <debian-boot@lists.debian.org>
+Architecture: amd64
+Provides: cdebconf-terminal
+Depends: cdebconf-gtk-udeb, libc6-udeb (>= 2.17), libglib2.0-udeb (>= 2.36.4), libgtk2.0-0-udeb (>= 2.24.0), libvte9-udeb (>= 1:0.28.0), cdebconf-udeb, cdebconf-gtk-terminal, cdebconf-gtk-terminal, cdebconf-gtk-terminal, cdebconf-gtk-terminal, cdebconf-gtk-terminal, cdebconf-gtk-terminal, cdebconf-gtk-terminal
+Description: cdebconf gtk plugin displaying a terminal
+Description-md5: 18c4446758aec003eb8cd0a43419f1aa
+Section: debian-installer
+Priority: extra
+Filename: pool/main/c/cdebconf-terminal/cdebconf-gtk-terminal_0.22_amd64.udeb
+Size: 14734
+MD5sum: f9c3a7354560cb88e0396e2b7ba54363
+SHA1: 9c1c93328e758bfd9de2752466b271aaf38c8177
+SHA256: ca749853fc3b93db1d08ccdc6b46de27633de52bc5b880fa65275897ebcaaf69
+
+Package: cdebconf-newt-terminal
+Source: cdebconf-terminal
+Version: 0.22
+Installed-Size: 43
+Maintainer: Debian Install System Team <debian-boot@lists.debian.org>
+Architecture: amd64
+Provides: cdebconf-terminal
+Depends: cdebconf-newt-udeb (>= 0.146), libc6-udeb (>= 2.17), libnewt0.52
+Description: cdebconf newt plugin to provide a clean terminal
+Description-md5: 4109a053022081b573d864d84d6eb16d
+Section: debian-installer
+Priority: extra
+Filename: pool/main/c/cdebconf-terminal/cdebconf-newt-terminal_0.22_amd64.udeb
+Size: 4538
+MD5sum: 20db6152fce5081fcbf49c7c08f21246
+SHA1: fa2a40f777a2f48b9634866bc780fb059e60b2fe
+SHA256: c4d99ef27285f0c9090005313165627e56e0972e687af7e68c2b1d1538e2ae09
+
+Package: libc6-udeb
+Source: eglibc (2.17-92)
+Version: 2.17-92+b1
+Installed-Size: 3126
+Maintainer: GNU Libc Maintainers <debian-glibc@lists.debian.org>
+Architecture: amd64
+Provides: glibc-2.17-1, libc-udeb, libc6
+Description: Embedded GNU C Library: Shared libraries - udeb
+Description-md5: 9552ce73b7b3fb466e3d89fe8db9a563
+Section: debian-installer
+Priority: extra
+Filename: pool/main/e/eglibc/libc6-udeb_2.17-92+b1_amd64.udeb
+Size: 1056000
+MD5sum: 7fd7032eeeecf7f76eff79a0543fbd72
+SHA1: 724b6a81b8fbc9d4d2bb43d656c08de73f7ada25
+SHA256: 137d4c001bbfde8161315c36e6cb8653ae2c50a8d6b6d2d27396c492d91a1723
+
+Package: libglib2.0-udeb
+Source: glib2.0
+Version: 2.36.4-1
+Installed-Size: 10070
+Maintainer: Debian GNOME Maintainers <pkg-gnome-maintainers@lists.alioth.debian.org>
+Architecture: amd64
+Description: GLib library of C routines - minimal runtime
+Description-md5: 0244040042870a89aa49f037cce3f1e9
+Section: debian-installer
+Priority: optional
+Filename: pool/main/g/glib2.0/libglib2.0-udeb_2.36.4-1_amd64.udeb
+Size: 1714604
+MD5sum: 72da029f1bbb36057d874f1f82a5d00a
+SHA1: 32bce78a052ef19a620f43ecbe12404fa570c0f1
+SHA256: 8edbc7cb872c0a82705913563f93f9eec5750881e4378c5a48770cde840cd6eb
+
+Package: libgtk2.0-0-udeb
+Source: gtk+2.0
+Version: 2.24.20-1
+Installed-Size: 5035
+Maintainer: Debian GNOME Maintainers <pkg-gnome-maintainers@lists.alioth.debian.org>
+Architecture: amd64
+Provides: gtk2.0-binver-2.10.0
+Description: GTK+ graphical user interface library - minimal runtime
+Description-md5: 32e5112b80c02578837cff4f65dfec84
+Section: debian-installer
+Priority: extra
+Filename: pool/main/g/gtk+2.0/libgtk2.0-0-udeb_2.24.20-1_amd64.udeb
+Size: 1643046
+MD5sum: 25513478eb2e02e5766c0eea0b411ca9
+SHA1: 9274f05bfa930a3406403441ce061bade04e2064
+SHA256: d5f611f48928ae02f759105cf8cff467cde1cb44df56ad31067168b46a80f8bc
+
+Package: libvte9-udeb
+Source: vte
+Version: 1:0.28.2-5
+Installed-Size: 628
+Maintainer: Debian GNOME Maintainers <pkg-gnome-maintainers@lists.alioth.debian.org>
+Architecture: amd64
+Description: Terminal emulator widget for GTK+ 2.0 - minimal runtime
+Description-md5: e7993385c30bae6e96c8cb87795a513c
+Section: debian-installer
+Priority: extra
+Filename: pool/main/v/vte/libvte9-udeb_0.28.2-5_amd64.udeb
+Size: 216968
+MD5sum: 7da7201effaf5ced19abd9d0b45aa2c6
+SHA1: a424cf779e7614d79740c422b6342de04fed3646
+SHA256: 4963033cbda5a8ba7eb8ebf1debae34463b8e63b821259860cfb51c1ab99562d
+
+Package: zlib1g-udeb
+Source: zlib
+Version: 1:1.2.8.dfsg-1
+Installed-Size: 115
+Maintainer: Mark Brown <broonie@debian.org>
+Architecture: amd64
+Description: compression library - runtime for Debian installer
+Description-md5: 9cab974e3eab657c53bc17611b894c7a
+Section: debian-installer
+Priority: optional
+Filename: pool/main/z/zlib/zlib1g-udeb_1.2.8.dfsg-1_amd64.udeb
+Size: 45270
+MD5sum: c02884420f79a3ae4569cf67782f3e74
+SHA1: 7cd1a7c8be4e086de733a0ce76f87d42b8b2173b
+SHA256: 61641ee2b5e185232108333438b72bec71ef549fe0e0df1b2b3afa37174e53a7
+
diff --git a/test/integration/apt.pem b/test/integration/apt.pem
new file mode 100644
index 000000000..f48df054d
--- /dev/null
+++ b/test/integration/apt.pem
@@ -0,0 +1,49 @@
+-----BEGIN PRIVATE KEY-----
+MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQCt4R1Q2oYF6utL
+19GBhnlHW8L2BI7PRFWge/ZpqIZWsaFcb30FV86Z6aXXZmgfEJ2814ZZYD1IKeCe
+JsJpns7B4vYe1v64r995ZNBQAAKIYjICkKZOBgOphV+ChBfrCctVXgfLbMP6iBdH
+J02wHzSCCdZm0sdVl9tB5l/OyJU8Mb4KB3btBhfZfY2M6lU+FOjcXs1LOduUrv7K
+fZ+DEalvVGkomLtHtD0qb2vkqFrTjVCkziUVWhhxFFflt08oQ01Clxpl+uv7rOQo
+jtkJ1LrMuv7iPfaZ/z3qLiFxZYG1BCGEwTOKCtJo6bgFzXiN3q7Q5FFlmv851x2J
+Dn8C7Qm7AgMBAAECggEAE3q6vAofJZ6Ryadd8zLLd3ESQFl2XkX7icUZb/DPS/sO
+ZrqeuPCDVr7UM3NnisNjyHoktPKRKvp2DYGuGgMOiq4QgJf5ZVten8zpgWze28SU
+cbEe0HLgCifE8Ww2+b/ZJbEpEmMW+YQxh2khzO9SBJdxi4dliXM/vvw+E35pKZsB
+s6glrz6VQAxxa9fY4fLnB2DafHy+pUvRVw8gC6PCM9jXN9tMYAqztsJu7aaanNyT
+HX2UDWa8hxVx6t5UQZuxvst9N+RcEwmVCR2qlfZt/VRBRibBm62crEKbTD00mNHQ
+4AIDn3g6Y3SXpDlgtNpjLyBL3fODPIwqwGdblaSKkQKBgQDYXecu0Eda7kbR5ciW
+IAn8XOxsBIkkh8YVl2gRiiajRVoeiYBHaW9TyuQiaWrftiDQxB/N4G2focTXy/7O
+VJn6e/SUoO/ZGRw2GbTxLUQptgvFsejYCcW9XpC8MCwE/y2swiY7JM0WR8cV2nCk
+a/Cls6f1LjL13aFO0PAorEcahQKBgQDNuth6EHZVwfDgUuqhRw4HIIpfsfiA3UOd
+b5k/NsfQIev1YUqnfucgInNPDq2Jf8eTQw3TKaszo2DCjDffCsEgM09Tym143Bd6
+AsMuqAStsE3IEC7pnmh95l29/7mh4OuG5cp5JUx0Pi5PkuJ6ywA8P1rM1MB9Zf52
+NGJCo1pnPwKBgQCx/n4i+uDYo1DLd/dN2UmdvGwaaJjR3ohMVuQcGcSzaGg82u0W
+0lvtWOnYjVSIeXIBjHaFjW1hd1lSFdWms96AO9z3MHZf6NJWh0tdZNnAXqzMlBFz
+OIbdxJ/Y0OBFtA9FIesFmL7G54GWLr+f49Ry3Jr9jmYJ8au0BRqsux07aQKBgC4q
+CT2KyCMCO/z6XjAGc71hres/UlYIUI3ZZvfqYPfxRLNxO4FOVqq9UEajMomyJKSE
+3WtO5F3YAXRmZnskPKXvHZPdzqbaLGJykD298h7PewSzrPM7WpM1yD9ETPFoOTGy
+CrcYiYlkEpxEh5GqT8k1JjjkXLVG18zKgGoXocedAoGAQyU2DCNfxwzIJfFHKZEG
+zpni72cR68Tu3AhW/38vMR2ZPca4KzXrUA52T+j7vkQC38LHm/mzNXNP7Vya0PJ3
+WoYOcLtg2uFPh0P/35ArEzuNooLsvulgg1jsamPbF8KAvJZKZHr30hlC/JGYSBbV
+bnkzJTShsKzHIUiLtQ8Ja+E=
+-----END PRIVATE KEY-----
+-----BEGIN CERTIFICATE-----
+MIIDezCCAmOgAwIBAgIJAJ39xapQo0vLMA0GCSqGSIb3DQEBBQUAMFMxCzAJBgNV
+BAYTAkRFMRMwEQYDVQQIDApTb21lLVN0YXRlMRswGQYDVQQKDBJBUFQgVGVzdGNh
+c2VzIEdtYkgxEjAQBgNVBAMMCWxvY2FsaG9zdDAgFw0xMzA5MTYwODQ4MzVaGA80
+NzUxMDgxMzA4NDgzNVowUzELMAkGA1UEBhMCREUxEzARBgNVBAgMClNvbWUtU3Rh
+dGUxGzAZBgNVBAoMEkFQVCBUZXN0Y2FzZXMgR21iSDESMBAGA1UEAwwJbG9jYWxo
+b3N0MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAreEdUNqGBerrS9fR
+gYZ5R1vC9gSOz0RVoHv2aaiGVrGhXG99BVfOmeml12ZoHxCdvNeGWWA9SCngnibC
+aZ7OweL2Htb+uK/feWTQUAACiGIyApCmTgYDqYVfgoQX6wnLVV4Hy2zD+ogXRydN
+sB80ggnWZtLHVZfbQeZfzsiVPDG+Cgd27QYX2X2NjOpVPhTo3F7NSznblK7+yn2f
+gxGpb1RpKJi7R7Q9Km9r5Kha041QpM4lFVoYcRRX5bdPKENNQpcaZfrr+6zkKI7Z
+CdS6zLr+4j32mf896i4hcWWBtQQhhMEzigrSaOm4Bc14jd6u0ORRZZr/OdcdiQ5/
+Au0JuwIDAQABo1AwTjAdBgNVHQ4EFgQUhd26E7ykEYRTDbgMzkYtFtENhSkwHwYD
+VR0jBBgwFoAUhd26E7ykEYRTDbgMzkYtFtENhSkwDAYDVR0TBAUwAwEB/zANBgkq
+hkiG9w0BAQUFAAOCAQEAWcyMKi0Vc4beGV7w4Qft0/2P68jjMlQRdgkz+gGXbMVr
+//KhqR3PbgFmHHpUsZ718AHeerNNdfFzOUptiAiOqH2muyAGdeWCxJ8KcU0sic8x
+/h3TOzMYfEozhgMSJp9YW1z655uHcb15S7jb4zZwXwGyQzxwXT35SKj2mCqSbjIb
+G987DGI+MtyoGRXhIwnBEsGTI1ck3NoeXBJ/tS/Ma8gUUC2xldMSprtHjeUHvZV2
+iz/HTqGlMLGW96AVeZiFNiC1fJ6pvref2XW5MkkvQm8tOi2cSrwJc9CgnCpCxkLp
+liRsbwAduwkA26XzEomMR7yyYS5pm0Eu0cO9X39FKQ==
+-----END CERTIFICATE-----
diff --git a/test/integration/framework b/test/integration/framework
index 4003d932c..d899bb574 100644
--- a/test/integration/framework
+++ b/test/integration/framework
@@ -216,14 +216,13 @@ getarchitectures() {
}
configarchitecture() {
- local CONFFILE=rootdir/etc/apt/apt.conf.d/01multiarch.conf
- rm -f $CONFFILE
- echo "APT::Architecture \"$(getarchitecture $1)\";" > $CONFFILE
- shift
- while [ -n "$1" ]; do
- echo "APT::Architectures:: \"$(getarchitecture $1)\";" >> $CONFFILE
- shift
- done
+ {
+ echo "APT::Architecture \"$(getarchitecture $1)\";"
+ while [ -n "$1" ]; do
+ echo "APT::Architectures:: \"$(getarchitecture $1)\";"
+ shift
+ done
+ } >rootdir/etc/apt/apt.conf.d/01multiarch.conf
configdpkg
}
@@ -236,16 +235,17 @@ configdpkg() {
echo -n > rootdir/var/lib/dpkg/status
fi
fi
+ rm -f rootdir/etc/apt/apt.conf.d/00foreigndpkg
if $(which dpkg) --assert-multi-arch >/dev/null 2>&1; then
local ARCHS="$(getarchitectures)"
if echo "$ARCHS" | grep -E -q '[^ ]+ [^ ]+'; then
DPKGARCH="$(dpkg --print-architecture)"
for ARCH in ${ARCHS}; do
if [ "${ARCH}" != "${DPKGARCH}" ]; then
- if ! dpkg --add-architecture ${ARCH}; then
+ if ! dpkg --add-architecture ${ARCH} >/dev/null 2>&1; then
# old-style used e.g. in Ubuntu-P – and as it seems travis
- echo "DPKG::options:: \"--foreign-architecture\";" >> aptconfig.conf
- echo "DPKG::options:: \"${ARCH}\";" >> aptconfig.conf
+ echo "DPKG::options:: \"--foreign-architecture\";" >> rootdir/etc/apt/apt.conf.d/00foreigndpkg
+ echo "DPKG::options:: \"${ARCH}\";" >> rootdir/etc/apt/apt.conf.d/00foreigndpkg
fi
fi
done
@@ -278,7 +278,11 @@ setupsimplenativepackage() {
local VERSION="$3"
local RELEASE="${4:-unstable}"
local DEPENDENCIES="$5"
- local DESCRIPTION="$6"
+ local DESCRIPTION="${6:-"Description: an autogenerated dummy ${NAME}=${VERSION}/${RELEASE}
+ If you find such a package installed on your system,
+ something went horribly wrong! They are autogenerated
+ und used only by testcases and surf no other propose…"}"
+
local SECTION="${7:-others}"
local DISTSECTION
if [ "$SECTION" = "$(echo "$SECTION" | cut -d'/' -f 2)" ]; then
@@ -310,14 +314,8 @@ Package: $NAME" > debian/control
echo "Architecture: any" >> debian/control
fi
test -z "$DEPENDENCIES" || echo "$DEPENDENCIES" >> debian/control
- if [ -z "$DESCRIPTION" ]; then
- echo "Description: an autogenerated dummy ${NAME}=${VERSION}/${RELEASE}
- If you find such a package installed on your system,
- YOU did something horribly wrong! They are autogenerated
- und used only by testcases for APT and surf no other propose…" >> debian/control
- else
- echo "Description: $DESCRIPTION" >> debian/control
- fi
+ echo "Description: $DESCRIPTION" >> debian/control
+
test -e debian/compat || echo "7" > debian/compat
test -e debian/source/format || echo "3.0 (native)" > debian/source/format
test -e debian/rules || cp /usr/share/doc/debhelper/examples/rules.tiny debian/rules
@@ -330,7 +328,11 @@ buildsimplenativepackage() {
local VERSION="$3"
local RELEASE="${4:-unstable}"
local DEPENDENCIES="$5"
- local DESCRIPTION="$6"
+ local DESCRIPTION="${6:-"Description: an autogenerated dummy ${NAME}=${VERSION}/${RELEASE}
+ If you find such a package installed on your system,
+ something went horribly wrong! They are autogenerated
+ und used only by testcases and surf no other propose…"}"
+
local SECTION="${7:-others}"
local PRIORITY="${8:-optional}"
local DISTSECTION
@@ -370,14 +372,7 @@ Package: $NAME" >> ${BUILDDIR}/debian/control
fi
local DEPS="$(echo "$DEPENDENCIES" | grep -v '^Build-')"
test -z "$DEPS" || echo "$DEPS" >> ${BUILDDIR}/debian/control
- if [ -z "$DESCRIPTION" ]; then
- echo "Description: an autogenerated dummy ${NAME}=${VERSION}/${RELEASE}
- If you find such a package installed on your system,
- YOU did something horribly wrong! They are autogenerated
- und used only by testcases for APT and surf no other propose…" >> ${BUILDDIR}/debian/control
- else
- echo "Description: $DESCRIPTION" >> ${BUILDDIR}/debian/control
- fi
+ echo "Description: $DESCRIPTION" >> ${BUILDDIR}/debian/control
echo '3.0 (native)' > ${BUILDDIR}/debian/source/format
(cd ${BUILDDIR}/..; dpkg-source -b ${NAME}-${VERSION} 2>&1) | sed -n 's#^dpkg-source: info: building [^ ]\+ in ##p' \
@@ -526,7 +521,10 @@ insertpackage() {
local VERSION="$4"
local DEPENDENCIES="$5"
local PRIORITY="${6:-optional}"
- local DESCRIPTION="${7}"
+ local DESCRIPTION="${7:-"Description: an autogenerated dummy ${NAME}=${VERSION}/${RELEASE}
+ If you find such a package installed on your system,
+ something went horribly wrong! They are autogenerated
+ und used only by testcases and surf no other propose…"}"
local ARCHS=""
for arch in $(echo "$ARCH" | sed -e 's#,#\n#g' | sed -e "s#^native\$#$(getarchitecture 'native')#"); do
if [ "$arch" = 'all' -o "$arch" = 'none' ]; then
@@ -548,15 +546,7 @@ Maintainer: Joe Sixpack <joe@example.org>" >> $FILE
echo "Version: $VERSION
Filename: pool/main/${NAME}/${NAME}_${VERSION}_${arch}.deb" >> $FILE
test -z "$DEPENDENCIES" || echo "$DEPENDENCIES" >> $FILE
- echo -n 'Description: ' >> $FILE
- if [ -z "$DESCRIPTION" ]; then
- echo "an autogenerated dummy ${NAME}=${VERSION}/${RELEASE}
- If you find such a package installed on your system,
- YOU did something horribly wrong! They are autogenerated
- und used only by testcases for APT and surf no other propose…" >> $FILE
- else
- echo "$DESCRIPTION" >> $FILE
- fi
+ echo "Description: $DESCRIPTION" >> $FILE
echo >> $FILE
done
done
@@ -591,6 +581,11 @@ insertinstalledpackage() {
local DEPENDENCIES="$4"
local PRIORITY="${5:-optional}"
local STATUS="${6:-install ok installed}"
+ local DESCRIPTION="${7:-"Description: an autogenerated dummy ${NAME}=${VERSION}/installed
+ If you find such a package installed on your system,
+ something went horribly wrong! They are autogenerated
+ und used only by testcases and surf no other propose…"}"
+
local FILE='rootdir/var/lib/dpkg/status'
local INFO='rootdir/var/lib/dpkg/info'
for arch in $(echo "$ARCH" | sed -e 's#,#\n#g' | sed -e "s#^native\$#$(getarchitecture 'native')#"); do
@@ -603,11 +598,8 @@ Maintainer: Joe Sixpack <joe@example.org>
Version: $VERSION" >> $FILE
test "$arch" = 'none' || echo "Architecture: $arch" >> $FILE
test -z "$DEPENDENCIES" || echo "$DEPENDENCIES" >> $FILE
- echo "Description: an autogenerated dummy ${NAME}=${VERSION}/installed
- If you find such a package installed on your system,
- YOU did something horribly wrong! They are autogenerated
- und used only by testcases for APT and surf no other propose…
-" >> $FILE
+ echo "Description: $DESCRIPTION" >> $FILE
+ echo >> $FILE
if [ "$(dpkg-query -W --showformat='${Multi-Arch}')" = 'same' ]; then
echo -n > ${INFO}/${NAME}:${arch}.list
else
@@ -795,6 +787,13 @@ signreleasefiles() {
msgdone "info"
}
+rewritesourceslist() {
+ local APTARCHIVE="file://$(readlink -f "${TMPWORKINGDIRECTORY}/aptarchive")"
+ for LIST in $(find rootdir/etc/apt/sources.list.d/ -name 'apt-test-*.list'); do
+ sed -i $LIST -e "s#$APTARCHIVE#${1}#" -e "s#http://localhost:8080/#${1}#" -e "s#http://localhost:4433/#${1}#"
+ done
+}
+
changetowebserver() {
local LOG='/dev/null'
if test -x ${BUILDDIRECTORY}/aptwebserver; then
@@ -806,31 +805,32 @@ changetowebserver() {
fi
addtrap "kill $PID;"
cd - > /dev/null
- elif [ $# -gt 0 ]; then
- msgdie 'Need the aptwebserver when passing arguments for the webserver'
- elif which weborf > /dev/null; then
- weborf -xb aptarchive/ >$LOG 2>&1 &
- addtrap "kill $!;"
- elif which gatling > /dev/null; then
- cd aptarchive
- gatling -p 8080 -F -S >$LOG 2>&1 &
- addtrap "kill $!;"
- cd - > /dev/null
- elif which lighttpd > /dev/null; then
- echo "server.document-root = \"$(readlink -f ./aptarchive)\"
-server.port = 8080
-server.stat-cache-engine = \"disable\"" > lighttpd.conf
- lighttpd -t -f lighttpd.conf >/dev/null || msgdie 'Can not change to webserver: our lighttpd config is invalid'
- lighttpd -D -f lighttpd.conf >$LOG 2>&1 &
- addtrap "kill $!;"
else
msgdie 'You have to build aptwerbserver or install a webserver'
fi
- local APTARCHIVE="file://$(readlink -f ./aptarchive)"
- for LIST in $(find rootdir/etc/apt/sources.list.d/ -name 'apt-test-*.list'); do
- sed -i $LIST -e "s#$APTARCHIVE#http://localhost:8080/#"
- done
- return 0
+ if [ "$1" != '--no-rewrite' ]; then
+ rewritesourceslist 'http://localhost:8080/'
+ fi
+}
+
+changetohttpswebserver() {
+ if ! which stunnel4 >/dev/null; then
+ msgdie 'You need to install stunnel4 for https testcases'
+ fi
+ if [ ! -e "${TMPWORKINGDIRECTORY}/aptarchive/aptwebserver.pid" ]; then
+ changetowebserver --no-rewrite
+ fi
+ echo "pid = ${TMPWORKINGDIRECTORY}/aptarchive/stunnel.pid
+cert = ${TESTDIRECTORY}/apt.pem
+
+[https]
+accept = 4433
+connect = 8080
+" > ${TMPWORKINGDIRECTORY}/stunnel.conf
+ stunnel4 "${TMPWORKINGDIRECTORY}/stunnel.conf"
+ local PID="$(cat ${TMPWORKINGDIRECTORY}/aptarchive/stunnel.pid)"
+ addtrap 'prefix' "kill ${PID};"
+ rewritesourceslist 'https://localhost:4433/'
}
changetocdrom() {
@@ -848,6 +848,46 @@ changetocdrom() {
find rootdir/etc/apt/sources.list.d/ -name 'apt-test-*.list' -delete
}
+downloadfile() {
+ PROTO="$(echo "$1" | cut -d':' -f 1)"
+ local DOWNLOG="${TMPWORKINGDIRECTORY}/download.log"
+ rm -f "$DOWNLOG"
+ touch "$DOWNLOG"
+ {
+ echo "601 Configuration
+Config-Item: Acquire::https::CaInfo=${TESTDIR}/apt.pem
+Config-Item: Debug::Acquire::${PROTO}=1
+
+600 Acquire URI
+URI: $1
+Filename: ${2}
+"
+ # simple worker keeping stdin open until we are done (201) or error (400)
+ # and requesting new URIs on try-agains/redirects inbetween
+ { tail -n 999 -f "$DOWNLOG" & echo "TAILPID: $!"; } | while read f1 f2; do
+ if [ "$f1" = 'TAILPID:' ]; then
+ TAILPID="$f2"
+ elif [ "$f1" = 'New-URI:' ]; then
+ echo "600 Acquire URI
+URI: $f2
+Filename: ${2}
+"
+ elif [ "$f1" = '201' ] || [ "$f1" = '400' ]; then
+ # tail would only die on next read – which never happens
+ test -z "$TAILPID" || kill -s HUP "$TAILPID"
+ break
+ fi
+ done
+ } | LD_LIBRARY_PATH=${BUILDDIRECTORY} ${BUILDDIRECTORY}/methods/${PROTO} 2>&1 | tee "$DOWNLOG"
+ rm "$DOWNLOG"
+ # only if the file exists the download was successful
+ if [ -e "$2" ]; then
+ return 0
+ else
+ return 1
+ fi
+}
+
checkdiff() {
local DIFFTEXT="$($(which diff) -u $* | sed -e '/^---/ d' -e '/^+++/ d' -e '/^@@/ d')"
if [ -n "$DIFFTEXT" ]; then
@@ -893,7 +933,7 @@ testequalor2() {
echo "$2" > $COMPAREFILE2
shift 2
msgtest "Test for equality OR of" "$*"
- $* >$COMPAREAGAINST 2>&1
+ $* >$COMPAREAGAINST 2>&1 || true
(checkdiff $COMPAREFILE1 $COMPAREAGAINST 1> /dev/null ||
checkdiff $COMPAREFILE2 $COMPAREAGAINST 1> /dev/null) && msgpass ||
( echo "\n${CINFO}Diff against OR 1${CNORMAL}" "$(checkdiff $COMPAREFILE1 $COMPAREAGAINST)" \
diff --git a/test/integration/test-bug-601016-description-translation b/test/integration/skip-bug-601016-description-translation
index 33c209e9d..33c209e9d 100755
--- a/test/integration/test-bug-601016-description-translation
+++ b/test/integration/skip-bug-601016-description-translation
diff --git a/test/integration/status-bug-723705-tagfile-truncates-fields b/test/integration/status-bug-723705-tagfile-truncates-fields
new file mode 100644
index 000000000..fe18506c8
--- /dev/null
+++ b/test/integration/status-bug-723705-tagfile-truncates-fields
@@ -0,0 +1,62 @@
+Package: libc6
+Status: install ok installed
+Priority: required
+Section: libs
+Installed-Size: 10164
+Maintainer: GNU Libc Maintainers <debian-glibc@lists.debian.org>
+Architecture: amd64
+Multi-Arch: same
+Source: eglibc (2.17-92)
+Version: 2.17-92+b1
+Replaces: libc6-amd64
+Provides: glibc-2.17-1
+Suggests: glibc-doc, debconf | debconf-2.0, locales
+Breaks: locales (<< 2.17), locales-all (<< 2.17), lsb-core (<= 3.2-27), nscd (<< 2.17)
+Conflicts: prelink (<= 0.0.20090311-1), tzdata (<< 2007k-1), tzdata-etch
+Conffiles:
+ /etc/ld.so.conf.d/x86_64-linux-gnu.conf 593ad12389ab2b6f952e7ede67b8fbbf
+Description: Embedded GNU C Library: Shared libraries
+ Contains the standard libraries that are used by nearly all programs on
+ the system. This package includes shared versions of the standard C library
+ and the standard math library, as well as many others.
+Homepage: http://www.eglibc.org
+
+Package: libnewt0.52
+Status: install ok installed
+Priority: important
+Section: libs
+Installed-Size: 820
+Maintainer: Alastair McKinstry <mckinstry@debian.org>
+Architecture: amd64
+Multi-Arch: same
+Source: newt
+Version: 0.52.15-3
+Recommends: libfribidi0
+Conffiles:
+ /etc/newt/palette.original d41d8cd98f00b204e9800998ecf8427e
+Description: Not Erik's Windowing Toolkit - text mode windowing with slang
+ Newt is a windowing toolkit for text mode built from the slang library.
+ It allows color text mode applications to easily use stackable windows,
+ push buttons, check boxes, radio buttons, lists, entry fields, labels,
+ and displayable text. Scrollbars are supported, and forms may be nested
+ to provide extra functionality. This package contains the shared library
+ for programs that have been built with newt.
+Homepage: https://fedorahosted.org/newt/
+
+Package: libgcc1
+Status: install ok installed
+Priority: required
+Section: libs
+Installed-Size: 128
+Maintainer: Debian GCC Maintainers <debian-gcc@lists.debian.org>
+Architecture: amd64
+Multi-Arch: same
+Source: gcc-4.8 (4.8.1-10)
+Version: 1:4.8.1-10
+Breaks: gcc-4.1, gcc-4.3 (<< 4.3.6-1), gcc-4.4 (<< 4.4.6-4), gcc-4.5 (<< 4.5.3-2)
+Description: GCC support library
+ Shared version of the support library, a library of internal subroutines
+ that GCC uses to overcome shortcomings of particular machines, or
+ special needs for some languages.
+Homepage: http://gcc.gnu.org/
+
diff --git a/test/integration/test-apt-get-autoremove b/test/integration/test-apt-get-autoremove
index dc30cde34..68ea1c574 100755
--- a/test/integration/test-apt-get-autoremove
+++ b/test/integration/test-apt-get-autoremove
@@ -49,3 +49,23 @@ Install: unrelated:i386 (1), debhelper:i386 (8.0.0), po-debconf:i386 (1.0.16, au
Remove: debhelper:i386 (8.0.0)
Remove: po-debconf:i386 (1.0.16)'
+
+testsuccess aptget install debhelper -y
+testdpkginstalled 'unrelated' 'debhelper' 'po-debconf'
+testsuccess aptmark auto debhelper
+
+testmarkedauto 'debhelper' 'po-debconf'
+testequal 'Reading package lists...
+Building dependency tree...
+Reading state information...
+The following packages will be REMOVED:
+ debhelper po-debconf
+0 upgraded, 0 newly installed, 2 to remove and 0 not upgraded.
+Remv debhelper [8.0.0]
+Remv po-debconf [1.0.16]' aptget autoremove -s
+
+testsuccess aptmark hold debhelper
+testequal 'Reading package lists...
+Building dependency tree...
+Reading state information...
+0 upgraded, 0 newly installed, 0 to remove and 0 not upgraded.' aptget autoremove -s
diff --git a/test/integration/test-apt-get-download b/test/integration/test-apt-get-download
index 420b2e380..6eac079f3 100755
--- a/test/integration/test-apt-get-download
+++ b/test/integration/test-apt-get-download
@@ -20,13 +20,14 @@ testdownload() {
fi
msgtest "Test download of package file $1 with" "$APT"
aptget -qq download ${APT} && test -f $1 && msgpass || msgfail
+ rm $1
}
testdownload apt_1.0_all.deb apt stable
testdownload apt_2.0_all.deb apt
DEBFILE="$(readlink -f aptarchive)/pool/apt_2.0_all.deb"
-testequal "'file://${DEBFILE}' apt_2.0_all.deb $(stat -c%s $DEBFILE) sha512:$(sha512sum $DEBFILE | cut -d' ' -f 1)" aptget download apt --print-uris
+testequal "'file://${DEBFILE}' apt_2.0_all.deb $(stat -c%s $DEBFILE) SHA512:$(sha512sum $DEBFILE | cut -d' ' -f 1)" aptget download apt --print-uris
# deb:677887
testequal "E: Can't find a source to download version '1.0' of 'vrms:i386'" aptget download vrms
diff --git a/test/integration/test-bug-254770-segfault-if-cache-not-buildable b/test/integration/test-bug-254770-segfault-if-cache-not-buildable
index 8fa337ccc..59102ddc9 100755
--- a/test/integration/test-bug-254770-segfault-if-cache-not-buildable
+++ b/test/integration/test-bug-254770-segfault-if-cache-not-buildable
@@ -18,7 +18,7 @@ testsegfault() {
msgpass
else
echo
- echo $TEST
+ echo "$TEST"
msgfail
fi
}
diff --git a/test/integration/test-bug-596498-trusted-unsigned-repo b/test/integration/test-bug-596498-trusted-unsigned-repo
index 5c643c40e..06c9c8285 100755
--- a/test/integration/test-bug-596498-trusted-unsigned-repo
+++ b/test/integration/test-bug-596498-trusted-unsigned-repo
@@ -21,6 +21,9 @@ DEBFILE='rootdir/etc/apt/sources.list.d/apt-test-unstable-deb.list'
testequal "$PKGTEXT
Download complete and in download only mode" aptget install cool --assume-no -d
+testequal "$PKGTEXT
+Download complete and in download only mode" aptget install cool --assume-no -d --allow-unauthenticated
+
sed -i -e 's#deb#deb [trusted=no]#' $DEBFILE
aptgetupdate
@@ -40,6 +43,12 @@ WARNING: The following packages cannot be authenticated!
Install these packages without verification? [y/N] N
E: Some packages could not be authenticated" aptget install cool --assume-no -d
+testequal "$PKGTEXT
+WARNING: The following packages cannot be authenticated!
+ cool
+Authentication warning overridden.
+Download complete and in download only mode" aptget install cool --assume-no -d --allow-unauthenticated
+
sed -i -e 's#deb#deb [trusted=yes]#' $DEBFILE
aptgetupdate
diff --git a/test/integration/test-bug-617690-allow-unauthenticated-makes-all-untrusted b/test/integration/test-bug-617690-allow-unauthenticated-makes-all-untrusted
new file mode 100755
index 000000000..633c197c0
--- /dev/null
+++ b/test/integration/test-bug-617690-allow-unauthenticated-makes-all-untrusted
@@ -0,0 +1,63 @@
+#!/bin/sh
+set -e
+
+TESTDIR=$(readlink -f $(dirname $0))
+. $TESTDIR/framework
+setupenvironment
+configarchitecture 'i386'
+
+buildsimplenativepackage 'cool' 'i386' '1.0' 'unstable'
+
+setupaptarchive --no-update
+
+testfileexists() {
+ msgtest 'Test for existance of file' "$1"
+ test -e "$1" && msgpass || msgfail
+ rm -f "$1"
+}
+
+testfilemissing() {
+ msgtest 'Test for non-existance of file' "$1"
+ test -e "$1" && msgfail || msgpass
+ rm -f "$1"
+}
+
+testrun() {
+ rm -rf rootdir/var/lib/apt
+ testsuccess aptget update
+
+ if [ "$1" = 'trusted' ]; then
+ testsuccess aptget download cool
+ testfileexists 'cool_1.0_i386.deb'
+
+ testsuccess aptget download cool --allow-unauthenticated
+ testfileexists 'cool_1.0_i386.deb'
+ else
+ testfailure aptget download cool
+ testfilemissing 'cool_1.0_i386.deb'
+
+ testsuccess aptget download cool --allow-unauthenticated
+ testfileexists 'cool_1.0_i386.deb'
+ fi
+
+ mv aptarchive/pool/cool_1.0_i386.deb aptarchive/pool/cool_1.0_i386.deb.bak
+ echo 'this is not a good package' > aptarchive/pool/cool_1.0_i386.deb
+ testfailure aptget download cool
+ testfilemissing cool_1.0_i386.deb
+
+ testfailure aptget download cool --allow-unauthenticated # unauthenticated doesn't mean unchecked
+ testfilemissing cool_1.0_i386.deb
+
+ rm -f aptarchive/pool/cool_1.0_i386.deb
+ mv aptarchive/pool/cool_1.0_i386.deb.bak aptarchive/pool/cool_1.0_i386.deb
+ testsuccess aptget download cool --allow-unauthenticated
+ testfileexists 'cool_1.0_i386.deb'
+}
+
+testrun 'trusted'
+
+find aptarchive/ \( -name 'Release.gpg' -o -name 'InRelease' \) -delete
+testrun 'untrusted'
+
+changetowebserver
+testrun 'untrusted'
diff --git a/test/integration/test-bug-633350-do-not-kill-last-char-in-Release b/test/integration/test-bug-633350-do-not-kill-last-char-in-Release
index 2aae7cfcc..988f8c9d0 100755
--- a/test/integration/test-bug-633350-do-not-kill-last-char-in-Release
+++ b/test/integration/test-bug-633350-do-not-kill-last-char-in-Release
@@ -8,7 +8,7 @@ configarchitecture 'amd64'
insertpackage 'unstable' 'cool' 'amd64' '1.0'
-setupaptarchive 2> /dev/null
+setupaptarchive --no-update
echo 'NotAutomatic: yes' >> aptarchive/dists/unstable/Release
diff --git a/test/integration/test-bug-679371-apt-get-autoclean-multiarch b/test/integration/test-bug-679371-apt-get-autoclean-multiarch
index b62d437aa..3de7d69f9 100755
--- a/test/integration/test-bug-679371-apt-get-autoclean-multiarch
+++ b/test/integration/test-bug-679371-apt-get-autoclean-multiarch
@@ -17,7 +17,10 @@ changetowebserver
testsuccess aptget update
testsuccess aptget install pkgall pkgnative pkgforeign -y
-testdpkginstalled pkgall pkgnative pkgforeign
+# if we work with an old dpkg, pkgforeign will be listed differently,
+# so test with aptcache for install status instead
+testdpkginstalled pkgall pkgnative
+testsuccess aptcache show pkgforeign/installed
testequal 'Reading package lists...
Building dependency tree...
diff --git a/test/integration/test-bug-686346-package-missing-architecture b/test/integration/test-bug-686346-package-missing-architecture
index 3b02811ca..dc51861ab 100755
--- a/test/integration/test-bug-686346-package-missing-architecture
+++ b/test/integration/test-bug-686346-package-missing-architecture
@@ -73,7 +73,7 @@ insertinstalledpackage 'pkgb' 'none' '1'
insertinstalledpackage 'pkgf' 'none' '1' 'Conflicts: pkgb'
insertinstalledpackage 'pkgg' 'amd64' '1' 'Conflicts: pkgb'
insertinstalledpackage 'pkgb' 'amd64' '2'
-testequal "Reading package lists...
+testequalor2 "Reading package lists...
Building dependency tree...
Reading state information...
You might want to run 'apt-get -f install' to correct these.
@@ -84,6 +84,17 @@ The following packages have unmet dependencies:
Conflicts: pkgb but 2 is installed
pkgg : Conflicts: pkgb but 2 is installed
Conflicts: pkgb:none but 1 is installed
+E: Unmet dependencies. Try using -f." "Reading package lists...
+Building dependency tree...
+Reading state information...
+You might want to run 'apt-get -f install' to correct these.
+The following packages have unmet dependencies:
+ pkgb : Conflicts: pkgb:none but 1 is installed
+ pkgb:none : Conflicts: pkgb but 2 is installed
+ pkgf:none : Conflicts: pkgb but 2 is installed
+ Conflicts: pkgb:none but 1 is installed
+ pkgg : Conflicts: pkgb but 2 is installed
+ Conflicts: pkgb:none but 1 is installed
E: Unmet dependencies. Try using -f." aptget check
# check that dependencies are generated for none-packages
diff --git a/test/integration/test-bug-722207-print-uris-even-if-very-quiet b/test/integration/test-bug-722207-print-uris-even-if-very-quiet
new file mode 100755
index 000000000..f2d95da19
--- /dev/null
+++ b/test/integration/test-bug-722207-print-uris-even-if-very-quiet
@@ -0,0 +1,30 @@
+#!/bin/sh
+set -e
+
+TESTDIR=$(readlink -f $(dirname $0))
+. $TESTDIR/framework
+
+setupenvironment
+configarchitecture 'amd64'
+
+insertinstalledpackage 'apt' 'all' '1'
+insertpackage 'unstable' 'apt' 'all' '2'
+insertsource 'unstable' 'apt' 'all' '2'
+insertsource 'unstable' 'apt2' 'all' '1'
+
+setupaptarchive
+
+APTARCHIVE=$(readlink -f ./aptarchive)
+
+testequal "'file://${APTARCHIVE}/pool/main/apt/apt_2_all.deb' apt_2_all.deb 0 MD5Sum:" aptget upgrade -qq --print-uris
+testequal "'file://${APTARCHIVE}/pool/main/apt/apt_2_all.deb' apt_2_all.deb 0 MD5Sum:" aptget dist-upgrade -qq --print-uris
+testequal "'file://${APTARCHIVE}/pool/main/apt/apt_2_all.deb' apt_2_all.deb 0 MD5Sum:" aptget install apt -qq --print-uris
+testequal "'file://${APTARCHIVE}/pool/main/apt/apt_2_all.deb' apt_2_all.deb 0 MD5Sum:" aptget download apt -qq --print-uris
+testequal "'file://${APTARCHIVE}/apt_2.dsc' apt_2.dsc 0 MD5Sum:d41d8cd98f00b204e9800998ecf8427e
+'file://${APTARCHIVE}/apt_2.tar.gz' apt_2.tar.gz 0 MD5Sum:d41d8cd98f00b204e9800998ecf8427e" aptget source apt -qq --print-uris
+testequal "'http://packages.debian.org/changelogs/pool/main/apt/apt_2/changelog'" aptget changelog apt -qq --print-uris
+
+testequal "'file://${APTARCHIVE}/apt_2.dsc' apt_2.dsc 0 MD5Sum:d41d8cd98f00b204e9800998ecf8427e
+'file://${APTARCHIVE}/apt_2.tar.gz' apt_2.tar.gz 0 MD5Sum:d41d8cd98f00b204e9800998ecf8427e
+'file://${APTARCHIVE}/apt2_1.dsc' apt2_1.dsc 0 MD5Sum:d41d8cd98f00b204e9800998ecf8427e
+'file://${APTARCHIVE}/apt2_1.tar.gz' apt2_1.tar.gz 0 MD5Sum:d41d8cd98f00b204e9800998ecf8427e" aptget source apt apt2 -qq --print-uris
diff --git a/test/integration/test-bug-723586-any-stripped-in-single-arch b/test/integration/test-bug-723586-any-stripped-in-single-arch
new file mode 100755
index 000000000..392b88e9f
--- /dev/null
+++ b/test/integration/test-bug-723586-any-stripped-in-single-arch
@@ -0,0 +1,54 @@
+#!/bin/sh
+set -e
+
+TESTDIR=$(readlink -f $(dirname $0))
+. $TESTDIR/framework
+setupenvironment
+configarchitecture 'amd64'
+
+insertinstalledpackage 'python3' 'all' '3.2.3-6'
+
+insertpackage 'unstable' 'python3' 'amd64' '3.3.2-16' 'Multi-Arch: allowed'
+insertpackage 'stable' 'python3-gnupg' 'all' '0.3.5-2' 'Depends: python3:any (>= 3.2.3-3~)'
+
+insertpackage 'unstable' 'python-mips' 'amd64' '3' 'Depends: python3:mips'
+
+setupaptarchive
+
+INSTALLLOG='Reading package lists...
+Building dependency tree...
+The following extra packages will be installed:
+ python3
+The following NEW packages will be installed:
+ python3-gnupg
+The following packages will be upgraded:
+ python3
+1 upgraded, 1 newly installed, 0 to remove and 0 not upgraded.
+Inst python3 [3.2.3-6] (3.3.2-16 unstable [amd64])
+Inst python3-gnupg (0.3.5-2 stable [all])
+Conf python3 (3.3.2-16 unstable [amd64])
+Conf python3-gnupg (0.3.5-2 stable [all])'
+
+FAILLOG='Reading package lists...
+Building dependency tree...
+Some packages could not be installed. This may mean that you have
+requested an impossible situation or if you are using the unstable
+distribution that some required packages have not yet been created
+or been moved out of Incoming.
+The following information may help to resolve the situation:
+
+The following packages have unmet dependencies:
+ python-mips : Depends: python3:mips but it is not installable
+E: Unable to correct problems, you have held broken packages.'
+
+testequal "$INSTALLLOG" aptget install python3-gnupg -s
+aptcache showpkg python3 > showpkg.log
+testequal "$FAILLOG" aptget install python-mips -s
+
+# same test, but this time in a multi-arch environment
+configarchitecture 'amd64' 'armhf'
+rm rootdir/var/cache/apt/*.bin
+
+testequal "$INSTALLLOG" aptget install python3-gnupg -s
+testequal "$(sed 's#3.3.2-16 - python3#3.3.2-16 - python3:any:armhf python3#' showpkg.log)" aptcache showpkg python3
+testequal "$FAILLOG" aptget install python-mips -s
diff --git a/test/integration/test-bug-723705-tagfile-truncates-fields b/test/integration/test-bug-723705-tagfile-truncates-fields
new file mode 100755
index 000000000..3180e7fc9
--- /dev/null
+++ b/test/integration/test-bug-723705-tagfile-truncates-fields
@@ -0,0 +1,33 @@
+#!/bin/sh
+set -e
+
+TESTDIR=$(readlink -f $(dirname $0))
+. $TESTDIR/framework
+setupenvironment
+configarchitecture 'amd64'
+
+setupaptarchive
+
+aptget install --print-uris -y cdebconf-newt-terminal cdebconf-gtk-terminal 2>&1 | sed 's#file:///tmp/tmp.[^/]\+#file:///tmp#g' > filename.log
+
+testfileequal filename.log "Reading package lists...
+Building dependency tree...
+The following extra packages will be installed:
+ cdebconf-gtk-udeb cdebconf-newt-udeb cdebconf-udeb libc6-udeb
+ libglib2.0-udeb libgtk2.0-0-udeb libvte9-udeb
+The following NEW packages will be installed:
+ cdebconf-gtk-terminal cdebconf-gtk-udeb cdebconf-newt-terminal
+ cdebconf-newt-udeb cdebconf-udeb libc6-udeb libglib2.0-udeb libgtk2.0-0-udeb
+ libvte9-udeb
+0 upgraded, 9 newly installed, 0 to remove and 0 not upgraded.
+Need to get 0 B/4774 kB of archives.
+After this operation, 19.8 MB of additional disk space will be used.
+'file:///tmp/aptarchive/pool/main/c/cdebconf/cdebconf-udeb_0.185_amd64.udeb' cdebconf-udeb_0.185_amd64.udeb 77376 MD5Sum:e3883706fdbf54c2e5ea959c92b2d37f
+'file:///tmp/aptarchive/pool/main/c/cdebconf/cdebconf-gtk-udeb_0.185_amd64.udeb' cdebconf-gtk-udeb_0.185_amd64.udeb 27278 MD5Sum:a1bbbc1d4fb8e0615b5621abac021924
+'file:///tmp/aptarchive/pool/main/c/cdebconf/cdebconf-newt-udeb_0.185_amd64.udeb' cdebconf-newt-udeb_0.185_amd64.udeb 19192 MD5Sum:de27807f56dae2f2403b3322d5fe6bd2
+'file:///tmp/aptarchive/pool/main/g/glib2.0/libglib2.0-udeb_2.36.4-1_amd64.udeb' libglib2.0-udeb_2.36.4-1_amd64.udeb 1714604 MD5Sum:72da029f1bbb36057d874f1f82a5d00a
+'file:///tmp/aptarchive/pool/main/e/eglibc/libc6-udeb_2.17-92+b1_amd64.udeb' libc6-udeb_2.17-92+b1_amd64.udeb 1056000 MD5Sum:7fd7032eeeecf7f76eff79a0543fbd72
+'file:///tmp/aptarchive/pool/main/g/gtk+2.0/libgtk2.0-0-udeb_2.24.20-1_amd64.udeb' libgtk2.0-0-udeb_2.24.20-1_amd64.udeb 1643046 MD5Sum:25513478eb2e02e5766c0eea0b411ca9
+'file:///tmp/aptarchive/pool/main/v/vte/libvte9-udeb_0.28.2-5_amd64.udeb' libvte9-udeb_1%3a0.28.2-5_amd64.udeb 216968 MD5Sum:7da7201effaf5ced19abd9d0b45aa2c6
+'file:///tmp/aptarchive/pool/main/c/cdebconf-terminal/cdebconf-gtk-terminal_0.22_amd64.udeb' cdebconf-gtk-terminal_0.22_amd64.udeb 14734 MD5Sum:f9c3a7354560cb88e0396e2b7ba54363
+'file:///tmp/aptarchive/pool/main/c/cdebconf-terminal/cdebconf-newt-terminal_0.22_amd64.udeb' cdebconf-newt-terminal_0.22_amd64.udeb 4538 MD5Sum:20db6152fce5081fcbf49c7c08f21246"
diff --git a/test/integration/test-partial-file-support b/test/integration/test-partial-file-support
new file mode 100755
index 000000000..8d1c51ae0
--- /dev/null
+++ b/test/integration/test-partial-file-support
@@ -0,0 +1,107 @@
+#!/bin/sh
+set -e
+
+TESTDIR=$(readlink -f $(dirname $0))
+. $TESTDIR/framework
+setupenvironment
+configarchitecture 'amd64'
+
+changetowebserver
+
+copysource() {
+ dd if="$1" bs=1 count="$2" of="$3" 2>/dev/null
+ touch -d "$(stat --format '%y' "${TESTFILE}")" "$3"
+}
+
+testdownloadfile() {
+ local DOWNLOG='download-testfile.log'
+ rm -f "$DOWNLOG"
+ msgtest "Testing download of file $2 with" "$1"
+ if ! downloadfile "$2" "$3" > "$DOWNLOG"; then
+ cat "$DOWNLOG"
+ msgfail
+ else
+ msgpass
+ fi
+ cat "$DOWNLOG" | while read field hash; do
+ local EXPECTED
+ case "$field" in
+ 'MD5Sum-Hash:') EXPECTED="$(md5sum "$TESTFILE" | cut -d' ' -f 1)";;
+ 'SHA1-Hash:') EXPECTED="$(sha1sum "$TESTFILE" | cut -d' ' -f 1)";;
+ 'SHA256-Hash:') EXPECTED="$(sha256sum "$TESTFILE" | cut -d' ' -f 1)";;
+ 'SHA512-Hash:') EXPECTED="$(sha512sum "$TESTFILE" | cut -d' ' -f 1)";;
+ *) continue;;
+ esac
+ if [ "$4" = '=' ]; then
+ msgtest 'Test downloaded file for correct' "$field"
+ else
+ msgtest 'Test downloaded file does not match in' "$field"
+ fi
+ if [ "$EXPECTED" "$4" "$hash" ]; then
+ msgpass
+ else
+ cat "$DOWNLOG"
+ msgfail "expected: $EXPECTED ; got: $hash"
+ fi
+ done
+}
+
+testwebserverlaststatuscode() {
+ STATUS="$(mktemp)"
+ addtrap "rm $STATUS;"
+ msgtest 'Test last status code from the webserver was' "$1"
+ downloadfile "http://localhost:8080/_config/find/aptwebserver::last-status-code" "$STATUS" >/dev/null
+ if [ "$(cat "$STATUS")" = "$1" ]; then
+ msgpass
+ else
+ cat download-testfile.log
+ msgfail "Status was $(cat "$STATUS")"
+ fi
+}
+
+
+TESTFILE='aptarchive/testfile'
+cp -a ${TESTDIR}/framework $TESTFILE
+
+testrun() {
+ downloadfile "$1/_config/set/aptwebserver::support::range/true" '/dev/null' >/dev/null
+ testwebserverlaststatuscode '200'
+
+ copysource $TESTFILE 0 ./testfile
+ testdownloadfile 'no data' "${1}/testfile" './testfile' '='
+ testwebserverlaststatuscode '200'
+
+ copysource $TESTFILE 20 ./testfile
+ testdownloadfile 'valid partial data' "${1}/testfile" './testfile' '='
+ testwebserverlaststatuscode '206'
+
+ copysource /dev/zero 20 ./testfile
+ testdownloadfile 'invalid partial data' "${1}/testfile" './testfile' '!='
+ testwebserverlaststatuscode '206'
+
+ copysource $TESTFILE 1M ./testfile
+ testdownloadfile 'completely downloaded file' "${1}/testfile" './testfile' '='
+ testwebserverlaststatuscode '416'
+
+ copysource /dev/zero 1M ./testfile
+ testdownloadfile 'too-big partial file' "${1}/testfile" './testfile' '='
+ testwebserverlaststatuscode '200'
+
+ copysource /dev/zero 20 ./testfile
+ touch ./testfile
+ testdownloadfile 'old data' "${1}/testfile" './testfile' '='
+ testwebserverlaststatuscode '200'
+
+ downloadfile "$1/_config/set/aptwebserver::support::range/false" '/dev/null' >/dev/null
+ testwebserverlaststatuscode '200'
+
+ copysource $TESTFILE 20 ./testfile
+ testdownloadfile 'no server support' "${1}/testfile" './testfile' '='
+ testwebserverlaststatuscode '200'
+}
+
+testrun 'http://localhost:8080'
+
+changetohttpswebserver
+
+testrun 'https://localhost:4433'
diff --git a/test/integration/test-releasefile-verification b/test/integration/test-releasefile-verification
index a9f4b9775..9d34a521a 100755
--- a/test/integration/test-releasefile-verification
+++ b/test/integration/test-releasefile-verification
@@ -11,18 +11,24 @@ buildaptarchive
setupflataptarchive
changetowebserver
+downloadfile "http://localhost:8080/_config/set/aptwebserver::support::range/false" '/dev/null' >/dev/null
+
prepare() {
local DATE="${2:-now}"
- if [ "$DATE" = 'now' -a "$1" = "${PKGFILE}-new" ]; then
- DATE='now + 6 days'
+ if [ "$DATE" = 'now' ]; then
+ if [ "$1" = "${PKGFILE}-new" ]; then
+ DATE='now - 1 day'
+ else
+ DATE='now - 7 day'
+ fi
fi
for release in $(find rootdir/var/lib/apt/lists 2> /dev/null); do
- touch -d 'now - 6 hours' $release
+ touch -d 'now - 1 year' $release
done
aptget clean
cp $1 aptarchive/Packages
find aptarchive -name 'Release' -delete
- compressfile 'aptarchive/Packages'
+ compressfile 'aptarchive/Packages' "$DATE"
generatereleasefiles "$DATE"
}
@@ -85,13 +91,34 @@ touch aptarchive/apt.deb
PKGFILE="${TESTDIR}/$(echo "$(basename $0)" | sed 's#^test-#Packages-#')"
+updatesuccess() {
+ local LOG='update.log'
+ if aptget update >$LOG 2>&1 || grep -q -E '^(W|E): ' $LOG; then
+ msgpass
+ else
+ cat $LOG
+ msgfail
+ fi
+}
+
+updatefailure() {
+ local LOG='update.log'
+ aptget update >$LOG 2>&1 || true
+ if grep -q -E "$1" $LOG; then
+ msgpass
+ else
+ cat $LOG
+ msgfail
+ fi
+}
+
runtest() {
prepare ${PKGFILE}
rm -rf rootdir/var/lib/apt/lists
signreleasefiles 'Joe Sixpack'
find aptarchive/ -name "$DELETEFILE" -delete
msgtest 'Cold archive signed by' 'Joe Sixpack'
- aptget update 2>&1 | grep -E '^(W|E): ' > /dev/null && msgfail || msgpass
+ updatesuccess
testequal "$(cat ${PKGFILE})
" aptcache show apt
installaptold
@@ -100,7 +127,7 @@ runtest() {
signreleasefiles 'Joe Sixpack'
find aptarchive/ -name "$DELETEFILE" -delete
msgtest 'Good warm archive signed by' 'Joe Sixpack'
- aptget update 2>&1 | grep -E '^(W|E): ' > /dev/null && msgfail || msgpass
+ updatesuccess
testequal "$(cat ${PKGFILE}-new)
" aptcache show apt
installaptnew
@@ -111,7 +138,7 @@ runtest() {
signreleasefiles 'Rex Expired'
find aptarchive/ -name "$DELETEFILE" -delete
msgtest 'Cold archive signed by' 'Rex Expired'
- aptget update 2>&1 | grep -E '^W: .* KEYEXPIRED' > /dev/null && msgpass || msgfail
+ updatefailure '^W: .* KEYEXPIRED'
testequal "$(cat ${PKGFILE})
" aptcache show apt
failaptold
@@ -122,7 +149,7 @@ runtest() {
signreleasefiles 'Marvin Paranoid'
find aptarchive/ -name "$DELETEFILE" -delete
msgtest 'Cold archive signed by' 'Marvin Paranoid'
- aptget update 2>&1 | grep -E '^W: .* NO_PUBKEY' > /dev/null && msgpass || msgfail
+ updatefailure '^W: .* NO_PUBKEY'
testequal "$(cat ${PKGFILE})
" aptcache show apt
failaptold
@@ -136,7 +163,7 @@ runtest() {
signreleasefiles 'Joe Sixpack'
find aptarchive/ -name "$DELETEFILE" -delete
msgtest 'Bad warm archive signed by' 'Joe Sixpack'
- aptget update 2>&1 | grep -E '^(W|E): ' > /dev/null && msgfail || msgpass
+ updatesuccess
testequal "$(cat ${PKGFILE}-new)
" aptcache show apt
installaptnew
@@ -147,7 +174,7 @@ runtest() {
signreleasefiles 'Joe Sixpack'
find aptarchive/ -name "$DELETEFILE" -delete
msgtest 'Cold archive signed by' 'Joe Sixpack'
- aptget update 2>&1 | grep -E '^(W|E): ' > /dev/null && msgfail || msgpass
+ updatesuccess
testequal "$(cat ${PKGFILE})
" aptcache show apt
installaptold
@@ -156,7 +183,7 @@ runtest() {
signreleasefiles 'Marvin Paranoid'
find aptarchive/ -name "$DELETEFILE" -delete
msgtest 'Good warm archive signed by' 'Marvin Paranoid'
- aptget update 2>&1 | grep -E '^W: .* NO_PUBKEY' > /dev/null && msgpass || msgfail
+ updatefailure '^W: .* NO_PUBKEY'
testequal "$(cat ${PKGFILE})
" aptcache show apt
installaptold
@@ -166,7 +193,7 @@ runtest() {
signreleasefiles 'Rex Expired'
find aptarchive/ -name "$DELETEFILE" -delete
msgtest 'Good warm archive signed by' 'Rex Expired'
- aptget update 2>&1 | grep -E '^W: .* KEYEXPIRED' > /dev/null && msgpass || msgfail
+ updatefailure '^W: .* KEYEXPIRED'
testequal "$(cat ${PKGFILE})
" aptcache show apt
installaptold
@@ -176,7 +203,7 @@ runtest() {
signreleasefiles
find aptarchive/ -name "$DELETEFILE" -delete
msgtest 'Good warm archive signed by' 'Joe Sixpack'
- aptget update 2>&1 | grep -E '^(W|E): ' > /dev/null && msgfail || msgpass
+ updatesuccess
testequal "$(cat ${PKGFILE}-new)
" aptcache show apt
installaptnew
@@ -187,7 +214,7 @@ runtest2() {
rm -rf rootdir/var/lib/apt/lists
signreleasefiles 'Joe Sixpack'
msgtest 'Cold archive signed by' 'Joe Sixpack'
- aptget update 2>&1 | grep -E '^(W|E): ' > /dev/null && msgfail || msgpass
+ updatesuccess
# New .deb but now an unsigned archive. For example MITM to circumvent
# package verification.
@@ -195,7 +222,7 @@ runtest2() {
find aptarchive/ -name InRelease -delete
find aptarchive/ -name Release.gpg -delete
msgtest 'Warm archive signed by' 'nobody'
- aptget update 2>&1 | grep -E '^(W|E): ' > /dev/null && msgfail || msgpass
+ updatesuccess
testequal "$(cat ${PKGFILE}-new)
" aptcache show apt
failaptnew
@@ -203,7 +230,7 @@ runtest2() {
# Unsigned archive from the beginning must also be detected.
rm -rf rootdir/var/lib/apt/lists
msgtest 'Cold archive signed by' 'nobody'
- aptget update 2>&1 | grep -E '^(W|E): ' > /dev/null && msgfail || msgpass
+ updatesuccess
testequal "$(cat ${PKGFILE}-new)
" aptcache show apt
failaptnew
diff --git a/test/integration/test-ubuntu-bug-859188-multiarch-reinstall b/test/integration/test-ubuntu-bug-859188-multiarch-reinstall
index 0fdf97485..be86f2e91 100755
--- a/test/integration/test-ubuntu-bug-859188-multiarch-reinstall
+++ b/test/integration/test-ubuntu-bug-859188-multiarch-reinstall
@@ -8,14 +8,13 @@ configarchitecture 'amd64' 'i386' 'armel'
buildsimplenativepackage 'libsame' 'amd64,i386,armel' '1.0' 'unstable' 'Multi-Arch: same'
-# FIXME: hack around dpkg's current inability to handle multiarch, a clean install would be better…
-insertinstalledpackage 'libsame' 'amd64,i386' '1.0' 'Multi-Arch: same'
-sed -e 's#/installed#/unstable#' -e 's#Installed-Size: 42#Installed-Size: 1#' -i rootdir/var/lib/dpkg/status
-
setupaptarchive
+testsuccess aptget install libsame libsame:i386
+
REINSTALL='Reading package lists...
Building dependency tree...
+Reading state information...
0 upgraded, 0 newly installed, 2 reinstalled, 0 to remove and 0 not upgraded.
Inst libsame [1.0] (1.0 unstable [amd64])
Inst libsame:i386 [1.0] (1.0 unstable [i386])
diff --git a/test/interactive-helper/aptwebserver.cc b/test/interactive-helper/aptwebserver.cc
index fde95fec9..4dae342dd 100644
--- a/test/interactive-helper/aptwebserver.cc
+++ b/test/interactive-helper/aptwebserver.cc
@@ -100,8 +100,13 @@ bool sendHead(int const client, int const httpcode, std::list<std::string> &head
std::string response("HTTP/1.1 ");
response.append(httpcodeToStr(httpcode));
headers.push_front(response);
+ _config->Set("APTWebserver::Last-Status-Code", httpcode);
- headers.push_back("Server: APT webserver");
+ std::stringstream buffer;
+ _config->Dump(buffer, "aptwebserver::response-header", "%t: %v%n", false);
+ std::vector<std::string> addheaders = VectorizeString(buffer.str(), '\n');
+ for (std::vector<std::string>::const_iterator h = addheaders.begin(); h != addheaders.end(); ++h)
+ headers.push_back(*h);
std::string date("Date: ");
date.append(TimeRFC1123(time(NULL)));
@@ -156,15 +161,29 @@ void sendError(int const client, int const httpcode, std::string const &request,
std::string response("<html><head><title>");
response.append(httpcodeToStr(httpcode)).append("</title></head>");
response.append("<body><h1>").append(httpcodeToStr(httpcode)).append("</h1>");
- if (error.empty() == false)
- response.append("<p><em>Error</em>: ").append(error).append("</p>");
- response.append("This error is a result of the request: <pre>");
+ if (httpcode != 200)
+ {
+ if (error.empty() == false)
+ response.append("<p><em>Error</em>: ").append(error).append("</p>");
+ response.append("This error is a result of the request: <pre>");
+ }
+ else
+ {
+ if (error.empty() == false)
+ response.append("<p><em>Success</em>: ").append(error).append("</p>");
+ response.append("The successfully executed operation was requested by: <pre>");
+ }
response.append(request).append("</pre></body></html>");
addDataHeaders(headers, response);
sendHead(client, httpcode, headers);
if (content == true)
sendData(client, response);
}
+void sendSuccess(int const client, std::string const &request,
+ bool content, std::string const &error = "")
+{
+ sendError(client, 200, request, content, error);
+}
/*}}}*/
void sendRedirect(int const client, int const httpcode, std::string const &uri,/*{{{*/
std::string const &request, bool content)
@@ -365,6 +384,49 @@ bool parseFirstLine(int const client, std::string const &request, /*{{{*/
return true;
}
/*}}}*/
+bool handleOnTheFlyReconfiguration(int const client, std::string const &request, std::vector<std::string> const &parts)/*{{{*/
+{
+ size_t const pcount = parts.size();
+ if (pcount == 4 && parts[1] == "set")
+ {
+ _config->Set(parts[2], parts[3]);
+ sendSuccess(client, request, true, "Option '" + parts[2] + "' was set to '" + parts[3] + "'!");
+ return true;
+ }
+ else if (pcount == 4 && parts[1] == "find")
+ {
+ std::list<std::string> headers;
+ std::string response = _config->Find(parts[2], parts[3]);
+ addDataHeaders(headers, response);
+ sendHead(client, 200, headers);
+ sendData(client, response);
+ return true;
+ }
+ else if (pcount == 3 && parts[1] == "find")
+ {
+ std::list<std::string> headers;
+ if (_config->Exists(parts[2]) == true)
+ {
+ std::string response = _config->Find(parts[2]);
+ addDataHeaders(headers, response);
+ sendHead(client, 200, headers);
+ sendData(client, response);
+ return true;
+ }
+ sendError(client, 404, request, "Requested Configuration option doesn't exist.");
+ return false;
+ }
+ else if (pcount == 3 && parts[1] == "clear")
+ {
+ _config->Clear(parts[2]);
+ sendSuccess(client, request, true, "Option '" + parts[2] + "' was cleared.");
+ return true;
+ }
+
+ sendError(client, 400, request, true, "Unknown on-the-fly configuration request");
+ return false;
+}
+ /*}}}*/
int main(int const argc, const char * argv[])
{
CommandLine::Args Args[] = {
@@ -455,6 +517,9 @@ int main(int const argc, const char * argv[])
listen(sock, 1);
/*}}}*/
+ _config->CndSet("aptwebserver::response-header::Server", "APT webserver");
+ _config->CndSet("aptwebserver::response-header::Accept-Ranges", "bytes");
+
std::vector<std::string> messages;
int client;
while ((client = accept(sock, NULL, NULL)) != -1)
@@ -475,6 +540,17 @@ int main(int const argc, const char * argv[])
if (parseFirstLine(client, *m, filename, sendContent, closeConnection) == false)
continue;
+ // special webserver command request
+ if (filename.length() > 1 && filename[0] == '_')
+ {
+ std::vector<std::string> parts = VectorizeString(filename, '/');
+ if (parts[0] == "_config")
+ {
+ handleOnTheFlyReconfiguration(client, *m, parts);
+ continue;
+ }
+ }
+
// string replacements in the requested filename
::Configuration::Item const *Replaces = _config->Tree("aptwebserver::redirect::replace");
if (Replaces != NULL)
@@ -532,6 +608,60 @@ int main(int const argc, const char * argv[])
}
}
+ if (_config->FindB("aptwebserver::support::range", true) == true)
+ condition = LookupTag(*m, "Range", "");
+ else
+ condition.clear();
+ if (condition.empty() == false && strncmp(condition.c_str(), "bytes=", 6) == 0)
+ {
+ time_t cache;
+ std::string ifrange;
+ if (_config->FindB("aptwebserver::support::if-range", true) == true)
+ ifrange = LookupTag(*m, "If-Range", "");
+ bool validrange = (ifrange.empty() == true ||
+ (RFC1123StrToTime(ifrange.c_str(), cache) == true &&
+ cache <= data.ModificationTime()));
+
+ // FIXME: support multiple byte-ranges (APT clients do not do this)
+ if (condition.find(',') == std::string::npos)
+ {
+ size_t start = 6;
+ unsigned long long filestart = strtoull(condition.c_str() + start, NULL, 10);
+ // FIXME: no support for last-byte-pos being not the end of the file (APT clients do not do this)
+ size_t dash = condition.find('-') + 1;
+ unsigned long long fileend = strtoull(condition.c_str() + dash, NULL, 10);
+ unsigned long long filesize = data.FileSize();
+ if ((fileend == 0 || (fileend == filesize && fileend >= filestart)) &&
+ validrange == true)
+ {
+ if (filesize > filestart)
+ {
+ data.Skip(filestart);
+ std::ostringstream contentlength;
+ contentlength << "Content-Length: " << (filesize - filestart);
+ headers.push_back(contentlength.str());
+ std::ostringstream contentrange;
+ contentrange << "Content-Range: bytes " << filestart << "-"
+ << filesize - 1 << "/" << filesize;
+ headers.push_back(contentrange.str());
+ sendHead(client, 206, headers);
+ if (sendContent == true)
+ sendFile(client, data);
+ continue;
+ }
+ else
+ {
+ headers.push_back("Content-Length: 0");
+ std::ostringstream contentrange;
+ contentrange << "Content-Range: bytes */" << filesize;
+ headers.push_back(contentrange.str());
+ sendHead(client, 416, headers);
+ continue;
+ }
+ }
+ }
+ }
+
addFileHeaders(headers, data);
sendHead(client, 200, headers);
if (sendContent == true)