summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.travis.yml2
-rw-r--r--BUGS9
-rw-r--r--apt-inst/contrib/arfile.cc6
-rw-r--r--apt-inst/contrib/extracttar.cc24
-rw-r--r--apt-inst/deb/debfile.cc3
-rw-r--r--apt-pkg/acquire-item.cc103
-rw-r--r--apt-pkg/acquire-item.h19
-rw-r--r--apt-pkg/aptconfiguration.cc2
-rw-r--r--apt-pkg/contrib/fileutl.cc38
-rw-r--r--apt-pkg/contrib/strutl.cc31
-rw-r--r--apt-pkg/contrib/strutl.h25
-rw-r--r--apt-pkg/deb/deblistparser.cc6
-rw-r--r--apt-pkg/deb/dpkgpm.cc159
-rw-r--r--apt-pkg/deb/dpkgpm.h4
-rw-r--r--apt-pkg/depcache.cc7
-rw-r--r--apt-pkg/depcache.h2
-rw-r--r--apt-pkg/tagfile.cc2
-rw-r--r--apt-private/makefile2
-rw-r--r--apt-private/private-cmndline.cc13
-rw-r--r--apt-private/private-download.cc96
-rw-r--r--apt-private/private-download.h9
-rw-r--r--apt-private/private-install.cc161
-rw-r--r--apt-private/private-install.h3
-rw-r--r--apt-private/private-list.cc15
-rw-r--r--apt-private/private-output.cc38
-rw-r--r--apt-private/private-search.cc6
-rw-r--r--apt-private/private-show.cc7
-rw-r--r--apt-private/private-upgrade.cc21
-rw-r--r--cmdline/apt-get.cc125
-rw-r--r--debian/apt.cron.daily2
-rw-r--r--debian/apt.postinst2
-rw-r--r--debian/changelog128
-rw-r--r--debian/control3
-rw-r--r--doc/apt-get.8.xml22
-rwxr-xr-xdselect/install9
-rw-r--r--methods/http.cc968
-rw-r--r--methods/http.h137
-rw-r--r--methods/https.cc138
-rw-r--r--methods/https.h29
-rw-r--r--methods/makefile6
-rw-r--r--methods/server.cc665
-rw-r--r--methods/server.h144
-rw-r--r--po/it.po175
-rw-r--r--po/vi.po98
-rw-r--r--test/integration/Packages-bug-723705-tagfile-truncates-fields167
-rw-r--r--test/integration/apt.pem49
-rw-r--r--test/integration/framework224
-rwxr-xr-xtest/integration/skip-bug-601016-description-translation (renamed from test/integration/test-bug-601016-description-translation)0
-rw-r--r--test/integration/status-bug-723705-tagfile-truncates-fields62
-rwxr-xr-xtest/integration/test-apt-cdrom5
-rwxr-xr-xtest/integration/test-apt-get-autoremove20
-rwxr-xr-xtest/integration/test-apt-get-download3
-rwxr-xr-xtest/integration/test-apt-get-upgrade78
-rwxr-xr-xtest/integration/test-bug-254770-segfault-if-cache-not-buildable2
-rwxr-xr-xtest/integration/test-bug-596498-trusted-unsigned-repo9
-rwxr-xr-xtest/integration/test-bug-617690-allow-unauthenticated-makes-all-untrusted63
-rwxr-xr-xtest/integration/test-bug-633350-do-not-kill-last-char-in-Release2
-rwxr-xr-xtest/integration/test-bug-679371-apt-get-autoclean-multiarch5
-rwxr-xr-xtest/integration/test-bug-686346-package-missing-architecture13
-rwxr-xr-xtest/integration/test-bug-689582-100-char-long-path-names35
-rwxr-xr-xtest/integration/test-bug-722207-print-uris-even-if-very-quiet30
-rwxr-xr-xtest/integration/test-bug-723586-any-stripped-in-single-arch54
-rwxr-xr-xtest/integration/test-bug-723705-tagfile-truncates-fields33
-rwxr-xr-xtest/integration/test-hashsum-verification4
-rwxr-xr-xtest/integration/test-partial-file-support107
-rwxr-xr-xtest/integration/test-pdiff-usage4
-rwxr-xr-xtest/integration/test-releasefile-verification61
-rwxr-xr-xtest/integration/test-ubuntu-bug-859188-multiarch-reinstall7
-rw-r--r--test/interactive-helper/aptwebserver.cc138
-rw-r--r--test/libapt/hashsums_test.cc32
-rw-r--r--test/libapt/strutil_test.cc27
71 files changed, 3083 insertions, 1615 deletions
diff --git a/.travis.yml b/.travis.yml
index 56536837f..2d9194c28 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -1,5 +1,5 @@
language: cpp
before_install:
- sudo apt-get update -q
- - sudo apt-get install -q dpkg-dev debhelper libdb-dev gettext libcurl4-gnutls-dev zlib1g-dev libbz2-dev xsltproc docbook-xsl docbook-xml po4a autotools-dev autoconf automake doxygen debiandoc-sgml
+ - sudo apt-get install -q --no-install-recommends dpkg-dev debhelper libdb-dev gettext libcurl4-gnutls-dev zlib1g-dev libbz2-dev xsltproc docbook-xsl docbook-xml po4a autotools-dev autoconf automake doxygen debiandoc-sgml stunnel4
script: make && make test && test/integration/run-tests
diff --git a/BUGS b/BUGS
deleted file mode 100644
index a7b6b1114..000000000
--- a/BUGS
+++ /dev/null
@@ -1,9 +0,0 @@
-
-DDTP problems:
---------------
-- apt-get update clean the /var/lib/apt/lists dir
- from all Translation-$index that are not in the current
- enviroment or Translations apt variable
-- there needs to be a list of locales (pt, sv, en) that need
- both language and country code to get the right file
- (is in the code in indexfile::LanguageCode(), just a bit ugly
diff --git a/apt-inst/contrib/arfile.cc b/apt-inst/contrib/arfile.cc
index d7ee528ba..9d84c1784 100644
--- a/apt-inst/contrib/arfile.cc
+++ b/apt-inst/contrib/arfile.cc
@@ -64,7 +64,7 @@ ARArchive::~ARArchive()
byte plain text header then the file data, another header, data, etc */
bool ARArchive::LoadHeaders()
{
- signed long Left = File.Size();
+ off_t Left = File.Size();
// Check the magic byte
char Magic[8];
@@ -123,7 +123,7 @@ bool ARArchive::LoadHeaders()
}
// Account for the AR header alignment
- unsigned Skip = Memb->Size % 2;
+ off_t Skip = Memb->Size % 2;
// Add it to the list
Memb->Next = List;
@@ -131,7 +131,7 @@ bool ARArchive::LoadHeaders()
Memb->Start = File.Tell();
if (File.Skip(Memb->Size + Skip) == false)
return false;
- if (Left < (signed)(Memb->Size + Skip))
+ if (Left < (off_t)(Memb->Size + Skip))
return _error->Error(_("Archive is too short"));
Left -= Memb->Size + Skip;
}
diff --git a/apt-inst/contrib/extracttar.cc b/apt-inst/contrib/extracttar.cc
index 49ed5db56..fb4db42f8 100644
--- a/apt-inst/contrib/extracttar.cc
+++ b/apt-inst/contrib/extracttar.cc
@@ -161,8 +161,8 @@ bool ExtractTar::Go(pkgDirStream &Stream)
return false;
// Loop over all blocks
- string LastLongLink;
- string LastLongName;
+ string LastLongLink, ItemLink;
+ string LastLongName, ItemName;
while (1)
{
bool BadRecord = false;
@@ -208,25 +208,23 @@ bool ExtractTar::Go(pkgDirStream &Stream)
StrToNum(Tar->Major,Itm.Major,sizeof(Tar->Major),8) == false ||
StrToNum(Tar->Minor,Itm.Minor,sizeof(Tar->Minor),8) == false)
return _error->Error(_("Corrupted archive"));
-
- // Grab the filename
+
+ // Grab the filename and link target: use last long name if one was
+ // set, otherwise use the header value as-is, but remember that it may
+ // fill the entire 100-byte block and needs to be zero-terminated.
+ // See Debian Bug #689582.
if (LastLongName.empty() == false)
Itm.Name = (char *)LastLongName.c_str();
else
- {
- Tar->Name[sizeof(Tar->Name)-1] = 0;
- Itm.Name = Tar->Name;
- }
+ Itm.Name = (char *)ItemName.assign(Tar->Name, sizeof(Tar->Name)).c_str();
if (Itm.Name[0] == '.' && Itm.Name[1] == '/' && Itm.Name[2] != 0)
Itm.Name += 2;
-
- // Grab the link target
- Tar->Name[sizeof(Tar->LinkName)-1] = 0;
- Itm.LinkTarget = Tar->LinkName;
if (LastLongLink.empty() == false)
Itm.LinkTarget = (char *)LastLongLink.c_str();
-
+ else
+ Itm.LinkTarget = (char *)ItemLink.assign(Tar->LinkName, sizeof(Tar->LinkName)).c_str();
+
// Convert the type over
switch (Tar->LinkFlag)
{
diff --git a/apt-inst/deb/debfile.cc b/apt-inst/deb/debfile.cc
index ab4037915..79434d8b5 100644
--- a/apt-inst/deb/debfile.cc
+++ b/apt-inst/deb/debfile.cc
@@ -51,8 +51,7 @@ debDebFile::debDebFile(FileFd &File) : File(File), AR(File)
!CheckMember("data.tar.bz2") &&
!CheckMember("data.tar.lzma") &&
!CheckMember("data.tar.xz")) {
- // FIXME: add data.tar.xz here - adding it now would require a Translation round for a very small gain
- _error->Error(_("This is not a valid DEB archive, it has no '%s', '%s' or '%s' member"), "data.tar.gz", "data.tar.bz2", "data.tar.lzma");
+ _error->Error(_("This is not a valid DEB archive, missing '%s' member"), "data.tar");
return;
}
}
diff --git a/apt-pkg/acquire-item.cc b/apt-pkg/acquire-item.cc
index da00c9dd5..b76921312 100644
--- a/apt-pkg/acquire-item.cc
+++ b/apt-pkg/acquire-item.cc
@@ -143,6 +143,32 @@ void pkgAcquire::Item::Rename(string From,string To)
}
}
/*}}}*/
+bool pkgAcquire::Item::RenameOnError(pkgAcquire::Item::RenameOnErrorState const error)/*{{{*/
+{
+ if(FileExists(DestFile))
+ Rename(DestFile, DestFile + ".FAILED");
+
+ switch (error)
+ {
+ case HashSumMismatch:
+ ErrorText = _("Hash Sum mismatch");
+ Status = StatAuthError;
+ ReportMirrorFailure("HashChecksumFailure");
+ break;
+ case SizeMismatch:
+ ErrorText = _("Size mismatch");
+ Status = StatAuthError;
+ ReportMirrorFailure("SizeFailure");
+ break;
+ case InvalidFormat:
+ ErrorText = _("Invalid file format");
+ Status = StatError;
+ // do not report as usually its not the mirrors fault, but Portal/Proxy
+ break;
+ }
+ return false;
+}
+ /*}}}*/
// Acquire::Item::ReportMirrorFailure /*{{{*/
// ---------------------------------------------------------------------
void pkgAcquire::Item::ReportMirrorFailure(string FailCode)
@@ -595,9 +621,7 @@ void pkgAcqIndexDiffs::Finish(bool allDone)
if(!ExpectedHash.empty() && !ExpectedHash.VerifyFile(DestFile))
{
- Status = StatAuthError;
- ErrorText = _("MD5Sum mismatch");
- Rename(DestFile,DestFile + ".FAILED");
+ RenameOnError(HashSumMismatch);
Dequeue();
return;
}
@@ -866,10 +890,7 @@ void pkgAcqIndex::Done(string Message,unsigned long long Size,string Hash,
if (!ExpectedHash.empty() && ExpectedHash.toStr() != Hash)
{
- Status = StatAuthError;
- ErrorText = _("Hash Sum mismatch");
- Rename(DestFile,DestFile + ".FAILED");
- ReportMirrorFailure("HashChecksumFailure");
+ RenameOnError(HashSumMismatch);
return;
}
@@ -878,22 +899,18 @@ void pkgAcqIndex::Done(string Message,unsigned long long Size,string Hash,
if (Verify == true)
{
FileFd fd(DestFile, FileFd::ReadOnly);
- pkgTagSection sec;
- pkgTagFile tag(&fd);
-
- // Only test for correctness if the file is not empty (empty is ok)
- if (fd.Size() > 0) {
- if (_error->PendingError() || !tag.Step(sec)) {
- Status = StatError;
- _error->DumpErrors();
- Rename(DestFile,DestFile + ".FAILED");
- return;
- } else if (!sec.Exists("Package")) {
- Status = StatError;
- ErrorText = ("Encountered a section with no Package: header");
- Rename(DestFile,DestFile + ".FAILED");
- return;
- }
+ // Only test for correctness if the file is not empty (empty is ok)
+ if (fd.FileSize() > 0)
+ {
+ pkgTagSection sec;
+ pkgTagFile tag(&fd);
+
+ // all our current indexes have a field 'Package' in each section
+ if (_error->PendingError() == true || tag.Step(sec) == false || sec.Exists("Package") == false)
+ {
+ RenameOnError(InvalidFormat);
+ return;
+ }
}
}
@@ -1719,34 +1736,40 @@ pkgAcqArchive::pkgAcqArchive(pkgAcquire *Owner,pkgSourceList *Sources,
}
// check if we have one trusted source for the package. if so, switch
- // to "TrustedOnly" mode
+ // to "TrustedOnly" mode - but only if not in AllowUnauthenticated mode
+ bool const allowUnauth = _config->FindB("APT::Get::AllowUnauthenticated", false);
+ bool const debugAuth = _config->FindB("Debug::pkgAcquire::Auth", false);
+ bool seenUntrusted = false;
for (pkgCache::VerFileIterator i = Version.FileList(); i.end() == false; ++i)
{
pkgIndexFile *Index;
if (Sources->FindIndex(i.File(),Index) == false)
continue;
- if (_config->FindB("Debug::pkgAcquire::Auth", false))
- {
+
+ if (debugAuth == true)
std::cerr << "Checking index: " << Index->Describe()
- << "(Trusted=" << Index->IsTrusted() << ")\n";
- }
- if (Index->IsTrusted()) {
+ << "(Trusted=" << Index->IsTrusted() << ")" << std::endl;
+
+ if (Index->IsTrusted() == true)
+ {
Trusted = true;
- break;
+ if (allowUnauth == false)
+ break;
}
+ else
+ seenUntrusted = true;
}
// "allow-unauthenticated" restores apts old fetching behaviour
// that means that e.g. unauthenticated file:// uris are higher
// priority than authenticated http:// uris
- if (_config->FindB("APT::Get::AllowUnauthenticated",false) == true)
+ if (allowUnauth == true && seenUntrusted == true)
Trusted = false;
// Select a source
if (QueueNext() == false && _error->PendingError() == false)
- _error->Error(_("I wasn't able to locate a file for the %s package. "
- "This might mean you need to manually fix this package."),
- Version.ParentPkg().Name());
+ _error->Error(_("Can't find a source to download version '%s' of '%s'"),
+ Version.VerStr(), Version.ParentPkg().FullName(false).c_str());
}
/*}}}*/
// AcqArchive::QueueNext - Queue the next file source /*{{{*/
@@ -1900,18 +1923,14 @@ void pkgAcqArchive::Done(string Message,unsigned long long Size,string CalcHash,
// Check the size
if (Size != Version->Size)
{
- Status = StatError;
- ErrorText = _("Size mismatch");
+ RenameOnError(SizeMismatch);
return;
}
// Check the hash
if(ExpectedHash.toStr() != CalcHash)
{
- Status = StatError;
- ErrorText = _("Hash Sum mismatch");
- if(FileExists(DestFile))
- Rename(DestFile,DestFile + ".FAILED");
+ RenameOnError(HashSumMismatch);
return;
}
@@ -2051,9 +2070,7 @@ void pkgAcqFile::Done(string Message,unsigned long long Size,string CalcHash,
// Check the hash
if(!ExpectedHash.empty() && ExpectedHash.toStr() != CalcHash)
{
- Status = StatError;
- ErrorText = _("Hash Sum mismatch");
- Rename(DestFile,DestFile + ".FAILED");
+ RenameOnError(HashSumMismatch);
return;
}
diff --git a/apt-pkg/acquire-item.h b/apt-pkg/acquire-item.h
index 10c855e63..6b4f73708 100644
--- a/apt-pkg/acquire-item.h
+++ b/apt-pkg/acquire-item.h
@@ -83,7 +83,7 @@ class pkgAcquire::Item : public WeakPointable
* overwritten.
*/
void Rename(std::string From,std::string To);
-
+
public:
/** \brief The current status of this item. */
@@ -281,6 +281,21 @@ class pkgAcquire::Item : public WeakPointable
* pkgAcquire::Remove.
*/
virtual ~Item();
+
+ protected:
+
+ enum RenameOnErrorState {
+ HashSumMismatch,
+ SizeMismatch,
+ InvalidFormat
+ };
+
+ /** \brief Rename failed file and set error
+ *
+ * \param state respresenting the error we encountered
+ * \param errorMsg a message describing the error
+ */
+ bool RenameOnError(RenameOnErrorState const state);
};
/*}}}*/
/** \brief Information about an index patch (aka diff). */ /*{{{*/
@@ -982,7 +997,7 @@ class pkgAcqArchive : public pkgAcquire::Item
*
* \param Version The package version to download.
*
- * \param StoreFilename A location in which the actual filename of
+ * \param[out] StoreFilename A location in which the actual filename of
* the package should be stored. It will be set to a guessed
* basename in the constructor, and filled in with a fully
* qualified filename once the download finishes.
diff --git a/apt-pkg/aptconfiguration.cc b/apt-pkg/aptconfiguration.cc
index 4f9b84e00..115d11616 100644
--- a/apt-pkg/aptconfiguration.cc
+++ b/apt-pkg/aptconfiguration.cc
@@ -453,7 +453,7 @@ void Configuration::setDefaultConfigurationForCompressors() {
_config->CndSet("Dir::Bin::bzip2", "/bin/bzip2");
_config->CndSet("Dir::Bin::xz", "/usr/bin/xz");
if (FileExists(_config->FindFile("Dir::Bin::xz")) == true) {
- _config->Clear("Dir::Bin::lzma");
+ _config->Set("Dir::Bin::lzma", _config->FindFile("Dir::Bin::xz"));
_config->Set("APT::Compressor::lzma::Binary", "xz");
if (_config->Exists("APT::Compressor::lzma::CompressArg") == false) {
_config->Set("APT::Compressor::lzma::CompressArg::", "--format=lzma");
diff --git a/apt-pkg/contrib/fileutl.cc b/apt-pkg/contrib/fileutl.cc
index 47a91c294..0261119ba 100644
--- a/apt-pkg/contrib/fileutl.cc
+++ b/apt-pkg/contrib/fileutl.cc
@@ -656,9 +656,9 @@ string flNoLink(string File)
while (1)
{
// Read the link
- int Res;
+ ssize_t Res;
if ((Res = readlink(NFile.c_str(),Buffer,sizeof(Buffer))) <= 0 ||
- (unsigned)Res >= sizeof(Buffer))
+ (size_t)Res >= sizeof(Buffer))
return File;
// Append or replace the previous path
@@ -946,9 +946,6 @@ bool FileFd::Open(string FileName,unsigned int const Mode,APT::Configuration::Co
if ((Mode & Atomic) == Atomic)
{
Flags |= Replace;
- char *name = strdup((FileName + ".XXXXXX").c_str());
- TemporaryFileName = string(mktemp(name));
- free(name);
}
else if ((Mode & (Exclusive | Create)) == (Exclusive | Create))
{
@@ -971,11 +968,24 @@ bool FileFd::Open(string FileName,unsigned int const Mode,APT::Configuration::Co
if_FLAGGED_SET(Create, O_CREAT);
if_FLAGGED_SET(Empty, O_TRUNC);
if_FLAGGED_SET(Exclusive, O_EXCL);
- else if_FLAGGED_SET(Atomic, O_EXCL);
#undef if_FLAGGED_SET
- if (TemporaryFileName.empty() == false)
- iFd = open(TemporaryFileName.c_str(), fileflags, Perms);
+ if ((Mode & Atomic) == Atomic)
+ {
+ char *name = strdup((FileName + ".XXXXXX").c_str());
+
+ if((iFd = mkstemp(name)) == -1)
+ {
+ free(name);
+ return FileFdErrno("mkstemp", "Could not create temporary file for %s", FileName.c_str());
+ }
+
+ TemporaryFileName = string(name);
+ free(name);
+
+ if(Perms != 600 && fchmod(iFd, Perms) == -1)
+ return FileFdErrno("fchmod", "Could not change permissions for temporary file %s", TemporaryFileName.c_str());
+ }
else
iFd = open(FileName.c_str(), fileflags, Perms);
@@ -1234,7 +1244,7 @@ FileFd::~FileFd()
gracefully. */
bool FileFd::Read(void *To,unsigned long long Size,unsigned long long *Actual)
{
- int Res;
+ ssize_t Res;
errno = 0;
if (Actual != 0)
*Actual = 0;
@@ -1334,7 +1344,7 @@ char* FileFd::ReadLine(char *To, unsigned long long const Size)
/* */
bool FileFd::Write(const void *From,unsigned long long Size)
{
- int Res;
+ ssize_t Res;
errno = 0;
do
{
@@ -1388,7 +1398,7 @@ bool FileFd::Write(const void *From,unsigned long long Size)
}
bool FileFd::Write(int Fd, const void *From, unsigned long long Size)
{
- int Res;
+ ssize_t Res;
errno = 0;
do
{
@@ -1461,14 +1471,14 @@ bool FileFd::Seek(unsigned long long To)
d->seekpos = To;
return true;
}
- int res;
+ off_t res;
#ifdef HAVE_ZLIB
if (d != NULL && d->gz)
res = gzseek(d->gz,To,SEEK_SET);
else
#endif
res = lseek(iFd,To,SEEK_SET);
- if (res != (signed)To)
+ if (res != (off_t)To)
return FileFdError("Unable to seek to %llu", To);
if (d != NULL)
@@ -1499,7 +1509,7 @@ bool FileFd::Skip(unsigned long long Over)
return true;
}
- int res;
+ off_t res;
#ifdef HAVE_ZLIB
if (d != NULL && d->gz != NULL)
res = gzseek(d->gz,Over,SEEK_CUR);
diff --git a/apt-pkg/contrib/strutl.cc b/apt-pkg/contrib/strutl.cc
index 0955b69f7..77e48962c 100644
--- a/apt-pkg/contrib/strutl.cc
+++ b/apt-pkg/contrib/strutl.cc
@@ -1118,6 +1118,37 @@ vector<string> VectorizeString(string const &haystack, char const &split)
return exploded;
}
/*}}}*/
+// StringSplit - split a string into a string vector by token /*{{{*/
+// ---------------------------------------------------------------------
+/* See header for details.
+ */
+vector<string> StringSplit(std::string const &s, std::string const &sep,
+ unsigned int maxsplit)
+{
+ vector<string> split;
+ size_t start, pos;
+
+ // no seperator given, this is bogus
+ if(sep.size() == 0)
+ return split;
+
+ start = pos = 0;
+ while (pos != string::npos)
+ {
+ pos = s.find(sep, start);
+ split.push_back(s.substr(start, pos-start));
+
+ // if maxsplit is reached, the remaining string is the last item
+ if(split.size() >= maxsplit)
+ {
+ split[split.size()-1] = s.substr(start);
+ break;
+ }
+ start = pos+sep.size();
+ }
+ return split;
+}
+ /*}}}*/
// RegexChoice - Simple regex list/list matcher /*{{{*/
// ---------------------------------------------------------------------
/* */
diff --git a/apt-pkg/contrib/strutl.h b/apt-pkg/contrib/strutl.h
index 530896141..b42e06491 100644
--- a/apt-pkg/contrib/strutl.h
+++ b/apt-pkg/contrib/strutl.h
@@ -17,7 +17,7 @@
#define STRUTL_H
-
+#include <limits>
#include <stdlib.h>
#include <string>
#include <cstring>
@@ -62,9 +62,32 @@ bool StrToNum(const char *Str,unsigned long &Res,unsigned Len,unsigned Base = 0)
bool StrToNum(const char *Str,unsigned long long &Res,unsigned Len,unsigned Base = 0);
bool Base256ToNum(const char *Str,unsigned long &Res,unsigned int Len);
bool Hex2Num(const std::string &Str,unsigned char *Num,unsigned int Length);
+
+// input changing string split
bool TokSplitString(char Tok,char *Input,char **List,
unsigned long ListMax);
+
+// split a given string by a char
std::vector<std::string> VectorizeString(std::string const &haystack, char const &split) __attrib_const;
+
+/* \brief Return a vector of strings from string "input" where "sep"
+ * is used as the delimiter string.
+ *
+ * \param input The input string.
+ *
+ * \param sep The seperator to use.
+ *
+ * \param maxsplit (optional) The maximum amount of splitting that
+ * should be done .
+ *
+ * The optional "maxsplit" argument can be used to limit the splitting,
+ * if used the string is only split on maxsplit places and the last
+ * item in the vector contains the remainder string.
+ */
+std::vector<std::string> StringSplit(std::string const &input,
+ std::string const &sep,
+ unsigned int maxsplit=std::numeric_limits<unsigned int>::max()) __attrib_const;
+
void ioprintf(std::ostream &out,const char *format,...) __like_printf(2);
void strprintf(std::string &out,const char *format,...) __like_printf(2);
char *safe_snprintf(char *Buffer,char *End,const char *Format,...) __like_printf(3);
diff --git a/apt-pkg/deb/deblistparser.cc b/apt-pkg/deb/deblistparser.cc
index 87aab6ee2..68d544e1f 100644
--- a/apt-pkg/deb/deblistparser.cc
+++ b/apt-pkg/deb/deblistparser.cc
@@ -635,7 +635,7 @@ bool debListParser::ParseDepends(pkgCache::VerIterator &Ver,
string Version;
unsigned int Op;
- Start = ParseDepends(Start,Stop,Package,Version,Op,false,!MultiArchEnabled);
+ Start = ParseDepends(Start, Stop, Package, Version, Op, false, false);
if (Start == 0)
return _error->Error("Problem parsing dependency %s",Tag);
size_t const found = Package.rfind(':');
@@ -717,9 +717,7 @@ bool debListParser::ParseProvides(pkgCache::VerIterator &Ver)
}
}
- if (MultiArchEnabled == false)
- return true;
- else if ((Ver->MultiArch & pkgCache::Version::Allowed) == pkgCache::Version::Allowed)
+ if ((Ver->MultiArch & pkgCache::Version::Allowed) == pkgCache::Version::Allowed)
{
string const Package = string(Ver.ParentPkg().Name()).append(":").append("any");
return NewProvidesAllArch(Ver, Package, Ver.VerStr());
diff --git a/apt-pkg/deb/dpkgpm.cc b/apt-pkg/deb/dpkgpm.cc
index 4b5467eff..b4d812d26 100644
--- a/apt-pkg/deb/dpkgpm.cc
+++ b/apt-pkg/deb/dpkgpm.cc
@@ -37,6 +37,7 @@
#include <map>
#include <pwd.h>
#include <grp.h>
+#include <iomanip>
#include <termios.h>
#include <unistd.h>
@@ -52,9 +53,16 @@ class pkgDPkgPMPrivate
{
public:
pkgDPkgPMPrivate() : stdin_is_dev_null(false), dpkgbuf_pos(0),
- term_out(NULL), history_out(NULL)
+ term_out(NULL), history_out(NULL),
+ last_reported_progress(0.0), nr_terminal_rows(0),
+ fancy_progress_output(false)
{
dpkgbuf[0] = '\0';
+ if(_config->FindB("Dpkg::Progress-Fancy", false) == true)
+ {
+ fancy_progress_output = true;
+ _config->Set("DpkgPM::Progress", true);
+ }
}
bool stdin_is_dev_null;
// the buffer we use for the dpkg status-fd reading
@@ -63,6 +71,10 @@ public:
FILE *term_out;
FILE *history_out;
string dpkg_error;
+
+ float last_reported_progress;
+ int nr_terminal_rows;
+ bool fancy_progress_output;
};
namespace
@@ -512,7 +524,8 @@ void pkgDPkgPM::ProcessDpkgStatusLine(int OutStatusFd, char *line)
/* dpkg sends strings like this:
- 'status: <pkg>: <pkg qstate>'
+ 'status: <pkg>: <pkg qstate>'
+ 'status: <pkg>:<arch>: <pkg qstate>'
errors look like this:
'status: /var/cache/apt/archives/krecipes_0.8.1-0ubuntu1_i386.deb : error : trying to overwrite `/usr/share/doc/kde/HTML/en/krecipes/krectip.png', which is also in package krecipes-data
and conffile-prompt like this
@@ -527,29 +540,36 @@ void pkgDPkgPM::ProcessDpkgStatusLine(int OutStatusFd, char *line)
'processing: trigproc: trigger'
*/
- char* list[6];
- // dpkg sends multiline error messages sometimes (see
- // #374195 for a example. we should support this by
- // either patching dpkg to not send multiline over the
- // statusfd or by rewriting the code here to deal with
- // it. for now we just ignore it and not crash
- TokSplitString(':', line, list, sizeof(list)/sizeof(list[0]));
- if( list[0] == NULL || list[1] == NULL || list[2] == NULL)
+ // we need to split on ": " (note the appended space) as the ':' is
+ // part of the pkgname:arch information that dpkg sends
+ //
+ // A dpkg error message may contain additional ":" (like
+ // "failed in buffer_write(fd) (10, ret=-1): backend dpkg-deb ..."
+ // so we need to ensure to not split too much
+ std::vector<std::string> list = StringSplit(line, ": ", 3);
+ if(list.size() != 3)
{
if (Debug == true)
std::clog << "ignoring line: not enough ':'" << std::endl;
return;
}
- const char* const pkg = list[1];
- const char* action = _strstrip(list[2]);
+ // dpkg does not send always send "pkgname:arch" so we add it here if needed
+ std::string pkgname = list[1];
+ if (pkgname.find(":") == std::string::npos)
+ {
+ string const nativeArch = _config->Find("APT::Architecture");
+ pkgname = pkgname + ":" + nativeArch;
+ }
+ const char* const pkg = pkgname.c_str();
+ const char* action = list[2].c_str();
// 'processing' from dpkg looks like
// 'processing: action: pkg'
- if(strncmp(list[0], "processing", strlen("processing")) == 0)
+ if(strncmp(list[0].c_str(), "processing", strlen("processing")) == 0)
{
char s[200];
- const char* const pkg_or_trigger = _strstrip(list[2]);
- action = _strstrip( list[1]);
+ const char* const pkg_or_trigger = list[2].c_str();
+ action = list[1].c_str();
const std::pair<const char *, const char *> * const iter =
std::find_if(PackageProcessingOpsBegin,
PackageProcessingOpsEnd,
@@ -578,14 +598,6 @@ void pkgDPkgPM::ProcessDpkgStatusLine(int OutStatusFd, char *line)
if(strncmp(action,"error",strlen("error")) == 0)
{
- // urgs, sometime has ":" in its error string so that we
- // end up with the error message split between list[3]
- // and list[4], e.g. the message:
- // "failed in buffer_write(fd) (10, ret=-1): backend dpkg-deb ..."
- // concat them again
- if( list[4] != NULL )
- list[3][strlen(list[3])] = ':';
-
status << "pmerror:" << list[1]
<< ":" << (PackagesDone/float(PackagesTotal)*100.0)
<< ":" << list[3]
@@ -595,7 +607,7 @@ void pkgDPkgPM::ProcessDpkgStatusLine(int OutStatusFd, char *line)
if (Debug == true)
std::clog << "send: '" << status.str() << "'" << endl;
pkgFailures++;
- WriteApportReport(list[1], list[3]);
+ WriteApportReport(list[1].c_str(), list[3].c_str());
return;
}
else if(strncmp(action,"conffile",strlen("conffile")) == 0)
@@ -632,6 +644,9 @@ void pkgDPkgPM::ProcessDpkgStatusLine(int OutStatusFd, char *line)
<< ":" << (PackagesDone/float(PackagesTotal)*100.0)
<< ":" << s
<< endl;
+ if(_config->FindB("DPkgPM::Progress", false) == true)
+ SendTerminalProgress(PackagesDone/float(PackagesTotal)*100.0);
+
if(OutStatusFd > 0)
FileFd::Write(OutStatusFd, status.str().c_str(), status.str().size());
if (Debug == true)
@@ -874,6 +889,51 @@ bool pkgDPkgPM::CloseLog()
return true;
}
/*}}}*/
+// DPkgPM::SendTerminalProgress /*{{{*/
+// ---------------------------------------------------------------------
+/* Send progress info to the terminal
+ */
+void pkgDPkgPM::SendTerminalProgress(float percentage)
+{
+ int reporting_steps = _config->FindI("DpkgPM::Reporting-Steps", 1);
+
+ if(percentage < (d->last_reported_progress + reporting_steps))
+ return;
+
+ std::string progress_str;
+ strprintf(progress_str, "Progress: [%3i%%]", (int)percentage);
+ if (d->fancy_progress_output)
+ {
+ int row = d->nr_terminal_rows;
+
+ static string save_cursor = "\033[s";
+ static string restore_cursor = "\033[u";
+
+ static string set_bg_color = "\033[42m"; // green
+ static string set_fg_color = "\033[30m"; // black
+
+ static string restore_bg = "\033[49m";
+ static string restore_fg = "\033[39m";
+
+ std::cout << save_cursor
+ // move cursor position to last row
+ << "\033[" << row << ";0f"
+ << set_bg_color
+ << set_fg_color
+ << progress_str
+ << restore_cursor
+ << restore_bg
+ << restore_fg;
+ }
+ else
+ {
+ std::cout << progress_str << "\r\n";
+ }
+ std::flush(std::cout);
+
+ d->last_reported_progress = percentage;
+}
+ /*}}}*/
/*{{{*/
// This implements a racy version of pselect for those architectures
// that don't have a working implementation.
@@ -895,6 +955,29 @@ static int racy_pselect(int nfds, fd_set *readfds, fd_set *writefds,
return retval;
}
/*}}}*/
+
+void pkgDPkgPM::SetupTerminalScrollArea(int nr_rows)
+{
+ if(!d->fancy_progress_output)
+ return;
+
+ // scroll down a bit to avoid visual glitch when the screen
+ // area shrinks by one row
+ std::cout << "\n";
+
+ // save cursor
+ std::cout << "\033[s";
+
+ // set scroll region (this will place the cursor in the top left)
+ std::cout << "\033[1;" << nr_rows - 1 << "r";
+
+ // restore cursor but ensure its inside the scrolling area
+ std::cout << "\033[u";
+ static const char *move_cursor_up = "\033[1A";
+ std::cout << move_cursor_up;
+ std::flush(std::cout);
+}
+
// DPkgPM::Go - Run the sequence /*{{{*/
// ---------------------------------------------------------------------
/* This globs the operations and calls dpkg
@@ -1020,7 +1103,7 @@ bool pkgDPkgPM::Go(int OutStatusFd)
if((*I).Pkg.end() == true)
continue;
- string const name = (*I).Pkg.Name();
+ string const name = (*I).Pkg.FullName();
PackageOpsDone[name] = 0;
for(int i=0; (DpkgStatesOpMap[(*I).Op][i]).state != NULL; ++i)
{
@@ -1251,7 +1334,8 @@ bool pkgDPkgPM::Go(int OutStatusFd)
_error->PushToStack();
if (tcgetattr(STDOUT_FILENO, &tt) == 0)
{
- ioctl(0, TIOCGWINSZ, (char *)&win);
+ ioctl(1, TIOCGWINSZ, (char *)&win);
+ d->nr_terminal_rows = win.ws_row;
if (openpty(&master, &slave, NULL, &tt, &win) < 0)
{
_error->Errno("openpty", _("Can not write log (%s)"), _("Is /dev/pts mounted?"));
@@ -1293,11 +1377,12 @@ bool pkgDPkgPM::Go(int OutStatusFd)
<< endl;
FileFd::Write(OutStatusFd, status.str().c_str(), status.str().size());
}
+
Child = ExecFork();
-
// This is the child
if (Child == 0)
{
+
if(slave >= 0 && master >= 0)
{
setsid();
@@ -1314,7 +1399,7 @@ bool pkgDPkgPM::Go(int OutStatusFd)
if (chdir(_config->FindDir("DPkg::Run-Directory","/").c_str()) != 0)
_exit(100);
-
+
if (_config->FindB("DPkg::FlushSTDIN",true) == true && isatty(STDIN_FILENO))
{
int Flags,dummy;
@@ -1330,6 +1415,7 @@ bool pkgDPkgPM::Go(int OutStatusFd)
if (fcntl(STDIN_FILENO,F_SETFL,Flags & (~(long)O_NONBLOCK)) < 0)
_exit(100);
}
+ SetupTerminalScrollArea(d->nr_terminal_rows);
/* No Job Control Stop Env is a magic dpkg var that prevents it
from using sigstop */
@@ -1424,12 +1510,21 @@ bool pkgDPkgPM::Go(int OutStatusFd)
signal(SIGHUP,old_SIGHUP);
+ // reset scroll area
+ SetupTerminalScrollArea(d->nr_terminal_rows + 1);
+ if(d->fancy_progress_output)
+ {
+ // override the progress line (sledgehammer)
+ static const char* clear_screen_below_cursor = "\033[J";
+ std::cout << clear_screen_below_cursor;
+ }
+
if(master >= 0)
{
tcsetattr(0, TCSAFLUSH, &tt);
close(master);
}
-
+
// Check for an error code.
if (WIFEXITED(Status) == 0 || WEXITSTATUS(Status) != 0)
{
@@ -1459,7 +1554,11 @@ bool pkgDPkgPM::Go(int OutStatusFd)
}
}
CloseLog();
-
+
+ // dpkg is done at this point
+ if(_config->FindB("DPkgPM::Progress", false) == true)
+ SendTerminalProgress(100);
+
if (pkgPackageManager::SigINTStop)
_error->Warning(_("Operation was interrupted before it could finish"));
diff --git a/apt-pkg/deb/dpkgpm.h b/apt-pkg/deb/dpkgpm.h
index c31d56f8e..3b8d36623 100644
--- a/apt-pkg/deb/dpkgpm.h
+++ b/apt-pkg/deb/dpkgpm.h
@@ -84,6 +84,10 @@ class pkgDPkgPM : public pkgPackageManager
bool SendPkgsInfo(FILE * const F, unsigned int const &Version);
void WriteHistoryTag(std::string const &tag, std::string value);
+ // Terminal progress
+ void SetupTerminalScrollArea(int nr_scrolled_rows);
+ void SendTerminalProgress(float percentage);
+
// apport integration
void WriteApportReport(const char *pkgpath, const char *errormsg);
diff --git a/apt-pkg/depcache.cc b/apt-pkg/depcache.cc
index 978a893f7..a06789cdf 100644
--- a/apt-pkg/depcache.cc
+++ b/apt-pkg/depcache.cc
@@ -896,6 +896,7 @@ char const* PrintMode(char const mode)
case pkgDepCache::ModeInstall: return "Install";
case pkgDepCache::ModeKeep: return "Keep";
case pkgDepCache::ModeDelete: return "Delete";
+ case pkgDepCache::ModeGarbage: return "Garbage";
default: return "UNKNOWN";
}
}
@@ -1726,8 +1727,6 @@ bool pkgDepCache::MarkRequired(InRootSetFunc &userFunc)
follow_recommends = MarkFollowsRecommends();
follow_suggests = MarkFollowsSuggests();
-
-
// do the mark part, this is the core bit of the algorithm
for(PkgIterator p = PkgBegin(); !p.end(); ++p)
{
@@ -1738,7 +1737,9 @@ bool pkgDepCache::MarkRequired(InRootSetFunc &userFunc)
// be nice even then a required package violates the policy (#583517)
// and do the full mark process also for required packages
(p.CurrentVer().end() != true &&
- p.CurrentVer()->Priority == pkgCache::State::Required))
+ p.CurrentVer()->Priority == pkgCache::State::Required) ||
+ // packages which can't be changed (like holds) can't be garbage
+ (IsModeChangeOk(ModeGarbage, p, 0, false) == false))
{
// the package is installed (and set to keep)
if(PkgState[p->ID].Keep() && !p.CurrentVer().end())
diff --git a/apt-pkg/depcache.h b/apt-pkg/depcache.h
index d9c95349b..61c9aa559 100644
--- a/apt-pkg/depcache.h
+++ b/apt-pkg/depcache.h
@@ -128,7 +128,7 @@ class pkgDepCache : protected pkgCache::Namespace
enum InternalFlags {AutoKept = (1 << 0), Purge = (1 << 1), ReInstall = (1 << 2), Protected = (1 << 3)};
enum VersionTypes {NowVersion, InstallVersion, CandidateVersion};
- enum ModeList {ModeDelete = 0, ModeKeep = 1, ModeInstall = 2};
+ enum ModeList {ModeDelete = 0, ModeKeep = 1, ModeInstall = 2, ModeGarbage = 3};
/** \brief Represents an active action group.
*
diff --git a/apt-pkg/tagfile.cc b/apt-pkg/tagfile.cc
index b91e868e2..e0802e3d5 100644
--- a/apt-pkg/tagfile.cc
+++ b/apt-pkg/tagfile.cc
@@ -164,7 +164,7 @@ bool pkgTagFile::Fill()
unsigned long long const dataSize = d->Size - ((d->End - d->Buffer) + 1);
if (d->Fd.Read(d->End, dataSize, &Actual) == false)
return false;
- if (Actual != dataSize || d->Fd.Eof() == true)
+ if (Actual != dataSize)
d->Done = true;
d->End += Actual;
}
diff --git a/apt-private/makefile b/apt-private/makefile
index 8feb1ce6c..1d179f0b2 100644
--- a/apt-private/makefile
+++ b/apt-private/makefile
@@ -17,7 +17,7 @@ MAJOR=0.0
MINOR=0
SLIBS=$(PTHREADLIB) -lapt-pkg
-PRIVATES=list install output cachefile cacheset update upgrade cmndline moo search show main
+PRIVATES=list install download output cachefile cacheset update upgrade cmndline moo search show main
SOURCE += $(foreach private, $(PRIVATES), private-$(private).cc)
HEADERS += $(foreach private, $(PRIVATES), private-$(private).h)
diff --git a/apt-private/private-cmndline.cc b/apt-private/private-cmndline.cc
index aceb865d5..8ba6629a8 100644
--- a/apt-private/private-cmndline.cc
+++ b/apt-private/private-cmndline.cc
@@ -62,8 +62,12 @@ bool addArgumentsAPTCache(std::vector<CommandLine::Args> &Args, char const * con
{
addArg(0, "all-names", "APT::Cache::AllNames", 0);
}
+ else if (CmdMatches("unmet"))
+ {
+ addArg('i', "important", "APT::Cache::Important", 0);
+ }
else if (CmdMatches("gencaches", "showsrc", "showpkg", "stats", "dump",
- "dumpavail", "unmet", "showauto", "policy", "madison"))
+ "dumpavail", "showauto", "policy", "madison"))
;
else
return false;
@@ -114,9 +118,9 @@ bool addArgumentsAPTConfig(std::vector<CommandLine::Args> &Args, char const * co
bool addArgumentsAPTGet(std::vector<CommandLine::Args> &Args, char const * const Cmd)/*{{{*/
{
if (CmdMatches("install", "remove", "purge", "upgrade", "dist-upgrade",
- "deselect-upgrade", "autoremove"))
+ "dselect-upgrade", "autoremove"))
{
- addArg(0, "dpkg-progress", "DpkgPM::Progress", 0);
+ addArg(0, "show-progress", "DpkgPM::Progress", 0);
addArg('f', "fix-broken", "APT::Get::Fix-Broken", 0);
addArg(0, "purge", "APT::Get::Purge", 0);
addArg('V',"verbose-versions","APT::Get::Show-Versions",0);
@@ -125,7 +129,8 @@ bool addArgumentsAPTGet(std::vector<CommandLine::Args> &Args, char const * const
addArg(0, "solver", "APT::Solver", CommandLine::HasArg);
if (CmdMatches("upgrade"))
{
- addArg(0, "allow-new", "APT::Get::UpgradeAllowNew", 0);
+ addArg(0, "new-pkgs", "APT::Get::Upgrade-Allow-New",
+ CommandLine::Boolean);
}
}
else if (CmdMatches("update"))
diff --git a/apt-private/private-download.cc b/apt-private/private-download.cc
new file mode 100644
index 000000000..f02991cde
--- /dev/null
+++ b/apt-private/private-download.cc
@@ -0,0 +1,96 @@
+// Include Files /*{{{*/
+#include <config.h>
+
+#include <apt-pkg/acquire.h>
+#include <apt-pkg/acquire-item.h>
+#include <apt-pkg/configuration.h>
+#include <apt-pkg/error.h>
+#include <apt-pkg/strutl.h>
+
+#include "private-output.h"
+
+#include <locale.h>
+
+#include <fstream>
+#include <string>
+#include <vector>
+
+#include <apti18n.h>
+ /*}}}*/
+
+// CheckAuth - check if each download comes form a trusted source /*{{{*/
+bool CheckAuth(pkgAcquire& Fetcher, bool const PromptUser)
+{
+ std::string UntrustedList;
+ for (pkgAcquire::ItemIterator I = Fetcher.ItemsBegin(); I < Fetcher.ItemsEnd(); ++I)
+ if (!(*I)->IsTrusted())
+ UntrustedList += std::string((*I)->ShortDesc()) + " ";
+
+ if (UntrustedList == "")
+ return true;
+
+ ShowList(c2out,_("WARNING: The following packages cannot be authenticated!"),UntrustedList,"");
+
+ if (_config->FindB("APT::Get::AllowUnauthenticated",false) == true)
+ {
+ c2out << _("Authentication warning overridden.\n");
+ return true;
+ }
+
+ if (PromptUser == false)
+ return _error->Error(_("Some packages could not be authenticated"));
+
+ if (_config->FindI("quiet",0) < 2
+ && _config->FindB("APT::Get::Assume-Yes",false) == false)
+ {
+ c2out << _("Install these packages without verification?") << std::flush;
+ if (!YnPrompt(false))
+ return _error->Error(_("Some packages could not be authenticated"));
+
+ return true;
+ }
+ else if (_config->FindB("APT::Get::Force-Yes",false) == true)
+ return true;
+
+ return _error->Error(_("There are problems and -y was used without --force-yes"));
+}
+ /*}}}*/
+bool AcquireRun(pkgAcquire &Fetcher, int const PulseInterval, bool * const Failure, bool * const TransientNetworkFailure)/*{{{*/
+{
+ pkgAcquire::RunResult res;
+ if(PulseInterval > 0)
+ res = Fetcher.Run(PulseInterval);
+ else
+ res = Fetcher.Run();
+
+ if (res == pkgAcquire::Failed)
+ return false;
+
+ for (pkgAcquire::ItemIterator I = Fetcher.ItemsBegin();
+ I != Fetcher.ItemsEnd(); ++I)
+ {
+
+ if ((*I)->Status == pkgAcquire::Item::StatDone &&
+ (*I)->Complete == true)
+ continue;
+
+ if (TransientNetworkFailure != NULL && (*I)->Status == pkgAcquire::Item::StatIdle)
+ {
+ *TransientNetworkFailure = true;
+ continue;
+ }
+
+ ::URI uri((*I)->DescURI());
+ uri.User.clear();
+ uri.Password.clear();
+ std::string descUri = std::string(uri);
+ _error->Error(_("Failed to fetch %s %s\n"), descUri.c_str(),
+ (*I)->ErrorText.c_str());
+
+ if (Failure != NULL)
+ *Failure = true;
+ }
+
+ return true;
+}
+ /*}}}*/
diff --git a/apt-private/private-download.h b/apt-private/private-download.h
new file mode 100644
index 000000000..b8cc8da1e
--- /dev/null
+++ b/apt-private/private-download.h
@@ -0,0 +1,9 @@
+#ifndef APT_PRIVATE_DOWNLOAD_H
+#define APT_PRIVATE_DOWNLOAD_H
+
+#include <apt-pkg/acquire.h>
+
+bool CheckAuth(pkgAcquire& Fetcher, bool const PromptUser);
+bool AcquireRun(pkgAcquire &Fetcher, int const PulseInterval, bool * const Failure, bool * const TransientNetworkFailure);
+
+#endif
diff --git a/apt-private/private-install.cc b/apt-private/private-install.cc
index d5052fcc0..643a6b370 100644
--- a/apt-private/private-install.cc
+++ b/apt-private/private-install.cc
@@ -42,6 +42,7 @@
#include <sstream>
#include "private-install.h"
+#include "private-download.h"
#include "private-cachefile.h"
#include "private-output.h"
#include "private-cacheset.h"
@@ -50,52 +51,6 @@
#include <apti18n.h>
/*}}}*/
-// CheckAuth - check if each download comes form a trusted source /*{{{*/
-// ---------------------------------------------------------------------
-/* */
-static bool CheckAuth(pkgAcquire& Fetcher)
-{
- std::string UntrustedList;
- for (pkgAcquire::ItemIterator I = Fetcher.ItemsBegin(); I < Fetcher.ItemsEnd(); ++I)
- {
- if (!(*I)->IsTrusted())
- {
- UntrustedList += std::string((*I)->ShortDesc()) + " ";
- }
- }
-
- if (UntrustedList == "")
- {
- return true;
- }
-
- ShowList(c2out,_("WARNING: The following packages cannot be authenticated!"),UntrustedList,"");
-
- if (_config->FindB("APT::Get::AllowUnauthenticated",false) == true)
- {
- c2out << _("Authentication warning overridden.\n");
- return true;
- }
-
- if (_config->FindI("quiet",0) < 2
- && _config->FindB("APT::Get::Assume-Yes",false) == false)
- {
- c2out << _("Install these packages without verification?") << std::flush;
- if (!YnPrompt(false))
- return _error->Error(_("Some packages could not be authenticated"));
-
- return true;
- }
- else if (_config->FindB("APT::Get::Force-Yes",false) == true)
- {
- return true;
- }
-
- return _error->Error(_("There are problems and -y was used without --force-yes"));
-}
- /*}}}*/
-
-
// InstallPackages - Actually download and install the packages /*{{{*/
// ---------------------------------------------------------------------
/* This displays the informative messages describing what is going to
@@ -301,12 +256,12 @@ bool InstallPackages(CacheFile &Cache,bool ShwKept,bool Ask, bool Safety)
{
pkgAcquire::UriIterator I = Fetcher.UriBegin();
for (; I != Fetcher.UriEnd(); ++I)
- c1out << '\'' << I->URI << "' " << flNotDir(I->Owner->DestFile) << ' ' <<
+ std::cout << '\'' << I->URI << "' " << flNotDir(I->Owner->DestFile) << ' ' <<
I->Owner->FileSize << ' ' << I->Owner->HashSum() << std::endl;
return true;
}
- if (!CheckAuth(Fetcher))
+ if (!CheckAuth(Fetcher, true))
return false;
/* Unlock the dpkg lock if we are not going to be doing an install
@@ -338,29 +293,10 @@ bool InstallPackages(CacheFile &Cache,bool ShwKept,bool Ask, bool Safety)
I = Fetcher.ItemsBegin();
}
}
-
- if (Fetcher.Run() == pkgAcquire::Failed)
- return false;
-
- // Print out errors
- bool Failed = false;
- for (pkgAcquire::ItemIterator I = Fetcher.ItemsBegin(); I != Fetcher.ItemsEnd(); ++I)
- {
- if ((*I)->Status == pkgAcquire::Item::StatDone &&
- (*I)->Complete == true)
- continue;
-
- if ((*I)->Status == pkgAcquire::Item::StatIdle)
- {
- Transient = true;
- // Failed = true;
- continue;
- }
- fprintf(stderr,_("Failed to fetch %s %s\n"),(*I)->DescURI().c_str(),
- (*I)->ErrorText.c_str());
- Failed = true;
- }
+ bool Failed = false;
+ if (AcquireRun(Fetcher, 0, &Failed, &Transient) == false)
+ return false;
/* If we are in no download mode and missing files and there were
'failures' then the user must specify -m. Furthermore, there
@@ -429,8 +365,6 @@ bool InstallPackages(CacheFile &Cache,bool ShwKept,bool Ask, bool Safety)
return true;
}
/*}}}*/
-
-
// DoAutomaticRemove - Remove all automatic unused packages /*{{{*/
// ---------------------------------------------------------------------
/* Remove unused automatic packages */
@@ -504,15 +438,15 @@ bool DoAutomaticRemove(CacheFile &Cache)
do {
Changed = false;
for (APT::PackageSet::const_iterator Pkg = tooMuch.begin();
- Pkg != tooMuch.end() && Changed == false; ++Pkg)
+ Pkg != tooMuch.end(); ++Pkg)
{
APT::PackageSet too;
too.insert(*Pkg);
for (pkgCache::PrvIterator Prv = Cache[Pkg].CandidateVerIter(Cache).ProvidesList();
Prv.end() == false; ++Prv)
too.insert(Prv.ParentPkg());
- for (APT::PackageSet::const_iterator P = too.begin();
- P != too.end() && Changed == false; ++P) {
+ for (APT::PackageSet::const_iterator P = too.begin(); P != too.end(); ++P)
+ {
for (pkgCache::DepIterator R = P.RevDependsList();
R.end() == false; ++R)
{
@@ -531,7 +465,11 @@ bool DoAutomaticRemove(CacheFile &Cache)
Changed = true;
break;
}
+ if (Changed == true)
+ break;
}
+ if (Changed == true)
+ break;
}
} while (Changed == true);
}
@@ -576,32 +514,28 @@ bool DoAutomaticRemove(CacheFile &Cache)
return true;
}
/*}}}*/
+// DoCacheManipulationFromCommandLine /*{{{*/
+static const unsigned short MOD_REMOVE = 1;
+static const unsigned short MOD_INSTALL = 2;
-
-
-
-// DoInstall - Install packages from the command line /*{{{*/
-// ---------------------------------------------------------------------
-/* Install named packages */
-bool DoInstall(CommandLine &CmdL)
+bool DoCacheManipulationFromCommandLine(CommandLine &CmdL, CacheFile &Cache)
{
- CacheFile Cache;
- if (Cache.OpenForInstall() == false ||
- Cache.CheckDeps(CmdL.FileSize() != 1) == false)
- return false;
-
+ std::map<unsigned short, APT::VersionSet> verset;
+ return DoCacheManipulationFromCommandLine(CmdL, Cache, verset);
+}
+bool DoCacheManipulationFromCommandLine(CommandLine &CmdL, CacheFile &Cache,
+ std::map<unsigned short, APT::VersionSet> &verset)
+{
+
// Enter the special broken fixing mode if the user specified arguments
bool BrokenFix = false;
if (Cache->BrokenCount() != 0)
BrokenFix = true;
- pkgProblemResolver* Fix = NULL;
+ SPtr<pkgProblemResolver> Fix;
if (_config->FindB("APT::Get::CallResolver", true) == true)
Fix = new pkgProblemResolver(Cache);
- static const unsigned short MOD_REMOVE = 1;
- static const unsigned short MOD_INSTALL = 2;
-
unsigned short fallback = MOD_INSTALL;
if (strcasecmp(CmdL.FileList[0],"remove") == 0)
fallback = MOD_REMOVE;
@@ -622,14 +556,12 @@ bool DoInstall(CommandLine &CmdL)
mods.push_back(APT::VersionSet::Modifier(MOD_REMOVE, "-",
APT::VersionSet::Modifier::POSTFIX, APT::VersionSet::NEWEST));
CacheSetHelperAPTGet helper(c0out);
- std::map<unsigned short, APT::VersionSet> verset = APT::VersionSet::GroupedFromCommandLine(Cache,
+ verset = APT::VersionSet::GroupedFromCommandLine(Cache,
CmdL.FileList + 1, mods, fallback, helper);
if (_error->PendingError() == true)
{
helper.showVirtualPackageErrors(Cache);
- if (Fix != NULL)
- delete Fix;
return false;
}
@@ -663,8 +595,6 @@ bool DoInstall(CommandLine &CmdL)
if (_error->PendingError() == true)
{
- if (Fix != NULL)
- delete Fix;
return false;
}
@@ -675,8 +605,6 @@ bool DoInstall(CommandLine &CmdL)
{
c1out << _("You might want to run 'apt-get -f install' to correct these:") << std::endl;
ShowBroken(c1out,Cache,false);
- if (Fix != NULL)
- delete Fix;
return _error->Error(_("Unmet dependencies. Try 'apt-get -f install' with no packages (or specify a solution)."));
}
@@ -684,7 +612,6 @@ bool DoInstall(CommandLine &CmdL)
{
// Call the scored problem resolver
Fix->Resolve(true);
- delete Fix;
}
// Now we check the state of the packages,
@@ -718,6 +645,33 @@ bool DoInstall(CommandLine &CmdL)
if (!DoAutomaticRemove(Cache))
return false;
+ // if nothing changed in the cache, but only the automark information
+ // we write the StateFile here, otherwise it will be written in
+ // cache.commit()
+ if (InstallAction.AutoMarkChanged > 0 &&
+ Cache->DelCount() == 0 && Cache->InstCount() == 0 &&
+ Cache->BadCount() == 0 &&
+ _config->FindB("APT::Get::Simulate",false) == false)
+ Cache->writeStateFile(NULL);
+
+ return true;
+}
+ /*}}}*/
+// DoInstall - Install packages from the command line /*{{{*/
+// ---------------------------------------------------------------------
+/* Install named packages */
+bool DoInstall(CommandLine &CmdL)
+{
+ CacheFile Cache;
+ if (Cache.OpenForInstall() == false ||
+ Cache.CheckDeps(CmdL.FileSize() != 1) == false)
+ return false;
+
+ std::map<unsigned short, APT::VersionSet> verset;
+
+ if(!DoCacheManipulationFromCommandLine(CmdL, Cache, verset))
+ return false;
+
/* Print out a list of packages that are going to be installed extra
to what the user asked */
if (Cache->InstCount() != verset[MOD_INSTALL].size())
@@ -833,15 +787,6 @@ bool DoInstall(CommandLine &CmdL)
}
- // if nothing changed in the cache, but only the automark information
- // we write the StateFile here, otherwise it will be written in
- // cache.commit()
- if (InstallAction.AutoMarkChanged > 0 &&
- Cache->DelCount() == 0 && Cache->InstCount() == 0 &&
- Cache->BadCount() == 0 &&
- _config->FindB("APT::Get::Simulate",false) == false)
- Cache->writeStateFile(NULL);
-
// See if we need to prompt
// FIXME: check if really the packages in the set are going to be installed
if (Cache->InstCount() == verset[MOD_INSTALL].size() && Cache->DelCount() == 0)
diff --git a/apt-private/private-install.h b/apt-private/private-install.h
index fcf4cbced..439c89712 100644
--- a/apt-private/private-install.h
+++ b/apt-private/private-install.h
@@ -14,6 +14,9 @@
bool DoInstall(CommandLine &Cmd);
+bool DoCacheManipulationFromCommandLine(CommandLine &CmdL, CacheFile &Cache,
+ std::map<unsigned short, APT::VersionSet> &verset);
+bool DoCacheManipulationFromCommandLine(CommandLine &CmdL, CacheFile &Cache);
bool InstallPackages(CacheFile &Cache,bool ShwKept,bool Ask = true,
bool Safety = true);
diff --git a/apt-private/private-list.cc b/apt-private/private-list.cc
index c3a21aafc..8c61fcae8 100644
--- a/apt-private/private-list.cc
+++ b/apt-private/private-list.cc
@@ -42,7 +42,7 @@
#include <apti18n.h>
/*}}}*/
-struct PackageSortAlphabetic
+struct PackageSortAlphabetic /*{{{*/
{
bool operator () (const pkgCache::PkgIterator &p_lhs,
const pkgCache::PkgIterator &p_rhs)
@@ -52,12 +52,12 @@ struct PackageSortAlphabetic
return (l_name < r_name);
}
};
-
+ /*}}}*/
+class PackageNameMatcher : public Matcher /*{{{*/
+{
#ifdef PACKAGE_MATCHER_ABI_COMPAT
#define PackageMatcher PackageNameMatchesFnmatch
#endif
-class PackageNameMatcher : public Matcher
-{
public:
PackageNameMatcher(const char **patterns)
{
@@ -98,9 +98,8 @@ private:
std::vector<APT::CacheFilter::PackageMatcher*>::const_iterator J;
#undef PackageMatcher
};
-
-
-void ListAllVersions(pkgCacheFile &CacheFile, pkgRecords &records,
+ /*}}}*/
+void ListAllVersions(pkgCacheFile &CacheFile, pkgRecords &records, /*{{{*/
pkgCache::PkgIterator P,
std::ostream &outs)
{
@@ -108,7 +107,7 @@ void ListAllVersions(pkgCacheFile &CacheFile, pkgRecords &records,
Ver.end() == false; Ver++)
ListSingleVersion(CacheFile, records, Ver, outs);
}
-
+ /*}}}*/
// list - list package based on criteria /*{{{*/
// ---------------------------------------------------------------------
bool List(CommandLine &Cmd)
diff --git a/apt-private/private-output.cc b/apt-private/private-output.cc
index 659975476..6fadf7274 100644
--- a/apt-private/private-output.cc
+++ b/apt-private/private-output.cc
@@ -28,8 +28,7 @@ std::ostream c2out(0);
std::ofstream devnull("/dev/null");
unsigned int ScreenWidth = 80 - 1; /* - 1 for the cursor */
-
-bool InitOutput()
+bool InitOutput() /*{{{*/
{
c0out.rdbuf(cout.rdbuf());
c1out.rdbuf(cout.rdbuf());
@@ -60,8 +59,8 @@ bool InitOutput()
return true;
}
-
-std::string GetArchiveSuite(pkgCacheFile &CacheFile, pkgCache::VerIterator ver)
+ /*}}}*/
+std::string GetArchiveSuite(pkgCacheFile &CacheFile, pkgCache::VerIterator ver) /*{{{*/
{
std::string suite = "";
if (ver && ver.FileList() && ver.FileList())
@@ -77,8 +76,8 @@ std::string GetArchiveSuite(pkgCacheFile &CacheFile, pkgCache::VerIterator ver)
}
return suite;
}
-
-std::string GetFlagsStr(pkgCacheFile &CacheFile, pkgCache::PkgIterator P)
+ /*}}}*/
+std::string GetFlagsStr(pkgCacheFile &CacheFile, pkgCache::PkgIterator P)/*{{{*/
{
pkgDepCache *DepCache = CacheFile.GetDepCache();
pkgDepCache::StateCache &state = (*DepCache)[P];
@@ -94,23 +93,23 @@ std::string GetFlagsStr(pkgCacheFile &CacheFile, pkgCache::PkgIterator P)
flags_str = "-";
return flags_str;
}
-
-std::string GetCandidateVersion(pkgCacheFile &CacheFile, pkgCache::PkgIterator P)
+ /*}}}*/
+std::string GetCandidateVersion(pkgCacheFile &CacheFile, pkgCache::PkgIterator P)/*{{{*/
{
pkgPolicy *policy = CacheFile.GetPolicy();
pkgCache::VerIterator cand = policy->GetCandidateVer(P);
return cand ? cand.VerStr() : "(none)";
}
-
-std::string GetInstalledVersion(pkgCacheFile &CacheFile, pkgCache::PkgIterator P)
+ /*}}}*/
+std::string GetInstalledVersion(pkgCacheFile &CacheFile, pkgCache::PkgIterator P)/*{{{*/
{
pkgCache::VerIterator inst = P.CurrentVer();
return inst ? inst.VerStr() : "(none)";
}
-
-std::string GetVersion(pkgCacheFile &CacheFile, pkgCache::VerIterator V)
+ /*}}}*/
+std::string GetVersion(pkgCacheFile &CacheFile, pkgCache::VerIterator V)/*{{{*/
{
pkgCache::PkgIterator P = V.ParentPkg();
if (V == P.CurrentVer())
@@ -127,8 +126,8 @@ std::string GetVersion(pkgCacheFile &CacheFile, pkgCache::VerIterator V)
return DeNull(V.VerStr());
return "(none)";
}
-
-std::string GetArchitecture(pkgCacheFile &CacheFile, pkgCache::PkgIterator P)
+ /*}}}*/
+std::string GetArchitecture(pkgCacheFile &CacheFile, pkgCache::PkgIterator P)/*{{{*/
{
pkgPolicy *policy = CacheFile.GetPolicy();
pkgCache::VerIterator inst = P.CurrentVer();
@@ -136,8 +135,8 @@ std::string GetArchitecture(pkgCacheFile &CacheFile, pkgCache::PkgIterator P)
return inst ? inst.Arch() : cand.Arch();
}
-
-std::string GetShortDescription(pkgCacheFile &CacheFile, pkgRecords &records, pkgCache::PkgIterator P)
+ /*}}}*/
+std::string GetShortDescription(pkgCacheFile &CacheFile, pkgRecords &records, pkgCache::PkgIterator P)/*{{{*/
{
pkgPolicy *policy = CacheFile.GetPolicy();
@@ -157,8 +156,8 @@ std::string GetShortDescription(pkgCacheFile &CacheFile, pkgRecords &records, pk
}
return ShortDescription;
}
-
-void ListSingleVersion(pkgCacheFile &CacheFile, pkgRecords &records,
+ /*}}}*/
+void ListSingleVersion(pkgCacheFile &CacheFile, pkgRecords &records, /*{{{*/
pkgCache::VerIterator V, std::ostream &out)
{
pkgCache::PkgIterator P = V.ParentPkg();
@@ -230,8 +229,7 @@ void ListSingleVersion(pkgCacheFile &CacheFile, pkgRecords &records,
<< std::endl;
}
}
-
-
+ /*}}}*/
// ShowList - Show a list /*{{{*/
// ---------------------------------------------------------------------
/* This prints out a string of space separated words with a title and
diff --git a/apt-private/private-search.cc b/apt-private/private-search.cc
index 6881f482f..ff4140fa7 100644
--- a/apt-private/private-search.cc
+++ b/apt-private/private-search.cc
@@ -1,3 +1,4 @@
+// Includes /*{{{*/
#include <apt-pkg/error.h>
#include <apt-pkg/cachefile.h>
#include <apt-pkg/cachefilter.h>
@@ -34,9 +35,9 @@
#include "private-search.h"
#include "private-cacheset.h"
+ /*}}}*/
-
-bool FullTextSearch(CommandLine &CmdL)
+bool FullTextSearch(CommandLine &CmdL) /*{{{*/
{
pkgCacheFile CacheFile;
pkgCache *Cache = CacheFile.GetPkgCache();
@@ -97,3 +98,4 @@ bool FullTextSearch(CommandLine &CmdL)
return true;
}
+ /*}}}*/
diff --git a/apt-private/private-show.cc b/apt-private/private-show.cc
index e26a2b30a..ddc75dbeb 100644
--- a/apt-private/private-show.cc
+++ b/apt-private/private-show.cc
@@ -1,3 +1,4 @@
+// Includes /*{{{*/
#include <apt-pkg/error.h>
#include <apt-pkg/cachefile.h>
#include <apt-pkg/cachefilter.h>
@@ -23,6 +24,7 @@
#include "private-output.h"
#include "private-cacheset.h"
+ /*}}}*/
namespace APT {
namespace Cmd {
@@ -87,8 +89,7 @@ bool DisplayRecord(pkgCacheFile &CacheFile, pkgCache::VerIterator V,
return true;
}
/*}}}*/
-
-bool ShowPackage(CommandLine &CmdL)
+bool ShowPackage(CommandLine &CmdL) /*{{{*/
{
pkgCacheFile CacheFile;
CacheSetHelperVirtuals helper(true, GlobalError::NOTICE);
@@ -102,7 +103,7 @@ bool ShowPackage(CommandLine &CmdL)
Pkg != helper.virtualPkgs.end(); ++Pkg)
{
c1out << "Package: " << Pkg.FullName(true) << std::endl;
- c1out << "State: " << _("not a real pacakge (virtual)") << std::endl;
+ c1out << "State: " << _("not a real package (virtual)") << std::endl;
// FIXME: show providers, see private-cacheset.h
// CacheSetHelperAPTGet::showVirtualPackageErrors()
}
diff --git a/apt-private/private-upgrade.cc b/apt-private/private-upgrade.cc
index eb546e3e3..9a5286b57 100644
--- a/apt-private/private-upgrade.cc
+++ b/apt-private/private-upgrade.cc
@@ -1,21 +1,18 @@
-
+// Includes /*{{{*/
#include <apt-pkg/algorithms.h>
#include "private-install.h"
#include "private-cachefile.h"
#include "private-upgrade.h"
#include "private-output.h"
+ /*}}}*/
-
-// DoUpgradeNoNewPackages - Upgrade all packages /*{{{*/
+// DoUpgradeNoNewPackages - Upgrade all packages /*{{{*/
// ---------------------------------------------------------------------
/* Upgrade all packages without installing new packages or erasing old
packages */
bool DoUpgradeNoNewPackages(CommandLine &CmdL)
{
- if (CmdL.FileSize() != 1)
- return _error->Error(_("The upgrade command takes no arguments"));
-
CacheFile Cache;
if (Cache.OpenForInstall() == false || Cache.CheckDeps() == false)
return false;
@@ -26,17 +23,17 @@ bool DoUpgradeNoNewPackages(CommandLine &CmdL)
ShowBroken(c1out,Cache,false);
return _error->Error(_("Internal error, AllUpgrade broke stuff"));
}
+
+ // parse additional cmdline pkg manipulation switches
+ if(!DoCacheManipulationFromCommandLine(CmdL, Cache))
+ return false;
return InstallPackages(Cache,true);
}
/*}}}*/
-
// DoSafeUpgrade - Upgrade all packages with install but not remove /*{{{*/
bool DoUpgradeWithAllowNewPackages(CommandLine &CmdL)
{
- if (CmdL.FileSize() != 1)
- return _error->Error(_("The upgrade command takes no arguments"));
-
CacheFile Cache;
if (Cache.OpenForInstall() == false || Cache.CheckDeps() == false)
return false;
@@ -47,6 +44,10 @@ bool DoUpgradeWithAllowNewPackages(CommandLine &CmdL)
ShowBroken(c1out,Cache,false);
return _error->Error(_("Internal error, AllUpgrade broke stuff"));
}
+
+ // parse additional cmdline pkg manipulation switches
+ if(!DoCacheManipulationFromCommandLine(CmdL, Cache))
+ return false;
return InstallPackages(Cache,true);
}
diff --git a/cmdline/apt-get.cc b/cmdline/apt-get.cc
index 93c21651f..e5e22e166 100644
--- a/cmdline/apt-get.cc
+++ b/cmdline/apt-get.cc
@@ -50,7 +50,7 @@
#include <apt-pkg/pkgrecords.h>
#include <apt-pkg/indexfile.h>
-
+#include <apt-private/private-download.h>
#include <apt-private/private-install.h>
#include <apt-private/private-upgrade.h>
#include <apt-private/private-output.h>
@@ -62,9 +62,11 @@
#include <apt-private/acqprogress.h>
#include <set>
+#include <fstream>
+#include <sstream>
+
#include <locale.h>
#include <langinfo.h>
-#include <fstream>
#include <termios.h>
#include <sys/ioctl.h>
#include <sys/stat.h>
@@ -76,7 +78,6 @@
#include <errno.h>
#include <regex.h>
#include <sys/wait.h>
-#include <sstream>
#include <apt-private/private-output.h>
#include <apt-private/private-main.h>
@@ -84,11 +85,8 @@
#include <apti18n.h>
/*}}}*/
-
using namespace std;
-
-
// TryToInstallBuildDep - Try to install a single package /*{{{*/
// ---------------------------------------------------------------------
/* This used to be inlined in DoInstall, but with the advent of regex package
@@ -350,9 +348,6 @@ bool DoMarkAuto(CommandLine &CmdL)
/* Intelligent upgrader that will install and remove packages at will */
bool DoDistUpgrade(CommandLine &CmdL)
{
- if (CmdL.FileSize() != 1)
- return _error->Error(_("The dist-upgrade command takes no arguments"));
-
CacheFile Cache;
if (Cache.OpenForInstall() == false || Cache.CheckDeps() == false)
return false;
@@ -365,6 +360,10 @@ bool DoDistUpgrade(CommandLine &CmdL)
return false;
}
+ // parse additional cmdline pkg manipulation switches
+ if(!DoCacheManipulationFromCommandLine(CmdL, Cache))
+ return false;
+
c0out << _("Done") << endl;
return InstallPackages(Cache,true);
@@ -526,7 +525,7 @@ bool DoDownload(CommandLine &CmdL)
CacheFile Cache;
if (Cache.ReadOnlyOpen() == false)
return false;
-
+
APT::CacheSetHelper helper(c0out);
APT::VersionList verset = APT::VersionList::FromCommandLine(Cache,
CmdL.FileList + 1, APT::VersionList::CANDIDATE, helper);
@@ -534,67 +533,57 @@ bool DoDownload(CommandLine &CmdL)
if (verset.empty() == true)
return false;
+ AcqTextStatus Stat(ScreenWidth, _config->FindI("quiet", 0));
pkgAcquire Fetcher;
- AcqTextStatus Stat(ScreenWidth, _config->FindI("quiet",0));
- if (_config->FindB("APT::Get::Print-URIs") == false)
- Fetcher.Setup(&Stat);
+ if (Fetcher.Setup(&Stat) == false)
+ return false;
pkgRecords Recs(Cache);
pkgSourceList *SrcList = Cache.GetSourceList();
- bool gotAll = true;
- for (APT::VersionList::const_iterator Ver = verset.begin();
- Ver != verset.end();
- ++Ver)
+ // reuse the usual acquire methods for deb files, but don't drop them into
+ // the usual directories - keep everything in the current directory
+ std::vector<std::string> storefile(verset.size());
+ std::string const cwd = SafeGetCWD();
+ _config->Set("Dir::Cache::Archives", cwd);
+ int i = 0;
+ for (APT::VersionList::const_iterator Ver = verset.begin();
+ Ver != verset.end(); ++Ver, ++i)
{
- string descr;
- // get the right version
- pkgCache::PkgIterator Pkg = Ver.ParentPkg();
- pkgRecords::Parser &rec=Recs.Lookup(Ver.FileList());
- pkgCache::VerFileIterator Vf = Ver.FileList();
- if (Vf.end() == true)
- {
- _error->Error("Can not find VerFile for %s in version %s", Pkg.FullName().c_str(), Ver.VerStr());
- gotAll = false;
- continue;
- }
- pkgCache::PkgFileIterator F = Vf.File();
- pkgIndexFile *index;
- if(SrcList->FindIndex(F, index) == false)
- {
- _error->Error(_("Can't find a source to download version '%s' of '%s'"), Ver.VerStr(), Pkg.FullName().c_str());
- gotAll = false;
- continue;
- }
- string uri = index->ArchiveURI(rec.FileName());
- strprintf(descr, _("Downloading %s %s"), Pkg.Name(), Ver.VerStr());
- // get the most appropriate hash
- HashString hash;
- if (rec.SHA512Hash() != "")
- hash = HashString("sha512", rec.SHA512Hash());
- else if (rec.SHA256Hash() != "")
- hash = HashString("sha256", rec.SHA256Hash());
- else if (rec.SHA1Hash() != "")
- hash = HashString("sha1", rec.SHA1Hash());
- else if (rec.MD5Hash() != "")
- hash = HashString("md5", rec.MD5Hash());
- // get the file
- new pkgAcqFile(&Fetcher, uri, hash.toStr(), (*Ver)->Size, descr, Pkg.Name(), ".");
+ pkgAcquire::Item *I = new pkgAcqArchive(&Fetcher, SrcList, &Recs, *Ver, storefile[i]);
+ std::string const filename = cwd + flNotDir(storefile[i]);
+ storefile[i].assign(filename);
+ I->DestFile.assign(filename);
}
- if (gotAll == false)
- return false;
// Just print out the uris and exit if the --print-uris flag was used
if (_config->FindB("APT::Get::Print-URIs") == true)
{
pkgAcquire::UriIterator I = Fetcher.UriBegin();
for (; I != Fetcher.UriEnd(); ++I)
- cout << '\'' << I->URI << "' " << flNotDir(I->Owner->DestFile) << ' ' <<
+ cout << '\'' << I->URI << "' " << flNotDir(I->Owner->DestFile) << ' ' <<
I->Owner->FileSize << ' ' << I->Owner->HashSum() << endl;
return true;
}
- return (Fetcher.Run() == pkgAcquire::Continue);
+ if (_error->PendingError() == true || CheckAuth(Fetcher, false) == false)
+ return false;
+
+ bool Failed = false;
+ if (AcquireRun(Fetcher, 0, &Failed, NULL) == false)
+ return false;
+
+ // copy files in local sources to the current directory
+ for (pkgAcquire::ItemIterator I = Fetcher.ItemsBegin(); I != Fetcher.ItemsEnd(); ++I)
+ if ((*I)->Local == true && (*I)->Status == pkgAcquire::Item::StatDone)
+ {
+ std::string const filename = cwd + flNotDir((*I)->DestFile);
+ std::ifstream src((*I)->DestFile.c_str(), std::ios::binary);
+ std::ofstream dst(filename.c_str(), std::ios::binary);
+ dst << src.rdbuf();
+ }
+
+ return Failed == false;
}
/*}}}*/
// DoCheck - Perform the check operation /*{{{*/
@@ -814,27 +803,10 @@ bool DoSource(CommandLine &CmdL)
delete[] Dsc;
return true;
}
-
- // Run it
- if (Fetcher.Run() == pkgAcquire::Failed)
- {
- delete[] Dsc;
- return false;
- }
- // Print error messages
+ // Run it
bool Failed = false;
- for (pkgAcquire::ItemIterator I = Fetcher.ItemsBegin(); I != Fetcher.ItemsEnd(); ++I)
- {
- if ((*I)->Status == pkgAcquire::Item::StatDone &&
- (*I)->Complete == true)
- continue;
-
- fprintf(stderr,_("Failed to fetch %s %s\n"),(*I)->DescURI().c_str(),
- (*I)->ErrorText.c_str());
- Failed = true;
- }
- if (Failed == true)
+ if (AcquireRun(Fetcher, 0, &Failed, NULL) == false || Failed == true)
{
delete[] Dsc;
return _error->Error(_("Failed to fetch some archives."));
@@ -1624,15 +1596,14 @@ void SigWinch(int)
#endif
}
/*}}}*/
-
-bool DoUpgrade(CommandLine &CmdL)
+bool DoUpgrade(CommandLine &CmdL) /*{{{*/
{
- if (_config->FindB("APT::Get::UpgradeAllowNew", false) == true)
+ if (_config->FindB("APT::Get::Upgrade-Allow-New", false) == true)
return DoUpgradeWithAllowNewPackages(CmdL);
else
return DoUpgradeNoNewPackages(CmdL);
}
-
+ /*}}}*/
int main(int argc,const char *argv[]) /*{{{*/
{
CommandLine::Dispatch Cmds[] = {{"update",&DoUpdate},
diff --git a/debian/apt.cron.daily b/debian/apt.cron.daily
index 2665b6579..3f9df9d7e 100644
--- a/debian/apt.cron.daily
+++ b/debian/apt.cron.daily
@@ -375,7 +375,7 @@ fi
check_power || exit 0
# check if we can lock the cache and if the cache is clean
-if which apt-get >/dev/null && ! eval apt-get check -f $XAPTOPT $XSTDERR ; then
+if which apt-get >/dev/null && ! eval apt-get check $XAPTOPT $XSTDERR ; then
debug_echo "error encountered in cron job with \"apt-get check\"."
exit 0
fi
diff --git a/debian/apt.postinst b/debian/apt.postinst
index 70de237d0..fd3e273bb 100644
--- a/debian/apt.postinst
+++ b/debian/apt.postinst
@@ -33,7 +33,7 @@ case "$1" in
fi
# create kernel autoremoval blacklist on update
- if dpkg --compare-versions "$2" lt-nl 0.9.9.3; then
+ if dpkg --compare-versions "$2" lt 0.9.9.3; then
/etc/kernel/postinst.d/apt-auto-removal
fi
;;
diff --git a/debian/changelog b/debian/changelog
index 5d3758850..287db9b28 100644
--- a/debian/changelog
+++ b/debian/changelog
@@ -1,18 +1,123 @@
-apt (0.9.11~exp3) UNRELEASEDexperimental; urgency=low
+apt (0.9.12) UNRELEASED; urgency=low
- * fix incorrect bugnumber for the Pre-Install-Pkgs hook
+ [ Christian Perrier ]
+ * Fix typo in apt-private/private-show.cc. Thanks to Benjamin
+ Keresa. Closes: #724073
+
+ [ Mark Hymers ]
+ * fix libapt-inst for >2G debs (closes: #725483)
+
+ [ David Kalnischkies ]
+ * don't strip :any from dependencies in single-arch (Closes: 723586)
+ * pkg from only trusted sources keeps being trusted (Closes: 617690)
+ * compression-neutral message for missing data.tar member (Closes: 722710)
+ * print-uris prints regardless of quiet-level again (Closes: 722207)
+ * retry without partial data after a 416 response (Closes: 710924)
+ * replace "filesize - 1" trick in http with proper 416 handling
+ * fix partial (206 and 416) support in https
+ * handle complete responses to https range requests (Closes: 617643, 667699)
+ (LP: 1157943)
+ * don't consider holds for autoremoval (Closes: 724995)
+ * put fetch errors in 'source' on our errorstack
+ * use pkgAcqArchive in 'download' for proper errors
+ * fix lzma-support detection via xz binary
+ * do not ++ on erased package pointers in autoremove
+
+ [ Michael Vogt ]
+ * Add new "apt-get upgrade --with-new-pkgs" option (and add man-page for it).
+ So "apt-get upgrade --with-new-pkgs" will pull in new dependencies but
+ never remove packages
+ * Rename "--dpkg-progress" to "--show-progress" and document it in
+ apt-get.8. This will show global install progress information in the
+ terminal window.
+ * Fix status-fd progress calculation for certain multi-arch install/upgrade
+ situations
+ * add new -o DpkgPM::Progress-Fancy for nicer dpkg progress output
+ on vt100+ terminals
+ * fix libapt-inst for >2G debs (closes: #725483), thanks to Mark Hymers
+ * debian/apt.postinst: use --compare-versions lt instead of lt-nl,
+ to ensure the apt-auto-removal file is correctly create,
+ thanks to Ben Hutchings
+ * update Uploaders to match recent uploaders better
+
+ -- Michael Vogt <mvo@debian.org> Tue, 08 Oct 2013 20:38:55 +0200
+
+apt (0.9.11.4) unstable; urgency=low
+
+ [ Oskari Saarenmaa ]
+ * don't truncate 100 char long paths in tar extraction.
+ Thanks to Mika Eloranta for the testcase! (Closes: #689582)
+
+ [ David Kalnischkies ]
+ * do not trust FileFd::Eof() in pkgTagFile::Fill()
+ Thanks to Cyril Brulebois (Closes: 723705)
+
+ -- Michael Vogt <mvo@debian.org> Fri, 20 Sep 2013 16:12:07 +0200
+
+apt (0.9.11.3) unstable; urgency=low
+
+ [ Michael Vogt ]
+ * Add DPkgPM::Progress option to enable terminal install
+ progress
+ * fix typo (mkostemp->mkstemp)
+ * Remove invalid "-f" option for apt-get check, thanks to
+ Philipp Weis (closes: #721477)
+ * Fix regression of "apt-cache unmet -i", thanks to Daniel Schepler
+ (closes: #722324)
+
+ [ David Kalnischkies ]
+ * use FileFd in HashSum test to unbreak non-linux ports.
+ Thanks to Aaron M. Ucko (Closes: 721723)
- -- Michael Vogt <michael.vogt@ubuntu.com> Mon, 19 Aug 2013 15:53:47 +0200
+ -- Michael Vogt <mvo@debian.org> Tue, 10 Sep 2013 17:32:02 +0200
-apt (0.9.11~exp2) experimental; urgency=low
+apt (0.9.11.2) unstable; urgency=low
+
+ [ Milo Casagrande ]
+ * Update Italian translation. Closes: #721030
+
+ [ Trần Ngá»c Quân ]
+ * Update Vietnamese translation. Closes: #720752
+
+ [ Michael Vogt ]
+ * dselect/install:
+ - remove "-f" option for apt-get clean/auto-clean (closes: #720532)
+ * apt-private/private-cmndline.cc:
+ - fix typo in CmdMatches() selection for dselect-upgrade (closes: #720532)
+ * use SPtr<pkgProblemResolver> in DoInstall() to simplify the code
+ * allow pkg manipulation in the upgrade/dist-upgrade commandline, like
+ apt-get dist-upgrade 2vcard- 4g8+ (thanks to Thorsten Glaser for the
+ suggestion)
+
+ [ Angel Guzman Maeso ]
+ * replace usage of potential dangerous mktemp with mkstemp
+
+ -- Michael Vogt <mvo@debian.org> Sat, 31 Aug 2013 16:45:31 +0200
+
+apt (0.9.11.1) unstable; urgency=low
+
+ [ Michael Vogt ]
+ * more coverity fixes:
+ - explicit init
+ - always chdir("/") after chroot()
+ - ftparchive/override.cc: fix "skip empty lines" code, the pointer
+ needs to get de-referenced first
+ * dselect/update:
+ - remove "-f" option for apt-get update to fix breakage (closes: 720532)
+
+ [ Christopher Baines ]
+ * Add test for bug #507998
+
+ [ David Kalnischkies ]
+ * add a breaks libapt-inst for FileFd changes in 0.9.9 (Closes: 720449)
+ * add versions to manpages-it Replaces+Breaks
- * add lintian override for no-shlibs-control-file so that
- the internal libapt-private.so.0.0.0 can be shipped
- * adjust apt.install.in to only install libapt-private.so.*
+ [ Ãngel Guzmán Maeso ]
+ * apt-pkg:contrib Avoid compiler warning about sign-compare
- -- Michael Vogt <mvo@debian.org> Mon, 19 Aug 2013 14:54:58 +0200
+ -- Michael Vogt <mvo@debian.org> Sat, 24 Aug 2013 09:13:27 +0200
-apt (0.9.11~exp1) experimental; urgency=low
+apt (0.9.11) unstable; urgency=low
[ Daniel Hartwig ]
* Clarify units of Acquire::http::Dl-Limit (closes: #705445)
@@ -31,6 +136,9 @@ apt (0.9.11~exp1) experimental; urgency=low
- install libapt-private* into the apt binary
- add PACKAGE_MATCHER_ABI_COMPAT define so that this branch can be
merged without breaking ABI
+ - add lintian override for no-shlibs-control-file so that
+ the internal libapt-private.so.0.0.0 can be shipped
+ - adjust apt.install.in to only install libapt-private.so.*
[ David Kalnischkies ]
* ensure that pkgTagFile isn't writing past Buffer length (Closes: 719629)
@@ -40,7 +148,7 @@ apt (0.9.11~exp1) experimental; urgency=low
[ Christian PERRIER ]
* French translation update.
- -- Michael Vogt <mvo@debian.org> Mon, 19 Aug 2013 13:25:45 +0200
+ -- Michael Vogt <mvo@debian.org> Wed, 21 Aug 2013 17:51:09 +0200
apt (0.9.10) unstable; urgency=low
diff --git a/debian/control b/debian/control
index 3c5d14fe8..673fba477 100644
--- a/debian/control
+++ b/debian/control
@@ -2,8 +2,7 @@ Source: apt
Section: admin
Priority: important
Maintainer: APT Development Team <deity@lists.debian.org>
-Uploaders: Michael Vogt <mvo@debian.org>, Otavio Salvador <otavio@debian.org>,
- Christian Perrier <bubulle@debian.org>, Daniel Burrows <dburrows@debian.org>,
+Uploaders: Michael Vogt <mvo@debian.org>, Christian Perrier <bubulle@debian.org>,
Julian Andres Klode <jak@debian.org>
Standards-Version: 3.9.4
Build-Depends: dpkg-dev (>= 1.15.8), debhelper (>= 8.1.3~), libdb-dev,
diff --git a/doc/apt-get.8.xml b/doc/apt-get.8.xml
index cc4e82255..4c050ec03 100644
--- a/doc/apt-get.8.xml
+++ b/doc/apt-get.8.xml
@@ -389,6 +389,18 @@
Configuration Item: <literal>APT::Ignore-Hold</literal>.</para></listitem>
</varlistentry>
+ <varlistentry><term><option>--with-new-pkgs</option></term>
+ <listitem><para>Allow installing new packages when used in
+ conjunction with <literal>upgrade</literal>. This is useful if
+ the update of a installed package requires new dependencies to be
+ installed. Instead of holding the package back <literal>upgrade</literal>
+ will upgrade the package and install the new dependencies. Note that
+ <literal>upgrade</literal> with this option will never remove packages,
+ only allow adding new ones.
+ Configuration Item: <literal>APT::Get::Upgrade-Allow-New</literal>.
+ </para></listitem>
+ </varlistentry>
+
<varlistentry><term><option>--no-upgrade</option></term>
<listitem><para>Do not upgrade packages; when used in conjunction with <literal>install</literal>,
<literal>no-upgrade</literal> will prevent packages on the command line
@@ -508,7 +520,15 @@
This is useful for tools like pbuilder.
Configuration Item: <literal>APT::Get::AllowUnauthenticated</literal>.</para></listitem>
</varlistentry>
-
+
+ <varlistentry><term><option>--show-progress</option></term>
+ <listitem><para>Show user friendly progress information in the
+ terminal window when packages are installed, upgraded or
+ removed. For a machine parsable version of this data see
+ README.progress-reporting in the apt doc directory.
+ <literal>DpkgPM::Progress</literal>.</para></listitem>
+ </varlistentry>
+
&apt-commonoptions;
diff --git a/dselect/install b/dselect/install
index 3ef213550..7104ee280 100755
--- a/dselect/install
+++ b/dselect/install
@@ -5,7 +5,8 @@ TEXTDOMAIN="apt"
# Get the configuration from /etc/apt/apt.conf
CLEAN="prompt"
-OPTS="-f"
+OPTS=""
+DSELECT_UPGRADE_OPTS="-f"
APTGET="/usr/bin/apt-get"
DPKG="/usr/bin/dpkg"
DPKG_OPTS="--admindir=$1"
@@ -47,12 +48,12 @@ yesno() {
}
if [ "$WAIT" = "true" ]; then
- $APTGET $OPTS "$APT_OPT0" "$APT_OPT1" -d dselect-upgrade
+ $APTGET $DSELECT_UPGRADE_OPTS $OPTS "$APT_OPT0" "$APT_OPT1" -d dselect-upgrade
echo $"Press enter to continue." && read RES
- $APTGET $OPTS "$APT_OPT0" "$APT_OPT1" dselect-upgrade
+ $APTGET $DSELECT_UPGRADE_OPTS $OPTS "$APT_OPT0" "$APT_OPT1" dselect-upgrade
RES=$?
else
- $APTGET $OPTS "$APT_OPT0" "$APT_OPT1" dselect-upgrade
+ $APTGET $DSELECT_UPGRADE_OPTS $OPTS "$APT_OPT0" "$APT_OPT1" dselect-upgrade
RES=$?
fi
diff --git a/methods/http.cc b/methods/http.cc
index 278ddb290..71a02e53a 100644
--- a/methods/http.cc
+++ b/methods/http.cc
@@ -58,15 +58,6 @@
/*}}}*/
using namespace std;
-string HttpMethod::FailFile;
-int HttpMethod::FailFd = -1;
-time_t HttpMethod::FailTime = 0;
-unsigned long PipelineDepth = 0;
-unsigned long TimeOut = 120;
-bool AllowRedirect = false;
-bool Debug = false;
-URI Proxy;
-
unsigned long long CircleBuf::BwReadLimit=0;
unsigned long long CircleBuf::BwTickReadData=0;
struct timeval CircleBuf::BwReadTick={0,0};
@@ -296,20 +287,17 @@ CircleBuf::~CircleBuf()
delete Hash;
}
-// ServerState::ServerState - Constructor /*{{{*/
-// ---------------------------------------------------------------------
-/* */
-ServerState::ServerState(URI Srv,HttpMethod *Owner) : Owner(Owner),
- In(64*1024), Out(4*1024),
- ServerName(Srv)
+// HttpServerState::HttpServerState - Constructor /*{{{*/
+HttpServerState::HttpServerState(URI Srv,HttpMethod *Owner) : ServerState(Srv, Owner), In(64*1024), Out(4*1024)
{
+ TimeOut = _config->FindI("Acquire::http::Timeout",TimeOut);
Reset();
}
/*}}}*/
-// ServerState::Open - Open a connection to the server /*{{{*/
+// HttpServerState::Open - Open a connection to the server /*{{{*/
// ---------------------------------------------------------------------
/* This opens a connection to the server. */
-bool ServerState::Open()
+bool HttpServerState::Open()
{
// Use the already open connection if possible.
if (ServerFd != -1)
@@ -373,72 +361,18 @@ bool ServerState::Open()
return true;
}
/*}}}*/
-// ServerState::Close - Close a connection to the server /*{{{*/
+// HttpServerState::Close - Close a connection to the server /*{{{*/
// ---------------------------------------------------------------------
/* */
-bool ServerState::Close()
+bool HttpServerState::Close()
{
close(ServerFd);
ServerFd = -1;
return true;
}
/*}}}*/
-// ServerState::RunHeaders - Get the headers before the data /*{{{*/
-// ---------------------------------------------------------------------
-/* Returns 0 if things are OK, 1 if an IO error occurred and 2 if a header
- parse error occurred */
-ServerState::RunHeadersResult ServerState::RunHeaders()
-{
- State = Header;
-
- Owner->Status(_("Waiting for headers"));
-
- Major = 0;
- Minor = 0;
- Result = 0;
- Size = 0;
- StartPos = 0;
- Encoding = Closes;
- HaveContent = false;
- time(&Date);
-
- do
- {
- string Data;
- if (In.WriteTillEl(Data) == false)
- continue;
-
- if (Debug == true)
- clog << Data;
-
- for (string::const_iterator I = Data.begin(); I < Data.end(); ++I)
- {
- string::const_iterator J = I;
- for (; J != Data.end() && *J != '\n' && *J != '\r'; ++J);
- if (HeaderLine(string(I,J)) == false)
- return RUN_HEADERS_PARSE_ERROR;
- I = J;
- }
-
- // 100 Continue is a Nop...
- if (Result == 100)
- continue;
-
- // Tidy up the connection persistance state.
- if (Encoding == Closes && HaveContent == true)
- Persistent = false;
-
- return RUN_HEADERS_OK;
- }
- while (Owner->Go(false,this) == true);
-
- return RUN_HEADERS_IO_ERROR;
-}
- /*}}}*/
-// ServerState::RunData - Transfer the data from the socket /*{{{*/
-// ---------------------------------------------------------------------
-/* */
-bool ServerState::RunData()
+// HttpServerState::RunData - Transfer the data from the socket /*{{{*/
+bool HttpServerState::RunData(FileFd * const File)
{
State = Data;
@@ -456,7 +390,7 @@ bool ServerState::RunData()
if (In.WriteTillEl(Data,true) == true)
break;
}
- while ((Last = Owner->Go(false,this)) == true);
+ while ((Last = Go(false, File)) == true);
if (Last == false)
return false;
@@ -474,7 +408,7 @@ bool ServerState::RunData()
if (In.WriteTillEl(Data,true) == true && Data.length() <= 2)
break;
}
- while ((Last = Owner->Go(false,this)) == true);
+ while ((Last = Go(false, File)) == true);
if (Last == false)
return false;
return !_error->PendingError();
@@ -482,7 +416,7 @@ bool ServerState::RunData()
// Transfer the block
In.Limit(Len);
- while (Owner->Go(true,this) == true)
+ while (Go(true, File) == true)
if (In.IsLimit() == true)
break;
@@ -498,7 +432,7 @@ bool ServerState::RunData()
if (In.WriteTillEl(Data,true) == true)
break;
}
- while ((Last = Owner->Go(false,this)) == true);
+ while ((Last = Go(false, File)) == true);
if (Last == false)
return false;
}
@@ -521,139 +455,217 @@ bool ServerState::RunData()
In.Limit(-1);
return !_error->PendingError();
}
- while (Owner->Go(true,this) == true);
+ while (Go(true, File) == true);
}
- return Owner->Flush(this) && !_error->PendingError();
+ return Owner->Flush() && !_error->PendingError();
}
/*}}}*/
-// ServerState::HeaderLine - Process a header line /*{{{*/
-// ---------------------------------------------------------------------
-/* */
-bool ServerState::HeaderLine(string Line)
+bool HttpServerState::ReadHeaderLines(std::string &Data) /*{{{*/
{
- if (Line.empty() == true)
- return true;
+ return In.WriteTillEl(Data);
+}
+ /*}}}*/
+bool HttpServerState::LoadNextResponse(bool const ToFile, FileFd * const File)/*{{{*/
+{
+ return Go(ToFile, File);
+}
+ /*}}}*/
+bool HttpServerState::WriteResponse(const std::string &Data) /*{{{*/
+{
+ return Out.Read(Data);
+}
+ /*}}}*/
+bool HttpServerState::IsOpen() /*{{{*/
+{
+ return (ServerFd != -1);
+}
+ /*}}}*/
+bool HttpServerState::InitHashes(FileFd &File) /*{{{*/
+{
+ delete In.Hash;
+ In.Hash = new Hashes;
- string::size_type Pos = Line.find(' ');
- if (Pos == string::npos || Pos+1 > Line.length())
+ // Set the expected size and read file for the hashes
+ if (StartPos >= 0)
{
- // Blah, some servers use "connection:closes", evil.
- Pos = Line.find(':');
- if (Pos == string::npos || Pos + 2 > Line.length())
- return _error->Error(_("Bad header line"));
- Pos++;
+ File.Truncate(StartPos);
+
+ return In.Hash->AddFD(File, StartPos);
}
+ return true;
+}
+ /*}}}*/
+Hashes * HttpServerState::GetHashes() /*{{{*/
+{
+ return In.Hash;
+}
+ /*}}}*/
+// HttpServerState::Die - The server has closed the connection. /*{{{*/
+bool HttpServerState::Die(FileFd &File)
+{
+ unsigned int LErrno = errno;
- // Parse off any trailing spaces between the : and the next word.
- string::size_type Pos2 = Pos;
- while (Pos2 < Line.length() && isspace(Line[Pos2]) != 0)
- Pos2++;
-
- string Tag = string(Line,0,Pos);
- string Val = string(Line,Pos2);
-
- if (stringcasecmp(Tag.c_str(),Tag.c_str()+4,"HTTP") == 0)
+ // Dump the buffer to the file
+ if (State == ServerState::Data)
{
- // Evil servers return no version
- if (Line[4] == '/')
- {
- int const elements = sscanf(Line.c_str(),"HTTP/%3u.%3u %3u%359[^\n]",&Major,&Minor,&Result,Code);
- if (elements == 3)
- {
- Code[0] = '\0';
- if (Debug == true)
- clog << "HTTP server doesn't give Reason-Phrase for " << Result << std::endl;
- }
- else if (elements != 4)
- return _error->Error(_("The HTTP server sent an invalid reply header"));
- }
- else
+ // on GNU/kFreeBSD, apt dies on /dev/null because non-blocking
+ // can't be set
+ if (File.Name() != "/dev/null")
+ SetNonBlock(File.Fd(),false);
+ while (In.WriteSpace() == true)
{
- Major = 0;
- Minor = 9;
- if (sscanf(Line.c_str(),"HTTP %3u%359[^\n]",&Result,Code) != 2)
- return _error->Error(_("The HTTP server sent an invalid reply header"));
- }
+ if (In.Write(File.Fd()) == false)
+ return _error->Errno("write",_("Error writing to the file"));
- /* Check the HTTP response header to get the default persistance
- state. */
- if (Major < 1)
- Persistent = false;
- else
- {
- if (Major == 1 && Minor == 0)
- Persistent = false;
- else
- Persistent = true;
+ // Done
+ if (In.IsLimit() == true)
+ return true;
}
+ }
- return true;
- }
-
- if (stringcasecmp(Tag,"Content-Length:") == 0)
+ // See if this is because the server finished the data stream
+ if (In.IsLimit() == false && State != HttpServerState::Header &&
+ Encoding != HttpServerState::Closes)
{
- if (Encoding == Closes)
- Encoding = Stream;
- HaveContent = true;
-
- // The length is already set from the Content-Range header
- if (StartPos != 0)
- return true;
+ Close();
+ if (LErrno == 0)
+ return _error->Error(_("Error reading from server. Remote end closed connection"));
+ errno = LErrno;
+ return _error->Errno("read",_("Error reading from server"));
+ }
+ else
+ {
+ In.Limit(-1);
+
+ // Nothing left in the buffer
+ if (In.WriteSpace() == false)
+ return false;
- Size = strtoull(Val.c_str(), NULL, 10);
- if (Size >= std::numeric_limits<unsigned long long>::max())
- return _error->Errno("HeaderLine", _("The HTTP server sent an invalid Content-Length header"));
+ // We may have got multiple responses back in one packet..
+ Close();
return true;
}
- if (stringcasecmp(Tag,"Content-Type:") == 0)
+ return false;
+}
+ /*}}}*/
+// HttpServerState::Flush - Dump the buffer into the file /*{{{*/
+// ---------------------------------------------------------------------
+/* This takes the current input buffer from the Server FD and writes it
+ into the file */
+bool HttpServerState::Flush(FileFd * const File)
+{
+ if (File != NULL)
{
- HaveContent = true;
- return true;
+ // on GNU/kFreeBSD, apt dies on /dev/null because non-blocking
+ // can't be set
+ if (File->Name() != "/dev/null")
+ SetNonBlock(File->Fd(),false);
+ if (In.WriteSpace() == false)
+ return true;
+
+ while (In.WriteSpace() == true)
+ {
+ if (In.Write(File->Fd()) == false)
+ return _error->Errno("write",_("Error writing to file"));
+ if (In.IsLimit() == true)
+ return true;
+ }
+
+ if (In.IsLimit() == true || Encoding == ServerState::Closes)
+ return true;
}
+ return false;
+}
+ /*}}}*/
+// HttpServerState::Go - Run a single loop /*{{{*/
+// ---------------------------------------------------------------------
+/* This runs the select loop over the server FDs, Output file FDs and
+ stdin. */
+bool HttpServerState::Go(bool ToFile, FileFd * const File)
+{
+ // Server has closed the connection
+ if (ServerFd == -1 && (In.WriteSpace() == false ||
+ ToFile == false))
+ return false;
+
+ fd_set rfds,wfds;
+ FD_ZERO(&rfds);
+ FD_ZERO(&wfds);
+
+ /* Add the server. We only send more requests if the connection will
+ be persisting */
+ if (Out.WriteSpace() == true && ServerFd != -1
+ && Persistent == true)
+ FD_SET(ServerFd,&wfds);
+ if (In.ReadSpace() == true && ServerFd != -1)
+ FD_SET(ServerFd,&rfds);
+
+ // Add the file
+ int FileFD = -1;
+ if (File != NULL)
+ FileFD = File->Fd();
- if (stringcasecmp(Tag,"Content-Range:") == 0)
+ if (In.WriteSpace() == true && ToFile == true && FileFD != -1)
+ FD_SET(FileFD,&wfds);
+
+ // Add stdin
+ if (_config->FindB("Acquire::http::DependOnSTDIN", true) == true)
+ FD_SET(STDIN_FILENO,&rfds);
+
+ // Figure out the max fd
+ int MaxFd = FileFD;
+ if (MaxFd < ServerFd)
+ MaxFd = ServerFd;
+
+ // Select
+ struct timeval tv;
+ tv.tv_sec = TimeOut;
+ tv.tv_usec = 0;
+ int Res = 0;
+ if ((Res = select(MaxFd+1,&rfds,&wfds,0,&tv)) < 0)
{
- HaveContent = true;
-
- if (sscanf(Val.c_str(),"bytes %llu-%*u/%llu",&StartPos,&Size) != 2)
- return _error->Error(_("The HTTP server sent an invalid Content-Range header"));
- if ((unsigned long long)StartPos > Size)
- return _error->Error(_("This HTTP server has broken range support"));
- return true;
+ if (errno == EINTR)
+ return true;
+ return _error->Errno("select",_("Select failed"));
}
- if (stringcasecmp(Tag,"Transfer-Encoding:") == 0)
+ if (Res == 0)
{
- HaveContent = true;
- if (stringcasecmp(Val,"chunked") == 0)
- Encoding = Chunked;
- return true;
+ _error->Error(_("Connection timed out"));
+ return Die(*File);
}
-
- if (stringcasecmp(Tag,"Connection:") == 0)
+
+ // Handle server IO
+ if (ServerFd != -1 && FD_ISSET(ServerFd,&rfds))
{
- if (stringcasecmp(Val,"close") == 0)
- Persistent = false;
- if (stringcasecmp(Val,"keep-alive") == 0)
- Persistent = true;
- return true;
+ errno = 0;
+ if (In.Read(ServerFd) == false)
+ return Die(*File);
}
-
- if (stringcasecmp(Tag,"Last-Modified:") == 0)
+
+ if (ServerFd != -1 && FD_ISSET(ServerFd,&wfds))
{
- if (RFC1123StrToTime(Val.c_str(), Date) == false)
- return _error->Error(_("Unknown date format"));
- return true;
+ errno = 0;
+ if (Out.Write(ServerFd) == false)
+ return Die(*File);
}
- if (stringcasecmp(Tag,"Location:") == 0)
+ // Send data to the file
+ if (FileFD != -1 && FD_ISSET(FileFD,&wfds))
{
- Location = Val;
- return true;
+ if (In.Write(FileFD) == false)
+ return _error->Errno("write",_("Error writing to output file"));
}
+ // Handle commands from APT
+ if (FD_ISSET(STDIN_FILENO,&rfds))
+ {
+ if (Owner->Run(true) != -1)
+ exit(100);
+ }
+
return true;
}
/*}}}*/
@@ -661,7 +673,7 @@ bool ServerState::HeaderLine(string Line)
// HttpMethod::SendReq - Send the HTTP request /*{{{*/
// ---------------------------------------------------------------------
/* This places the http request in the outbound buffer */
-void HttpMethod::SendReq(FetchItem *Itm,CircleBuf &Out)
+void HttpMethod::SendReq(FetchItem *Itm)
{
URI Uri = Itm->Uri;
@@ -687,7 +699,7 @@ void HttpMethod::SendReq(FetchItem *Itm,CircleBuf &Out)
but while its a must for all servers to accept absolute URIs,
it is assumed clients will sent an absolute path for non-proxies */
std::string requesturi;
- if (Proxy.empty() == true || Proxy.Host.empty())
+ if (Server->Proxy.empty() == true || Server->Proxy.Host.empty())
requesturi = Uri.Path;
else
requesturi = Itm->Uri;
@@ -742,7 +754,7 @@ void HttpMethod::SendReq(FetchItem *Itm,CircleBuf &Out)
if (stat(Itm->DestFile.c_str(),&SBuf) >= 0 && SBuf.st_size > 0)
{
// In this case we send an if-range query with a range header
- sprintf(Buf,"Range: bytes=%lli-\r\nIf-Range: %s\r\n",(long long)SBuf.st_size - 1,
+ sprintf(Buf,"Range: bytes=%lli-\r\nIf-Range: %s\r\n",(long long)SBuf.st_size,
TimeRFC1123(SBuf.st_mtime).c_str());
Req += Buf;
}
@@ -755,9 +767,9 @@ void HttpMethod::SendReq(FetchItem *Itm,CircleBuf &Out)
}
}
- if (Proxy.User.empty() == false || Proxy.Password.empty() == false)
+ if (Server->Proxy.User.empty() == false || Server->Proxy.Password.empty() == false)
Req += string("Proxy-Authorization: Basic ") +
- Base64Encode(Proxy.User + ":" + Proxy.Password) + "\r\n";
+ Base64Encode(Server->Proxy.User + ":" + Server->Proxy.Password) + "\r\n";
maybe_add_auth (Uri, _config->FindFile("Dir::Etc::netrc"));
if (Uri.User.empty() == false || Uri.Password.empty() == false)
@@ -771,340 +783,18 @@ void HttpMethod::SendReq(FetchItem *Itm,CircleBuf &Out)
if (Debug == true)
cerr << Req << endl;
- Out.Read(Req);
+ Server->WriteResponse(Req);
}
/*}}}*/
-// HttpMethod::Go - Run a single loop /*{{{*/
-// ---------------------------------------------------------------------
-/* This runs the select loop over the server FDs, Output file FDs and
- stdin. */
-bool HttpMethod::Go(bool ToFile,ServerState *Srv)
-{
- // Server has closed the connection
- if (Srv->ServerFd == -1 && (Srv->In.WriteSpace() == false ||
- ToFile == false))
- return false;
-
- fd_set rfds,wfds;
- FD_ZERO(&rfds);
- FD_ZERO(&wfds);
-
- /* Add the server. We only send more requests if the connection will
- be persisting */
- if (Srv->Out.WriteSpace() == true && Srv->ServerFd != -1
- && Srv->Persistent == true)
- FD_SET(Srv->ServerFd,&wfds);
- if (Srv->In.ReadSpace() == true && Srv->ServerFd != -1)
- FD_SET(Srv->ServerFd,&rfds);
-
- // Add the file
- int FileFD = -1;
- if (File != 0)
- FileFD = File->Fd();
-
- if (Srv->In.WriteSpace() == true && ToFile == true && FileFD != -1)
- FD_SET(FileFD,&wfds);
-
- // Add stdin
- if (_config->FindB("Acquire::http::DependOnSTDIN", true) == true)
- FD_SET(STDIN_FILENO,&rfds);
-
- // Figure out the max fd
- int MaxFd = FileFD;
- if (MaxFd < Srv->ServerFd)
- MaxFd = Srv->ServerFd;
-
- // Select
- struct timeval tv;
- tv.tv_sec = TimeOut;
- tv.tv_usec = 0;
- int Res = 0;
- if ((Res = select(MaxFd+1,&rfds,&wfds,0,&tv)) < 0)
- {
- if (errno == EINTR)
- return true;
- return _error->Errno("select",_("Select failed"));
- }
-
- if (Res == 0)
- {
- _error->Error(_("Connection timed out"));
- return ServerDie(Srv);
- }
-
- // Handle server IO
- if (Srv->ServerFd != -1 && FD_ISSET(Srv->ServerFd,&rfds))
- {
- errno = 0;
- if (Srv->In.Read(Srv->ServerFd) == false)
- return ServerDie(Srv);
- }
-
- if (Srv->ServerFd != -1 && FD_ISSET(Srv->ServerFd,&wfds))
- {
- errno = 0;
- if (Srv->Out.Write(Srv->ServerFd) == false)
- return ServerDie(Srv);
- }
-
- // Send data to the file
- if (FileFD != -1 && FD_ISSET(FileFD,&wfds))
- {
- if (Srv->In.Write(FileFD) == false)
- return _error->Errno("write",_("Error writing to output file"));
- }
-
- // Handle commands from APT
- if (FD_ISSET(STDIN_FILENO,&rfds))
- {
- if (Run(true) != -1)
- exit(100);
- }
-
- return true;
-}
- /*}}}*/
-// HttpMethod::Flush - Dump the buffer into the file /*{{{*/
-// ---------------------------------------------------------------------
-/* This takes the current input buffer from the Server FD and writes it
- into the file */
-bool HttpMethod::Flush(ServerState *Srv)
-{
- if (File != 0)
- {
- // on GNU/kFreeBSD, apt dies on /dev/null because non-blocking
- // can't be set
- if (File->Name() != "/dev/null")
- SetNonBlock(File->Fd(),false);
- if (Srv->In.WriteSpace() == false)
- return true;
-
- while (Srv->In.WriteSpace() == true)
- {
- if (Srv->In.Write(File->Fd()) == false)
- return _error->Errno("write",_("Error writing to file"));
- if (Srv->In.IsLimit() == true)
- return true;
- }
-
- if (Srv->In.IsLimit() == true || Srv->Encoding == ServerState::Closes)
- return true;
- }
- return false;
-}
- /*}}}*/
-// HttpMethod::ServerDie - The server has closed the connection. /*{{{*/
-// ---------------------------------------------------------------------
-/* */
-bool HttpMethod::ServerDie(ServerState *Srv)
-{
- unsigned int LErrno = errno;
-
- // Dump the buffer to the file
- if (Srv->State == ServerState::Data)
- {
- // on GNU/kFreeBSD, apt dies on /dev/null because non-blocking
- // can't be set
- if (File->Name() != "/dev/null")
- SetNonBlock(File->Fd(),false);
- while (Srv->In.WriteSpace() == true)
- {
- if (Srv->In.Write(File->Fd()) == false)
- return _error->Errno("write",_("Error writing to the file"));
-
- // Done
- if (Srv->In.IsLimit() == true)
- return true;
- }
- }
-
- // See if this is because the server finished the data stream
- if (Srv->In.IsLimit() == false && Srv->State != ServerState::Header &&
- Srv->Encoding != ServerState::Closes)
- {
- Srv->Close();
- if (LErrno == 0)
- return _error->Error(_("Error reading from server. Remote end closed connection"));
- errno = LErrno;
- return _error->Errno("read",_("Error reading from server"));
- }
- else
- {
- Srv->In.Limit(-1);
-
- // Nothing left in the buffer
- if (Srv->In.WriteSpace() == false)
- return false;
-
- // We may have got multiple responses back in one packet..
- Srv->Close();
- return true;
- }
-
- return false;
-}
- /*}}}*/
-// HttpMethod::DealWithHeaders - Handle the retrieved header data /*{{{*/
-// ---------------------------------------------------------------------
-/* We look at the header data we got back from the server and decide what
- to do. Returns DealWithHeadersResult (see http.h for details).
- */
-HttpMethod::DealWithHeadersResult
-HttpMethod::DealWithHeaders(FetchResult &Res,ServerState *Srv)
-{
- // Not Modified
- if (Srv->Result == 304)
- {
- unlink(Queue->DestFile.c_str());
- Res.IMSHit = true;
- Res.LastModified = Queue->LastModified;
- return IMS_HIT;
- }
-
- /* Redirect
- *
- * Note that it is only OK for us to treat all redirection the same
- * because we *always* use GET, not other HTTP methods. There are
- * three redirection codes for which it is not appropriate that we
- * redirect. Pass on those codes so the error handling kicks in.
- */
- if (AllowRedirect
- && (Srv->Result > 300 && Srv->Result < 400)
- && (Srv->Result != 300 // Multiple Choices
- && Srv->Result != 304 // Not Modified
- && Srv->Result != 306)) // (Not part of HTTP/1.1, reserved)
- {
- if (Srv->Location.empty() == true);
- else if (Srv->Location[0] == '/' && Queue->Uri.empty() == false)
- {
- URI Uri = Queue->Uri;
- if (Uri.Host.empty() == false)
- NextURI = URI::SiteOnly(Uri);
- else
- NextURI.clear();
- NextURI.append(DeQuoteString(Srv->Location));
- return TRY_AGAIN_OR_REDIRECT;
- }
- else
- {
- NextURI = DeQuoteString(Srv->Location);
- URI tmpURI = NextURI;
- // Do not allow a redirection to switch protocol
- if (tmpURI.Access == "http")
- return TRY_AGAIN_OR_REDIRECT;
- }
- /* else pass through for error message */
- }
-
- /* We have a reply we dont handle. This should indicate a perm server
- failure */
- if (Srv->Result < 200 || Srv->Result >= 300)
- {
- char err[255];
- snprintf(err,sizeof(err)-1,"HttpError%i",Srv->Result);
- SetFailReason(err);
- _error->Error("%u %s",Srv->Result,Srv->Code);
- if (Srv->HaveContent == true)
- return ERROR_WITH_CONTENT_PAGE;
- return ERROR_UNRECOVERABLE;
- }
-
- // This is some sort of 2xx 'data follows' reply
- Res.LastModified = Srv->Date;
- Res.Size = Srv->Size;
-
- // Open the file
- delete File;
- File = new FileFd(Queue->DestFile,FileFd::WriteAny);
- if (_error->PendingError() == true)
- return ERROR_NOT_FROM_SERVER;
-
- FailFile = Queue->DestFile;
- FailFile.c_str(); // Make sure we dont do a malloc in the signal handler
- FailFd = File->Fd();
- FailTime = Srv->Date;
-
- delete Srv->In.Hash;
- Srv->In.Hash = new Hashes;
-
- // Set the expected size and read file for the hashes
- if (Srv->StartPos >= 0)
- {
- Res.ResumePoint = Srv->StartPos;
- File->Truncate(Srv->StartPos);
-
- if (Srv->In.Hash->AddFD(*File,Srv->StartPos) == false)
- {
- _error->Errno("read",_("Problem hashing file"));
- return ERROR_NOT_FROM_SERVER;
- }
- }
-
- SetNonBlock(File->Fd(),true);
- return FILE_IS_OPEN;
-}
- /*}}}*/
-// HttpMethod::SigTerm - Handle a fatal signal /*{{{*/
-// ---------------------------------------------------------------------
-/* This closes and timestamps the open file. This is neccessary to get
- resume behavoir on user abort */
-void HttpMethod::SigTerm(int)
-{
- if (FailFd == -1)
- _exit(100);
- close(FailFd);
-
- // Timestamp
- struct utimbuf UBuf;
- UBuf.actime = FailTime;
- UBuf.modtime = FailTime;
- utime(FailFile.c_str(),&UBuf);
-
- _exit(100);
-}
- /*}}}*/
-// HttpMethod::Fetch - Fetch an item /*{{{*/
-// ---------------------------------------------------------------------
-/* This adds an item to the pipeline. We keep the pipeline at a fixed
- depth. */
-bool HttpMethod::Fetch(FetchItem *)
-{
- if (Server == 0)
- return true;
-
- // Queue the requests
- int Depth = -1;
- for (FetchItem *I = Queue; I != 0 && Depth < (signed)PipelineDepth;
- I = I->Next, Depth++)
- {
- // If pipelining is disabled, we only queue 1 request
- if (Server->Pipeline == false && Depth >= 0)
- break;
-
- // Make sure we stick with the same server
- if (Server->Comp(I->Uri) == false)
- break;
- if (QueueBack == I)
- {
- QueueBack = I->Next;
- SendReq(I,Server->Out);
- continue;
- }
- }
-
- return true;
-};
- /*}}}*/
// HttpMethod::Configuration - Handle a configuration message /*{{{*/
// ---------------------------------------------------------------------
/* We stash the desired pipeline depth */
bool HttpMethod::Configuration(string Message)
{
- if (pkgAcqMethod::Configuration(Message) == false)
+ if (ServerMethod::Configuration(Message) == false)
return false;
-
+
AllowRedirect = _config->FindB("Acquire::http::AllowRedirect",true);
- TimeOut = _config->FindI("Acquire::http::Timeout",TimeOut);
PipelineDepth = _config->FindI("Acquire::http::Pipeline-Depth",
PipelineDepth);
Debug = _config->FindB("Debug::Acquire::http",false);
@@ -1116,258 +806,6 @@ bool HttpMethod::Configuration(string Message)
return true;
}
/*}}}*/
-// HttpMethod::Loop - Main loop /*{{{*/
-// ---------------------------------------------------------------------
-/* */
-int HttpMethod::Loop()
-{
- typedef vector<string> StringVector;
- typedef vector<string>::iterator StringVectorIterator;
- map<string, StringVector> Redirected;
-
- signal(SIGTERM,SigTerm);
- signal(SIGINT,SigTerm);
-
- Server = 0;
-
- int FailCounter = 0;
- while (1)
- {
- // We have no commands, wait for some to arrive
- if (Queue == 0)
- {
- if (WaitFd(STDIN_FILENO) == false)
- return 0;
- }
-
- /* Run messages, we can accept 0 (no message) if we didn't
- do a WaitFd above.. Otherwise the FD is closed. */
- int Result = Run(true);
- if (Result != -1 && (Result != 0 || Queue == 0))
- {
- if(FailReason.empty() == false ||
- _config->FindB("Acquire::http::DependOnSTDIN", true) == true)
- return 100;
- else
- return 0;
- }
-
- if (Queue == 0)
- continue;
-
- // Connect to the server
- if (Server == 0 || Server->Comp(Queue->Uri) == false)
- {
- delete Server;
- Server = new ServerState(Queue->Uri,this);
- }
- /* If the server has explicitly said this is the last connection
- then we pre-emptively shut down the pipeline and tear down
- the connection. This will speed up HTTP/1.0 servers a tad
- since we don't have to wait for the close sequence to
- complete */
- if (Server->Persistent == false)
- Server->Close();
-
- // Reset the pipeline
- if (Server->ServerFd == -1)
- QueueBack = Queue;
-
- // Connnect to the host
- if (Server->Open() == false)
- {
- Fail(true);
- delete Server;
- Server = 0;
- continue;
- }
-
- // Fill the pipeline.
- Fetch(0);
-
- // Fetch the next URL header data from the server.
- switch (Server->RunHeaders())
- {
- case ServerState::RUN_HEADERS_OK:
- break;
-
- // The header data is bad
- case ServerState::RUN_HEADERS_PARSE_ERROR:
- {
- _error->Error(_("Bad header data"));
- Fail(true);
- RotateDNS();
- continue;
- }
-
- // The server closed a connection during the header get..
- default:
- case ServerState::RUN_HEADERS_IO_ERROR:
- {
- FailCounter++;
- _error->Discard();
- Server->Close();
- Server->Pipeline = false;
-
- if (FailCounter >= 2)
- {
- Fail(_("Connection failed"),true);
- FailCounter = 0;
- }
-
- RotateDNS();
- continue;
- }
- };
-
- // Decide what to do.
- FetchResult Res;
- Res.Filename = Queue->DestFile;
- switch (DealWithHeaders(Res,Server))
- {
- // Ok, the file is Open
- case FILE_IS_OPEN:
- {
- URIStart(Res);
-
- // Run the data
- bool Result = Server->RunData();
-
- /* If the server is sending back sizeless responses then fill in
- the size now */
- if (Res.Size == 0)
- Res.Size = File->Size();
-
- // Close the file, destroy the FD object and timestamp it
- FailFd = -1;
- delete File;
- File = 0;
-
- // Timestamp
- struct utimbuf UBuf;
- time(&UBuf.actime);
- UBuf.actime = Server->Date;
- UBuf.modtime = Server->Date;
- utime(Queue->DestFile.c_str(),&UBuf);
-
- // Send status to APT
- if (Result == true)
- {
- Res.TakeHashes(*Server->In.Hash);
- URIDone(Res);
- }
- else
- {
- if (Server->ServerFd == -1)
- {
- FailCounter++;
- _error->Discard();
- Server->Close();
-
- if (FailCounter >= 2)
- {
- Fail(_("Connection failed"),true);
- FailCounter = 0;
- }
-
- QueueBack = Queue;
- }
- else
- Fail(true);
- }
- break;
- }
-
- // IMS hit
- case IMS_HIT:
- {
- URIDone(Res);
- break;
- }
-
- // Hard server error, not found or something
- case ERROR_UNRECOVERABLE:
- {
- Fail();
- break;
- }
-
- // Hard internal error, kill the connection and fail
- case ERROR_NOT_FROM_SERVER:
- {
- delete File;
- File = 0;
-
- Fail();
- RotateDNS();
- Server->Close();
- break;
- }
-
- // We need to flush the data, the header is like a 404 w/ error text
- case ERROR_WITH_CONTENT_PAGE:
- {
- Fail();
-
- // Send to content to dev/null
- File = new FileFd("/dev/null",FileFd::WriteExists);
- Server->RunData();
- delete File;
- File = 0;
- break;
- }
-
- // Try again with a new URL
- case TRY_AGAIN_OR_REDIRECT:
- {
- // Clear rest of response if there is content
- if (Server->HaveContent)
- {
- File = new FileFd("/dev/null",FileFd::WriteExists);
- Server->RunData();
- delete File;
- File = 0;
- }
-
- /* Detect redirect loops. No more redirects are allowed
- after the same URI is seen twice in a queue item. */
- StringVector &R = Redirected[Queue->DestFile];
- bool StopRedirects = false;
- if (R.empty() == true)
- R.push_back(Queue->Uri);
- else if (R[0] == "STOP" || R.size() > 10)
- StopRedirects = true;
- else
- {
- for (StringVectorIterator I = R.begin(); I != R.end(); ++I)
- if (Queue->Uri == *I)
- {
- R[0] = "STOP";
- break;
- }
-
- R.push_back(Queue->Uri);
- }
-
- if (StopRedirects == false)
- Redirect(NextURI);
- else
- Fail();
-
- break;
- }
-
- default:
- Fail(_("Internal error"));
- break;
- }
-
- FailCounter = 0;
- }
-
- return 0;
-}
- /*}}}*/
// HttpMethod::AutoDetectProxy - auto detect proxy /*{{{*/
// ---------------------------------------------------------------------
/* */
@@ -1420,5 +858,13 @@ bool HttpMethod::AutoDetectProxy()
return true;
}
/*}}}*/
-
-
+ServerState * HttpMethod::CreateServerState(URI uri) /*{{{*/
+{
+ return new HttpServerState(uri, this);
+}
+ /*}}}*/
+void HttpMethod::RotateDNS() /*{{{*/
+{
+ ::RotateDNS();
+}
+ /*}}}*/
diff --git a/methods/http.h b/methods/http.h
index 7446119cd..02c04e8ae 100644
--- a/methods/http.h
+++ b/methods/http.h
@@ -15,6 +15,8 @@
#include <string>
+#include "server.h"
+
using std::cout;
using std::endl;
@@ -31,7 +33,7 @@ class CircleBuf
unsigned long long StrPos;
unsigned long long MaxGet;
struct timeval Start;
-
+
static unsigned long long BwReadLimit;
static unsigned long long BwTickReadData;
static struct timeval BwReadTick;
@@ -54,21 +56,20 @@ class CircleBuf
return Sz;
}
void FillOut();
-
+
public:
-
Hashes *Hash;
-
+
// Read data in
bool Read(int Fd);
bool Read(std::string Data);
-
+
// Write data out
bool Write(int Fd);
bool WriteTillEl(std::string &Data,bool Single = false);
-
+
// Control the write limit
- void Limit(long long Max) {if (Max == -1) MaxGet = 0-1; else MaxGet = OutP + Max;}
+ void Limit(long long Max) {if (Max == -1) MaxGet = 0-1; else MaxGet = OutP + Max;}
bool IsLimit() const {return MaxGet == OutP;};
void Print() const {cout << MaxGet << ',' << OutP << endl;};
@@ -84,114 +85,56 @@ class CircleBuf
~CircleBuf();
};
-struct ServerState
+struct HttpServerState: public ServerState
{
- // This is the last parsed Header Line
- unsigned int Major;
- unsigned int Minor;
- unsigned int Result;
- char Code[360];
-
- // These are some statistics from the last parsed header lines
- unsigned long long Size;
- signed long long StartPos;
- time_t Date;
- bool HaveContent;
- enum {Chunked,Stream,Closes} Encoding;
- enum {Header, Data} State;
- bool Persistent;
- std::string Location;
-
- // This is a Persistent attribute of the server itself.
- bool Pipeline;
-
- HttpMethod *Owner;
-
// This is the connection itself. Output is data FROM the server
CircleBuf In;
CircleBuf Out;
int ServerFd;
- URI ServerName;
-
- bool HeaderLine(std::string Line);
- bool Comp(URI Other) const {return Other.Host == ServerName.Host && Other.Port == ServerName.Port;};
- void Reset() {Major = 0; Minor = 0; Result = 0; Code[0] = '\0'; Size = 0;
- StartPos = 0; Encoding = Closes; time(&Date); HaveContent = false;
- State = Header; Persistent = false; ServerFd = -1;
- Pipeline = true;};
-
- /** \brief Result of the header acquire */
- enum RunHeadersResult {
- /** \brief Header ok */
- RUN_HEADERS_OK,
- /** \brief IO error while retrieving */
- RUN_HEADERS_IO_ERROR,
- /** \brief Parse error after retrieving */
- RUN_HEADERS_PARSE_ERROR,
- };
- /** \brief Get the headers before the data */
- RunHeadersResult RunHeaders();
- /** \brief Transfer the data from the socket */
- bool RunData();
-
- bool Open();
- bool Close();
-
- ServerState(URI Srv,HttpMethod *Owner);
- ~ServerState() {Close();};
+
+ protected:
+ virtual bool ReadHeaderLines(std::string &Data);
+ virtual bool LoadNextResponse(bool const ToFile, FileFd * const File);
+ virtual bool WriteResponse(std::string const &Data);
+
+ public:
+ virtual void Reset() { ServerState::Reset(); ServerFd = -1; };
+
+ virtual bool RunData(FileFd * const File);
+
+ virtual bool Open();
+ virtual bool IsOpen();
+ virtual bool Close();
+ virtual bool InitHashes(FileFd &File);
+ virtual Hashes * GetHashes();
+ virtual bool Die(FileFd &File);
+ virtual bool Flush(FileFd * const File);
+ virtual bool Go(bool ToFile, FileFd * const File);
+
+ HttpServerState(URI Srv, HttpMethod *Owner);
+ virtual ~HttpServerState() {Close();};
};
-class HttpMethod : public pkgAcqMethod
+class HttpMethod : public ServerMethod
{
- void SendReq(FetchItem *Itm,CircleBuf &Out);
- bool Go(bool ToFile,ServerState *Srv);
- bool Flush(ServerState *Srv);
- bool ServerDie(ServerState *Srv);
-
- /** \brief Result of the header parsing */
- enum DealWithHeadersResult {
- /** \brief The file is open and ready */
- FILE_IS_OPEN,
- /** \brief We got a IMS hit, the file has not changed */
- IMS_HIT,
- /** \brief The server reported a unrecoverable error */
- ERROR_UNRECOVERABLE,
- /** \brief The server reported a error with a error content page */
- ERROR_WITH_CONTENT_PAGE,
- /** \brief An error on the client side */
- ERROR_NOT_FROM_SERVER,
- /** \brief A redirect or retry request */
- TRY_AGAIN_OR_REDIRECT
- };
- /** \brief Handle the retrieved header data */
- DealWithHeadersResult DealWithHeaders(FetchResult &Res,ServerState *Srv);
+ public:
+ virtual void SendReq(FetchItem *Itm);
/** \brief Try to AutoDetect the proxy */
bool AutoDetectProxy();
virtual bool Configuration(std::string Message);
-
- // In the event of a fatal signal this file will be closed and timestamped.
- static std::string FailFile;
- static int FailFd;
- static time_t FailTime;
- static void SigTerm(int);
+
+ virtual ServerState * CreateServerState(URI uri);
+ virtual void RotateDNS();
protected:
- virtual bool Fetch(FetchItem *);
-
- std::string NextURI;
std::string AutoDetectProxyCmd;
public:
- friend struct ServerState;
-
- FileFd *File;
- ServerState *Server;
-
- int Loop();
-
- HttpMethod() : pkgAcqMethod("1.2",Pipeline | SendConfig)
+ friend struct HttpServerState;
+
+ HttpMethod() : ServerMethod("1.2",Pipeline | SendConfig)
{
File = 0;
Server = 0;
diff --git a/methods/https.cc b/methods/https.cc
index 84ce2d68f..2a562434b 100644
--- a/methods/https.cc
+++ b/methods/https.cc
@@ -36,6 +36,41 @@
/*}}}*/
using namespace std;
+size_t
+HttpsMethod::parse_header(void *buffer, size_t size, size_t nmemb, void *userp)
+{
+ size_t len = size * nmemb;
+ HttpsMethod *me = (HttpsMethod *)userp;
+ std::string line((char*) buffer, len);
+ for (--len; len > 0; --len)
+ if (isspace(line[len]) == 0)
+ {
+ ++len;
+ break;
+ }
+ line.erase(len);
+
+ if (line.empty() == true)
+ {
+ if (me->Server->Result != 416 && me->Server->StartPos != 0)
+ ;
+ else if (me->Server->Result == 416 && me->Server->Size == me->File->FileSize())
+ {
+ me->Server->Result = 200;
+ me->Server->StartPos = me->Server->Size;
+ }
+ else
+ me->Server->StartPos = 0;
+
+ me->File->Truncate(me->Server->StartPos);
+ me->File->Seek(me->Server->StartPos);
+ }
+ else if (me->Server->HeaderLine(line) == false)
+ return 0;
+
+ return size*nmemb;
+}
+
size_t
HttpsMethod::write_data(void *buffer, size_t size, size_t nmemb, void *userp)
{
@@ -59,6 +94,14 @@ HttpsMethod::progress_callback(void *clientp, double dltotal, double dlnow,
return 0;
}
+// HttpsServerState::HttpsServerState - Constructor /*{{{*/
+HttpsServerState::HttpsServerState(URI Srv,HttpsMethod *Owner) : ServerState(Srv, NULL)
+{
+ TimeOut = _config->FindI("Acquire::https::Timeout",TimeOut);
+ Reset();
+}
+ /*}}}*/
+
void HttpsMethod::SetupProxy() /*{{{*/
{
URI ServerName = Queue->Uri;
@@ -121,7 +164,6 @@ bool HttpsMethod::Fetch(FetchItem *Itm)
struct stat SBuf;
struct curl_slist *headers=NULL;
char curl_errorstr[CURL_ERROR_SIZE];
- long curl_responsecode;
URI Uri = Itm->Uri;
string remotehost = Uri.Host;
@@ -137,6 +179,8 @@ bool HttpsMethod::Fetch(FetchItem *Itm)
// callbacks
curl_easy_setopt(curl, CURLOPT_URL, static_cast<string>(Uri).c_str());
+ curl_easy_setopt(curl, CURLOPT_HEADERFUNCTION, parse_header);
+ curl_easy_setopt(curl, CURLOPT_WRITEHEADER, this);
curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, write_data);
curl_easy_setopt(curl, CURLOPT_WRITEDATA, this);
curl_easy_setopt(curl, CURLOPT_PROGRESSFUNCTION, progress_callback);
@@ -277,7 +321,7 @@ bool HttpsMethod::Fetch(FetchItem *Itm)
if (stat(Itm->DestFile.c_str(),&SBuf) >= 0 && SBuf.st_size > 0)
{
char Buf[1000];
- sprintf(Buf, "Range: bytes=%li-", (long) SBuf.st_size - 1);
+ sprintf(Buf, "Range: bytes=%li-", (long) SBuf.st_size);
headers = curl_slist_append(headers, Buf);
sprintf(Buf, "If-Range: %s", TimeRFC1123(SBuf.st_mtime).c_str());
headers = curl_slist_append(headers, Buf);
@@ -290,18 +334,13 @@ bool HttpsMethod::Fetch(FetchItem *Itm)
// go for it - if the file exists, append on it
File = new FileFd(Itm->DestFile, FileFd::WriteAny);
- if (File->Size() > 0)
- File->Seek(File->Size() - 1);
-
+ Server = new HttpsServerState(Itm->Uri, this);
+
// keep apt updated
Res.Filename = Itm->DestFile;
// get it!
CURLcode success = curl_easy_perform(curl);
- curl_easy_getinfo(curl, CURLINFO_RESPONSE_CODE, &curl_responsecode);
-
- long curl_servdate;
- curl_easy_getinfo(curl, CURLINFO_FILETIME, &curl_servdate);
// If the server returns 200 OK but the If-Modified-Since condition is not
// met, CURLINFO_CONDITION_UNMET will be set to 1
@@ -309,57 +348,83 @@ bool HttpsMethod::Fetch(FetchItem *Itm)
curl_easy_getinfo(curl, CURLINFO_CONDITION_UNMET, &curl_condition_unmet);
File->Close();
+ curl_slist_free_all(headers);
// cleanup
- if(success != 0 || (curl_responsecode != 200 && curl_responsecode != 304))
+ if (success != 0)
{
_error->Error("%s", curl_errorstr);
- // unlink, no need keep 401/404 page content in partial/
unlink(File->Name().c_str());
- Fail();
+ return false;
+ }
+
+ // server says file not modified
+ if (Server->Result == 304 || curl_condition_unmet == 1)
+ {
+ unlink(File->Name().c_str());
+ Res.IMSHit = true;
+ Res.LastModified = Itm->LastModified;
+ Res.Size = 0;
+ URIDone(Res);
return true;
}
+ Res.IMSHit = false;
- // Timestamp
- struct utimbuf UBuf;
- if (curl_servdate != -1) {
- UBuf.actime = curl_servdate;
- UBuf.modtime = curl_servdate;
- utime(File->Name().c_str(),&UBuf);
+ if (Server->Result != 200 && // OK
+ Server->Result != 206 && // Partial
+ Server->Result != 416) // invalid Range
+ {
+ char err[255];
+ snprintf(err, sizeof(err) - 1, "HttpError%i", Server->Result);
+ SetFailReason(err);
+ _error->Error("%s", err);
+ // unlink, no need keep 401/404 page content in partial/
+ unlink(File->Name().c_str());
+ return false;
}
- // check the downloaded result
- struct stat Buf;
- if (stat(File->Name().c_str(),&Buf) == 0)
+ struct stat resultStat;
+ if (unlikely(stat(File->Name().c_str(), &resultStat) != 0))
{
- Res.Filename = File->Name();
- Res.LastModified = Buf.st_mtime;
- Res.IMSHit = false;
- if (curl_responsecode == 304 || curl_condition_unmet)
- {
- unlink(File->Name().c_str());
- Res.IMSHit = true;
- Res.LastModified = Itm->LastModified;
- Res.Size = 0;
- URIDone(Res);
- return true;
- }
- Res.Size = Buf.st_size;
+ _error->Errno("stat", "Unable to access file %s", File->Name().c_str());
+ return false;
+ }
+ Res.Size = resultStat.st_size;
+
+ // invalid range-request
+ if (Server->Result == 416)
+ {
+ unlink(File->Name().c_str());
+ Res.Size = 0;
+ delete File;
+ Redirect(Itm->Uri);
+ return true;
+ }
+
+ // Timestamp
+ curl_easy_getinfo(curl, CURLINFO_FILETIME, &Res.LastModified);
+ if (Res.LastModified != -1)
+ {
+ struct utimbuf UBuf;
+ UBuf.actime = Res.LastModified;
+ UBuf.modtime = Res.LastModified;
+ utime(File->Name().c_str(),&UBuf);
}
+ else
+ Res.LastModified = resultStat.st_mtime;
// take hashes
Hashes Hash;
FileFd Fd(Res.Filename, FileFd::ReadOnly);
Hash.AddFD(Fd);
Res.TakeHashes(Hash);
-
+
// keep apt updated
URIDone(Res);
// cleanup
Res.Size = 0;
delete File;
- curl_slist_free_all(headers);
return true;
};
@@ -374,4 +439,3 @@ int main()
return Mth.Run();
}
-
diff --git a/methods/https.h b/methods/https.h
index 293e288e0..8632d6d02 100644
--- a/methods/https.h
+++ b/methods/https.h
@@ -14,24 +14,53 @@
#include <iostream>
#include <curl/curl.h>
+#include "server.h"
+
using std::cout;
using std::endl;
class HttpsMethod;
class FileFd;
+class HttpsServerState : public ServerState
+{
+ protected:
+ virtual bool ReadHeaderLines(std::string &Data) { return false; }
+ virtual bool LoadNextResponse(bool const ToFile, FileFd * const File) { return false; }
+
+ public:
+ virtual bool WriteResponse(std::string const &Data) { return false; }
+
+ /** \brief Transfer the data from the socket */
+ virtual bool RunData(FileFd * const File) { return false; }
+
+ virtual bool Open() { return false; }
+ virtual bool IsOpen() { return false; }
+ virtual bool Close() { return false; }
+ virtual bool InitHashes(FileFd &File) { return false; }
+ virtual Hashes * GetHashes() { return NULL; }
+ virtual bool Die(FileFd &File) { return false; }
+ virtual bool Flush(FileFd * const File) { return false; }
+ virtual bool Go(bool ToFile, FileFd * const File) { return false; }
+
+ HttpsServerState(URI Srv, HttpsMethod *Owner);
+ virtual ~HttpsServerState() {Close();};
+};
+
class HttpsMethod : public pkgAcqMethod
{
// minimum speed in bytes/se that triggers download timeout handling
static const int DL_MIN_SPEED = 10;
virtual bool Fetch(FetchItem *);
+ static size_t parse_header(void *buffer, size_t size, size_t nmemb, void *userp);
static size_t write_data(void *buffer, size_t size, size_t nmemb, void *userp);
static int progress_callback(void *clientp, double dltotal, double dlnow,
double ultotal, double ulnow);
void SetupProxy();
CURL *curl;
FetchResult Res;
+ HttpsServerState *Server;
public:
FileFd *File;
diff --git a/methods/makefile b/methods/makefile
index 294c55d23..6b7781294 100644
--- a/methods/makefile
+++ b/methods/makefile
@@ -48,14 +48,14 @@ include $(PROGRAM_H)
PROGRAM=http
SLIBS = -lapt-pkg $(SOCKETLIBS) $(INTLLIBS)
LIB_MAKES = apt-pkg/makefile
-SOURCE = http.cc http_main.cc rfc2553emu.cc connect.cc
+SOURCE = http.cc http_main.cc rfc2553emu.cc connect.cc server.cc
include $(PROGRAM_H)
# The https method
PROGRAM=https
SLIBS = -lapt-pkg -lcurl $(INTLLIBS)
LIB_MAKES = apt-pkg/makefile
-SOURCE = https.cc
+SOURCE = https.cc server.cc
include $(PROGRAM_H)
# The ftp method
@@ -83,7 +83,7 @@ include $(PROGRAM_H)
PROGRAM=mirror
SLIBS = -lapt-pkg $(SOCKETLIBS)
LIB_MAKES = apt-pkg/makefile
-SOURCE = mirror.cc http.cc rfc2553emu.cc connect.cc
+SOURCE = mirror.cc http.cc rfc2553emu.cc connect.cc server.cc
include $(PROGRAM_H)
# SSH method symlink
diff --git a/methods/server.cc b/methods/server.cc
new file mode 100644
index 000000000..a2128441c
--- /dev/null
+++ b/methods/server.cc
@@ -0,0 +1,665 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+/* ######################################################################
+
+ HTTP and HTTPS share a lot of common code and these classes are
+ exactly the dumping ground for this common code
+
+ ##################################################################### */
+ /*}}}*/
+// Include Files /*{{{*/
+#include <config.h>
+
+#include <apt-pkg/fileutl.h>
+#include <apt-pkg/acquire-method.h>
+#include <apt-pkg/configuration.h>
+#include <apt-pkg/error.h>
+#include <apt-pkg/hashes.h>
+#include <apt-pkg/netrc.h>
+
+#include <sys/stat.h>
+#include <sys/time.h>
+#include <utime.h>
+#include <unistd.h>
+#include <signal.h>
+#include <stdio.h>
+#include <errno.h>
+#include <string.h>
+#include <climits>
+#include <iostream>
+#include <map>
+
+// Internet stuff
+#include <netdb.h>
+
+#include "config.h"
+#include "connect.h"
+#include "rfc2553emu.h"
+#include "http.h"
+
+#include <apti18n.h>
+ /*}}}*/
+using namespace std;
+
+string ServerMethod::FailFile;
+int ServerMethod::FailFd = -1;
+time_t ServerMethod::FailTime = 0;
+
+// ServerState::RunHeaders - Get the headers before the data /*{{{*/
+// ---------------------------------------------------------------------
+/* Returns 0 if things are OK, 1 if an IO error occurred and 2 if a header
+ parse error occurred */
+ServerState::RunHeadersResult ServerState::RunHeaders(FileFd * const File)
+{
+ State = Header;
+
+ Owner->Status(_("Waiting for headers"));
+
+ Major = 0;
+ Minor = 0;
+ Result = 0;
+ Size = 0;
+ StartPos = 0;
+ Encoding = Closes;
+ HaveContent = false;
+ time(&Date);
+
+ do
+ {
+ string Data;
+ if (ReadHeaderLines(Data) == false)
+ continue;
+
+ if (Owner->Debug == true)
+ clog << Data;
+
+ for (string::const_iterator I = Data.begin(); I < Data.end(); ++I)
+ {
+ string::const_iterator J = I;
+ for (; J != Data.end() && *J != '\n' && *J != '\r'; ++J);
+ if (HeaderLine(string(I,J)) == false)
+ return RUN_HEADERS_PARSE_ERROR;
+ I = J;
+ }
+
+ // 100 Continue is a Nop...
+ if (Result == 100)
+ continue;
+
+ // Tidy up the connection persistance state.
+ if (Encoding == Closes && HaveContent == true)
+ Persistent = false;
+
+ return RUN_HEADERS_OK;
+ }
+ while (LoadNextResponse(false, File) == true);
+
+ return RUN_HEADERS_IO_ERROR;
+}
+ /*}}}*/
+// ServerState::HeaderLine - Process a header line /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool ServerState::HeaderLine(string Line)
+{
+ if (Line.empty() == true)
+ return true;
+
+ string::size_type Pos = Line.find(' ');
+ if (Pos == string::npos || Pos+1 > Line.length())
+ {
+ // Blah, some servers use "connection:closes", evil.
+ Pos = Line.find(':');
+ if (Pos == string::npos || Pos + 2 > Line.length())
+ return _error->Error(_("Bad header line"));
+ Pos++;
+ }
+
+ // Parse off any trailing spaces between the : and the next word.
+ string::size_type Pos2 = Pos;
+ while (Pos2 < Line.length() && isspace(Line[Pos2]) != 0)
+ Pos2++;
+
+ string Tag = string(Line,0,Pos);
+ string Val = string(Line,Pos2);
+
+ if (stringcasecmp(Tag.c_str(),Tag.c_str()+4,"HTTP") == 0)
+ {
+ // Evil servers return no version
+ if (Line[4] == '/')
+ {
+ int const elements = sscanf(Line.c_str(),"HTTP/%3u.%3u %3u%359[^\n]",&Major,&Minor,&Result,Code);
+ if (elements == 3)
+ {
+ Code[0] = '\0';
+ if (Owner->Debug == true)
+ clog << "HTTP server doesn't give Reason-Phrase for " << Result << std::endl;
+ }
+ else if (elements != 4)
+ return _error->Error(_("The HTTP server sent an invalid reply header"));
+ }
+ else
+ {
+ Major = 0;
+ Minor = 9;
+ if (sscanf(Line.c_str(),"HTTP %3u%359[^\n]",&Result,Code) != 2)
+ return _error->Error(_("The HTTP server sent an invalid reply header"));
+ }
+
+ /* Check the HTTP response header to get the default persistance
+ state. */
+ if (Major < 1)
+ Persistent = false;
+ else
+ {
+ if (Major == 1 && Minor == 0)
+ Persistent = false;
+ else
+ Persistent = true;
+ }
+
+ return true;
+ }
+
+ if (stringcasecmp(Tag,"Content-Length:") == 0)
+ {
+ if (Encoding == Closes)
+ Encoding = Stream;
+ HaveContent = true;
+
+ // The length is already set from the Content-Range header
+ if (StartPos != 0)
+ return true;
+
+ Size = strtoull(Val.c_str(), NULL, 10);
+ if (Size >= std::numeric_limits<unsigned long long>::max())
+ return _error->Errno("HeaderLine", _("The HTTP server sent an invalid Content-Length header"));
+ else if (Size == 0)
+ HaveContent = false;
+ return true;
+ }
+
+ if (stringcasecmp(Tag,"Content-Type:") == 0)
+ {
+ HaveContent = true;
+ return true;
+ }
+
+ if (stringcasecmp(Tag,"Content-Range:") == 0)
+ {
+ HaveContent = true;
+
+ // §14.16 says 'byte-range-resp-spec' should be a '*' in case of 416
+ if (Result == 416 && sscanf(Val.c_str(), "bytes */%llu",&Size) == 1)
+ {
+ StartPos = 1; // ignore Content-Length, it would override Size
+ HaveContent = false;
+ }
+ else if (sscanf(Val.c_str(),"bytes %llu-%*u/%llu",&StartPos,&Size) != 2)
+ return _error->Error(_("The HTTP server sent an invalid Content-Range header"));
+ if ((unsigned long long)StartPos > Size)
+ return _error->Error(_("This HTTP server has broken range support"));
+ return true;
+ }
+
+ if (stringcasecmp(Tag,"Transfer-Encoding:") == 0)
+ {
+ HaveContent = true;
+ if (stringcasecmp(Val,"chunked") == 0)
+ Encoding = Chunked;
+ return true;
+ }
+
+ if (stringcasecmp(Tag,"Connection:") == 0)
+ {
+ if (stringcasecmp(Val,"close") == 0)
+ Persistent = false;
+ if (stringcasecmp(Val,"keep-alive") == 0)
+ Persistent = true;
+ return true;
+ }
+
+ if (stringcasecmp(Tag,"Last-Modified:") == 0)
+ {
+ if (RFC1123StrToTime(Val.c_str(), Date) == false)
+ return _error->Error(_("Unknown date format"));
+ return true;
+ }
+
+ if (stringcasecmp(Tag,"Location:") == 0)
+ {
+ Location = Val;
+ return true;
+ }
+
+ return true;
+}
+ /*}}}*/
+// ServerState::ServerState - Constructor /*{{{*/
+ServerState::ServerState(URI Srv, ServerMethod *Owner) : ServerName(Srv), TimeOut(120), Owner(Owner)
+{
+ Reset();
+}
+ /*}}}*/
+
+bool ServerMethod::Configuration(string Message) /*{{{*/
+{
+ return pkgAcqMethod::Configuration(Message);
+}
+ /*}}}*/
+
+// ServerMethod::DealWithHeaders - Handle the retrieved header data /*{{{*/
+// ---------------------------------------------------------------------
+/* We look at the header data we got back from the server and decide what
+ to do. Returns DealWithHeadersResult (see http.h for details).
+ */
+ServerMethod::DealWithHeadersResult
+ServerMethod::DealWithHeaders(FetchResult &Res)
+{
+ // Not Modified
+ if (Server->Result == 304)
+ {
+ unlink(Queue->DestFile.c_str());
+ Res.IMSHit = true;
+ Res.LastModified = Queue->LastModified;
+ return IMS_HIT;
+ }
+
+ /* Redirect
+ *
+ * Note that it is only OK for us to treat all redirection the same
+ * because we *always* use GET, not other HTTP methods. There are
+ * three redirection codes for which it is not appropriate that we
+ * redirect. Pass on those codes so the error handling kicks in.
+ */
+ if (AllowRedirect
+ && (Server->Result > 300 && Server->Result < 400)
+ && (Server->Result != 300 // Multiple Choices
+ && Server->Result != 304 // Not Modified
+ && Server->Result != 306)) // (Not part of HTTP/1.1, reserved)
+ {
+ if (Server->Location.empty() == true);
+ else if (Server->Location[0] == '/' && Queue->Uri.empty() == false)
+ {
+ URI Uri = Queue->Uri;
+ if (Uri.Host.empty() == false)
+ NextURI = URI::SiteOnly(Uri);
+ else
+ NextURI.clear();
+ NextURI.append(DeQuoteString(Server->Location));
+ return TRY_AGAIN_OR_REDIRECT;
+ }
+ else
+ {
+ NextURI = DeQuoteString(Server->Location);
+ URI tmpURI = NextURI;
+ // Do not allow a redirection to switch protocol
+ if (tmpURI.Access == "http")
+ return TRY_AGAIN_OR_REDIRECT;
+ }
+ /* else pass through for error message */
+ }
+ // retry after an invalid range response without partial data
+ else if (Server->Result == 416)
+ {
+ struct stat SBuf;
+ if (stat(Queue->DestFile.c_str(),&SBuf) >= 0 && SBuf.st_size > 0)
+ {
+ if ((unsigned long long)SBuf.st_size == Server->Size)
+ {
+ // the file is completely downloaded, but was not moved
+ Server->StartPos = Server->Size;
+ Server->Result = 200;
+ Server->HaveContent = false;
+ }
+ else if (unlink(Queue->DestFile.c_str()) == 0)
+ {
+ NextURI = Queue->Uri;
+ return TRY_AGAIN_OR_REDIRECT;
+ }
+ }
+ }
+
+ /* We have a reply we dont handle. This should indicate a perm server
+ failure */
+ if (Server->Result < 200 || Server->Result >= 300)
+ {
+ char err[255];
+ snprintf(err,sizeof(err)-1,"HttpError%i",Server->Result);
+ SetFailReason(err);
+ _error->Error("%u %s",Server->Result,Server->Code);
+ if (Server->HaveContent == true)
+ return ERROR_WITH_CONTENT_PAGE;
+ return ERROR_UNRECOVERABLE;
+ }
+
+ // This is some sort of 2xx 'data follows' reply
+ Res.LastModified = Server->Date;
+ Res.Size = Server->Size;
+
+ // Open the file
+ delete File;
+ File = new FileFd(Queue->DestFile,FileFd::WriteAny);
+ if (_error->PendingError() == true)
+ return ERROR_NOT_FROM_SERVER;
+
+ FailFile = Queue->DestFile;
+ FailFile.c_str(); // Make sure we dont do a malloc in the signal handler
+ FailFd = File->Fd();
+ FailTime = Server->Date;
+
+ if (Server->InitHashes(*File) == false)
+ {
+ _error->Errno("read",_("Problem hashing file"));
+ return ERROR_NOT_FROM_SERVER;
+ }
+ if (Server->StartPos > 0)
+ Res.ResumePoint = Server->StartPos;
+
+ SetNonBlock(File->Fd(),true);
+ return FILE_IS_OPEN;
+}
+ /*}}}*/
+// ServerMethod::SigTerm - Handle a fatal signal /*{{{*/
+// ---------------------------------------------------------------------
+/* This closes and timestamps the open file. This is neccessary to get
+ resume behavoir on user abort */
+void ServerMethod::SigTerm(int)
+{
+ if (FailFd == -1)
+ _exit(100);
+ close(FailFd);
+
+ // Timestamp
+ struct utimbuf UBuf;
+ UBuf.actime = FailTime;
+ UBuf.modtime = FailTime;
+ utime(FailFile.c_str(),&UBuf);
+
+ _exit(100);
+}
+ /*}}}*/
+// ServerMethod::Fetch - Fetch an item /*{{{*/
+// ---------------------------------------------------------------------
+/* This adds an item to the pipeline. We keep the pipeline at a fixed
+ depth. */
+bool ServerMethod::Fetch(FetchItem *)
+{
+ if (Server == 0)
+ return true;
+
+ // Queue the requests
+ int Depth = -1;
+ for (FetchItem *I = Queue; I != 0 && Depth < (signed)PipelineDepth;
+ I = I->Next, Depth++)
+ {
+ // If pipelining is disabled, we only queue 1 request
+ if (Server->Pipeline == false && Depth >= 0)
+ break;
+
+ // Make sure we stick with the same server
+ if (Server->Comp(I->Uri) == false)
+ break;
+ if (QueueBack == I)
+ {
+ QueueBack = I->Next;
+ SendReq(I);
+ continue;
+ }
+ }
+
+ return true;
+};
+ /*}}}*/
+// ServerMethod::Loop - Main loop /*{{{*/
+int ServerMethod::Loop()
+{
+ typedef vector<string> StringVector;
+ typedef vector<string>::iterator StringVectorIterator;
+ map<string, StringVector> Redirected;
+
+ signal(SIGTERM,SigTerm);
+ signal(SIGINT,SigTerm);
+
+ Server = 0;
+
+ int FailCounter = 0;
+ while (1)
+ {
+ // We have no commands, wait for some to arrive
+ if (Queue == 0)
+ {
+ if (WaitFd(STDIN_FILENO) == false)
+ return 0;
+ }
+
+ /* Run messages, we can accept 0 (no message) if we didn't
+ do a WaitFd above.. Otherwise the FD is closed. */
+ int Result = Run(true);
+ if (Result != -1 && (Result != 0 || Queue == 0))
+ {
+ if(FailReason.empty() == false ||
+ _config->FindB("Acquire::http::DependOnSTDIN", true) == true)
+ return 100;
+ else
+ return 0;
+ }
+
+ if (Queue == 0)
+ continue;
+
+ // Connect to the server
+ if (Server == 0 || Server->Comp(Queue->Uri) == false)
+ {
+ delete Server;
+ Server = CreateServerState(Queue->Uri);
+ }
+ /* If the server has explicitly said this is the last connection
+ then we pre-emptively shut down the pipeline and tear down
+ the connection. This will speed up HTTP/1.0 servers a tad
+ since we don't have to wait for the close sequence to
+ complete */
+ if (Server->Persistent == false)
+ Server->Close();
+
+ // Reset the pipeline
+ if (Server->IsOpen() == false)
+ QueueBack = Queue;
+
+ // Connnect to the host
+ if (Server->Open() == false)
+ {
+ Fail(true);
+ delete Server;
+ Server = 0;
+ continue;
+ }
+
+ // Fill the pipeline.
+ Fetch(0);
+
+ // Fetch the next URL header data from the server.
+ switch (Server->RunHeaders(File))
+ {
+ case ServerState::RUN_HEADERS_OK:
+ break;
+
+ // The header data is bad
+ case ServerState::RUN_HEADERS_PARSE_ERROR:
+ {
+ _error->Error(_("Bad header data"));
+ Fail(true);
+ RotateDNS();
+ continue;
+ }
+
+ // The server closed a connection during the header get..
+ default:
+ case ServerState::RUN_HEADERS_IO_ERROR:
+ {
+ FailCounter++;
+ _error->Discard();
+ Server->Close();
+ Server->Pipeline = false;
+
+ if (FailCounter >= 2)
+ {
+ Fail(_("Connection failed"),true);
+ FailCounter = 0;
+ }
+
+ RotateDNS();
+ continue;
+ }
+ };
+
+ // Decide what to do.
+ FetchResult Res;
+ Res.Filename = Queue->DestFile;
+ switch (DealWithHeaders(Res))
+ {
+ // Ok, the file is Open
+ case FILE_IS_OPEN:
+ {
+ URIStart(Res);
+
+ // Run the data
+ bool Result = true;
+ if (Server->HaveContent)
+ Result = Server->RunData(File);
+
+ /* If the server is sending back sizeless responses then fill in
+ the size now */
+ if (Res.Size == 0)
+ Res.Size = File->Size();
+
+ // Close the file, destroy the FD object and timestamp it
+ FailFd = -1;
+ delete File;
+ File = 0;
+
+ // Timestamp
+ struct utimbuf UBuf;
+ time(&UBuf.actime);
+ UBuf.actime = Server->Date;
+ UBuf.modtime = Server->Date;
+ utime(Queue->DestFile.c_str(),&UBuf);
+
+ // Send status to APT
+ if (Result == true)
+ {
+ Res.TakeHashes(*Server->GetHashes());
+ URIDone(Res);
+ }
+ else
+ {
+ if (Server->IsOpen() == false)
+ {
+ FailCounter++;
+ _error->Discard();
+ Server->Close();
+
+ if (FailCounter >= 2)
+ {
+ Fail(_("Connection failed"),true);
+ FailCounter = 0;
+ }
+
+ QueueBack = Queue;
+ }
+ else
+ Fail(true);
+ }
+ break;
+ }
+
+ // IMS hit
+ case IMS_HIT:
+ {
+ URIDone(Res);
+ break;
+ }
+
+ // Hard server error, not found or something
+ case ERROR_UNRECOVERABLE:
+ {
+ Fail();
+ break;
+ }
+
+ // Hard internal error, kill the connection and fail
+ case ERROR_NOT_FROM_SERVER:
+ {
+ delete File;
+ File = 0;
+
+ Fail();
+ RotateDNS();
+ Server->Close();
+ break;
+ }
+
+ // We need to flush the data, the header is like a 404 w/ error text
+ case ERROR_WITH_CONTENT_PAGE:
+ {
+ Fail();
+
+ // Send to content to dev/null
+ File = new FileFd("/dev/null",FileFd::WriteExists);
+ Server->RunData(File);
+ delete File;
+ File = 0;
+ break;
+ }
+
+ // Try again with a new URL
+ case TRY_AGAIN_OR_REDIRECT:
+ {
+ // Clear rest of response if there is content
+ if (Server->HaveContent)
+ {
+ File = new FileFd("/dev/null",FileFd::WriteExists);
+ Server->RunData(File);
+ delete File;
+ File = 0;
+ }
+
+ /* Detect redirect loops. No more redirects are allowed
+ after the same URI is seen twice in a queue item. */
+ StringVector &R = Redirected[Queue->DestFile];
+ bool StopRedirects = false;
+ if (R.empty() == true)
+ R.push_back(Queue->Uri);
+ else if (R[0] == "STOP" || R.size() > 10)
+ StopRedirects = true;
+ else
+ {
+ for (StringVectorIterator I = R.begin(); I != R.end(); ++I)
+ if (Queue->Uri == *I)
+ {
+ R[0] = "STOP";
+ break;
+ }
+
+ R.push_back(Queue->Uri);
+ }
+
+ if (StopRedirects == false)
+ Redirect(NextURI);
+ else
+ Fail();
+
+ break;
+ }
+
+ default:
+ Fail(_("Internal error"));
+ break;
+ }
+
+ FailCounter = 0;
+ }
+
+ return 0;
+}
+ /*}}}*/
diff --git a/methods/server.h b/methods/server.h
new file mode 100644
index 000000000..4dc6a1f2f
--- /dev/null
+++ b/methods/server.h
@@ -0,0 +1,144 @@
+// -*- mode: cpp; mode: fold -*-
+// Description /*{{{*/
+/* ######################################################################
+
+ Classes dealing with the abstraction of talking to a end via a text
+ protocol like HTTP (which is used by the http and https methods)
+
+ ##################################################################### */
+ /*}}}*/
+
+#ifndef APT_SERVER_H
+#define APT_SERVER_H
+
+#include <apt-pkg/strutl.h>
+
+#include <string>
+
+using std::cout;
+using std::endl;
+
+class Hashes;
+class ServerMethod;
+class FileFd;
+
+struct ServerState
+{
+ // This is the last parsed Header Line
+ unsigned int Major;
+ unsigned int Minor;
+ unsigned int Result;
+ char Code[360];
+
+ // These are some statistics from the last parsed header lines
+ unsigned long long Size;
+ signed long long StartPos;
+ time_t Date;
+ bool HaveContent;
+ enum {Chunked,Stream,Closes} Encoding;
+ enum {Header, Data} State;
+ bool Persistent;
+ std::string Location;
+
+ // This is a Persistent attribute of the server itself.
+ bool Pipeline;
+ URI ServerName;
+ URI Proxy;
+ unsigned long TimeOut;
+
+ protected:
+ ServerMethod *Owner;
+
+ virtual bool ReadHeaderLines(std::string &Data) = 0;
+ virtual bool LoadNextResponse(bool const ToFile, FileFd * const File) = 0;
+
+ public:
+ bool HeaderLine(std::string Line);
+
+ /** \brief Result of the header acquire */
+ enum RunHeadersResult {
+ /** \brief Header ok */
+ RUN_HEADERS_OK,
+ /** \brief IO error while retrieving */
+ RUN_HEADERS_IO_ERROR,
+ /** \brief Parse error after retrieving */
+ RUN_HEADERS_PARSE_ERROR,
+ };
+ /** \brief Get the headers before the data */
+ RunHeadersResult RunHeaders(FileFd * const File);
+
+ bool Comp(URI Other) const {return Other.Host == ServerName.Host && Other.Port == ServerName.Port;};
+ virtual void Reset() {Major = 0; Minor = 0; Result = 0; Code[0] = '\0'; Size = 0;
+ StartPos = 0; Encoding = Closes; time(&Date); HaveContent = false;
+ State = Header; Persistent = false; Pipeline = true;};
+ virtual bool WriteResponse(std::string const &Data) = 0;
+
+ /** \brief Transfer the data from the socket */
+ virtual bool RunData(FileFd * const File) = 0;
+
+ virtual bool Open() = 0;
+ virtual bool IsOpen() = 0;
+ virtual bool Close() = 0;
+ virtual bool InitHashes(FileFd &File) = 0;
+ virtual Hashes * GetHashes() = 0;
+ virtual bool Die(FileFd &File) = 0;
+ virtual bool Flush(FileFd * const File) = 0;
+ virtual bool Go(bool ToFile, FileFd * const File) = 0;
+
+ ServerState(URI Srv, ServerMethod *Owner);
+ virtual ~ServerState() {};
+};
+
+class ServerMethod : public pkgAcqMethod
+{
+ protected:
+ virtual bool Fetch(FetchItem *);
+
+ ServerState *Server;
+ std::string NextURI;
+ FileFd *File;
+
+ unsigned long PipelineDepth;
+ bool AllowRedirect;
+
+ public:
+ bool Debug;
+
+ /** \brief Result of the header parsing */
+ enum DealWithHeadersResult {
+ /** \brief The file is open and ready */
+ FILE_IS_OPEN,
+ /** \brief We got a IMS hit, the file has not changed */
+ IMS_HIT,
+ /** \brief The server reported a unrecoverable error */
+ ERROR_UNRECOVERABLE,
+ /** \brief The server reported a error with a error content page */
+ ERROR_WITH_CONTENT_PAGE,
+ /** \brief An error on the client side */
+ ERROR_NOT_FROM_SERVER,
+ /** \brief A redirect or retry request */
+ TRY_AGAIN_OR_REDIRECT
+ };
+ /** \brief Handle the retrieved header data */
+ DealWithHeadersResult DealWithHeaders(FetchResult &Res);
+
+ // In the event of a fatal signal this file will be closed and timestamped.
+ static std::string FailFile;
+ static int FailFd;
+ static time_t FailTime;
+ static void SigTerm(int);
+
+ virtual bool Configuration(std::string Message);
+ virtual bool Flush() { return Server->Flush(File); };
+
+ int Loop();
+
+ virtual void SendReq(FetchItem *Itm) = 0;
+ virtual ServerState * CreateServerState(URI uri) = 0;
+ virtual void RotateDNS() = 0;
+
+ ServerMethod(const char *Ver,unsigned long Flags = 0) : pkgAcqMethod(Ver, Flags), PipelineDepth(0), AllowRedirect(false), Debug(false) {};
+ virtual ~ServerMethod() {};
+};
+
+#endif
diff --git a/po/it.po b/po/it.po
index 1b9d6db32..abd5d6df2 100644
--- a/po/it.po
+++ b/po/it.po
@@ -1,15 +1,15 @@
# Italian translation of apt
-# Copyright (C) 2002-2010, 2011, 2012 The Free Software Foundation, Inc.
+# Copyright (C) 2002-2010, 2011, 2012, 2013 The Free Software Foundation, Inc.
# This file is distributed under the same license as the apt package.
# Samuele Giovanni Tonon <samu@debian.org>, 2002.
-# Milo Casagrande <milo@ubuntu.com>, 2009, 2010, 2011, 2012.
+# Milo Casagrande <milo@ubuntu.com>, 2009, 2010, 2011, 2012, 2013.
#
msgid ""
msgstr ""
"Project-Id-Version: apt\n"
"Report-Msgid-Bugs-To: APT Development Team <deity@lists.debian.org>\n"
"POT-Creation-Date: 2013-07-31 16:24+0200\n"
-"PO-Revision-Date: 2012-06-25 21:54+0200\n"
+"PO-Revision-Date: 2013-08-27 22:06+0200\n"
"Last-Translator: Milo Casagrande <milo@ubuntu.com>\n"
"Language-Team: Italian <tp@lists.linux.it>\n"
"Language: it\n"
@@ -18,7 +18,7 @@ msgstr ""
"Content-Transfer-Encoding: 8-bit\n"
"Plural-Forms: nplurals=2; plural=(n!=1);\n"
"X-Launchpad-Export-Date: 2012-06-25 19:48+0000\n"
-"X-Generator: Launchpad (build 15482)\n"
+"X-Generator: Gtranslator 2.91.6\n"
#: cmdline/apt-cache.cc:158
#, c-format
@@ -245,6 +245,12 @@ msgid ""
"You may try the --cdrom option to set the CD-ROM mount point. See 'man apt-"
"cdrom' for more information about the CD-ROM auto-detection and mount point."
msgstr ""
+"Impossibile rilevare automaticamente un CD-ROM oppure è stato trovato con "
+"il\n"
+"punto di mount predefinito.\n"
+"È possibile provare l'opzione --cdrom per impostare il punto di mount del\n"
+"CD-ROM. Per maggiori informazioni sull'autorilevamento e sul punto di mount\n"
+"del CD-ROM, consultare \"man apt-cdrom\"."
#: cmdline/apt-cdrom.cc:85
msgid "Please provide a name for this Disc, such as 'Debian 5.0.3 Disk 1'"
@@ -1547,10 +1553,9 @@ msgid "Temporary failure resolving '%s'"
msgstr "Risoluzione di \"%s\" temporaneamente non riuscita"
#: methods/connect.cc:209
-#, fuzzy, c-format
+#, c-format
msgid "System error resolving '%s:%s'"
-msgstr ""
-"Si è verificato qualcosa di anormale nella risoluzione di \"%s:%s\" (%i - %s)"
+msgstr "Errore di sistema nella risoluzione di \"%s:%s\""
#: methods/connect.cc:211
#, c-format
@@ -1587,6 +1592,8 @@ msgid ""
"Clearsigned file isn't valid, got '%s' (does the network require "
"authentication?)"
msgstr ""
+"Il file con la firma in chiaro non è valido, ottenuto \"%s\" (la rete "
+"richiede autenticazione?)"
#: methods/gpgv.cc:183
msgid "Unknown error executing gpgv"
@@ -1710,9 +1717,9 @@ msgid "Can not read mirror file '%s'"
msgstr "Impossibile leggere il file mirror \"%s\""
#: methods/mirror.cc:315
-#, fuzzy, c-format
+#, c-format
msgid "No entry found in mirror file '%s'"
-msgstr "Impossibile leggere il file mirror \"%s\""
+msgstr "Nessuna voce trovata nel file mirror \"%s\""
#: methods/mirror.cc:445
#, c-format
@@ -2508,13 +2515,13 @@ msgstr "%c%s... Fatto"
#: apt-pkg/contrib/progress.cc:179
msgid "..."
-msgstr ""
+msgstr "..."
#. Print the spinner
#: apt-pkg/contrib/progress.cc:195
-#, fuzzy, c-format
+#, c-format
msgid "%c%s... %u%%"
-msgstr "%c%s... Fatto"
+msgstr "%c%s... %u%%"
#: apt-pkg/contrib/cmndline.cc:80
#, c-format
@@ -3610,147 +3617,3 @@ msgstr ""
#: apt-pkg/deb/debsystem.cc:121
msgid "Not locked"
msgstr "Non bloccato"
-
-#~ msgid "File %s doesn't start with a clearsigned message"
-#~ msgstr "Il file %s non inizia con un messaggio di firma in chiaro"
-
-#~ msgid "Skipping nonexistent file %s"
-#~ msgstr "Saltato il file inesistente %s"
-
-#~ msgid "Failed to remove %s"
-#~ msgstr "Rimozione di %s non riuscita"
-
-#~ msgid "Unable to create %s"
-#~ msgstr "Impossibile creare %s"
-
-#~ msgid "Failed to stat %sinfo"
-#~ msgstr "Esecuzione di stat su %sinfo non riuscita"
-
-#~ msgid "The info and temp directories need to be on the same filesystem"
-#~ msgstr "Le directory temp e info devono essere sullo stesso file system"
-
-#~ msgid "Failed to change to the admin dir %sinfo"
-#~ msgstr "Cambio della directory admin %sinfo non riuscito"
-
-#~ msgid "Internal error getting a package name"
-#~ msgstr "Errore interno nel recuperare un nome di un pacchetto"
-
-#~ msgid "Reading file listing"
-#~ msgstr "Lettura elenco dei file"
-
-#~ msgid ""
-#~ "Failed to open the list file '%sinfo/%s'. If you cannot restore this file "
-#~ "then make it empty and immediately re-install the same version of the "
-#~ "package!"
-#~ msgstr ""
-#~ "Impossibile aprire il file dell'elenco \"%sinfo/%s\". Se non è possibile "
-#~ "ripristinarlo, crearne uno vuoto e reinstallare immediatamente la stessa "
-#~ "versione del pacchetto."
-
-#~ msgid "Failed reading the list file %sinfo/%s"
-#~ msgstr "Lettura del file dell'elenco %sinfo/%s non riuscita"
-
-#~ msgid "Internal error getting a node"
-#~ msgstr "Errore interno nel recuperare un nodo"
-
-#~ msgid "Failed to open the diversions file %sdiversions"
-#~ msgstr "Apertura del file di deviazione %sdiversions non riuscita"
-
-#~ msgid "The diversion file is corrupted"
-#~ msgstr "Il file di deviazione è danneggiato"
-
-#~ msgid "Invalid line in the diversion file: %s"
-#~ msgstr "Riga non valida nel file di diversion: %s"
-
-#~ msgid "Internal error adding a diversion"
-#~ msgstr "Errore interno nell'aggiungere una deviazioni"
-
-#~ msgid "The pkg cache must be initialized first"
-#~ msgstr "La cache dei pacchetti deve prima essere inizializzata"
-
-#~ msgid "Failed to find a Package: header, offset %lu"
-#~ msgstr "Impossibile trovare un Package: header, offset %lu"
-
-#~ msgid "Bad ConfFile section in the status file. Offset %lu"
-#~ msgstr "Sezione ConfFile nel file di stato non corretta. Offset %lu"
-
-#~ msgid "Error parsing MD5. Offset %lu"
-#~ msgstr "Errore nell'analisi MD5. Offset %lu"
-
-#~ msgid "Couldn't change to %s"
-#~ msgstr "Impossibile passare a %s"
-
-#~ msgid "Failed to locate a valid control file"
-#~ msgstr "Impossibile localizzare un file \"control\" valido"
-
-#~ msgid "Couldn't open pipe for %s"
-#~ msgstr "Impossibile aprire una pipe per %s"
-
-#~ msgid "Read error from %s process"
-#~ msgstr "Errore di lettura dal processo %s"
-
-#~ msgid "Got a single header line over %u chars"
-#~ msgstr "Ricevuta una singola riga header su %u caratteri"
-
-#~ msgid ""
-#~ "No apport report written because the error message indicates an issue on "
-#~ "the local system"
-#~ msgstr ""
-#~ "Non è stata scritta alcuna segnalazione di apport poiché il messaggio di "
-#~ "errore indica la presenza di un problema nel sistema locale"
-
-#~ msgid "Malformed override %s line %lu #1"
-#~ msgstr "Override non corretto: file %s riga %lu #1"
-
-#~ msgid "Malformed override %s line %lu #2"
-#~ msgstr "Override non corretto: file %s riga %lu #2"
-
-#~ msgid "Malformed override %s line %lu #3"
-#~ msgstr "Override non corretto: file %s riga %lu #3"
-
-#~ msgid "read, still have %lu to read but none left"
-#~ msgstr "lettura, c'erano ancora %lu da leggere ma non ne è rimasto alcuno"
-
-#~ msgid "write, still have %lu to write but couldn't"
-#~ msgstr "scrittura, c'erano ancora %lu da scrivere ma non è stato possibile"
-
-#~ msgid "Error occurred while processing %s (NewPackage)"
-#~ msgstr "Si è verificato un errore nell'elaborare %s (NewPackage)"
-
-#~ msgid "Error occurred while processing %s (UsePackage1)"
-#~ msgstr "Si è verificato un errore nell'elaborare %s (UsePackage1)"
-
-#~ msgid "Error occurred while processing %s (NewFileDesc1)"
-#~ msgstr "Si è verificato un errore nell'elaborare %s (NewFileDesc1)"
-
-#~ msgid "Error occurred while processing %s (UsePackage2)"
-#~ msgstr "Si è verificato un errore nell'elaborare %s (UsePackage2)"
-
-#~ msgid "Error occurred while processing %s (NewFileVer1)"
-#~ msgstr "Si è verificato un errore nell'elaborare %s (NewFileVer1)"
-
-#~ msgid "Error occurred while processing %s (UsePackage3)"
-#~ msgstr "Si è verificato un errore nell'elaborare %s (UsePackage3)"
-
-#~ msgid "Error occurred while processing %s (NewFileDesc2)"
-#~ msgstr "Si è verificato un errore nell'elaborare %s (NewFileDesc2)"
-
-#~ msgid "Error occurred while processing %s (FindPkg)"
-#~ msgstr "Si è verificato un errore nell'elaborare %s (FindPkg)"
-
-#~ msgid "Error occurred while processing %s (CollectFileProvides)"
-#~ msgstr "Si è verificato un errore nell'elaborare %s (CollectFileProvides)"
-
-#~ msgid "decompressor"
-#~ msgstr "de-compressore"
-
-#~ msgid "Error occurred while processing %s (NewVersion%d)"
-#~ msgstr "Si è verificato un errore nell'elaborare %s (NewVersion%d)"
-
-#~ msgid ""
-#~ "Could not perform immediate configuration on already unpacked '%s'. "
-#~ "Please see man 5 apt.conf under APT::Immediate-Configure for details."
-#~ msgstr ""
-#~ "Impossibile eseguire immediatamente la configurazione su \"%s\" già "
-#~ "estratto. Per maggiori informazioni, consultare \"man 5 apt.conf\" alla "
-#~ "sezione \"APT::Immediate-Configure\"."
diff --git a/po/vi.po b/po/vi.po
index 997fc90d9..44b7fec72 100644
--- a/po/vi.po
+++ b/po/vi.po
@@ -5,10 +5,10 @@
#
msgid ""
msgstr ""
-"Project-Id-Version: apt-0.9.9.4\n"
+"Project-Id-Version: apt-0.9.11\n"
"Report-Msgid-Bugs-To: APT Development Team <deity@lists.debian.org>\n"
"POT-Creation-Date: 2013-07-31 16:24+0200\n"
-"PO-Revision-Date: 2013-08-03 08:52+0700\n"
+"PO-Revision-Date: 2013-08-25 14:13+0700\n"
"Last-Translator: Trần Ngá»c Quân <vnwildman@gmail.com>\n"
"Language-Team: Vietnamese <translation-team-vi@lists.sourceforge.net>\n"
"Language: vi\n"
@@ -24,7 +24,7 @@ msgstr ""
#: cmdline/apt-cache.cc:158
#, c-format
msgid "Package %s version %s has an unmet dep:\n"
-msgstr "Gói %s phiên bản %s phụ thuá»™c vào phần má»m chÆ°a có:\n"
+msgstr "Gói %s phiên bản %s chÆ°a thá»a mãn quan hệ phụ thuá»™c:\n"
#: cmdline/apt-cache.cc:286
msgid "Total package names: "
@@ -112,7 +112,7 @@ msgstr "Bạn phải đưa ra ít nhất một mẫu tìm kiếm"
#: cmdline/apt-cache.cc:1431
msgid "This command is deprecated. Please use 'apt-mark showauto' instead."
msgstr ""
-"Lệnh này đã bị loại bá». Xin hãy dùng lệnh 'apt-mark showauto' để thay thế."
+"Lệnh này đã lạc hậu. Xin hãy dùng lệnh 'apt-mark showauto' để thay thế."
#: cmdline/apt-cache.cc:1526 apt-pkg/cacheset.cc:510
#, c-format
@@ -219,7 +219,7 @@ msgstr ""
" stats - Hiện phần thống kê cơ bản\n"
" dump - Hiện toàn bộ tập tin dạng ngắn (đổ)\n"
" dumpavail - In ra một tập tin sẵn dùng ra thiết bị xuất chuẩn\n"
-" unmet - Hiện các cách phụ thuá»™c chÆ°a thá»a mãn\n"
+" unmet - Hiện các gói chÆ°a thá»a mãn quan hệ phụ thuá»™c\n"
" search - Tìm kiếm danh sách các gói dựa trên biểu thức chính quy\n"
" show - Hiển thị bản ghi có thể Ä‘á»c cho những gói đó\n"
" depends - Hiện thông tin quan hệ phụ thuộc dạng thô cho gói\n"
@@ -336,7 +336,7 @@ msgstr "Lỗi biên dịch biểu thức chính quy - %s"
#: cmdline/apt-get.cc:289
msgid "The following packages have unmet dependencies:"
-msgstr "Những gói theo đây có phần phụ thuộc chưa có:"
+msgstr "Những gói theo đây chÆ°a thá»a mãn quan hệ phụ thuá»™c:"
#: cmdline/apt-get.cc:379
#, c-format
@@ -443,7 +443,7 @@ msgstr "Ghi chú: Ä‘ang chá»n “%s†cho biểu thức chính quy “%sâ€\n"
#: cmdline/apt-get.cc:686
#, c-format
msgid "Package %s is a virtual package provided by:\n"
-msgstr "Gói %s là gói ảo được cung cấp do:\n"
+msgstr "Gói %s là gói ảo được cung cấp bởi:\n"
#: cmdline/apt-get.cc:697
msgid " [Installed]"
@@ -560,9 +560,7 @@ msgstr "Bạn có thể chạy lệnh “apt-get -f install†để sửa nhữ
#: cmdline/apt-get.cc:1072
msgid "Unmet dependencies. Try using -f."
-msgstr ""
-"Còn có cách phụ thuá»™c vào phần má»m chÆ°a có. NhÆ° thế thì bạn hãy cố dùng tùy "
-"chá»n “-fâ€."
+msgstr "ChÆ°a thá»a mãn quan hệ phụ thuá»™c. Hãy thá»­ dùng tùy chá»n “-fâ€."
#: cmdline/apt-get.cc:1097
msgid "WARNING: The following packages cannot be authenticated!"
@@ -574,7 +572,7 @@ msgstr "Cảnh báo xác thực bị đè.\n"
#: cmdline/apt-get.cc:1108
msgid "Install these packages without verification?"
-msgstr "Cài đặt những gói này mà không cần thẩm tra không?"
+msgstr "Cài đặt những gói này mà không cần thẩm tra?"
#: cmdline/apt-get.cc:1110
msgid "Some packages could not be authenticated"
@@ -582,7 +580,7 @@ msgstr "Một số gói không thể được xác thực"
#: cmdline/apt-get.cc:1119 cmdline/apt-get.cc:1280
msgid "There are problems and -y was used without --force-yes"
-msgstr "Gặp lá»—i và đã dùng tùy chá»n “-y†mà không có “--force-yesâ€"
+msgstr "Có lá»—i và đã dùng tùy chá»n “-y†mà không có “--force-yesâ€"
#: cmdline/apt-get.cc:1160
msgid "Internal error, InstallPackages was called with broken packages!"
@@ -620,7 +618,7 @@ msgstr "Cần phải lấy %sB từ kho chứa.\n"
#: cmdline/apt-get.cc:1237
#, c-format
msgid "After this operation, %sB of additional disk space will be used.\n"
-msgstr "Sau thao tác này, %sB dung lượng đĩa thêm sẽ được dùng thêm.\n"
+msgstr "Sau thao tác này, %sB dung lượng đĩa sẽ bị chiếm dụng.\n"
#. TRANSLATOR: The required space between number and unit is already included
#. in the replacement string, so %sB will be correctly translate in e.g. 1,5 MB
@@ -682,7 +680,7 @@ msgstr "Một số tập tin không tải vỠđược"
#: cmdline/apt-get.cc:1404 cmdline/apt-get.cc:2698
msgid "Download complete and in download only mode"
-msgstr "Má»›i tải vá» xong và trong chế Ä‘á»™ chỉ tải vá»"
+msgstr "Hoàn tất việc tải vá» và trong chế Ä‘á»™ chỉ tải vá»"
#: cmdline/apt-get.cc:1410
msgid ""
@@ -805,8 +803,8 @@ msgid ""
"Unmet dependencies. Try 'apt-get -f install' with no packages (or specify a "
"solution)."
msgstr ""
-"Gói còn phụ thuá»™c vào phần má»m chÆ°a có. Hãy cố chạy lệnh “apt-get -f "
-"install†mà không có gói nào (hoặc chỉ định cách thức giải quyết)."
+"ThÆ°a thá»a mãn quan hệ phụ thuá»™c. Hãy thá»­ chạy lệnh “apt-get -f install†mà "
+"không có gói nào (hoặc chỉ định cách thức giải quyết)."
#: cmdline/apt-get.cc:2002
msgid ""
@@ -816,9 +814,9 @@ msgid ""
"or been moved out of Incoming."
msgstr ""
"Không thể cài đặt má»™t số gói. Äiá»u đó có nghÄ©a là bạn đã yêu cầu\n"
-"má»™t trÆ°á»ng hợp không thể, hoặc nếu bạn sá»­ dụng bản phân phối\n"
+"má»™t trÆ°á»ng hợp không thể, hoặc nếu bạn Ä‘ang sá»­ dụng bản phân phối\n"
"chưa ổn định cái mà yêu cầu các gói mà nó còn chưa được tạo ra\n"
-"hoặc di chuyển chúng ra khá»i phần Incoming (Äến)."
+"hay chÆ°a được chuyển ra khá»i phần Incoming (Äến)."
#: cmdline/apt-get.cc:2023
msgid "Broken packages"
@@ -851,8 +849,8 @@ msgid ""
"This command is deprecated. Please use 'apt-mark auto' and 'apt-mark manual' "
"instead."
msgstr ""
-"Lệnh này đã bị loại bá». Xin hãy dùng lệnh 'apt-mark auto' và 'apt-mark "
-"manual' để thay thế."
+"Lệnh này đã lá»—i thá»i. Xin hãy dùng lệnh 'apt-mark auto' và 'apt-mark manual' "
+"để thay thế."
#: cmdline/apt-get.cc:2213
msgid "Calculating upgrade... "
@@ -899,7 +897,7 @@ msgid ""
"NOTICE: '%s' packaging is maintained in the '%s' version control system at:\n"
"%s\n"
msgstr ""
-"GHI CHÚ: sá»± đóng gói “%s†được bảo trì trong hệ thống Ä‘iá»u khiển phiên bản "
+"GHI CHÚ: việc đóng gói “%s†được bảo trì trong hệ thống quản lý mã nguồn "
"“%s†tại:\n"
"%s\n"
@@ -945,7 +943,7 @@ msgstr "Lấy mã nguồn %s\n"
#: cmdline/apt-get.cc:2693
msgid "Failed to fetch some archives."
-msgstr "Gặp lỗi lkhi lấy một số kho."
+msgstr "Gặp lỗi khi lấy một số kho."
#: cmdline/apt-get.cc:2724
#, c-format
@@ -1008,13 +1006,15 @@ msgstr ""
msgid ""
"%s dependency for %s cannot be satisfied because the package %s cannot be "
"found"
-msgstr "cách phụ thuá»™c %s cho %s không thể được thá»a vì không tìm thấy gá»i %s"
+msgstr ""
+"cách phụ thuá»™c %s cho %s không thể được thá»a mãn vì không tìm thấy gói %s"
#: cmdline/apt-get.cc:3081
#, c-format
msgid "Failed to satisfy %s dependency for %s: Installed package %s is too new"
msgstr ""
-"Việc cố thá»a cách phụ thuá»™c %s cho %s bị lá»—i vì gói đã cài đặt %s là quá má»›i"
+"Việc cố thá»a mãn quan hệ phụ thuá»™c %s cho %s bị lá»—i vì gói đã cài đặt %s là "
+"quá mới"
#: cmdline/apt-get.cc:3120
#, c-format
@@ -1111,22 +1111,23 @@ msgstr ""
"remove: gỡ bá»\n"
"source: nguồn\n"
"\n"
-"apt-get là một giao diện dòng lệnh đơn giản để tải vỠvà cài đặt gói phần "
-"má»m.\n"
+"apt-get là một giao diện dòng lệnh đơn giản dùng để tải vỠvà cài đặt gói "
+"phần má»m.\n"
"Những lệnh được dùng thÆ°á»ng nhất là update (cập nhật) và install (cài đặt).\n"
"\n"
"Lệnh:\n"
" update - Lấy danh sách gói mới (cập nhật cơ sở dữ liệu)\n"
-" upgrade - Nâng cấp\n"
+" upgrade - Nâng cấp lên phiên bản mới hơn\n"
" install - Cài đặt gói mới (gói có dạng libc6 không phải libc6.deb)\n"
" remove - Gỡ bá» gói phần má»m\n"
" autoremove - Tự động gỡ bỠtất cả các gói không dùng\n"
" purge - Gỡ bỠvà tẩy xóa gói\n"
" source - Tải vỠkho nguồn\n"
" build-dep - Äịnh cấu hình quan hệ phụ thuá»™c khi xây dụng, cho gói nguồn\n"
-" dist-upgrade - Nâng cấp bản phân phối hãy xem apt-get(8)\n"
+" dist-upgrade - Nâng cấp hệ Ä‘iá»u hành lên phiên bản má»›i hÆ¡n, hãy xem apt-"
+"get(8)\n"
" dselect-upgrade - Cho phép chá»n dselect\n"
-" clean - Xóa các tập tin kho đã tải vá» (dá»n dẹp thÆ° mục lÆ°u)\n"
+" clean - Xóa các tập tin kho đã tải vá» (dá»n dẹp thÆ° mục lÆ°u trữ)\n"
" autoclean - Xóa các tập tin kho cũ đã tải vỠ(tự động làm sạch)\n"
" check - Kiểm tra xem có quan hệ phụ thuộc bị sai không\n"
" changelog - Tải vỠvà hiển thị các thay đổi cho gói đã cho\n"
@@ -1299,7 +1300,7 @@ msgid ""
"cannot be used to add new CD-ROMs"
msgstr ""
"Hãy sử dụng lệnh “apt-cdrom†để làm cho APT chấp nhận đĩa CD này. Không thể "
-"sử dụng lệnh “apt-get update†(lấy cập nhật) để thêm đĩa CD mới."
+"sử dụng lệnh “apt-get update†(cập nhật cơ sở dữ liệu) để thêm đĩa CD mới."
#: methods/cdrom.cc:222
msgid "Wrong CD-ROM"
@@ -1564,8 +1565,8 @@ msgid ""
"Clearsigned file isn't valid, got '%s' (does the network require "
"authentication?)"
msgstr ""
-"Tập tin Clearsigned không hợp lệ, nhận được '%s' (mạng yêu cầu xác nhận "
-"phải không?)"
+"Tập tin Clearsigned không hợp lệ, nhận được '%s' (mạng yêu cầu xác nhận phải "
+"không?)"
#: methods/gpgv.cc:183
msgid "Unknown error executing gpgv"
@@ -1588,7 +1589,7 @@ msgstr "Các tập tin trống rỗng không phải là kho lưu hợp lệ"
#: methods/http.cc:394
msgid "Waiting for headers"
-msgstr "Äang đợi những phần đầu..."
+msgstr "Äang đợi phần đầu dữ liệu..."
#: methods/http.cc:544
msgid "Bad header line"
@@ -1601,7 +1602,7 @@ msgstr "Máy phục vụ HTTP đã gá»­i má»™t dòng đầu trả lá»i không h
#: methods/http.cc:606
msgid "The HTTP server sent an invalid Content-Length header"
msgstr ""
-"Máy phục vụ HTTP đã gửi một dòng đầu Content-Length (độ dài nội dụng) không "
+"Máy phục vụ HTTP đã gửi một dòng đầu Content-Length (độ dài nội dung) không "
"hợp lệ"
#: methods/http.cc:621
@@ -1612,7 +1613,7 @@ msgstr ""
#: methods/http.cc:623
msgid "This HTTP server has broken range support"
-msgstr "Máy phục vụ HTTP đã ngắt cách hỗ trợ phạm vi"
+msgstr "Máy phục vụ HTTP không hỗ trợ tải một phần tập tin"
#: methods/http.cc:647
msgid "Unknown date format"
@@ -1628,7 +1629,7 @@ msgstr "Kết nối đã quá giá»"
#: methods/http.cc:854
msgid "Error writing to output file"
-msgstr "Gặp lỗi khi ghi vào tập tin xuất"
+msgstr "Gặp lỗi khi ghi vào tập tin đầu ra"
#: methods/http.cc:885
msgid "Error writing to file"
@@ -1782,8 +1783,8 @@ msgid ""
msgstr ""
"Cách dùng: apt-extracttemplates tập_tin1 [tập_tin2 ...]\n"
"\n"
-"[extract: rút;\n"
-"templates: những biểu mẫu]\n"
+"[extract: rút trích;\n"
+"templates: mẫu]\n"
"\n"
"apt-extracttemplates là một công cụ rút thông tin kiểu cấu hình\n"
"\tvà biểu mẫu Ä‘á»u từ gói Debian\n"
@@ -2156,7 +2157,7 @@ msgstr ""
#: cmdline/apt-sortpkgs.cc:89
msgid "Unknown package record!"
-msgstr "Không rõ bản ghi gói!"
+msgstr "Không hiểu bản ghi gói!"
#: cmdline/apt-sortpkgs.cc:153
msgid ""
@@ -2211,16 +2212,16 @@ msgstr "Chữ ký kho không hợp lệ"
#: apt-inst/contrib/arfile.cc:82
msgid "Error reading archive member header"
-msgstr "Gặp lá»—i khi Ä‘á»c phần đầu bá»™ phận kho"
+msgstr "Gặp lá»—i khi Ä‘á»c phần đầu thành viên kho"
#: apt-inst/contrib/arfile.cc:94
#, c-format
msgid "Invalid archive member header %s"
-msgstr "Phần đầu bộ phận kho lưu không hợp lệ %s"
+msgstr "Phần đầu thành viên kho lưu không hợp lệ %s"
#: apt-inst/contrib/arfile.cc:106
msgid "Invalid archive member header"
-msgstr "Phần đầu bộ phận kho không hợp lê"
+msgstr "Phần đầu thành viên kho không hợp lê"
#: apt-inst/contrib/arfile.cc:135
msgid "Archive is too short"
@@ -2927,7 +2928,7 @@ msgstr ""
#: apt-pkg/algorithms.cc:1240
msgid "Unable to correct problems, you have held broken packages."
-msgstr "Không thể sá»­a vấn Ä‘á», bạn đã giữ lại má»™t số gói bị ngắt."
+msgstr "Không thể sá»­a trục trặc này, bạn đã giữ lại má»™t số gói bị há»ng."
#: apt-pkg/algorithms.cc:1592 apt-pkg/algorithms.cc:1594
msgid ""
@@ -2967,12 +2968,12 @@ msgstr "Äang tải tập tin %li trong tổng số %li"
#: apt-pkg/acquire-worker.cc:112
#, c-format
msgid "The method driver %s could not be found."
-msgstr "Không tìm thấy trình Ä‘iá»u khiển phÆ°Æ¡ng pháp %s."
+msgstr "Không tìm thấy trình Ä‘iá»u khiển phÆ°Æ¡ng thức %s."
#: apt-pkg/acquire-worker.cc:161
#, c-format
msgid "Method %s did not start correctly"
-msgstr "Phương pháp %s đã không bắt đầu cho đúng."
+msgstr "Phương thức %s đã không khởi chạy đúng đắn."
#: apt-pkg/acquire-worker.cc:447
#, c-format
@@ -3000,12 +3001,13 @@ msgstr ""
#: apt-pkg/cachefile.cc:87
msgid "The package lists or status file could not be parsed or opened."
-msgstr "Không thể phân tích hay mở danh sách gói hay tâp tin trạng thái."
+msgstr "Không thể phân tích hay mở danh sách gói hay tập tin trạng thái."
#: apt-pkg/cachefile.cc:91
msgid "You may want to run apt-get update to correct these problems"
msgstr ""
-"Bạn nên chạy lệnh “apt-get update†(lấy cập nhật) để sửa các vấn đỠnày"
+"Bạn nên lấy cơ sở dữ liệu mới bằng lệnh “apt-get update†để sửa các vấn đỠ"
+"này"
#: apt-pkg/cachefile.cc:109
msgid "The list of sources could not be read."
@@ -3052,7 +3054,7 @@ msgstr "Bá»™ nhá»› tạm có hệ thống Ä‘iá»u khiển phiên bản không tÆ
#: apt-pkg/pkgcachegen.cc:563
#, c-format
msgid "Error occurred while processing %s (%s%d)"
-msgstr "Gặp lỗi khi xử lý %s (%s%d)"
+msgstr "Có lỗi phát sinh khi xử lý %s (%s%d)"
#: apt-pkg/pkgcachegen.cc:251
msgid "Wow, you exceeded the number of package names this APT is capable of."
diff --git a/test/integration/Packages-bug-723705-tagfile-truncates-fields b/test/integration/Packages-bug-723705-tagfile-truncates-fields
new file mode 100644
index 000000000..c42b85072
--- /dev/null
+++ b/test/integration/Packages-bug-723705-tagfile-truncates-fields
@@ -0,0 +1,167 @@
+Package: cdebconf-gtk-udeb
+Source: cdebconf
+Version: 0.185
+Installed-Size: 92
+Maintainer: Debian Install System Team <debian-boot@lists.debian.org>
+Architecture: amd64
+Description: Gtk+ frontend for Debian Configuration Management System
+Description-md5: 75d036e0a245499123544e2254b92e9c
+Section: debian-installer
+Priority: optional
+Filename: pool/main/c/cdebconf/cdebconf-gtk-udeb_0.185_amd64.udeb
+Size: 27278
+MD5sum: a1bbbc1d4fb8e0615b5621abac021924
+SHA1: b1a7ab55a90f61e5337847d02ff1d12d73559def
+SHA256: cd79f3205304a7932b3309c4df9898c9a53929bc651912659858e087ebe1c18a
+
+Package: cdebconf-newt-udeb
+Source: cdebconf
+Version: 0.185
+Installed-Size: 58
+Maintainer: Debian Install System Team <debian-boot@lists.debian.org>
+Architecture: amd64
+Description: Newt frontend for Debian Configuration Management System
+Description-md5: e080be5e38cb8c57bca2f3effe9ee030
+Section: debian-installer
+Priority: optional
+Filename: pool/main/c/cdebconf/cdebconf-newt-udeb_0.185_amd64.udeb
+Size: 19192
+MD5sum: de27807f56dae2f2403b3322d5fe6bd2
+SHA1: 57883e223d46a9f25966f9b986e6a3bc2f67d8ef
+SHA256: 5f8b9c3a5430f2ec879484a7736582b152d76cc8ba9bc19328268f3635759a1b
+
+Package: cdebconf-udeb
+Source: cdebconf
+Version: 0.185
+Installed-Size: 245
+Maintainer: Debian Install System Team <debian-boot@lists.debian.org>
+Architecture: amd64
+Provides: debconf-2.0
+Description: Debian Configuration Management System (C-implementation)
+Description-md5: 9f3579e9d9f86ac89e667a8707d3cbd3
+Section: debian-installer
+Priority: standard
+Filename: pool/main/c/cdebconf/cdebconf-udeb_0.185_amd64.udeb
+Size: 77376
+MD5sum: e3883706fdbf54c2e5ea959c92b2d37f
+SHA1: 0232f1bdf1531db628516ed3a46a27466b267fdc
+SHA256: 96345575417a3e4df8a2cadaa55784ec8f6c042defb1e2fc002d941b6116ceab
+
+Package: cdebconf-gtk-terminal
+Source: cdebconf-terminal
+Version: 0.22
+Installed-Size: 64
+Maintainer: Debian Install System Team <debian-boot@lists.debian.org>
+Architecture: amd64
+Provides: cdebconf-terminal
+Depends: cdebconf-gtk-udeb, libc6-udeb (>= 2.17), libglib2.0-udeb (>= 2.36.4), libgtk2.0-0-udeb (>= 2.24.0), libvte9-udeb (>= 1:0.28.0), cdebconf-udeb, cdebconf-gtk-terminal, cdebconf-gtk-terminal, cdebconf-gtk-terminal, cdebconf-gtk-terminal, cdebconf-gtk-terminal, cdebconf-gtk-terminal, cdebconf-gtk-terminal
+Description: cdebconf gtk plugin displaying a terminal
+Description-md5: 18c4446758aec003eb8cd0a43419f1aa
+Section: debian-installer
+Priority: extra
+Filename: pool/main/c/cdebconf-terminal/cdebconf-gtk-terminal_0.22_amd64.udeb
+Size: 14734
+MD5sum: f9c3a7354560cb88e0396e2b7ba54363
+SHA1: 9c1c93328e758bfd9de2752466b271aaf38c8177
+SHA256: ca749853fc3b93db1d08ccdc6b46de27633de52bc5b880fa65275897ebcaaf69
+
+Package: cdebconf-newt-terminal
+Source: cdebconf-terminal
+Version: 0.22
+Installed-Size: 43
+Maintainer: Debian Install System Team <debian-boot@lists.debian.org>
+Architecture: amd64
+Provides: cdebconf-terminal
+Depends: cdebconf-newt-udeb (>= 0.146), libc6-udeb (>= 2.17), libnewt0.52
+Description: cdebconf newt plugin to provide a clean terminal
+Description-md5: 4109a053022081b573d864d84d6eb16d
+Section: debian-installer
+Priority: extra
+Filename: pool/main/c/cdebconf-terminal/cdebconf-newt-terminal_0.22_amd64.udeb
+Size: 4538
+MD5sum: 20db6152fce5081fcbf49c7c08f21246
+SHA1: fa2a40f777a2f48b9634866bc780fb059e60b2fe
+SHA256: c4d99ef27285f0c9090005313165627e56e0972e687af7e68c2b1d1538e2ae09
+
+Package: libc6-udeb
+Source: eglibc (2.17-92)
+Version: 2.17-92+b1
+Installed-Size: 3126
+Maintainer: GNU Libc Maintainers <debian-glibc@lists.debian.org>
+Architecture: amd64
+Provides: glibc-2.17-1, libc-udeb, libc6
+Description: Embedded GNU C Library: Shared libraries - udeb
+Description-md5: 9552ce73b7b3fb466e3d89fe8db9a563
+Section: debian-installer
+Priority: extra
+Filename: pool/main/e/eglibc/libc6-udeb_2.17-92+b1_amd64.udeb
+Size: 1056000
+MD5sum: 7fd7032eeeecf7f76eff79a0543fbd72
+SHA1: 724b6a81b8fbc9d4d2bb43d656c08de73f7ada25
+SHA256: 137d4c001bbfde8161315c36e6cb8653ae2c50a8d6b6d2d27396c492d91a1723
+
+Package: libglib2.0-udeb
+Source: glib2.0
+Version: 2.36.4-1
+Installed-Size: 10070
+Maintainer: Debian GNOME Maintainers <pkg-gnome-maintainers@lists.alioth.debian.org>
+Architecture: amd64
+Description: GLib library of C routines - minimal runtime
+Description-md5: 0244040042870a89aa49f037cce3f1e9
+Section: debian-installer
+Priority: optional
+Filename: pool/main/g/glib2.0/libglib2.0-udeb_2.36.4-1_amd64.udeb
+Size: 1714604
+MD5sum: 72da029f1bbb36057d874f1f82a5d00a
+SHA1: 32bce78a052ef19a620f43ecbe12404fa570c0f1
+SHA256: 8edbc7cb872c0a82705913563f93f9eec5750881e4378c5a48770cde840cd6eb
+
+Package: libgtk2.0-0-udeb
+Source: gtk+2.0
+Version: 2.24.20-1
+Installed-Size: 5035
+Maintainer: Debian GNOME Maintainers <pkg-gnome-maintainers@lists.alioth.debian.org>
+Architecture: amd64
+Provides: gtk2.0-binver-2.10.0
+Description: GTK+ graphical user interface library - minimal runtime
+Description-md5: 32e5112b80c02578837cff4f65dfec84
+Section: debian-installer
+Priority: extra
+Filename: pool/main/g/gtk+2.0/libgtk2.0-0-udeb_2.24.20-1_amd64.udeb
+Size: 1643046
+MD5sum: 25513478eb2e02e5766c0eea0b411ca9
+SHA1: 9274f05bfa930a3406403441ce061bade04e2064
+SHA256: d5f611f48928ae02f759105cf8cff467cde1cb44df56ad31067168b46a80f8bc
+
+Package: libvte9-udeb
+Source: vte
+Version: 1:0.28.2-5
+Installed-Size: 628
+Maintainer: Debian GNOME Maintainers <pkg-gnome-maintainers@lists.alioth.debian.org>
+Architecture: amd64
+Description: Terminal emulator widget for GTK+ 2.0 - minimal runtime
+Description-md5: e7993385c30bae6e96c8cb87795a513c
+Section: debian-installer
+Priority: extra
+Filename: pool/main/v/vte/libvte9-udeb_0.28.2-5_amd64.udeb
+Size: 216968
+MD5sum: 7da7201effaf5ced19abd9d0b45aa2c6
+SHA1: a424cf779e7614d79740c422b6342de04fed3646
+SHA256: 4963033cbda5a8ba7eb8ebf1debae34463b8e63b821259860cfb51c1ab99562d
+
+Package: zlib1g-udeb
+Source: zlib
+Version: 1:1.2.8.dfsg-1
+Installed-Size: 115
+Maintainer: Mark Brown <broonie@debian.org>
+Architecture: amd64
+Description: compression library - runtime for Debian installer
+Description-md5: 9cab974e3eab657c53bc17611b894c7a
+Section: debian-installer
+Priority: optional
+Filename: pool/main/z/zlib/zlib1g-udeb_1.2.8.dfsg-1_amd64.udeb
+Size: 45270
+MD5sum: c02884420f79a3ae4569cf67782f3e74
+SHA1: 7cd1a7c8be4e086de733a0ce76f87d42b8b2173b
+SHA256: 61641ee2b5e185232108333438b72bec71ef549fe0e0df1b2b3afa37174e53a7
+
diff --git a/test/integration/apt.pem b/test/integration/apt.pem
new file mode 100644
index 000000000..f48df054d
--- /dev/null
+++ b/test/integration/apt.pem
@@ -0,0 +1,49 @@
+-----BEGIN PRIVATE KEY-----
+MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQCt4R1Q2oYF6utL
+19GBhnlHW8L2BI7PRFWge/ZpqIZWsaFcb30FV86Z6aXXZmgfEJ2814ZZYD1IKeCe
+JsJpns7B4vYe1v64r995ZNBQAAKIYjICkKZOBgOphV+ChBfrCctVXgfLbMP6iBdH
+J02wHzSCCdZm0sdVl9tB5l/OyJU8Mb4KB3btBhfZfY2M6lU+FOjcXs1LOduUrv7K
+fZ+DEalvVGkomLtHtD0qb2vkqFrTjVCkziUVWhhxFFflt08oQ01Clxpl+uv7rOQo
+jtkJ1LrMuv7iPfaZ/z3qLiFxZYG1BCGEwTOKCtJo6bgFzXiN3q7Q5FFlmv851x2J
+Dn8C7Qm7AgMBAAECggEAE3q6vAofJZ6Ryadd8zLLd3ESQFl2XkX7icUZb/DPS/sO
+ZrqeuPCDVr7UM3NnisNjyHoktPKRKvp2DYGuGgMOiq4QgJf5ZVten8zpgWze28SU
+cbEe0HLgCifE8Ww2+b/ZJbEpEmMW+YQxh2khzO9SBJdxi4dliXM/vvw+E35pKZsB
+s6glrz6VQAxxa9fY4fLnB2DafHy+pUvRVw8gC6PCM9jXN9tMYAqztsJu7aaanNyT
+HX2UDWa8hxVx6t5UQZuxvst9N+RcEwmVCR2qlfZt/VRBRibBm62crEKbTD00mNHQ
+4AIDn3g6Y3SXpDlgtNpjLyBL3fODPIwqwGdblaSKkQKBgQDYXecu0Eda7kbR5ciW
+IAn8XOxsBIkkh8YVl2gRiiajRVoeiYBHaW9TyuQiaWrftiDQxB/N4G2focTXy/7O
+VJn6e/SUoO/ZGRw2GbTxLUQptgvFsejYCcW9XpC8MCwE/y2swiY7JM0WR8cV2nCk
+a/Cls6f1LjL13aFO0PAorEcahQKBgQDNuth6EHZVwfDgUuqhRw4HIIpfsfiA3UOd
+b5k/NsfQIev1YUqnfucgInNPDq2Jf8eTQw3TKaszo2DCjDffCsEgM09Tym143Bd6
+AsMuqAStsE3IEC7pnmh95l29/7mh4OuG5cp5JUx0Pi5PkuJ6ywA8P1rM1MB9Zf52
+NGJCo1pnPwKBgQCx/n4i+uDYo1DLd/dN2UmdvGwaaJjR3ohMVuQcGcSzaGg82u0W
+0lvtWOnYjVSIeXIBjHaFjW1hd1lSFdWms96AO9z3MHZf6NJWh0tdZNnAXqzMlBFz
+OIbdxJ/Y0OBFtA9FIesFmL7G54GWLr+f49Ry3Jr9jmYJ8au0BRqsux07aQKBgC4q
+CT2KyCMCO/z6XjAGc71hres/UlYIUI3ZZvfqYPfxRLNxO4FOVqq9UEajMomyJKSE
+3WtO5F3YAXRmZnskPKXvHZPdzqbaLGJykD298h7PewSzrPM7WpM1yD9ETPFoOTGy
+CrcYiYlkEpxEh5GqT8k1JjjkXLVG18zKgGoXocedAoGAQyU2DCNfxwzIJfFHKZEG
+zpni72cR68Tu3AhW/38vMR2ZPca4KzXrUA52T+j7vkQC38LHm/mzNXNP7Vya0PJ3
+WoYOcLtg2uFPh0P/35ArEzuNooLsvulgg1jsamPbF8KAvJZKZHr30hlC/JGYSBbV
+bnkzJTShsKzHIUiLtQ8Ja+E=
+-----END PRIVATE KEY-----
+-----BEGIN CERTIFICATE-----
+MIIDezCCAmOgAwIBAgIJAJ39xapQo0vLMA0GCSqGSIb3DQEBBQUAMFMxCzAJBgNV
+BAYTAkRFMRMwEQYDVQQIDApTb21lLVN0YXRlMRswGQYDVQQKDBJBUFQgVGVzdGNh
+c2VzIEdtYkgxEjAQBgNVBAMMCWxvY2FsaG9zdDAgFw0xMzA5MTYwODQ4MzVaGA80
+NzUxMDgxMzA4NDgzNVowUzELMAkGA1UEBhMCREUxEzARBgNVBAgMClNvbWUtU3Rh
+dGUxGzAZBgNVBAoMEkFQVCBUZXN0Y2FzZXMgR21iSDESMBAGA1UEAwwJbG9jYWxo
+b3N0MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAreEdUNqGBerrS9fR
+gYZ5R1vC9gSOz0RVoHv2aaiGVrGhXG99BVfOmeml12ZoHxCdvNeGWWA9SCngnibC
+aZ7OweL2Htb+uK/feWTQUAACiGIyApCmTgYDqYVfgoQX6wnLVV4Hy2zD+ogXRydN
+sB80ggnWZtLHVZfbQeZfzsiVPDG+Cgd27QYX2X2NjOpVPhTo3F7NSznblK7+yn2f
+gxGpb1RpKJi7R7Q9Km9r5Kha041QpM4lFVoYcRRX5bdPKENNQpcaZfrr+6zkKI7Z
+CdS6zLr+4j32mf896i4hcWWBtQQhhMEzigrSaOm4Bc14jd6u0ORRZZr/OdcdiQ5/
+Au0JuwIDAQABo1AwTjAdBgNVHQ4EFgQUhd26E7ykEYRTDbgMzkYtFtENhSkwHwYD
+VR0jBBgwFoAUhd26E7ykEYRTDbgMzkYtFtENhSkwDAYDVR0TBAUwAwEB/zANBgkq
+hkiG9w0BAQUFAAOCAQEAWcyMKi0Vc4beGV7w4Qft0/2P68jjMlQRdgkz+gGXbMVr
+//KhqR3PbgFmHHpUsZ718AHeerNNdfFzOUptiAiOqH2muyAGdeWCxJ8KcU0sic8x
+/h3TOzMYfEozhgMSJp9YW1z655uHcb15S7jb4zZwXwGyQzxwXT35SKj2mCqSbjIb
+G987DGI+MtyoGRXhIwnBEsGTI1ck3NoeXBJ/tS/Ma8gUUC2xldMSprtHjeUHvZV2
+iz/HTqGlMLGW96AVeZiFNiC1fJ6pvref2XW5MkkvQm8tOi2cSrwJc9CgnCpCxkLp
+liRsbwAduwkA26XzEomMR7yyYS5pm0Eu0cO9X39FKQ==
+-----END CERTIFICATE-----
diff --git a/test/integration/framework b/test/integration/framework
index 54d35fef8..d899bb574 100644
--- a/test/integration/framework
+++ b/test/integration/framework
@@ -194,6 +194,7 @@ setupenvironment() {
echo 'quiet::NoUpdate "true";' >> aptconfig.conf
export LC_ALL=C
export PATH="${PATH}:/usr/local/sbin:/usr/sbin:/sbin"
+ configcompression '.' 'gz' #'bz2' 'lzma' 'xz'
msgdone "info"
}
@@ -215,14 +216,13 @@ getarchitectures() {
}
configarchitecture() {
- local CONFFILE=rootdir/etc/apt/apt.conf.d/01multiarch.conf
- rm -f $CONFFILE
- echo "APT::Architecture \"$(getarchitecture $1)\";" > $CONFFILE
- shift
- while [ -n "$1" ]; do
- echo "APT::Architectures:: \"$(getarchitecture $1)\";" >> $CONFFILE
- shift
- done
+ {
+ echo "APT::Architecture \"$(getarchitecture $1)\";"
+ while [ -n "$1" ]; do
+ echo "APT::Architectures:: \"$(getarchitecture $1)\";"
+ shift
+ done
+ } >rootdir/etc/apt/apt.conf.d/01multiarch.conf
configdpkg
}
@@ -235,12 +235,19 @@ configdpkg() {
echo -n > rootdir/var/lib/dpkg/status
fi
fi
+ rm -f rootdir/etc/apt/apt.conf.d/00foreigndpkg
if $(which dpkg) --assert-multi-arch >/dev/null 2>&1; then
local ARCHS="$(getarchitectures)"
if echo "$ARCHS" | grep -E -q '[^ ]+ [^ ]+'; then
DPKGARCH="$(dpkg --print-architecture)"
for ARCH in ${ARCHS}; do
- if [ "${ARCH}" != "${DPKGARCH}" ]; then dpkg --add-architecture ${ARCH}; fi
+ if [ "${ARCH}" != "${DPKGARCH}" ]; then
+ if ! dpkg --add-architecture ${ARCH} >/dev/null 2>&1; then
+ # old-style used e.g. in Ubuntu-P – and as it seems travis
+ echo "DPKG::options:: \"--foreign-architecture\";" >> rootdir/etc/apt/apt.conf.d/00foreigndpkg
+ echo "DPKG::options:: \"${ARCH}\";" >> rootdir/etc/apt/apt.conf.d/00foreigndpkg
+ fi
+ fi
done
if [ "0" = "$(dpkg -l dpkg 2> /dev/null | grep '^i' | wc -l)" ]; then
# dpkg doesn't really check the version as long as it is fully installed,
@@ -251,13 +258,31 @@ configdpkg() {
fi
}
+configcompression() {
+ while [ -n "$1" ]; do
+ case "$1" in
+ '.') echo ".\t.\tcat";;
+ 'gz') echo "gzip\tgz\tgzip";;
+ 'bz2') echo "bzip2\tbz2\tbzip2";;
+ 'lzma') echo "lzma\tlzma\txz --format=lzma";;
+ 'xz') echo "xz\txz\txz";;
+ *) echo "$1\t$1\t$1";;
+ esac
+ shift
+ done > ${TMPWORKINGDIRECTORY}/rootdir/etc/testcase-compressor.conf
+}
+
setupsimplenativepackage() {
local NAME="$1"
local ARCH="$2"
local VERSION="$3"
local RELEASE="${4:-unstable}"
local DEPENDENCIES="$5"
- local DESCRIPTION="$6"
+ local DESCRIPTION="${6:-"Description: an autogenerated dummy ${NAME}=${VERSION}/${RELEASE}
+ If you find such a package installed on your system,
+ something went horribly wrong! They are autogenerated
+ und used only by testcases and surf no other propose…"}"
+
local SECTION="${7:-others}"
local DISTSECTION
if [ "$SECTION" = "$(echo "$SECTION" | cut -d'/' -f 2)" ]; then
@@ -289,14 +314,8 @@ Package: $NAME" > debian/control
echo "Architecture: any" >> debian/control
fi
test -z "$DEPENDENCIES" || echo "$DEPENDENCIES" >> debian/control
- if [ -z "$DESCRIPTION" ]; then
- echo "Description: an autogenerated dummy ${NAME}=${VERSION}/${RELEASE}
- If you find such a package installed on your system,
- YOU did something horribly wrong! They are autogenerated
- und used only by testcases for APT and surf no other propose…" >> debian/control
- else
- echo "Description: $DESCRIPTION" >> debian/control
- fi
+ echo "Description: $DESCRIPTION" >> debian/control
+
test -e debian/compat || echo "7" > debian/compat
test -e debian/source/format || echo "3.0 (native)" > debian/source/format
test -e debian/rules || cp /usr/share/doc/debhelper/examples/rules.tiny debian/rules
@@ -309,7 +328,11 @@ buildsimplenativepackage() {
local VERSION="$3"
local RELEASE="${4:-unstable}"
local DEPENDENCIES="$5"
- local DESCRIPTION="$6"
+ local DESCRIPTION="${6:-"Description: an autogenerated dummy ${NAME}=${VERSION}/${RELEASE}
+ If you find such a package installed on your system,
+ something went horribly wrong! They are autogenerated
+ und used only by testcases and surf no other propose…"}"
+
local SECTION="${7:-others}"
local PRIORITY="${8:-optional}"
local DISTSECTION
@@ -349,14 +372,7 @@ Package: $NAME" >> ${BUILDDIR}/debian/control
fi
local DEPS="$(echo "$DEPENDENCIES" | grep -v '^Build-')"
test -z "$DEPS" || echo "$DEPS" >> ${BUILDDIR}/debian/control
- if [ -z "$DESCRIPTION" ]; then
- echo "Description: an autogenerated dummy ${NAME}=${VERSION}/${RELEASE}
- If you find such a package installed on your system,
- YOU did something horribly wrong! They are autogenerated
- und used only by testcases for APT and surf no other propose…" >> ${BUILDDIR}/debian/control
- else
- echo "Description: $DESCRIPTION" >> ${BUILDDIR}/debian/control
- fi
+ echo "Description: $DESCRIPTION" >> ${BUILDDIR}/debian/control
echo '3.0 (native)' > ${BUILDDIR}/debian/source/format
(cd ${BUILDDIR}/..; dpkg-source -b ${NAME}-${VERSION} 2>&1) | sed -n 's#^dpkg-source: info: building [^ ]\+ in ##p' \
@@ -421,6 +437,8 @@ buildaptarchive() {
}
createaptftparchiveconfig() {
+ local COMPRESSORS="$(cut -d' ' -f 1 ${TMPWORKINGDIRECTORY}/rootdir/etc/testcase-compressor.conf | tr '\n' ' ')"
+ COMPRESSORS="${COMPRESSORS%* }"
local ARCHS="$(find pool/ -name '*.deb' | grep -oE '_[a-z0-9-]+\.deb$' | sort | uniq | sed -e '/^_all.deb$/ d' -e 's#^_\([a-z0-9-]*\)\.deb$#\1#' | tr '\n' ' ')"
if [ -z "$ARCHS" ]; then
# the pool is empty, so we will operate on faked packages - let us use the configured archs
@@ -438,10 +456,10 @@ createaptftparchiveconfig() {
echo -n '";
};
Default {
- Packages::Compress ". gzip bzip2 lzma xz";
- Sources::Compress ". gzip bzip2 lzma xz";
- Contents::Compress ". gzip bzip2 lzma xz";
- Translation::Compress ". gzip bzip2 lzma xz";
+ Packages::Compress "'"$COMPRESSORS"'";
+ Sources::Compress "'"$COMPRESSORS"'";
+ Contents::Compress "'"$COMPRESSORS"'";
+ Translation::Compress "'"$COMPRESSORS"'";
LongDescription "false";
};
TreeDefault {
@@ -503,7 +521,10 @@ insertpackage() {
local VERSION="$4"
local DEPENDENCIES="$5"
local PRIORITY="${6:-optional}"
- local DESCRIPTION="${7}"
+ local DESCRIPTION="${7:-"Description: an autogenerated dummy ${NAME}=${VERSION}/${RELEASE}
+ If you find such a package installed on your system,
+ something went horribly wrong! They are autogenerated
+ und used only by testcases and surf no other propose…"}"
local ARCHS=""
for arch in $(echo "$ARCH" | sed -e 's#,#\n#g' | sed -e "s#^native\$#$(getarchitecture 'native')#"); do
if [ "$arch" = 'all' -o "$arch" = 'none' ]; then
@@ -525,15 +546,7 @@ Maintainer: Joe Sixpack <joe@example.org>" >> $FILE
echo "Version: $VERSION
Filename: pool/main/${NAME}/${NAME}_${VERSION}_${arch}.deb" >> $FILE
test -z "$DEPENDENCIES" || echo "$DEPENDENCIES" >> $FILE
- echo -n 'Description: ' >> $FILE
- if [ -z "$DESCRIPTION" ]; then
- echo "an autogenerated dummy ${NAME}=${VERSION}/${RELEASE}
- If you find such a package installed on your system,
- YOU did something horribly wrong! They are autogenerated
- und used only by testcases for APT and surf no other propose…" >> $FILE
- else
- echo "$DESCRIPTION" >> $FILE
- fi
+ echo "Description: $DESCRIPTION" >> $FILE
echo >> $FILE
done
done
@@ -568,6 +581,11 @@ insertinstalledpackage() {
local DEPENDENCIES="$4"
local PRIORITY="${5:-optional}"
local STATUS="${6:-install ok installed}"
+ local DESCRIPTION="${7:-"Description: an autogenerated dummy ${NAME}=${VERSION}/installed
+ If you find such a package installed on your system,
+ something went horribly wrong! They are autogenerated
+ und used only by testcases and surf no other propose…"}"
+
local FILE='rootdir/var/lib/dpkg/status'
local INFO='rootdir/var/lib/dpkg/info'
for arch in $(echo "$ARCH" | sed -e 's#,#\n#g' | sed -e "s#^native\$#$(getarchitecture 'native')#"); do
@@ -580,11 +598,8 @@ Maintainer: Joe Sixpack <joe@example.org>
Version: $VERSION" >> $FILE
test "$arch" = 'none' || echo "Architecture: $arch" >> $FILE
test -z "$DEPENDENCIES" || echo "$DEPENDENCIES" >> $FILE
- echo "Description: an autogenerated dummy ${NAME}=${VERSION}/installed
- If you find such a package installed on your system,
- YOU did something horribly wrong! They are autogenerated
- und used only by testcases for APT and surf no other propose…
-" >> $FILE
+ echo "Description: $DESCRIPTION" >> $FILE
+ echo >> $FILE
if [ "$(dpkg-query -W --showformat='${Multi-Arch}')" = 'same' ]; then
echo -n > ${INFO}/${NAME}:${arch}.list
else
@@ -611,18 +626,27 @@ buildaptarchivefromfiles() {
msginfo "Build APT archive for ${CCMD}$(basename $0)${CINFO} based on prebuild files…"
find aptarchive -name 'Packages' -o -name 'Sources' | while read line; do
msgninfo "\t${line} file… "
- cat ${line} | gzip > ${line}.gz
- cat ${line} | bzip2 > ${line}.bz2
- cat ${line} | xz --format=lzma > ${line}.lzma
- cat ${line} | xz > ${line}.xz
- if [ -n "$1" ]; then
- touch -d "$1" ${line}.gz ${line}.bz2 ${line}.lzma ${line}.xz
- fi
+ compressfile "$line" "$1"
msgdone "info"
done
generatereleasefiles "$@"
}
+compressfile() {
+ cat ${TMPWORKINGDIRECTORY}/rootdir/etc/testcase-compressor.conf | while read compressor extension command; do
+ if [ "$compressor" = '.' ]; then
+ if [ -n "$2" ]; then
+ touch -d "$2" "$1"
+ fi
+ continue
+ fi
+ cat "$1" | $command > "${1}.${extension}"
+ if [ -n "$2" ]; then
+ touch -d "$2" "${1}.${extension}"
+ fi
+ done
+}
+
# can be overridden by testcases for their pleasure
getcodenamefromsuite() { echo -n "$1"; }
getreleaseversionfromsuite() { true; }
@@ -763,6 +787,13 @@ signreleasefiles() {
msgdone "info"
}
+rewritesourceslist() {
+ local APTARCHIVE="file://$(readlink -f "${TMPWORKINGDIRECTORY}/aptarchive")"
+ for LIST in $(find rootdir/etc/apt/sources.list.d/ -name 'apt-test-*.list'); do
+ sed -i $LIST -e "s#$APTARCHIVE#${1}#" -e "s#http://localhost:8080/#${1}#" -e "s#http://localhost:4433/#${1}#"
+ done
+}
+
changetowebserver() {
local LOG='/dev/null'
if test -x ${BUILDDIRECTORY}/aptwebserver; then
@@ -774,31 +805,32 @@ changetowebserver() {
fi
addtrap "kill $PID;"
cd - > /dev/null
- elif [ $# -gt 0 ]; then
- msgdie 'Need the aptwebserver when passing arguments for the webserver'
- elif which weborf > /dev/null; then
- weborf -xb aptarchive/ >$LOG 2>&1 &
- addtrap "kill $!;"
- elif which gatling > /dev/null; then
- cd aptarchive
- gatling -p 8080 -F -S >$LOG 2>&1 &
- addtrap "kill $!;"
- cd - > /dev/null
- elif which lighttpd > /dev/null; then
- echo "server.document-root = \"$(readlink -f ./aptarchive)\"
-server.port = 8080
-server.stat-cache-engine = \"disable\"" > lighttpd.conf
- lighttpd -t -f lighttpd.conf >/dev/null || msgdie 'Can not change to webserver: our lighttpd config is invalid'
- lighttpd -D -f lighttpd.conf >$LOG 2>&1 &
- addtrap "kill $!;"
else
msgdie 'You have to build aptwerbserver or install a webserver'
fi
- local APTARCHIVE="file://$(readlink -f ./aptarchive)"
- for LIST in $(find rootdir/etc/apt/sources.list.d/ -name 'apt-test-*.list'); do
- sed -i $LIST -e "s#$APTARCHIVE#http://localhost:8080/#"
- done
- return 0
+ if [ "$1" != '--no-rewrite' ]; then
+ rewritesourceslist 'http://localhost:8080/'
+ fi
+}
+
+changetohttpswebserver() {
+ if ! which stunnel4 >/dev/null; then
+ msgdie 'You need to install stunnel4 for https testcases'
+ fi
+ if [ ! -e "${TMPWORKINGDIRECTORY}/aptarchive/aptwebserver.pid" ]; then
+ changetowebserver --no-rewrite
+ fi
+ echo "pid = ${TMPWORKINGDIRECTORY}/aptarchive/stunnel.pid
+cert = ${TESTDIRECTORY}/apt.pem
+
+[https]
+accept = 4433
+connect = 8080
+" > ${TMPWORKINGDIRECTORY}/stunnel.conf
+ stunnel4 "${TMPWORKINGDIRECTORY}/stunnel.conf"
+ local PID="$(cat ${TMPWORKINGDIRECTORY}/aptarchive/stunnel.pid)"
+ addtrap 'prefix' "kill ${PID};"
+ rewritesourceslist 'https://localhost:4433/'
}
changetocdrom() {
@@ -816,6 +848,46 @@ changetocdrom() {
find rootdir/etc/apt/sources.list.d/ -name 'apt-test-*.list' -delete
}
+downloadfile() {
+ PROTO="$(echo "$1" | cut -d':' -f 1)"
+ local DOWNLOG="${TMPWORKINGDIRECTORY}/download.log"
+ rm -f "$DOWNLOG"
+ touch "$DOWNLOG"
+ {
+ echo "601 Configuration
+Config-Item: Acquire::https::CaInfo=${TESTDIR}/apt.pem
+Config-Item: Debug::Acquire::${PROTO}=1
+
+600 Acquire URI
+URI: $1
+Filename: ${2}
+"
+ # simple worker keeping stdin open until we are done (201) or error (400)
+ # and requesting new URIs on try-agains/redirects inbetween
+ { tail -n 999 -f "$DOWNLOG" & echo "TAILPID: $!"; } | while read f1 f2; do
+ if [ "$f1" = 'TAILPID:' ]; then
+ TAILPID="$f2"
+ elif [ "$f1" = 'New-URI:' ]; then
+ echo "600 Acquire URI
+URI: $f2
+Filename: ${2}
+"
+ elif [ "$f1" = '201' ] || [ "$f1" = '400' ]; then
+ # tail would only die on next read – which never happens
+ test -z "$TAILPID" || kill -s HUP "$TAILPID"
+ break
+ fi
+ done
+ } | LD_LIBRARY_PATH=${BUILDDIRECTORY} ${BUILDDIRECTORY}/methods/${PROTO} 2>&1 | tee "$DOWNLOG"
+ rm "$DOWNLOG"
+ # only if the file exists the download was successful
+ if [ -e "$2" ]; then
+ return 0
+ else
+ return 1
+ fi
+}
+
checkdiff() {
local DIFFTEXT="$($(which diff) -u $* | sed -e '/^---/ d' -e '/^+++/ d' -e '/^@@/ d')"
if [ -n "$DIFFTEXT" ]; then
@@ -861,7 +933,7 @@ testequalor2() {
echo "$2" > $COMPAREFILE2
shift 2
msgtest "Test for equality OR of" "$*"
- $* >$COMPAREAGAINST 2>&1
+ $* >$COMPAREAGAINST 2>&1 || true
(checkdiff $COMPAREFILE1 $COMPAREAGAINST 1> /dev/null ||
checkdiff $COMPAREFILE2 $COMPAREAGAINST 1> /dev/null) && msgpass ||
( echo "\n${CINFO}Diff against OR 1${CNORMAL}" "$(checkdiff $COMPAREFILE1 $COMPAREAGAINST)" \
diff --git a/test/integration/test-bug-601016-description-translation b/test/integration/skip-bug-601016-description-translation
index 33c209e9d..33c209e9d 100755
--- a/test/integration/test-bug-601016-description-translation
+++ b/test/integration/skip-bug-601016-description-translation
diff --git a/test/integration/status-bug-723705-tagfile-truncates-fields b/test/integration/status-bug-723705-tagfile-truncates-fields
new file mode 100644
index 000000000..fe18506c8
--- /dev/null
+++ b/test/integration/status-bug-723705-tagfile-truncates-fields
@@ -0,0 +1,62 @@
+Package: libc6
+Status: install ok installed
+Priority: required
+Section: libs
+Installed-Size: 10164
+Maintainer: GNU Libc Maintainers <debian-glibc@lists.debian.org>
+Architecture: amd64
+Multi-Arch: same
+Source: eglibc (2.17-92)
+Version: 2.17-92+b1
+Replaces: libc6-amd64
+Provides: glibc-2.17-1
+Suggests: glibc-doc, debconf | debconf-2.0, locales
+Breaks: locales (<< 2.17), locales-all (<< 2.17), lsb-core (<= 3.2-27), nscd (<< 2.17)
+Conflicts: prelink (<= 0.0.20090311-1), tzdata (<< 2007k-1), tzdata-etch
+Conffiles:
+ /etc/ld.so.conf.d/x86_64-linux-gnu.conf 593ad12389ab2b6f952e7ede67b8fbbf
+Description: Embedded GNU C Library: Shared libraries
+ Contains the standard libraries that are used by nearly all programs on
+ the system. This package includes shared versions of the standard C library
+ and the standard math library, as well as many others.
+Homepage: http://www.eglibc.org
+
+Package: libnewt0.52
+Status: install ok installed
+Priority: important
+Section: libs
+Installed-Size: 820
+Maintainer: Alastair McKinstry <mckinstry@debian.org>
+Architecture: amd64
+Multi-Arch: same
+Source: newt
+Version: 0.52.15-3
+Recommends: libfribidi0
+Conffiles:
+ /etc/newt/palette.original d41d8cd98f00b204e9800998ecf8427e
+Description: Not Erik's Windowing Toolkit - text mode windowing with slang
+ Newt is a windowing toolkit for text mode built from the slang library.
+ It allows color text mode applications to easily use stackable windows,
+ push buttons, check boxes, radio buttons, lists, entry fields, labels,
+ and displayable text. Scrollbars are supported, and forms may be nested
+ to provide extra functionality. This package contains the shared library
+ for programs that have been built with newt.
+Homepage: https://fedorahosted.org/newt/
+
+Package: libgcc1
+Status: install ok installed
+Priority: required
+Section: libs
+Installed-Size: 128
+Maintainer: Debian GCC Maintainers <debian-gcc@lists.debian.org>
+Architecture: amd64
+Multi-Arch: same
+Source: gcc-4.8 (4.8.1-10)
+Version: 1:4.8.1-10
+Breaks: gcc-4.1, gcc-4.3 (<< 4.3.6-1), gcc-4.4 (<< 4.4.6-4), gcc-4.5 (<< 4.5.3-2)
+Description: GCC support library
+ Shared version of the support library, a library of internal subroutines
+ that GCC uses to overcome shortcomings of particular machines, or
+ special needs for some languages.
+Homepage: http://gcc.gnu.org/
+
diff --git a/test/integration/test-apt-cdrom b/test/integration/test-apt-cdrom
index 85c3a2fee..cc3483f9b 100755
--- a/test/integration/test-apt-cdrom
+++ b/test/integration/test-apt-cdrom
@@ -18,10 +18,7 @@ echo 'Description-de: automatisch generiertes Testpaket testing=0.8.15/stable
Diese Pakete sind nur für das testen von APT gedacht,
sie erfüllen keinen Zweck auf einem normalen System…
' >> Translation-de
-cat Translation-de | gzip > Translation-de.gz
-cat Translation-de | bzip2 > Translation-de.bz2
-cat Translation-de | xz --format=lzma > Translation-de.lzma
-cat Translation-de | xz > Translation-de.xz
+compressfile Translation-de
rm Translation-en Translation-de
cd - > /dev/null
addtrap 'prefix' "chmod -R +w $PWD/rootdir/media/cdrom/dists/;"
diff --git a/test/integration/test-apt-get-autoremove b/test/integration/test-apt-get-autoremove
index dc30cde34..68ea1c574 100755
--- a/test/integration/test-apt-get-autoremove
+++ b/test/integration/test-apt-get-autoremove
@@ -49,3 +49,23 @@ Install: unrelated:i386 (1), debhelper:i386 (8.0.0), po-debconf:i386 (1.0.16, au
Remove: debhelper:i386 (8.0.0)
Remove: po-debconf:i386 (1.0.16)'
+
+testsuccess aptget install debhelper -y
+testdpkginstalled 'unrelated' 'debhelper' 'po-debconf'
+testsuccess aptmark auto debhelper
+
+testmarkedauto 'debhelper' 'po-debconf'
+testequal 'Reading package lists...
+Building dependency tree...
+Reading state information...
+The following packages will be REMOVED:
+ debhelper po-debconf
+0 upgraded, 0 newly installed, 2 to remove and 0 not upgraded.
+Remv debhelper [8.0.0]
+Remv po-debconf [1.0.16]' aptget autoremove -s
+
+testsuccess aptmark hold debhelper
+testequal 'Reading package lists...
+Building dependency tree...
+Reading state information...
+0 upgraded, 0 newly installed, 0 to remove and 0 not upgraded.' aptget autoremove -s
diff --git a/test/integration/test-apt-get-download b/test/integration/test-apt-get-download
index 420b2e380..6eac079f3 100755
--- a/test/integration/test-apt-get-download
+++ b/test/integration/test-apt-get-download
@@ -20,13 +20,14 @@ testdownload() {
fi
msgtest "Test download of package file $1 with" "$APT"
aptget -qq download ${APT} && test -f $1 && msgpass || msgfail
+ rm $1
}
testdownload apt_1.0_all.deb apt stable
testdownload apt_2.0_all.deb apt
DEBFILE="$(readlink -f aptarchive)/pool/apt_2.0_all.deb"
-testequal "'file://${DEBFILE}' apt_2.0_all.deb $(stat -c%s $DEBFILE) sha512:$(sha512sum $DEBFILE | cut -d' ' -f 1)" aptget download apt --print-uris
+testequal "'file://${DEBFILE}' apt_2.0_all.deb $(stat -c%s $DEBFILE) SHA512:$(sha512sum $DEBFILE | cut -d' ' -f 1)" aptget download apt --print-uris
# deb:677887
testequal "E: Can't find a source to download version '1.0' of 'vrms:i386'" aptget download vrms
diff --git a/test/integration/test-apt-get-upgrade b/test/integration/test-apt-get-upgrade
new file mode 100755
index 000000000..23446299c
--- /dev/null
+++ b/test/integration/test-apt-get-upgrade
@@ -0,0 +1,78 @@
+#!/bin/sh
+set -e
+
+TESTDIR=$(readlink -f $(dirname $0))
+. $TESTDIR/framework
+
+setupenvironment
+configarchitecture "i386"
+
+# simple case
+insertpackage 'stable' 'upgrade-simple' 'all' '1.0'
+insertpackage 'unstable' 'upgrade-simple' 'all' '2.0'
+insertinstalledpackage 'upgrade-simple' 'all' '1.0'
+
+# upgrade with a new dependency
+insertpackage 'stable' 'upgrade-with-new-dep' 'all' '1.0'
+insertpackage 'unstable' 'upgrade-with-new-dep' 'all' '2.0' 'Depends: new-dep'
+insertpackage 'stable' 'new-dep' 'all' '1.0'
+insertinstalledpackage 'upgrade-with-new-dep' 'all' '1.0'
+
+# upgrade with conflict and a new pkg with higher priority than conflict
+insertpackage 'stable' 'upgrade-with-conflict' 'all' '1.0'
+insertpackage 'unstable' 'upgrade-with-conflict' 'all' '2.0' 'Conflicts: conflicting-dep' 'standard'
+insertpackage 'stable' 'conflicting-dep' 'all' '1.0'
+insertinstalledpackage 'upgrade-with-conflict' 'all' '1.0'
+insertinstalledpackage 'conflicting-dep' 'all' '1.0'
+
+
+setupaptarchive
+
+# Test if normal upgrade works as expected
+testequal 'Reading package lists...
+Building dependency tree...
+The following packages have been kept back:
+ upgrade-with-conflict upgrade-with-new-dep
+The following packages will be upgraded:
+ upgrade-simple
+1 upgraded, 0 newly installed, 0 to remove and 2 not upgraded.
+Inst upgrade-simple [1.0] (2.0 unstable [all])
+Conf upgrade-simple (2.0 unstable [all])' aptget -s upgrade
+
+# Test if apt-get upgrade --with-new-pkgs works
+testequal 'Reading package lists...
+Building dependency tree...
+The following NEW packages will be installed:
+ new-dep
+The following packages have been kept back:
+ upgrade-with-conflict
+The following packages will be upgraded:
+ upgrade-simple upgrade-with-new-dep
+2 upgraded, 1 newly installed, 0 to remove and 1 not upgraded.
+Inst new-dep (1.0 stable [all])
+Inst upgrade-simple [1.0] (2.0 unstable [all])
+Inst upgrade-with-new-dep [1.0] (2.0 unstable [all])
+Conf new-dep (1.0 stable [all])
+Conf upgrade-simple (2.0 unstable [all])
+Conf upgrade-with-new-dep (2.0 unstable [all])' aptget -s upgrade --with-new-pkgs
+
+# Test if apt-get dist-upgrade works
+testequal 'Reading package lists...
+Building dependency tree...
+The following packages will be REMOVED:
+ conflicting-dep
+The following NEW packages will be installed:
+ new-dep
+The following packages will be upgraded:
+ upgrade-simple upgrade-with-conflict upgrade-with-new-dep
+3 upgraded, 1 newly installed, 1 to remove and 0 not upgraded.
+Remv conflicting-dep [1.0]
+Inst upgrade-with-conflict [1.0] (2.0 unstable [all])
+Inst new-dep (1.0 stable [all])
+Inst upgrade-simple [1.0] (2.0 unstable [all])
+Inst upgrade-with-new-dep [1.0] (2.0 unstable [all])
+Conf upgrade-with-conflict (2.0 unstable [all])
+Conf new-dep (1.0 stable [all])
+Conf upgrade-simple (2.0 unstable [all])
+Conf upgrade-with-new-dep (2.0 unstable [all])' aptget -s dist-upgrade
+
diff --git a/test/integration/test-bug-254770-segfault-if-cache-not-buildable b/test/integration/test-bug-254770-segfault-if-cache-not-buildable
index 8fa337ccc..59102ddc9 100755
--- a/test/integration/test-bug-254770-segfault-if-cache-not-buildable
+++ b/test/integration/test-bug-254770-segfault-if-cache-not-buildable
@@ -18,7 +18,7 @@ testsegfault() {
msgpass
else
echo
- echo $TEST
+ echo "$TEST"
msgfail
fi
}
diff --git a/test/integration/test-bug-596498-trusted-unsigned-repo b/test/integration/test-bug-596498-trusted-unsigned-repo
index 5c643c40e..06c9c8285 100755
--- a/test/integration/test-bug-596498-trusted-unsigned-repo
+++ b/test/integration/test-bug-596498-trusted-unsigned-repo
@@ -21,6 +21,9 @@ DEBFILE='rootdir/etc/apt/sources.list.d/apt-test-unstable-deb.list'
testequal "$PKGTEXT
Download complete and in download only mode" aptget install cool --assume-no -d
+testequal "$PKGTEXT
+Download complete and in download only mode" aptget install cool --assume-no -d --allow-unauthenticated
+
sed -i -e 's#deb#deb [trusted=no]#' $DEBFILE
aptgetupdate
@@ -40,6 +43,12 @@ WARNING: The following packages cannot be authenticated!
Install these packages without verification? [y/N] N
E: Some packages could not be authenticated" aptget install cool --assume-no -d
+testequal "$PKGTEXT
+WARNING: The following packages cannot be authenticated!
+ cool
+Authentication warning overridden.
+Download complete and in download only mode" aptget install cool --assume-no -d --allow-unauthenticated
+
sed -i -e 's#deb#deb [trusted=yes]#' $DEBFILE
aptgetupdate
diff --git a/test/integration/test-bug-617690-allow-unauthenticated-makes-all-untrusted b/test/integration/test-bug-617690-allow-unauthenticated-makes-all-untrusted
new file mode 100755
index 000000000..633c197c0
--- /dev/null
+++ b/test/integration/test-bug-617690-allow-unauthenticated-makes-all-untrusted
@@ -0,0 +1,63 @@
+#!/bin/sh
+set -e
+
+TESTDIR=$(readlink -f $(dirname $0))
+. $TESTDIR/framework
+setupenvironment
+configarchitecture 'i386'
+
+buildsimplenativepackage 'cool' 'i386' '1.0' 'unstable'
+
+setupaptarchive --no-update
+
+testfileexists() {
+ msgtest 'Test for existance of file' "$1"
+ test -e "$1" && msgpass || msgfail
+ rm -f "$1"
+}
+
+testfilemissing() {
+ msgtest 'Test for non-existance of file' "$1"
+ test -e "$1" && msgfail || msgpass
+ rm -f "$1"
+}
+
+testrun() {
+ rm -rf rootdir/var/lib/apt
+ testsuccess aptget update
+
+ if [ "$1" = 'trusted' ]; then
+ testsuccess aptget download cool
+ testfileexists 'cool_1.0_i386.deb'
+
+ testsuccess aptget download cool --allow-unauthenticated
+ testfileexists 'cool_1.0_i386.deb'
+ else
+ testfailure aptget download cool
+ testfilemissing 'cool_1.0_i386.deb'
+
+ testsuccess aptget download cool --allow-unauthenticated
+ testfileexists 'cool_1.0_i386.deb'
+ fi
+
+ mv aptarchive/pool/cool_1.0_i386.deb aptarchive/pool/cool_1.0_i386.deb.bak
+ echo 'this is not a good package' > aptarchive/pool/cool_1.0_i386.deb
+ testfailure aptget download cool
+ testfilemissing cool_1.0_i386.deb
+
+ testfailure aptget download cool --allow-unauthenticated # unauthenticated doesn't mean unchecked
+ testfilemissing cool_1.0_i386.deb
+
+ rm -f aptarchive/pool/cool_1.0_i386.deb
+ mv aptarchive/pool/cool_1.0_i386.deb.bak aptarchive/pool/cool_1.0_i386.deb
+ testsuccess aptget download cool --allow-unauthenticated
+ testfileexists 'cool_1.0_i386.deb'
+}
+
+testrun 'trusted'
+
+find aptarchive/ \( -name 'Release.gpg' -o -name 'InRelease' \) -delete
+testrun 'untrusted'
+
+changetowebserver
+testrun 'untrusted'
diff --git a/test/integration/test-bug-633350-do-not-kill-last-char-in-Release b/test/integration/test-bug-633350-do-not-kill-last-char-in-Release
index 2aae7cfcc..988f8c9d0 100755
--- a/test/integration/test-bug-633350-do-not-kill-last-char-in-Release
+++ b/test/integration/test-bug-633350-do-not-kill-last-char-in-Release
@@ -8,7 +8,7 @@ configarchitecture 'amd64'
insertpackage 'unstable' 'cool' 'amd64' '1.0'
-setupaptarchive 2> /dev/null
+setupaptarchive --no-update
echo 'NotAutomatic: yes' >> aptarchive/dists/unstable/Release
diff --git a/test/integration/test-bug-679371-apt-get-autoclean-multiarch b/test/integration/test-bug-679371-apt-get-autoclean-multiarch
index b62d437aa..3de7d69f9 100755
--- a/test/integration/test-bug-679371-apt-get-autoclean-multiarch
+++ b/test/integration/test-bug-679371-apt-get-autoclean-multiarch
@@ -17,7 +17,10 @@ changetowebserver
testsuccess aptget update
testsuccess aptget install pkgall pkgnative pkgforeign -y
-testdpkginstalled pkgall pkgnative pkgforeign
+# if we work with an old dpkg, pkgforeign will be listed differently,
+# so test with aptcache for install status instead
+testdpkginstalled pkgall pkgnative
+testsuccess aptcache show pkgforeign/installed
testequal 'Reading package lists...
Building dependency tree...
diff --git a/test/integration/test-bug-686346-package-missing-architecture b/test/integration/test-bug-686346-package-missing-architecture
index 3b02811ca..dc51861ab 100755
--- a/test/integration/test-bug-686346-package-missing-architecture
+++ b/test/integration/test-bug-686346-package-missing-architecture
@@ -73,7 +73,7 @@ insertinstalledpackage 'pkgb' 'none' '1'
insertinstalledpackage 'pkgf' 'none' '1' 'Conflicts: pkgb'
insertinstalledpackage 'pkgg' 'amd64' '1' 'Conflicts: pkgb'
insertinstalledpackage 'pkgb' 'amd64' '2'
-testequal "Reading package lists...
+testequalor2 "Reading package lists...
Building dependency tree...
Reading state information...
You might want to run 'apt-get -f install' to correct these.
@@ -84,6 +84,17 @@ The following packages have unmet dependencies:
Conflicts: pkgb but 2 is installed
pkgg : Conflicts: pkgb but 2 is installed
Conflicts: pkgb:none but 1 is installed
+E: Unmet dependencies. Try using -f." "Reading package lists...
+Building dependency tree...
+Reading state information...
+You might want to run 'apt-get -f install' to correct these.
+The following packages have unmet dependencies:
+ pkgb : Conflicts: pkgb:none but 1 is installed
+ pkgb:none : Conflicts: pkgb but 2 is installed
+ pkgf:none : Conflicts: pkgb but 2 is installed
+ Conflicts: pkgb:none but 1 is installed
+ pkgg : Conflicts: pkgb but 2 is installed
+ Conflicts: pkgb:none but 1 is installed
E: Unmet dependencies. Try using -f." aptget check
# check that dependencies are generated for none-packages
diff --git a/test/integration/test-bug-689582-100-char-long-path-names b/test/integration/test-bug-689582-100-char-long-path-names
new file mode 100755
index 000000000..1b4b172b6
--- /dev/null
+++ b/test/integration/test-bug-689582-100-char-long-path-names
@@ -0,0 +1,35 @@
+#!/bin/sh
+set -e
+
+TESTDIR=$(readlink -f $(dirname $0))
+. $TESTDIR/framework
+setupenvironment
+configarchitecture 'amd64'
+
+mkdir aptarchive/testpkg
+cd aptarchive/testpkg
+
+for i in $(seq 98 102); do
+ touch "$(printf "%0${i}d" "$i")"
+done
+tar zcf data.tar.gz 00*
+
+echo 'Package: testpkg
+Version: 1-1
+Architecture: all
+Maintainer: Joe Sixpack <joe@example.org>
+Description: Package for test
+Section: debug
+Priority: extra' > control
+tar zcf control.tar.gz control
+
+echo '2.0' > debian-binary
+ar cr ../testpkg.deb debian-binary control.tar.gz data.tar.gz
+
+cd - > /dev/null
+
+testequal '000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000102 testpkg
+00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000101 testpkg
+0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000100 testpkg
+000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000099 testpkg
+00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000098 testpkg' aptftparchive contents aptarchive/
diff --git a/test/integration/test-bug-722207-print-uris-even-if-very-quiet b/test/integration/test-bug-722207-print-uris-even-if-very-quiet
new file mode 100755
index 000000000..f2d95da19
--- /dev/null
+++ b/test/integration/test-bug-722207-print-uris-even-if-very-quiet
@@ -0,0 +1,30 @@
+#!/bin/sh
+set -e
+
+TESTDIR=$(readlink -f $(dirname $0))
+. $TESTDIR/framework
+
+setupenvironment
+configarchitecture 'amd64'
+
+insertinstalledpackage 'apt' 'all' '1'
+insertpackage 'unstable' 'apt' 'all' '2'
+insertsource 'unstable' 'apt' 'all' '2'
+insertsource 'unstable' 'apt2' 'all' '1'
+
+setupaptarchive
+
+APTARCHIVE=$(readlink -f ./aptarchive)
+
+testequal "'file://${APTARCHIVE}/pool/main/apt/apt_2_all.deb' apt_2_all.deb 0 MD5Sum:" aptget upgrade -qq --print-uris
+testequal "'file://${APTARCHIVE}/pool/main/apt/apt_2_all.deb' apt_2_all.deb 0 MD5Sum:" aptget dist-upgrade -qq --print-uris
+testequal "'file://${APTARCHIVE}/pool/main/apt/apt_2_all.deb' apt_2_all.deb 0 MD5Sum:" aptget install apt -qq --print-uris
+testequal "'file://${APTARCHIVE}/pool/main/apt/apt_2_all.deb' apt_2_all.deb 0 MD5Sum:" aptget download apt -qq --print-uris
+testequal "'file://${APTARCHIVE}/apt_2.dsc' apt_2.dsc 0 MD5Sum:d41d8cd98f00b204e9800998ecf8427e
+'file://${APTARCHIVE}/apt_2.tar.gz' apt_2.tar.gz 0 MD5Sum:d41d8cd98f00b204e9800998ecf8427e" aptget source apt -qq --print-uris
+testequal "'http://packages.debian.org/changelogs/pool/main/apt/apt_2/changelog'" aptget changelog apt -qq --print-uris
+
+testequal "'file://${APTARCHIVE}/apt_2.dsc' apt_2.dsc 0 MD5Sum:d41d8cd98f00b204e9800998ecf8427e
+'file://${APTARCHIVE}/apt_2.tar.gz' apt_2.tar.gz 0 MD5Sum:d41d8cd98f00b204e9800998ecf8427e
+'file://${APTARCHIVE}/apt2_1.dsc' apt2_1.dsc 0 MD5Sum:d41d8cd98f00b204e9800998ecf8427e
+'file://${APTARCHIVE}/apt2_1.tar.gz' apt2_1.tar.gz 0 MD5Sum:d41d8cd98f00b204e9800998ecf8427e" aptget source apt apt2 -qq --print-uris
diff --git a/test/integration/test-bug-723586-any-stripped-in-single-arch b/test/integration/test-bug-723586-any-stripped-in-single-arch
new file mode 100755
index 000000000..392b88e9f
--- /dev/null
+++ b/test/integration/test-bug-723586-any-stripped-in-single-arch
@@ -0,0 +1,54 @@
+#!/bin/sh
+set -e
+
+TESTDIR=$(readlink -f $(dirname $0))
+. $TESTDIR/framework
+setupenvironment
+configarchitecture 'amd64'
+
+insertinstalledpackage 'python3' 'all' '3.2.3-6'
+
+insertpackage 'unstable' 'python3' 'amd64' '3.3.2-16' 'Multi-Arch: allowed'
+insertpackage 'stable' 'python3-gnupg' 'all' '0.3.5-2' 'Depends: python3:any (>= 3.2.3-3~)'
+
+insertpackage 'unstable' 'python-mips' 'amd64' '3' 'Depends: python3:mips'
+
+setupaptarchive
+
+INSTALLLOG='Reading package lists...
+Building dependency tree...
+The following extra packages will be installed:
+ python3
+The following NEW packages will be installed:
+ python3-gnupg
+The following packages will be upgraded:
+ python3
+1 upgraded, 1 newly installed, 0 to remove and 0 not upgraded.
+Inst python3 [3.2.3-6] (3.3.2-16 unstable [amd64])
+Inst python3-gnupg (0.3.5-2 stable [all])
+Conf python3 (3.3.2-16 unstable [amd64])
+Conf python3-gnupg (0.3.5-2 stable [all])'
+
+FAILLOG='Reading package lists...
+Building dependency tree...
+Some packages could not be installed. This may mean that you have
+requested an impossible situation or if you are using the unstable
+distribution that some required packages have not yet been created
+or been moved out of Incoming.
+The following information may help to resolve the situation:
+
+The following packages have unmet dependencies:
+ python-mips : Depends: python3:mips but it is not installable
+E: Unable to correct problems, you have held broken packages.'
+
+testequal "$INSTALLLOG" aptget install python3-gnupg -s
+aptcache showpkg python3 > showpkg.log
+testequal "$FAILLOG" aptget install python-mips -s
+
+# same test, but this time in a multi-arch environment
+configarchitecture 'amd64' 'armhf'
+rm rootdir/var/cache/apt/*.bin
+
+testequal "$INSTALLLOG" aptget install python3-gnupg -s
+testequal "$(sed 's#3.3.2-16 - python3#3.3.2-16 - python3:any:armhf python3#' showpkg.log)" aptcache showpkg python3
+testequal "$FAILLOG" aptget install python-mips -s
diff --git a/test/integration/test-bug-723705-tagfile-truncates-fields b/test/integration/test-bug-723705-tagfile-truncates-fields
new file mode 100755
index 000000000..3180e7fc9
--- /dev/null
+++ b/test/integration/test-bug-723705-tagfile-truncates-fields
@@ -0,0 +1,33 @@
+#!/bin/sh
+set -e
+
+TESTDIR=$(readlink -f $(dirname $0))
+. $TESTDIR/framework
+setupenvironment
+configarchitecture 'amd64'
+
+setupaptarchive
+
+aptget install --print-uris -y cdebconf-newt-terminal cdebconf-gtk-terminal 2>&1 | sed 's#file:///tmp/tmp.[^/]\+#file:///tmp#g' > filename.log
+
+testfileequal filename.log "Reading package lists...
+Building dependency tree...
+The following extra packages will be installed:
+ cdebconf-gtk-udeb cdebconf-newt-udeb cdebconf-udeb libc6-udeb
+ libglib2.0-udeb libgtk2.0-0-udeb libvte9-udeb
+The following NEW packages will be installed:
+ cdebconf-gtk-terminal cdebconf-gtk-udeb cdebconf-newt-terminal
+ cdebconf-newt-udeb cdebconf-udeb libc6-udeb libglib2.0-udeb libgtk2.0-0-udeb
+ libvte9-udeb
+0 upgraded, 9 newly installed, 0 to remove and 0 not upgraded.
+Need to get 0 B/4774 kB of archives.
+After this operation, 19.8 MB of additional disk space will be used.
+'file:///tmp/aptarchive/pool/main/c/cdebconf/cdebconf-udeb_0.185_amd64.udeb' cdebconf-udeb_0.185_amd64.udeb 77376 MD5Sum:e3883706fdbf54c2e5ea959c92b2d37f
+'file:///tmp/aptarchive/pool/main/c/cdebconf/cdebconf-gtk-udeb_0.185_amd64.udeb' cdebconf-gtk-udeb_0.185_amd64.udeb 27278 MD5Sum:a1bbbc1d4fb8e0615b5621abac021924
+'file:///tmp/aptarchive/pool/main/c/cdebconf/cdebconf-newt-udeb_0.185_amd64.udeb' cdebconf-newt-udeb_0.185_amd64.udeb 19192 MD5Sum:de27807f56dae2f2403b3322d5fe6bd2
+'file:///tmp/aptarchive/pool/main/g/glib2.0/libglib2.0-udeb_2.36.4-1_amd64.udeb' libglib2.0-udeb_2.36.4-1_amd64.udeb 1714604 MD5Sum:72da029f1bbb36057d874f1f82a5d00a
+'file:///tmp/aptarchive/pool/main/e/eglibc/libc6-udeb_2.17-92+b1_amd64.udeb' libc6-udeb_2.17-92+b1_amd64.udeb 1056000 MD5Sum:7fd7032eeeecf7f76eff79a0543fbd72
+'file:///tmp/aptarchive/pool/main/g/gtk+2.0/libgtk2.0-0-udeb_2.24.20-1_amd64.udeb' libgtk2.0-0-udeb_2.24.20-1_amd64.udeb 1643046 MD5Sum:25513478eb2e02e5766c0eea0b411ca9
+'file:///tmp/aptarchive/pool/main/v/vte/libvte9-udeb_0.28.2-5_amd64.udeb' libvte9-udeb_1%3a0.28.2-5_amd64.udeb 216968 MD5Sum:7da7201effaf5ced19abd9d0b45aa2c6
+'file:///tmp/aptarchive/pool/main/c/cdebconf-terminal/cdebconf-gtk-terminal_0.22_amd64.udeb' cdebconf-gtk-terminal_0.22_amd64.udeb 14734 MD5Sum:f9c3a7354560cb88e0396e2b7ba54363
+'file:///tmp/aptarchive/pool/main/c/cdebconf-terminal/cdebconf-newt-terminal_0.22_amd64.udeb' cdebconf-newt-terminal_0.22_amd64.udeb 4538 MD5Sum:20db6152fce5081fcbf49c7c08f21246"
diff --git a/test/integration/test-hashsum-verification b/test/integration/test-hashsum-verification
index 99ea8bffa..e77efb46e 100755
--- a/test/integration/test-hashsum-verification
+++ b/test/integration/test-hashsum-verification
@@ -21,9 +21,7 @@ prepare() {
done
cp $1 aptarchive/Packages
find aptarchive -name 'Release' -delete
- cat aptarchive/Packages | gzip > aptarchive/Packages.gz
- cat aptarchive/Packages | bzip2 > aptarchive/Packages.bz2
- cat aptarchive/Packages | xz --format=lzma > aptarchive/Packages.lzma
+ compressfile aptarchive/Packages
# create Release file with incorret checksums
cat > aptarchive/Release <<EOF
Date: Fri, 05 Aug 2011 09:22:08 UTC
diff --git a/test/integration/test-partial-file-support b/test/integration/test-partial-file-support
new file mode 100755
index 000000000..8d1c51ae0
--- /dev/null
+++ b/test/integration/test-partial-file-support
@@ -0,0 +1,107 @@
+#!/bin/sh
+set -e
+
+TESTDIR=$(readlink -f $(dirname $0))
+. $TESTDIR/framework
+setupenvironment
+configarchitecture 'amd64'
+
+changetowebserver
+
+copysource() {
+ dd if="$1" bs=1 count="$2" of="$3" 2>/dev/null
+ touch -d "$(stat --format '%y' "${TESTFILE}")" "$3"
+}
+
+testdownloadfile() {
+ local DOWNLOG='download-testfile.log'
+ rm -f "$DOWNLOG"
+ msgtest "Testing download of file $2 with" "$1"
+ if ! downloadfile "$2" "$3" > "$DOWNLOG"; then
+ cat "$DOWNLOG"
+ msgfail
+ else
+ msgpass
+ fi
+ cat "$DOWNLOG" | while read field hash; do
+ local EXPECTED
+ case "$field" in
+ 'MD5Sum-Hash:') EXPECTED="$(md5sum "$TESTFILE" | cut -d' ' -f 1)";;
+ 'SHA1-Hash:') EXPECTED="$(sha1sum "$TESTFILE" | cut -d' ' -f 1)";;
+ 'SHA256-Hash:') EXPECTED="$(sha256sum "$TESTFILE" | cut -d' ' -f 1)";;
+ 'SHA512-Hash:') EXPECTED="$(sha512sum "$TESTFILE" | cut -d' ' -f 1)";;
+ *) continue;;
+ esac
+ if [ "$4" = '=' ]; then
+ msgtest 'Test downloaded file for correct' "$field"
+ else
+ msgtest 'Test downloaded file does not match in' "$field"
+ fi
+ if [ "$EXPECTED" "$4" "$hash" ]; then
+ msgpass
+ else
+ cat "$DOWNLOG"
+ msgfail "expected: $EXPECTED ; got: $hash"
+ fi
+ done
+}
+
+testwebserverlaststatuscode() {
+ STATUS="$(mktemp)"
+ addtrap "rm $STATUS;"
+ msgtest 'Test last status code from the webserver was' "$1"
+ downloadfile "http://localhost:8080/_config/find/aptwebserver::last-status-code" "$STATUS" >/dev/null
+ if [ "$(cat "$STATUS")" = "$1" ]; then
+ msgpass
+ else
+ cat download-testfile.log
+ msgfail "Status was $(cat "$STATUS")"
+ fi
+}
+
+
+TESTFILE='aptarchive/testfile'
+cp -a ${TESTDIR}/framework $TESTFILE
+
+testrun() {
+ downloadfile "$1/_config/set/aptwebserver::support::range/true" '/dev/null' >/dev/null
+ testwebserverlaststatuscode '200'
+
+ copysource $TESTFILE 0 ./testfile
+ testdownloadfile 'no data' "${1}/testfile" './testfile' '='
+ testwebserverlaststatuscode '200'
+
+ copysource $TESTFILE 20 ./testfile
+ testdownloadfile 'valid partial data' "${1}/testfile" './testfile' '='
+ testwebserverlaststatuscode '206'
+
+ copysource /dev/zero 20 ./testfile
+ testdownloadfile 'invalid partial data' "${1}/testfile" './testfile' '!='
+ testwebserverlaststatuscode '206'
+
+ copysource $TESTFILE 1M ./testfile
+ testdownloadfile 'completely downloaded file' "${1}/testfile" './testfile' '='
+ testwebserverlaststatuscode '416'
+
+ copysource /dev/zero 1M ./testfile
+ testdownloadfile 'too-big partial file' "${1}/testfile" './testfile' '='
+ testwebserverlaststatuscode '200'
+
+ copysource /dev/zero 20 ./testfile
+ touch ./testfile
+ testdownloadfile 'old data' "${1}/testfile" './testfile' '='
+ testwebserverlaststatuscode '200'
+
+ downloadfile "$1/_config/set/aptwebserver::support::range/false" '/dev/null' >/dev/null
+ testwebserverlaststatuscode '200'
+
+ copysource $TESTFILE 20 ./testfile
+ testdownloadfile 'no server support' "${1}/testfile" './testfile' '='
+ testwebserverlaststatuscode '200'
+}
+
+testrun 'http://localhost:8080'
+
+changetohttpswebserver
+
+testrun 'https://localhost:4433'
diff --git a/test/integration/test-pdiff-usage b/test/integration/test-pdiff-usage
index 994489ee5..ac0563b7f 100755
--- a/test/integration/test-pdiff-usage
+++ b/test/integration/test-pdiff-usage
@@ -19,9 +19,7 @@ testequal "$(cat ${PKGFILE})
" aptcache show apt oldstuff
cp ${PKGFILE}-new aptarchive/Packages
-cat aptarchive/Packages | gzip > aptarchive/Packages.gz
-cat aptarchive/Packages | bzip2 > aptarchive/Packages.bz2
-cat aptarchive/Packages | xz --format=lzma > aptarchive/Packages.lzma
+compressfile 'aptarchive/Packages'
rm -rf aptarchive/Packages.diff
mkdir -p aptarchive/Packages.diff
PATCHFILE="aptarchive/Packages.diff/$(date +%Y-%m-%d-%H%M.%S)"
diff --git a/test/integration/test-releasefile-verification b/test/integration/test-releasefile-verification
index daba3919b..9d34a521a 100755
--- a/test/integration/test-releasefile-verification
+++ b/test/integration/test-releasefile-verification
@@ -11,20 +11,24 @@ buildaptarchive
setupflataptarchive
changetowebserver
+downloadfile "http://localhost:8080/_config/set/aptwebserver::support::range/false" '/dev/null' >/dev/null
+
prepare() {
local DATE="${2:-now}"
- if [ "$DATE" = 'now' -a "$1" = "${PKGFILE}-new" ]; then
- DATE='now + 6 days'
+ if [ "$DATE" = 'now' ]; then
+ if [ "$1" = "${PKGFILE}-new" ]; then
+ DATE='now - 1 day'
+ else
+ DATE='now - 7 day'
+ fi
fi
for release in $(find rootdir/var/lib/apt/lists 2> /dev/null); do
- touch -d 'now - 6 hours' $release
+ touch -d 'now - 1 year' $release
done
aptget clean
cp $1 aptarchive/Packages
find aptarchive -name 'Release' -delete
- cat aptarchive/Packages | gzip > aptarchive/Packages.gz
- cat aptarchive/Packages | bzip2 > aptarchive/Packages.bz2
- cat aptarchive/Packages | xz --format=lzma > aptarchive/Packages.lzma
+ compressfile 'aptarchive/Packages' "$DATE"
generatereleasefiles "$DATE"
}
@@ -87,13 +91,34 @@ touch aptarchive/apt.deb
PKGFILE="${TESTDIR}/$(echo "$(basename $0)" | sed 's#^test-#Packages-#')"
+updatesuccess() {
+ local LOG='update.log'
+ if aptget update >$LOG 2>&1 || grep -q -E '^(W|E): ' $LOG; then
+ msgpass
+ else
+ cat $LOG
+ msgfail
+ fi
+}
+
+updatefailure() {
+ local LOG='update.log'
+ aptget update >$LOG 2>&1 || true
+ if grep -q -E "$1" $LOG; then
+ msgpass
+ else
+ cat $LOG
+ msgfail
+ fi
+}
+
runtest() {
prepare ${PKGFILE}
rm -rf rootdir/var/lib/apt/lists
signreleasefiles 'Joe Sixpack'
find aptarchive/ -name "$DELETEFILE" -delete
msgtest 'Cold archive signed by' 'Joe Sixpack'
- aptget update 2>&1 | grep -E '^(W|E): ' > /dev/null && msgfail || msgpass
+ updatesuccess
testequal "$(cat ${PKGFILE})
" aptcache show apt
installaptold
@@ -102,7 +127,7 @@ runtest() {
signreleasefiles 'Joe Sixpack'
find aptarchive/ -name "$DELETEFILE" -delete
msgtest 'Good warm archive signed by' 'Joe Sixpack'
- aptget update 2>&1 | grep -E '^(W|E): ' > /dev/null && msgfail || msgpass
+ updatesuccess
testequal "$(cat ${PKGFILE}-new)
" aptcache show apt
installaptnew
@@ -113,7 +138,7 @@ runtest() {
signreleasefiles 'Rex Expired'
find aptarchive/ -name "$DELETEFILE" -delete
msgtest 'Cold archive signed by' 'Rex Expired'
- aptget update 2>&1 | grep -E '^W: .* KEYEXPIRED' > /dev/null && msgpass || msgfail
+ updatefailure '^W: .* KEYEXPIRED'
testequal "$(cat ${PKGFILE})
" aptcache show apt
failaptold
@@ -124,7 +149,7 @@ runtest() {
signreleasefiles 'Marvin Paranoid'
find aptarchive/ -name "$DELETEFILE" -delete
msgtest 'Cold archive signed by' 'Marvin Paranoid'
- aptget update 2>&1 | grep -E '^W: .* NO_PUBKEY' > /dev/null && msgpass || msgfail
+ updatefailure '^W: .* NO_PUBKEY'
testequal "$(cat ${PKGFILE})
" aptcache show apt
failaptold
@@ -138,7 +163,7 @@ runtest() {
signreleasefiles 'Joe Sixpack'
find aptarchive/ -name "$DELETEFILE" -delete
msgtest 'Bad warm archive signed by' 'Joe Sixpack'
- aptget update 2>&1 | grep -E '^(W|E): ' > /dev/null && msgfail || msgpass
+ updatesuccess
testequal "$(cat ${PKGFILE}-new)
" aptcache show apt
installaptnew
@@ -149,7 +174,7 @@ runtest() {
signreleasefiles 'Joe Sixpack'
find aptarchive/ -name "$DELETEFILE" -delete
msgtest 'Cold archive signed by' 'Joe Sixpack'
- aptget update 2>&1 | grep -E '^(W|E): ' > /dev/null && msgfail || msgpass
+ updatesuccess
testequal "$(cat ${PKGFILE})
" aptcache show apt
installaptold
@@ -158,7 +183,7 @@ runtest() {
signreleasefiles 'Marvin Paranoid'
find aptarchive/ -name "$DELETEFILE" -delete
msgtest 'Good warm archive signed by' 'Marvin Paranoid'
- aptget update 2>&1 | grep -E '^W: .* NO_PUBKEY' > /dev/null && msgpass || msgfail
+ updatefailure '^W: .* NO_PUBKEY'
testequal "$(cat ${PKGFILE})
" aptcache show apt
installaptold
@@ -168,7 +193,7 @@ runtest() {
signreleasefiles 'Rex Expired'
find aptarchive/ -name "$DELETEFILE" -delete
msgtest 'Good warm archive signed by' 'Rex Expired'
- aptget update 2>&1 | grep -E '^W: .* KEYEXPIRED' > /dev/null && msgpass || msgfail
+ updatefailure '^W: .* KEYEXPIRED'
testequal "$(cat ${PKGFILE})
" aptcache show apt
installaptold
@@ -178,7 +203,7 @@ runtest() {
signreleasefiles
find aptarchive/ -name "$DELETEFILE" -delete
msgtest 'Good warm archive signed by' 'Joe Sixpack'
- aptget update 2>&1 | grep -E '^(W|E): ' > /dev/null && msgfail || msgpass
+ updatesuccess
testequal "$(cat ${PKGFILE}-new)
" aptcache show apt
installaptnew
@@ -189,7 +214,7 @@ runtest2() {
rm -rf rootdir/var/lib/apt/lists
signreleasefiles 'Joe Sixpack'
msgtest 'Cold archive signed by' 'Joe Sixpack'
- aptget update 2>&1 | grep -E '^(W|E): ' > /dev/null && msgfail || msgpass
+ updatesuccess
# New .deb but now an unsigned archive. For example MITM to circumvent
# package verification.
@@ -197,7 +222,7 @@ runtest2() {
find aptarchive/ -name InRelease -delete
find aptarchive/ -name Release.gpg -delete
msgtest 'Warm archive signed by' 'nobody'
- aptget update 2>&1 | grep -E '^(W|E): ' > /dev/null && msgfail || msgpass
+ updatesuccess
testequal "$(cat ${PKGFILE}-new)
" aptcache show apt
failaptnew
@@ -205,7 +230,7 @@ runtest2() {
# Unsigned archive from the beginning must also be detected.
rm -rf rootdir/var/lib/apt/lists
msgtest 'Cold archive signed by' 'nobody'
- aptget update 2>&1 | grep -E '^(W|E): ' > /dev/null && msgfail || msgpass
+ updatesuccess
testequal "$(cat ${PKGFILE}-new)
" aptcache show apt
failaptnew
diff --git a/test/integration/test-ubuntu-bug-859188-multiarch-reinstall b/test/integration/test-ubuntu-bug-859188-multiarch-reinstall
index 0fdf97485..be86f2e91 100755
--- a/test/integration/test-ubuntu-bug-859188-multiarch-reinstall
+++ b/test/integration/test-ubuntu-bug-859188-multiarch-reinstall
@@ -8,14 +8,13 @@ configarchitecture 'amd64' 'i386' 'armel'
buildsimplenativepackage 'libsame' 'amd64,i386,armel' '1.0' 'unstable' 'Multi-Arch: same'
-# FIXME: hack around dpkg's current inability to handle multiarch, a clean install would be better…
-insertinstalledpackage 'libsame' 'amd64,i386' '1.0' 'Multi-Arch: same'
-sed -e 's#/installed#/unstable#' -e 's#Installed-Size: 42#Installed-Size: 1#' -i rootdir/var/lib/dpkg/status
-
setupaptarchive
+testsuccess aptget install libsame libsame:i386
+
REINSTALL='Reading package lists...
Building dependency tree...
+Reading state information...
0 upgraded, 0 newly installed, 2 reinstalled, 0 to remove and 0 not upgraded.
Inst libsame [1.0] (1.0 unstable [amd64])
Inst libsame:i386 [1.0] (1.0 unstable [i386])
diff --git a/test/interactive-helper/aptwebserver.cc b/test/interactive-helper/aptwebserver.cc
index fde95fec9..4dae342dd 100644
--- a/test/interactive-helper/aptwebserver.cc
+++ b/test/interactive-helper/aptwebserver.cc
@@ -100,8 +100,13 @@ bool sendHead(int const client, int const httpcode, std::list<std::string> &head
std::string response("HTTP/1.1 ");
response.append(httpcodeToStr(httpcode));
headers.push_front(response);
+ _config->Set("APTWebserver::Last-Status-Code", httpcode);
- headers.push_back("Server: APT webserver");
+ std::stringstream buffer;
+ _config->Dump(buffer, "aptwebserver::response-header", "%t: %v%n", false);
+ std::vector<std::string> addheaders = VectorizeString(buffer.str(), '\n');
+ for (std::vector<std::string>::const_iterator h = addheaders.begin(); h != addheaders.end(); ++h)
+ headers.push_back(*h);
std::string date("Date: ");
date.append(TimeRFC1123(time(NULL)));
@@ -156,15 +161,29 @@ void sendError(int const client, int const httpcode, std::string const &request,
std::string response("<html><head><title>");
response.append(httpcodeToStr(httpcode)).append("</title></head>");
response.append("<body><h1>").append(httpcodeToStr(httpcode)).append("</h1>");
- if (error.empty() == false)
- response.append("<p><em>Error</em>: ").append(error).append("</p>");
- response.append("This error is a result of the request: <pre>");
+ if (httpcode != 200)
+ {
+ if (error.empty() == false)
+ response.append("<p><em>Error</em>: ").append(error).append("</p>");
+ response.append("This error is a result of the request: <pre>");
+ }
+ else
+ {
+ if (error.empty() == false)
+ response.append("<p><em>Success</em>: ").append(error).append("</p>");
+ response.append("The successfully executed operation was requested by: <pre>");
+ }
response.append(request).append("</pre></body></html>");
addDataHeaders(headers, response);
sendHead(client, httpcode, headers);
if (content == true)
sendData(client, response);
}
+void sendSuccess(int const client, std::string const &request,
+ bool content, std::string const &error = "")
+{
+ sendError(client, 200, request, content, error);
+}
/*}}}*/
void sendRedirect(int const client, int const httpcode, std::string const &uri,/*{{{*/
std::string const &request, bool content)
@@ -365,6 +384,49 @@ bool parseFirstLine(int const client, std::string const &request, /*{{{*/
return true;
}
/*}}}*/
+bool handleOnTheFlyReconfiguration(int const client, std::string const &request, std::vector<std::string> const &parts)/*{{{*/
+{
+ size_t const pcount = parts.size();
+ if (pcount == 4 && parts[1] == "set")
+ {
+ _config->Set(parts[2], parts[3]);
+ sendSuccess(client, request, true, "Option '" + parts[2] + "' was set to '" + parts[3] + "'!");
+ return true;
+ }
+ else if (pcount == 4 && parts[1] == "find")
+ {
+ std::list<std::string> headers;
+ std::string response = _config->Find(parts[2], parts[3]);
+ addDataHeaders(headers, response);
+ sendHead(client, 200, headers);
+ sendData(client, response);
+ return true;
+ }
+ else if (pcount == 3 && parts[1] == "find")
+ {
+ std::list<std::string> headers;
+ if (_config->Exists(parts[2]) == true)
+ {
+ std::string response = _config->Find(parts[2]);
+ addDataHeaders(headers, response);
+ sendHead(client, 200, headers);
+ sendData(client, response);
+ return true;
+ }
+ sendError(client, 404, request, "Requested Configuration option doesn't exist.");
+ return false;
+ }
+ else if (pcount == 3 && parts[1] == "clear")
+ {
+ _config->Clear(parts[2]);
+ sendSuccess(client, request, true, "Option '" + parts[2] + "' was cleared.");
+ return true;
+ }
+
+ sendError(client, 400, request, true, "Unknown on-the-fly configuration request");
+ return false;
+}
+ /*}}}*/
int main(int const argc, const char * argv[])
{
CommandLine::Args Args[] = {
@@ -455,6 +517,9 @@ int main(int const argc, const char * argv[])
listen(sock, 1);
/*}}}*/
+ _config->CndSet("aptwebserver::response-header::Server", "APT webserver");
+ _config->CndSet("aptwebserver::response-header::Accept-Ranges", "bytes");
+
std::vector<std::string> messages;
int client;
while ((client = accept(sock, NULL, NULL)) != -1)
@@ -475,6 +540,17 @@ int main(int const argc, const char * argv[])
if (parseFirstLine(client, *m, filename, sendContent, closeConnection) == false)
continue;
+ // special webserver command request
+ if (filename.length() > 1 && filename[0] == '_')
+ {
+ std::vector<std::string> parts = VectorizeString(filename, '/');
+ if (parts[0] == "_config")
+ {
+ handleOnTheFlyReconfiguration(client, *m, parts);
+ continue;
+ }
+ }
+
// string replacements in the requested filename
::Configuration::Item const *Replaces = _config->Tree("aptwebserver::redirect::replace");
if (Replaces != NULL)
@@ -532,6 +608,60 @@ int main(int const argc, const char * argv[])
}
}
+ if (_config->FindB("aptwebserver::support::range", true) == true)
+ condition = LookupTag(*m, "Range", "");
+ else
+ condition.clear();
+ if (condition.empty() == false && strncmp(condition.c_str(), "bytes=", 6) == 0)
+ {
+ time_t cache;
+ std::string ifrange;
+ if (_config->FindB("aptwebserver::support::if-range", true) == true)
+ ifrange = LookupTag(*m, "If-Range", "");
+ bool validrange = (ifrange.empty() == true ||
+ (RFC1123StrToTime(ifrange.c_str(), cache) == true &&
+ cache <= data.ModificationTime()));
+
+ // FIXME: support multiple byte-ranges (APT clients do not do this)
+ if (condition.find(',') == std::string::npos)
+ {
+ size_t start = 6;
+ unsigned long long filestart = strtoull(condition.c_str() + start, NULL, 10);
+ // FIXME: no support for last-byte-pos being not the end of the file (APT clients do not do this)
+ size_t dash = condition.find('-') + 1;
+ unsigned long long fileend = strtoull(condition.c_str() + dash, NULL, 10);
+ unsigned long long filesize = data.FileSize();
+ if ((fileend == 0 || (fileend == filesize && fileend >= filestart)) &&
+ validrange == true)
+ {
+ if (filesize > filestart)
+ {
+ data.Skip(filestart);
+ std::ostringstream contentlength;
+ contentlength << "Content-Length: " << (filesize - filestart);
+ headers.push_back(contentlength.str());
+ std::ostringstream contentrange;
+ contentrange << "Content-Range: bytes " << filestart << "-"
+ << filesize - 1 << "/" << filesize;
+ headers.push_back(contentrange.str());
+ sendHead(client, 206, headers);
+ if (sendContent == true)
+ sendFile(client, data);
+ continue;
+ }
+ else
+ {
+ headers.push_back("Content-Length: 0");
+ std::ostringstream contentrange;
+ contentrange << "Content-Range: bytes */" << filesize;
+ headers.push_back(contentrange.str());
+ sendHead(client, 416, headers);
+ continue;
+ }
+ }
+ }
+ }
+
addFileHeaders(headers, data);
sendHead(client, 200, headers);
if (sendContent == true)
diff --git a/test/libapt/hashsums_test.cc b/test/libapt/hashsums_test.cc
index e2d0aec5b..3da89052b 100644
--- a/test/libapt/hashsums_test.cc
+++ b/test/libapt/hashsums_test.cc
@@ -3,6 +3,7 @@
#include <apt-pkg/sha2.h>
#include <apt-pkg/strutl.h>
#include <apt-pkg/hashes.h>
+#include <apt-pkg/fileutl.h>
#include <iostream>
#include <stdio.h>
@@ -108,55 +109,54 @@ int main(int argc, char** argv)
Test<SHA512Summation>("The quick brown fox jumps over the lazy dog.", "91ea1245f20d46ae9a037a989f54f1f790f0a47607eeb8a14d12890cea77a1bb"
"c6c7ed9cf205e67b7f2b8fd4c7dfd3a7a8617e45f3c463d481c7e586c39ac1ed");
- FILE* fd = fopen(argv[1], "r");
- if (fd == NULL) {
+ FileFd fd(argv[1], FileFd::ReadOnly);
+ if (fd.IsOpen() == false) {
std::cerr << "Can't open file for 1. testing: " << argv[1] << std::endl;
return 1;
}
{
Hashes hashes;
- hashes.AddFD(fileno(fd));
+ hashes.AddFD(fd.Fd());
equals(argv[2], hashes.MD5.Result().Value());
equals(argv[3], hashes.SHA1.Result().Value());
equals(argv[4], hashes.SHA256.Result().Value());
equals(argv[5], hashes.SHA512.Result().Value());
}
- fseek(fd, 0L, SEEK_END);
- unsigned long sz = ftell(fd);
- fseek(fd, 0L, SEEK_SET);
+ unsigned long sz = fd.FileSize();
+ fd.Seek(0);
{
Hashes hashes;
- hashes.AddFD(fileno(fd), sz);
+ hashes.AddFD(fd.Fd(), sz);
equals(argv[2], hashes.MD5.Result().Value());
equals(argv[3], hashes.SHA1.Result().Value());
equals(argv[4], hashes.SHA256.Result().Value());
equals(argv[5], hashes.SHA512.Result().Value());
}
- fseek(fd, 0L, SEEK_SET);
+ fd.Seek(0);
{
MD5Summation md5;
- md5.AddFD(fileno(fd));
+ md5.AddFD(fd.Fd());
equals(argv[2], md5.Result().Value());
}
- fseek(fd, 0L, SEEK_SET);
+ fd.Seek(0);
{
SHA1Summation sha1;
- sha1.AddFD(fileno(fd));
+ sha1.AddFD(fd.Fd());
equals(argv[3], sha1.Result().Value());
}
- fseek(fd, 0L, SEEK_SET);
+ fd.Seek(0);
{
SHA256Summation sha2;
- sha2.AddFD(fileno(fd));
+ sha2.AddFD(fd.Fd());
equals(argv[4], sha2.Result().Value());
}
- fseek(fd, 0L, SEEK_SET);
+ fd.Seek(0);
{
SHA512Summation sha2;
- sha2.AddFD(fileno(fd));
+ sha2.AddFD(fd.Fd());
equals(argv[5], sha2.Result().Value());
}
- fclose(fd);
+ fd.Close();
// test HashString code
{
diff --git a/test/libapt/strutil_test.cc b/test/libapt/strutil_test.cc
index bfe0d7222..110a20d27 100644
--- a/test/libapt/strutil_test.cc
+++ b/test/libapt/strutil_test.cc
@@ -42,5 +42,32 @@ int main(int argc,char *argv[])
output = DeEscapeString(input);
equals(output, expected);
+ // Split
+ input = "status: libnet1:amd64: unpacked";
+ vector<std::string> result = StringSplit(input, ": ");
+ equals(result[0], "status");
+ equals(result[1], "libnet1:amd64");
+ equals(result[2], "unpacked");
+ equals(result.size(), 3);
+
+ input = "status: libnet1:amd64: unpacked";
+ result = StringSplit(input, "xxx");
+ equals(result[0], input);
+ equals(result.size(), 1);
+
+ input = "status: libnet1:amd64: unpacked";
+ result = StringSplit(input, "");
+ equals(result.size(), 0);
+
+ input = "x:y:z";
+ result = StringSplit(input, ":", 2);
+ equals(result.size(), 2);
+ equals(result[0], "x");
+ equals(result[1], "y:z");
+
+ input = "abc";
+ result = StringSplit(input, "");
+ equals(result.size(), 0);
+
return 0;
}