summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMichael Vogt <mvo@ubuntu.com>2015-05-22 17:01:03 +0200
committerMichael Vogt <mvo@ubuntu.com>2015-05-22 17:01:03 +0200
commit4fc6b7570c3e97b65c118b58cdf6729fa94c9b03 (patch)
treef952bb728b9c23f1eebc8f1a87f527ed7c9b0b36
parent6bf93605fdb8e858d3f0a79a124c1d39f760094d (diff)
parent65759e00eff0513c34f584b99420b72fe0e5073e (diff)
Merge branch 'debian/sid' into debian/experimental
Conflicts: apt-pkg/pkgcache.h debian/changelog methods/https.cc methods/server.cc test/integration/test-apt-download-progress
-rw-r--r--apt-pkg/contrib/fileutl.cc3
-rw-r--r--apt-pkg/pkgcache.cc14
-rw-r--r--debian/changelog10
-rw-r--r--methods/http.cc2
-rw-r--r--methods/https.cc8
-rw-r--r--methods/server.cc38
-rw-r--r--methods/server.h13
-rwxr-xr-xtest/integration/test-apt-download-progress1
-rwxr-xr-xtest/integration/test-bug-782777-single-arch-weirdness72
-rwxr-xr-xtest/integration/test-bug-lp1445239-download-loop29
-rw-r--r--test/interactive-helper/aptwebserver.cc8
11 files changed, 158 insertions, 40 deletions
diff --git a/apt-pkg/contrib/fileutl.cc b/apt-pkg/contrib/fileutl.cc
index afc243b7f..8ec868ec0 100644
--- a/apt-pkg/contrib/fileutl.cc
+++ b/apt-pkg/contrib/fileutl.cc
@@ -800,8 +800,9 @@ pid_t ExecFork(std::set<int> KeepFDs)
signal(SIGCONT,SIG_DFL);
signal(SIGTSTP,SIG_DFL);
+ long ScOpenMax = sysconf(_SC_OPEN_MAX);
// Close all of our FDs - just in case
- for (int K = 3; K != sysconf(_SC_OPEN_MAX); K++)
+ for (int K = 3; K != ScOpenMax; K++)
{
if(KeepFDs.find(K) == KeepFDs.end())
fcntl(K,F_SETFD,FD_CLOEXEC);
diff --git a/apt-pkg/pkgcache.cc b/apt-pkg/pkgcache.cc
index b42f7e228..864ae0f60 100644
--- a/apt-pkg/pkgcache.cc
+++ b/apt-pkg/pkgcache.cc
@@ -234,12 +234,7 @@ pkgCache::PkgIterator pkgCache::SingleArchFindPkg(const string &Name)
pkgCache::PkgIterator pkgCache::FindPkg(const string &Name) {
size_t const found = Name.find(':');
if (found == string::npos)
- {
- if (MultiArchCache() == false)
- return SingleArchFindPkg(Name);
- else
- return FindPkg(Name, "native");
- }
+ return FindPkg(Name, "native");
string const Arch = Name.substr(found+1);
/* Beware: This is specialcased to handle pkg:any in dependencies as
these are linked to virtual pkg:any named packages with all archs.
@@ -253,13 +248,6 @@ pkgCache::PkgIterator pkgCache::FindPkg(const string &Name) {
// ---------------------------------------------------------------------
/* Returns 0 on error, pointer to the package otherwise */
pkgCache::PkgIterator pkgCache::FindPkg(const string &Name, string const &Arch) {
- if (MultiArchCache() == false && Arch != "none") {
- if (Arch == "native" || Arch == "all" || Arch == "any" ||
- Arch == NativeArch())
- return SingleArchFindPkg(Name);
- else
- return PkgIterator(*this,0);
- }
/* We make a detour via the GrpIterator here as
on a multi-arch environment a group is easier to
find than a package (less entries in the buckets) */
diff --git a/debian/changelog b/debian/changelog
index b0d518b17..7414b5c61 100644
--- a/debian/changelog
+++ b/debian/changelog
@@ -234,6 +234,16 @@ apt (1.1~exp1) experimental; urgency=low
-- Michael Vogt <mvo@debian.org> Thu, 19 Jun 2014 12:01:48 +0200
+apt (1.0.9.9) unstable; urgency=medium
+
+ [ David Kalnischkies ]
+ * parse specific-arch dependencies correctly on single-arch systems
+ (Closes: 777760)
+ * remove "first package seen is native package" assumption.
+ Thanks to Axel Beckert for testing (Closes: 782777)
+
+ -- David Kalnischkies <david@kalnischkies.de> Tue, 28 Apr 2015 16:11:27 +0200
+
apt (1.0.9.8) unstable; urgency=medium
[ David Kalnischkies ]
diff --git a/methods/http.cc b/methods/http.cc
index af3d5ccb6..ce697a338 100644
--- a/methods/http.cc
+++ b/methods/http.cc
@@ -447,7 +447,7 @@ bool HttpServerState::RunData(FileFd * const File)
else if (JunkSize != 0)
In.Limit(JunkSize);
else
- In.Limit(Size - StartPos);
+ In.Limit(DownloadSize);
// Just transfer the whole block.
do
diff --git a/methods/https.cc b/methods/https.cc
index fa143439a..d2ddf6fcf 100644
--- a/methods/https.cc
+++ b/methods/https.cc
@@ -70,19 +70,19 @@ HttpsMethod::parse_header(void *buffer, size_t size, size_t nmemb, void *userp)
{
Hashes resultHashes(me->Itm->ExpectedHashes);
FileFd file(me->Itm->DestFile, FileFd::ReadOnly);
- me->https->Server->Size = file.FileSize();
+ me->https->Server->TotalFileSize = file.FileSize();
me->https->Server->Date = file.ModificationTime();
resultHashes.AddFD(file);
HashStringList const hashList = resultHashes.GetHashStringList();
partialHit = (me->Itm->ExpectedHashes == hashList);
}
- else if (me->https->Server->Result == 416 && me->https->Server->Size == me->https->File->FileSize())
+ else if (me->https->Server->Result == 416 && me->https->Server->TotalFileSize == me->https->File->FileSize())
partialHit = true;
if (partialHit == true)
{
me->https->Server->Result = 200;
- me->https->Server->StartPos = me->https->Server->Size;
+ me->https->Server->StartPos = me->https->Server->TotalFileSize;
// the actual size is not important for https as curl will deal with it
// by itself and e.g. doesn't bother us with transport-encoding…
me->https->Server->JunkSize = std::numeric_limits<unsigned long long>::max();
@@ -94,7 +94,7 @@ HttpsMethod::parse_header(void *buffer, size_t size, size_t nmemb, void *userp)
me->https->Server->StartPos = 0;
me->Res->LastModified = me->https->Server->Date;
- me->Res->Size = me->https->Server->Size;
+ me->Res->Size = me->https->Server->TotalFileSize;
me->Res->ResumePoint = me->https->Server->StartPos;
// we expect valid data, so tell our caller we get the file now
diff --git a/methods/server.cc b/methods/server.cc
index bd01c3e98..f61a6fedb 100644
--- a/methods/server.cc
+++ b/methods/server.cc
@@ -54,7 +54,7 @@ ServerState::RunHeadersResult ServerState::RunHeaders(FileFd * const File,
Major = 0;
Minor = 0;
Result = 0;
- Size = 0;
+ TotalFileSize = 0;
JunkSize = 0;
StartPos = 0;
Encoding = Closes;
@@ -164,15 +164,22 @@ bool ServerState::HeaderLine(string Line)
Encoding = Stream;
HaveContent = true;
- unsigned long long * SizePtr = &Size;
+ unsigned long long * DownloadSizePtr = &DownloadSize;
if (Result == 416)
- SizePtr = &JunkSize;
+ DownloadSizePtr = &JunkSize;
- *SizePtr = strtoull(Val.c_str(), NULL, 10);
- if (*SizePtr >= std::numeric_limits<unsigned long long>::max())
+ *DownloadSizePtr = strtoull(Val.c_str(), NULL, 10);
+ if (*DownloadSizePtr >= std::numeric_limits<unsigned long long>::max())
return _error->Errno("HeaderLine", _("The HTTP server sent an invalid Content-Length header"));
- else if (*SizePtr == 0)
+ else if (*DownloadSizePtr == 0)
HaveContent = false;
+
+ // On partial content (206) the Content-Length less than the real
+ // size, so do not set it here but leave that to the Content-Range
+ // header instead
+ if(Result != 206 && TotalFileSize == 0)
+ TotalFileSize = DownloadSize;
+
return true;
}
@@ -187,12 +194,15 @@ bool ServerState::HeaderLine(string Line)
HaveContent = true;
// §14.16 says 'byte-range-resp-spec' should be a '*' in case of 416
- if (Result == 416 && sscanf(Val.c_str(), "bytes */%llu",&Size) == 1)
+ if (Result == 416 && sscanf(Val.c_str(), "bytes */%llu",&TotalFileSize) == 1)
; // we got the expected filesize which is all we wanted
- else if (sscanf(Val.c_str(),"bytes %llu-%*u/%llu",&StartPos,&Size) != 2)
+ else if (sscanf(Val.c_str(),"bytes %llu-%*u/%llu",&StartPos,&TotalFileSize) != 2)
return _error->Error(_("The HTTP server sent an invalid Content-Range header"));
- if ((unsigned long long)StartPos > Size)
+ if ((unsigned long long)StartPos > TotalFileSize)
return _error->Error(_("This HTTP server has broken range support"));
+
+ // figure out what we will download
+ DownloadSize = TotalFileSize - StartPos;
return true;
}
@@ -319,13 +329,13 @@ ServerMethod::DealWithHeaders(FetchResult &Res)
{
Hashes resultHashes(Queue->ExpectedHashes);
FileFd file(Queue->DestFile, FileFd::ReadOnly);
- Server->Size = file.FileSize();
+ Server->TotalFileSize = file.FileSize();
Server->Date = file.ModificationTime();
resultHashes.AddFD(file);
HashStringList const hashList = resultHashes.GetHashStringList();
partialHit = (Queue->ExpectedHashes == hashList);
}
- else if ((unsigned long long)SBuf.st_size == Server->Size)
+ else if ((unsigned long long)SBuf.st_size == Server->TotalFileSize)
partialHit = true;
if (partialHit == true)
{
@@ -337,7 +347,7 @@ ServerMethod::DealWithHeaders(FetchResult &Res)
Server->RunData(&DevNull);
}
Server->HaveContent = false;
- Server->StartPos = Server->Size;
+ Server->StartPos = Server->TotalFileSize;
Server->Result = 200;
}
else if (unlink(Queue->DestFile.c_str()) == 0)
@@ -363,8 +373,8 @@ ServerMethod::DealWithHeaders(FetchResult &Res)
// This is some sort of 2xx 'data follows' reply
Res.LastModified = Server->Date;
- Res.Size = Server->Size;
-
+ Res.Size = Server->TotalFileSize;
+
// Open the file
delete File;
File = new FileFd(Queue->DestFile,FileFd::WriteAny);
diff --git a/methods/server.h b/methods/server.h
index 1b1f754a3..8d7d33ee6 100644
--- a/methods/server.h
+++ b/methods/server.h
@@ -34,9 +34,16 @@ struct ServerState
char Code[360];
// These are some statistics from the last parsed header lines
- unsigned long long Size; // size of the usable content (aka: the file)
- unsigned long long JunkSize; // size of junk content (aka: server error pages)
+
+ // total size of the usable content (aka: the file)
+ unsigned long long TotalFileSize;
+ // size we actually download (can be smaller than Size if we have partial content)
+ unsigned long long DownloadSize;
+ // size of junk content (aka: server error pages)
+ unsigned long long JunkSize;
+ // The start of the data (for partial content)
unsigned long long StartPos;
+
time_t Date;
bool HaveContent;
enum {Chunked,Stream,Closes} Encoding;
@@ -75,7 +82,7 @@ struct ServerState
bool AddPartialFileToHashes(FileFd &File);
bool Comp(URI Other) const {return Other.Host == ServerName.Host && Other.Port == ServerName.Port;};
- virtual void Reset() {Major = 0; Minor = 0; Result = 0; Code[0] = '\0'; Size = 0; JunkSize = 0;
+ virtual void Reset() {Major = 0; Minor = 0; Result = 0; Code[0] = '\0'; TotalFileSize = 0; JunkSize = 0;
StartPos = 0; Encoding = Closes; time(&Date); HaveContent = false;
State = Header; Persistent = false; Pipeline = true; MaximumSize = 0;};
virtual bool WriteResponse(std::string const &Data) = 0;
diff --git a/test/integration/test-apt-download-progress b/test/integration/test-apt-download-progress
index 07c5e09c5..65c438e8f 100755
--- a/test/integration/test-apt-download-progress
+++ b/test/integration/test-apt-download-progress
@@ -36,7 +36,6 @@ assertprogress apt-progress.log
msgtest 'download progress works via' 'https'
exec 3> apt-progress.log
testsuccess --nomsg apthelper download-file "https://localhost:4433/$TESTFILE" https-$TESTFILE $OPT -o Acquire::https::Dl-Limit=800
-assertprogress apt-progress.log
# cleanup
rm -f apt-progress*.log
diff --git a/test/integration/test-bug-782777-single-arch-weirdness b/test/integration/test-bug-782777-single-arch-weirdness
new file mode 100755
index 000000000..004903385
--- /dev/null
+++ b/test/integration/test-bug-782777-single-arch-weirdness
@@ -0,0 +1,72 @@
+#!/bin/sh
+# Ensure that the order in which packages are in the binary cache
+# does not effect if they can be found or not
+set -e
+
+TESTDIR=$(readlink -f $(dirname $0))
+. $TESTDIR/framework
+setupenvironment
+configarchitecture 'i386'
+
+insertpackage 'unstable' 'abar' 'i386' '1'
+insertpackage 'unstable' 'foobar' 'i386' '1' 'Depends: abar:amd64, zfoo:amd64'
+insertpackage 'unstable' 'zfoo' 'i386' '1'
+
+setupaptarchive
+
+testrun() {
+ rm -f rootdir/var/lib/apt/extended_states
+
+ testequal 'Reading package lists...
+Building dependency tree...
+The following NEW packages will be installed:
+ abar zfoo
+0 upgraded, 2 newly installed, 0 to remove and 0 not upgraded.
+Inst abar (1 unstable [i386])
+Inst zfoo (1 unstable [i386])
+Conf abar (1 unstable [i386])
+Conf zfoo (1 unstable [i386])' aptget install abar zfoo -s
+
+ testequal 'Reading package lists...
+Building dependency tree...
+The following NEW packages will be installed:
+ abar zfoo
+0 upgraded, 2 newly installed, 0 to remove and 0 not upgraded.
+Inst abar (1 unstable [i386])
+Inst zfoo (1 unstable [i386])
+Conf abar (1 unstable [i386])
+Conf zfoo (1 unstable [i386])' aptget install abar:i386 zfoo:i386 -s
+
+ testequal "Reading package lists...
+Building dependency tree...
+Package abar:amd64 is not available, but is referred to by another package.
+This may mean that the package is missing, has been obsoleted, or
+is only available from another source
+
+Package zfoo:amd64 is not available, but is referred to by another package.
+This may mean that the package is missing, has been obsoleted, or
+is only available from another source
+
+E: Package 'abar:amd64' has no installation candidate
+E: Package 'zfoo:amd64' has no installation candidate" aptget install abar:amd64 zfoo:amd64 -s
+
+ cp -f rootdir/var/lib/dpkg/status status.backup
+ insertinstalledpackage 'abar' 'i386' '1'
+ insertinstalledpackage 'zfoo' 'i386' '1'
+
+ testequal 'abar
+zfoo' aptmark showmanual abar zfoo
+ testequal 'abar set to automatically installed.
+zfoo set to automatically installed.' aptmark auto abar zfoo
+ testempty aptmark showmanual abar zfoo
+ testequal 'abar
+zfoo' aptmark showauto abar zfoo
+
+ mv -f status.backup rootdir/var/lib/dpkg/status
+}
+
+msgmsg 'Single-Arch testrun'
+testrun
+msgmsg 'Multi-Arch testrun'
+configarchitecture 'i386' 'amd64'
+testrun
diff --git a/test/integration/test-bug-lp1445239-download-loop b/test/integration/test-bug-lp1445239-download-loop
new file mode 100755
index 000000000..4e4de67bd
--- /dev/null
+++ b/test/integration/test-bug-lp1445239-download-loop
@@ -0,0 +1,29 @@
+#!/bin/sh
+#
+# this is a regression test for LP: #1445239 where a partial download can
+# trigger an endless hang of the download method
+#
+
+set -e
+
+TESTDIR=$(readlink -f $(dirname $0))
+. $TESTDIR/framework
+setupenvironment
+configarchitecture 'amd64'
+
+changetowebserver
+webserverconfig 'aptwebserver::support::range' 'true'
+
+TESTFILE='aptarchive/testfile'
+dd if=/dev/zero of=$TESTFILE bs=100k count=1 2>/dev/null
+
+DOWNLOADLOG='rootdir/tmp/testdownloadfile.log'
+
+TARGET=./downloaded/testfile-downloaded
+dd if=/dev/zero of=$TARGET bs=99k count=1 2>/dev/null
+if ! downloadfile http://localhost:8080/testfile "$TARGET" > "$DOWNLOADLOG"; then
+ cat >&2 "$DOWNLOADLOG"
+ msgfail
+else
+ msgpass
+fi
diff --git a/test/interactive-helper/aptwebserver.cc b/test/interactive-helper/aptwebserver.cc
index c933060e7..411da0e8f 100644
--- a/test/interactive-helper/aptwebserver.cc
+++ b/test/interactive-helper/aptwebserver.cc
@@ -731,13 +731,15 @@ static void * handleClient(void * voidclient) /*{{{*/
if (filesize > filestart)
{
data.Skip(filestart);
- std::ostringstream contentlength;
- contentlength << "Content-Length: " << (filesize - filestart);
- headers.push_back(contentlength.str());
+ // make sure to send content-range before conent-length
+ // as regression test for LP: #1445239
std::ostringstream contentrange;
contentrange << "Content-Range: bytes " << filestart << "-"
<< filesize - 1 << "/" << filesize;
headers.push_back(contentrange.str());
+ std::ostringstream contentlength;
+ contentlength << "Content-Length: " << (filesize - filestart);
+ headers.push_back(contentlength.str());
sendHead(client, 206, headers);
if (sendContent == true)
sendFile(client, headers, data);