From 746ae151ddf8500cab23bc43f646742f3bcafa6d Mon Sep 17 00:00:00 2001 From: David Kalnischkies Date: Mon, 30 Sep 2013 09:08:33 +0200 Subject: APT has no bugs Okay, maybe it does have a "few", but the DDTP issues mentioned in this file are long since gone, so lets just drop the file and look at the PTS instead: http://bugs.debian.org/cgi-bin/pkgreport.cgi?src=apt Git-Dch: Ignore --- BUGS | 9 --------- 1 file changed, 9 deletions(-) delete mode 100644 BUGS diff --git a/BUGS b/BUGS deleted file mode 100644 index a7b6b1114..000000000 --- a/BUGS +++ /dev/null @@ -1,9 +0,0 @@ - -DDTP problems: --------------- -- apt-get update clean the /var/lib/apt/lists dir - from all Translation-$index that are not in the current - enviroment or Translations apt variable -- there needs to be a list of locales (pt, sv, en) that need - both language and country code to get the right file - (is in the code in indexfile::LanguageCode(), just a bit ugly -- cgit v1.2.3 From 6beca0eb8bfc105ba25e140e8ca4e07e78d097e7 Mon Sep 17 00:00:00 2001 From: David Kalnischkies Date: Sun, 15 Sep 2013 22:54:04 +0200 Subject: access _config via GET requests in the webserver Git-Dch: Ignore --- test/interactive-helper/aptwebserver.cc | 74 +++++++++++++++++++++++++++++++-- 1 file changed, 71 insertions(+), 3 deletions(-) diff --git a/test/interactive-helper/aptwebserver.cc b/test/interactive-helper/aptwebserver.cc index fde95fec9..7134b37bc 100644 --- a/test/interactive-helper/aptwebserver.cc +++ b/test/interactive-helper/aptwebserver.cc @@ -156,14 +156,28 @@ void sendError(int const client, int const httpcode, std::string const &request, std::string response(""); response.append(httpcodeToStr(httpcode)).append(""); response.append("

").append(httpcodeToStr(httpcode)).append("

"); - if (error.empty() == false) - response.append("

Error: ").append(error).append("

"); - response.append("This error is a result of the request:
");
+   if (httpcode != 200)
+   {
+      if (error.empty() == false)
+	 response.append("

Error: ").append(error).append("

"); + response.append("This error is a result of the request:
");
+   }
+   else
+   {
+      if (error.empty() == false)
+	 response.append("

Success: ").append(error).append("

"); + response.append("The successfully executed operation was requested by:
");
+   }
    response.append(request).append("
"); addDataHeaders(headers, response); sendHead(client, httpcode, headers); if (content == true) sendData(client, response); +} +void sendSuccess(int const client, std::string const &request, + bool content, std::string const &error = "") +{ + sendError(client, 200, request, content, error); } /*}}}*/ void sendRedirect(int const client, int const httpcode, std::string const &uri,/*{{{*/ @@ -365,6 +379,49 @@ bool parseFirstLine(int const client, std::string const &request, /*{{{*/ return true; } /*}}}*/ +bool handleOnTheFlyReconfiguration(int const client, std::string const &request, std::vector const &parts)/*{{{*/ +{ + size_t const pcount = parts.size(); + if (pcount == 4 && parts[1] == "set") + { + _config->Set(parts[2], parts[3]); + sendSuccess(client, request, true, "Option '" + parts[2] + "' was set to '" + parts[3] + "'!"); + return true; + } + else if (pcount == 4 && parts[1] == "find") + { + std::list headers; + std::string response = _config->Find(parts[2], parts[3]); + addDataHeaders(headers, response); + sendHead(client, 200, headers); + sendData(client, response); + return true; + } + else if (pcount == 3 && parts[1] == "find") + { + std::list headers; + if (_config->Exists(parts[2]) == true) + { + std::string response = _config->Find(parts[2]); + addDataHeaders(headers, response); + sendHead(client, 200, headers); + sendData(client, response); + return true; + } + sendError(client, 404, request, "Requested Configuration option doesn't exist."); + return false; + } + else if (pcount == 3 && parts[1] == "clear") + { + _config->Clear(parts[2]); + sendSuccess(client, request, true, "Option '" + parts[2] + "' was cleared."); + return true; + } + + sendError(client, 400, request, true, "Unknown on-the-fly configuration request"); + return false; +} + /*}}}*/ int main(int const argc, const char * argv[]) { CommandLine::Args Args[] = { @@ -475,6 +532,17 @@ int main(int const argc, const char * argv[]) if (parseFirstLine(client, *m, filename, sendContent, closeConnection) == false) continue; + // special webserver command request + if (filename.length() > 1 && filename[0] == '_') + { + std::vector parts = VectorizeString(filename, '/'); + if (parts[0] == "_config") + { + handleOnTheFlyReconfiguration(client, *m, parts); + continue; + } + } + // string replacements in the requested filename ::Configuration::Item const *Replaces = _config->Tree("aptwebserver::redirect::replace"); if (Replaces != NULL) -- cgit v1.2.3 From 14c84d021d82335255275f1eabf3a856bde4df07 Mon Sep 17 00:00:00 2001 From: David Kalnischkies Date: Mon, 16 Sep 2013 00:02:21 +0200 Subject: add Range and If-Range support in the webserver Git-Dch: Ignore --- test/interactive-helper/aptwebserver.cc | 64 ++++++++++++++++++++++++++++++++- 1 file changed, 63 insertions(+), 1 deletion(-) diff --git a/test/interactive-helper/aptwebserver.cc b/test/interactive-helper/aptwebserver.cc index 7134b37bc..4dae342dd 100644 --- a/test/interactive-helper/aptwebserver.cc +++ b/test/interactive-helper/aptwebserver.cc @@ -100,8 +100,13 @@ bool sendHead(int const client, int const httpcode, std::list &head std::string response("HTTP/1.1 "); response.append(httpcodeToStr(httpcode)); headers.push_front(response); + _config->Set("APTWebserver::Last-Status-Code", httpcode); - headers.push_back("Server: APT webserver"); + std::stringstream buffer; + _config->Dump(buffer, "aptwebserver::response-header", "%t: %v%n", false); + std::vector addheaders = VectorizeString(buffer.str(), '\n'); + for (std::vector::const_iterator h = addheaders.begin(); h != addheaders.end(); ++h) + headers.push_back(*h); std::string date("Date: "); date.append(TimeRFC1123(time(NULL))); @@ -512,6 +517,9 @@ int main(int const argc, const char * argv[]) listen(sock, 1); /*}}}*/ + _config->CndSet("aptwebserver::response-header::Server", "APT webserver"); + _config->CndSet("aptwebserver::response-header::Accept-Ranges", "bytes"); + std::vector messages; int client; while ((client = accept(sock, NULL, NULL)) != -1) @@ -600,6 +608,60 @@ int main(int const argc, const char * argv[]) } } + if (_config->FindB("aptwebserver::support::range", true) == true) + condition = LookupTag(*m, "Range", ""); + else + condition.clear(); + if (condition.empty() == false && strncmp(condition.c_str(), "bytes=", 6) == 0) + { + time_t cache; + std::string ifrange; + if (_config->FindB("aptwebserver::support::if-range", true) == true) + ifrange = LookupTag(*m, "If-Range", ""); + bool validrange = (ifrange.empty() == true || + (RFC1123StrToTime(ifrange.c_str(), cache) == true && + cache <= data.ModificationTime())); + + // FIXME: support multiple byte-ranges (APT clients do not do this) + if (condition.find(',') == std::string::npos) + { + size_t start = 6; + unsigned long long filestart = strtoull(condition.c_str() + start, NULL, 10); + // FIXME: no support for last-byte-pos being not the end of the file (APT clients do not do this) + size_t dash = condition.find('-') + 1; + unsigned long long fileend = strtoull(condition.c_str() + dash, NULL, 10); + unsigned long long filesize = data.FileSize(); + if ((fileend == 0 || (fileend == filesize && fileend >= filestart)) && + validrange == true) + { + if (filesize > filestart) + { + data.Skip(filestart); + std::ostringstream contentlength; + contentlength << "Content-Length: " << (filesize - filestart); + headers.push_back(contentlength.str()); + std::ostringstream contentrange; + contentrange << "Content-Range: bytes " << filestart << "-" + << filesize - 1 << "/" << filesize; + headers.push_back(contentrange.str()); + sendHead(client, 206, headers); + if (sendContent == true) + sendFile(client, data); + continue; + } + else + { + headers.push_back("Content-Length: 0"); + std::ostringstream contentrange; + contentrange << "Content-Range: bytes */" << filesize; + headers.push_back(contentrange.str()); + sendHead(client, 416, headers); + continue; + } + } + } + } + addFileHeaders(headers, data); sendHead(client, 200, headers); if (sendContent == true) -- cgit v1.2.3 From 331e8396ee5a4f2e7d276eddc54749b2a13dd789 Mon Sep 17 00:00:00 2001 From: David Kalnischkies Date: Mon, 16 Sep 2013 23:21:11 +0200 Subject: retry without partial data after a 416 response If we get a 416 from the server it means the Range we asked for is above the real filesize of the file on the server. Mostly this happens if the server isn't supporting If-Range, but regardless of how we end up with the partial data, the data is invalid so we discard it and retry with a fresh plate and hope for the best. Old behavior was to consider 416 an error and retry with a different compression until we ran out of compression and requested the uncompressed file (which doesn't exist on most mirrors) with an accept line which server answered with "406 Not Acceptable". Closes: 710924 --- methods/http.cc | 19 ++++++++- test/integration/test-releasefile-verification | 59 +++++++++++++++++++------- 2 files changed, 60 insertions(+), 18 deletions(-) diff --git a/methods/http.cc b/methods/http.cc index 278ddb290..2a8b7f0fc 100644 --- a/methods/http.cc +++ b/methods/http.cc @@ -604,6 +604,8 @@ bool ServerState::HeaderLine(string Line) Size = strtoull(Val.c_str(), NULL, 10); if (Size >= std::numeric_limits::max()) return _error->Errno("HeaderLine", _("The HTTP server sent an invalid Content-Length header")); + else if (Size == 0) + HaveContent = false; return true; } @@ -616,8 +618,14 @@ bool ServerState::HeaderLine(string Line) if (stringcasecmp(Tag,"Content-Range:") == 0) { HaveContent = true; - - if (sscanf(Val.c_str(),"bytes %llu-%*u/%llu",&StartPos,&Size) != 2) + + // §14.16 says 'byte-range-resp-spec' should be a '*' in case of 416 + if (Result == 416 && sscanf(Val.c_str(), "bytes */%llu",&Size) == 1) + { + StartPos = 1; // ignore Content-Length, it would override Size + HaveContent = false; + } + else if (sscanf(Val.c_str(),"bytes %llu-%*u/%llu",&StartPos,&Size) != 2) return _error->Error(_("The HTTP server sent an invalid Content-Range header")); if ((unsigned long long)StartPos > Size) return _error->Error(_("This HTTP server has broken range support")); @@ -995,6 +1003,13 @@ HttpMethod::DealWithHeaders(FetchResult &Res,ServerState *Srv) } /* else pass through for error message */ } + // retry after an invalid range response without partial data + else if (Srv->Result == 416 && FileExists(Queue->DestFile) == true && + unlink(Queue->DestFile.c_str()) == 0) + { + NextURI = Queue->Uri; + return TRY_AGAIN_OR_REDIRECT; + } /* We have a reply we dont handle. This should indicate a perm server failure */ diff --git a/test/integration/test-releasefile-verification b/test/integration/test-releasefile-verification index a9f4b9775..9d34a521a 100755 --- a/test/integration/test-releasefile-verification +++ b/test/integration/test-releasefile-verification @@ -11,18 +11,24 @@ buildaptarchive setupflataptarchive changetowebserver +downloadfile "http://localhost:8080/_config/set/aptwebserver::support::range/false" '/dev/null' >/dev/null + prepare() { local DATE="${2:-now}" - if [ "$DATE" = 'now' -a "$1" = "${PKGFILE}-new" ]; then - DATE='now + 6 days' + if [ "$DATE" = 'now' ]; then + if [ "$1" = "${PKGFILE}-new" ]; then + DATE='now - 1 day' + else + DATE='now - 7 day' + fi fi for release in $(find rootdir/var/lib/apt/lists 2> /dev/null); do - touch -d 'now - 6 hours' $release + touch -d 'now - 1 year' $release done aptget clean cp $1 aptarchive/Packages find aptarchive -name 'Release' -delete - compressfile 'aptarchive/Packages' + compressfile 'aptarchive/Packages' "$DATE" generatereleasefiles "$DATE" } @@ -85,13 +91,34 @@ touch aptarchive/apt.deb PKGFILE="${TESTDIR}/$(echo "$(basename $0)" | sed 's#^test-#Packages-#')" +updatesuccess() { + local LOG='update.log' + if aptget update >$LOG 2>&1 || grep -q -E '^(W|E): ' $LOG; then + msgpass + else + cat $LOG + msgfail + fi +} + +updatefailure() { + local LOG='update.log' + aptget update >$LOG 2>&1 || true + if grep -q -E "$1" $LOG; then + msgpass + else + cat $LOG + msgfail + fi +} + runtest() { prepare ${PKGFILE} rm -rf rootdir/var/lib/apt/lists signreleasefiles 'Joe Sixpack' find aptarchive/ -name "$DELETEFILE" -delete msgtest 'Cold archive signed by' 'Joe Sixpack' - aptget update 2>&1 | grep -E '^(W|E): ' > /dev/null && msgfail || msgpass + updatesuccess testequal "$(cat ${PKGFILE}) " aptcache show apt installaptold @@ -100,7 +127,7 @@ runtest() { signreleasefiles 'Joe Sixpack' find aptarchive/ -name "$DELETEFILE" -delete msgtest 'Good warm archive signed by' 'Joe Sixpack' - aptget update 2>&1 | grep -E '^(W|E): ' > /dev/null && msgfail || msgpass + updatesuccess testequal "$(cat ${PKGFILE}-new) " aptcache show apt installaptnew @@ -111,7 +138,7 @@ runtest() { signreleasefiles 'Rex Expired' find aptarchive/ -name "$DELETEFILE" -delete msgtest 'Cold archive signed by' 'Rex Expired' - aptget update 2>&1 | grep -E '^W: .* KEYEXPIRED' > /dev/null && msgpass || msgfail + updatefailure '^W: .* KEYEXPIRED' testequal "$(cat ${PKGFILE}) " aptcache show apt failaptold @@ -122,7 +149,7 @@ runtest() { signreleasefiles 'Marvin Paranoid' find aptarchive/ -name "$DELETEFILE" -delete msgtest 'Cold archive signed by' 'Marvin Paranoid' - aptget update 2>&1 | grep -E '^W: .* NO_PUBKEY' > /dev/null && msgpass || msgfail + updatefailure '^W: .* NO_PUBKEY' testequal "$(cat ${PKGFILE}) " aptcache show apt failaptold @@ -136,7 +163,7 @@ runtest() { signreleasefiles 'Joe Sixpack' find aptarchive/ -name "$DELETEFILE" -delete msgtest 'Bad warm archive signed by' 'Joe Sixpack' - aptget update 2>&1 | grep -E '^(W|E): ' > /dev/null && msgfail || msgpass + updatesuccess testequal "$(cat ${PKGFILE}-new) " aptcache show apt installaptnew @@ -147,7 +174,7 @@ runtest() { signreleasefiles 'Joe Sixpack' find aptarchive/ -name "$DELETEFILE" -delete msgtest 'Cold archive signed by' 'Joe Sixpack' - aptget update 2>&1 | grep -E '^(W|E): ' > /dev/null && msgfail || msgpass + updatesuccess testequal "$(cat ${PKGFILE}) " aptcache show apt installaptold @@ -156,7 +183,7 @@ runtest() { signreleasefiles 'Marvin Paranoid' find aptarchive/ -name "$DELETEFILE" -delete msgtest 'Good warm archive signed by' 'Marvin Paranoid' - aptget update 2>&1 | grep -E '^W: .* NO_PUBKEY' > /dev/null && msgpass || msgfail + updatefailure '^W: .* NO_PUBKEY' testequal "$(cat ${PKGFILE}) " aptcache show apt installaptold @@ -166,7 +193,7 @@ runtest() { signreleasefiles 'Rex Expired' find aptarchive/ -name "$DELETEFILE" -delete msgtest 'Good warm archive signed by' 'Rex Expired' - aptget update 2>&1 | grep -E '^W: .* KEYEXPIRED' > /dev/null && msgpass || msgfail + updatefailure '^W: .* KEYEXPIRED' testequal "$(cat ${PKGFILE}) " aptcache show apt installaptold @@ -176,7 +203,7 @@ runtest() { signreleasefiles find aptarchive/ -name "$DELETEFILE" -delete msgtest 'Good warm archive signed by' 'Joe Sixpack' - aptget update 2>&1 | grep -E '^(W|E): ' > /dev/null && msgfail || msgpass + updatesuccess testequal "$(cat ${PKGFILE}-new) " aptcache show apt installaptnew @@ -187,7 +214,7 @@ runtest2() { rm -rf rootdir/var/lib/apt/lists signreleasefiles 'Joe Sixpack' msgtest 'Cold archive signed by' 'Joe Sixpack' - aptget update 2>&1 | grep -E '^(W|E): ' > /dev/null && msgfail || msgpass + updatesuccess # New .deb but now an unsigned archive. For example MITM to circumvent # package verification. @@ -195,7 +222,7 @@ runtest2() { find aptarchive/ -name InRelease -delete find aptarchive/ -name Release.gpg -delete msgtest 'Warm archive signed by' 'nobody' - aptget update 2>&1 | grep -E '^(W|E): ' > /dev/null && msgfail || msgpass + updatesuccess testequal "$(cat ${PKGFILE}-new) " aptcache show apt failaptnew @@ -203,7 +230,7 @@ runtest2() { # Unsigned archive from the beginning must also be detected. rm -rf rootdir/var/lib/apt/lists msgtest 'Cold archive signed by' 'nobody' - aptget update 2>&1 | grep -E '^(W|E): ' > /dev/null && msgfail || msgpass + updatesuccess testequal "$(cat ${PKGFILE}-new) " aptcache show apt failaptnew -- cgit v1.2.3 From 78c72d0ce22e00b194251445aae306df357d5c1a Mon Sep 17 00:00:00 2001 From: David Kalnischkies Date: Tue, 17 Sep 2013 00:41:58 +0200 Subject: replace "filesize - 1" trick in http with proper 416 handling Our http client requests the "filesize - 1" for the small edgecase of handling a file which was completely downloaded, but not yet moved to the correct place as we get 416 errors in that case, but as we can handle 416 returns now we just special-case the situation of requesting the exact filesize and handle it as a 200 without content instead. --- methods/http.cc | 27 +++++++++++++++++++++------ 1 file changed, 21 insertions(+), 6 deletions(-) diff --git a/methods/http.cc b/methods/http.cc index 2a8b7f0fc..5f0dc03d9 100644 --- a/methods/http.cc +++ b/methods/http.cc @@ -750,7 +750,7 @@ void HttpMethod::SendReq(FetchItem *Itm,CircleBuf &Out) if (stat(Itm->DestFile.c_str(),&SBuf) >= 0 && SBuf.st_size > 0) { // In this case we send an if-range query with a range header - sprintf(Buf,"Range: bytes=%lli-\r\nIf-Range: %s\r\n",(long long)SBuf.st_size - 1, + sprintf(Buf,"Range: bytes=%lli-\r\nIf-Range: %s\r\n",(long long)SBuf.st_size, TimeRFC1123(SBuf.st_mtime).c_str()); Req += Buf; } @@ -1004,11 +1004,24 @@ HttpMethod::DealWithHeaders(FetchResult &Res,ServerState *Srv) /* else pass through for error message */ } // retry after an invalid range response without partial data - else if (Srv->Result == 416 && FileExists(Queue->DestFile) == true && - unlink(Queue->DestFile.c_str()) == 0) + else if (Srv->Result == 416) { - NextURI = Queue->Uri; - return TRY_AGAIN_OR_REDIRECT; + struct stat SBuf; + if (stat(Queue->DestFile.c_str(),&SBuf) >= 0 && SBuf.st_size > 0) + { + if ((unsigned long long)SBuf.st_size == Srv->Size) + { + // the file is completely downloaded, but was not moved + Srv->StartPos = Srv->Size; + Srv->Result = 200; + Srv->HaveContent = false; + } + else if (unlink(Queue->DestFile.c_str()) == 0) + { + NextURI = Queue->Uri; + return TRY_AGAIN_OR_REDIRECT; + } + } } /* We have a reply we dont handle. This should indicate a perm server @@ -1246,7 +1259,9 @@ int HttpMethod::Loop() URIStart(Res); // Run the data - bool Result = Server->RunData(); + bool Result = true; + if (Server->HaveContent) + Result = Server->RunData(); /* If the server is sending back sizeless responses then fill in the size now */ -- cgit v1.2.3 From 7330f4df8b31e66f6557bf49c9c90ad9a73ff459 Mon Sep 17 00:00:00 2001 From: David Kalnischkies Date: Tue, 17 Sep 2013 22:35:44 +0200 Subject: refactor http client implementation No effective behavior change, just shuffling big junks of code between methods and classes to split them into those strongly related to our client implementation and those implementing HTTP. The idea is to get HTTPS to a point in which most of the implementation can be shared even though the client implementations itself is completely different. This isn't anywhere near yet though, but it should beenough to reuse at least a few lines from http in https now. Git-Dch: Ignore --- methods/http.cc | 991 +++++++++++------------------------------------------- methods/http.h | 136 +++----- methods/makefile | 4 +- methods/server.cc | 665 ++++++++++++++++++++++++++++++++++++ methods/server.h | 142 ++++++++ 5 files changed, 1049 insertions(+), 889 deletions(-) create mode 100644 methods/server.cc create mode 100644 methods/server.h diff --git a/methods/http.cc b/methods/http.cc index 5f0dc03d9..d2f084b04 100644 --- a/methods/http.cc +++ b/methods/http.cc @@ -58,15 +58,6 @@ /*}}}*/ using namespace std; -string HttpMethod::FailFile; -int HttpMethod::FailFd = -1; -time_t HttpMethod::FailTime = 0; -unsigned long PipelineDepth = 0; -unsigned long TimeOut = 120; -bool AllowRedirect = false; -bool Debug = false; -URI Proxy; - unsigned long long CircleBuf::BwReadLimit=0; unsigned long long CircleBuf::BwTickReadData=0; struct timeval CircleBuf::BwReadTick={0,0}; @@ -296,20 +287,17 @@ CircleBuf::~CircleBuf() delete Hash; } -// ServerState::ServerState - Constructor /*{{{*/ -// --------------------------------------------------------------------- -/* */ -ServerState::ServerState(URI Srv,HttpMethod *Owner) : Owner(Owner), - In(64*1024), Out(4*1024), - ServerName(Srv) +// HttpServerState::HttpServerState - Constructor /*{{{*/ +HttpServerState::HttpServerState(URI Srv,HttpMethod *Owner) : ServerState(Srv, Owner), In(64*1024), Out(4*1024) { + TimeOut = _config->FindI("Acquire::http::Timeout",TimeOut); Reset(); } /*}}}*/ -// ServerState::Open - Open a connection to the server /*{{{*/ +// HttpServerState::Open - Open a connection to the server /*{{{*/ // --------------------------------------------------------------------- /* This opens a connection to the server. */ -bool ServerState::Open() +bool HttpServerState::Open() { // Use the already open connection if possible. if (ServerFd != -1) @@ -373,72 +361,18 @@ bool ServerState::Open() return true; } /*}}}*/ -// ServerState::Close - Close a connection to the server /*{{{*/ +// HttpServerState::Close - Close a connection to the server /*{{{*/ // --------------------------------------------------------------------- /* */ -bool ServerState::Close() +bool HttpServerState::Close() { close(ServerFd); ServerFd = -1; return true; } /*}}}*/ -// ServerState::RunHeaders - Get the headers before the data /*{{{*/ -// --------------------------------------------------------------------- -/* Returns 0 if things are OK, 1 if an IO error occurred and 2 if a header - parse error occurred */ -ServerState::RunHeadersResult ServerState::RunHeaders() -{ - State = Header; - - Owner->Status(_("Waiting for headers")); - - Major = 0; - Minor = 0; - Result = 0; - Size = 0; - StartPos = 0; - Encoding = Closes; - HaveContent = false; - time(&Date); - - do - { - string Data; - if (In.WriteTillEl(Data) == false) - continue; - - if (Debug == true) - clog << Data; - - for (string::const_iterator I = Data.begin(); I < Data.end(); ++I) - { - string::const_iterator J = I; - for (; J != Data.end() && *J != '\n' && *J != '\r'; ++J); - if (HeaderLine(string(I,J)) == false) - return RUN_HEADERS_PARSE_ERROR; - I = J; - } - - // 100 Continue is a Nop... - if (Result == 100) - continue; - - // Tidy up the connection persistance state. - if (Encoding == Closes && HaveContent == true) - Persistent = false; - - return RUN_HEADERS_OK; - } - while (Owner->Go(false,this) == true); - - return RUN_HEADERS_IO_ERROR; -} - /*}}}*/ -// ServerState::RunData - Transfer the data from the socket /*{{{*/ -// --------------------------------------------------------------------- -/* */ -bool ServerState::RunData() +// HttpServerState::RunData - Transfer the data from the socket /*{{{*/ +bool HttpServerState::RunData(FileFd * const File) { State = Data; @@ -456,7 +390,7 @@ bool ServerState::RunData() if (In.WriteTillEl(Data,true) == true) break; } - while ((Last = Owner->Go(false,this)) == true); + while ((Last = Go(false, File)) == true); if (Last == false) return false; @@ -474,7 +408,7 @@ bool ServerState::RunData() if (In.WriteTillEl(Data,true) == true && Data.length() <= 2) break; } - while ((Last = Owner->Go(false,this)) == true); + while ((Last = Go(false, File)) == true); if (Last == false) return false; return !_error->PendingError(); @@ -482,7 +416,7 @@ bool ServerState::RunData() // Transfer the block In.Limit(Len); - while (Owner->Go(true,this) == true) + while (Go(true, File) == true) if (In.IsLimit() == true) break; @@ -498,7 +432,7 @@ bool ServerState::RunData() if (In.WriteTillEl(Data,true) == true) break; } - while ((Last = Owner->Go(false,this)) == true); + while ((Last = Go(false, File)) == true); if (Last == false) return false; } @@ -521,147 +455,217 @@ bool ServerState::RunData() In.Limit(-1); return !_error->PendingError(); } - while (Owner->Go(true,this) == true); + while (Go(true, File) == true); } - return Owner->Flush(this) && !_error->PendingError(); + return Owner->Flush() && !_error->PendingError(); } /*}}}*/ -// ServerState::HeaderLine - Process a header line /*{{{*/ -// --------------------------------------------------------------------- -/* */ -bool ServerState::HeaderLine(string Line) +bool HttpServerState::ReadHeaderLines(std::string &Data) /*{{{*/ { - if (Line.empty() == true) - return true; + return In.WriteTillEl(Data); +} + /*}}}*/ +bool HttpServerState::LoadNextResponse(bool const ToFile, FileFd * const File)/*{{{*/ +{ + return Go(ToFile, File); +} + /*}}}*/ +bool HttpServerState::WriteResponse(const std::string &Data) /*{{{*/ +{ + return Out.Read(Data); +} + /*}}}*/ +bool HttpServerState::IsOpen() /*{{{*/ +{ + return (ServerFd != -1); +} + /*}}}*/ +bool HttpServerState::InitHashes(FileFd &File) /*{{{*/ +{ + delete In.Hash; + In.Hash = new Hashes; - string::size_type Pos = Line.find(' '); - if (Pos == string::npos || Pos+1 > Line.length()) + // Set the expected size and read file for the hashes + if (StartPos >= 0) { - // Blah, some servers use "connection:closes", evil. - Pos = Line.find(':'); - if (Pos == string::npos || Pos + 2 > Line.length()) - return _error->Error(_("Bad header line")); - Pos++; + File.Truncate(StartPos); + + return In.Hash->AddFD(File, StartPos); } + return true; +} + /*}}}*/ +Hashes * HttpServerState::GetHashes() /*{{{*/ +{ + return In.Hash; +} + /*}}}*/ +// HttpServerState::Die - The server has closed the connection. /*{{{*/ +bool HttpServerState::Die(FileFd &File) +{ + unsigned int LErrno = errno; - // Parse off any trailing spaces between the : and the next word. - string::size_type Pos2 = Pos; - while (Pos2 < Line.length() && isspace(Line[Pos2]) != 0) - Pos2++; - - string Tag = string(Line,0,Pos); - string Val = string(Line,Pos2); - - if (stringcasecmp(Tag.c_str(),Tag.c_str()+4,"HTTP") == 0) + // Dump the buffer to the file + if (State == ServerState::Data) { - // Evil servers return no version - if (Line[4] == '/') - { - int const elements = sscanf(Line.c_str(),"HTTP/%3u.%3u %3u%359[^\n]",&Major,&Minor,&Result,Code); - if (elements == 3) - { - Code[0] = '\0'; - if (Debug == true) - clog << "HTTP server doesn't give Reason-Phrase for " << Result << std::endl; - } - else if (elements != 4) - return _error->Error(_("The HTTP server sent an invalid reply header")); - } - else + // on GNU/kFreeBSD, apt dies on /dev/null because non-blocking + // can't be set + if (File.Name() != "/dev/null") + SetNonBlock(File.Fd(),false); + while (In.WriteSpace() == true) { - Major = 0; - Minor = 9; - if (sscanf(Line.c_str(),"HTTP %3u%359[^\n]",&Result,Code) != 2) - return _error->Error(_("The HTTP server sent an invalid reply header")); - } + if (In.Write(File.Fd()) == false) + return _error->Errno("write",_("Error writing to the file")); - /* Check the HTTP response header to get the default persistance - state. */ - if (Major < 1) - Persistent = false; - else - { - if (Major == 1 && Minor == 0) - Persistent = false; - else - Persistent = true; + // Done + if (In.IsLimit() == true) + return true; } + } - return true; - } - - if (stringcasecmp(Tag,"Content-Length:") == 0) + // See if this is because the server finished the data stream + if (In.IsLimit() == false && State != HttpServerState::Header && + Encoding != HttpServerState::Closes) { - if (Encoding == Closes) - Encoding = Stream; - HaveContent = true; - - // The length is already set from the Content-Range header - if (StartPos != 0) - return true; - - Size = strtoull(Val.c_str(), NULL, 10); - if (Size >= std::numeric_limits::max()) - return _error->Errno("HeaderLine", _("The HTTP server sent an invalid Content-Length header")); - else if (Size == 0) - HaveContent = false; - return true; + Close(); + if (LErrno == 0) + return _error->Error(_("Error reading from server. Remote end closed connection")); + errno = LErrno; + return _error->Errno("read",_("Error reading from server")); } - - if (stringcasecmp(Tag,"Content-Type:") == 0) + else { - HaveContent = true; + In.Limit(-1); + + // Nothing left in the buffer + if (In.WriteSpace() == false) + return false; + + // We may have got multiple responses back in one packet.. + Close(); return true; } - - if (stringcasecmp(Tag,"Content-Range:") == 0) - { - HaveContent = true; - // §14.16 says 'byte-range-resp-spec' should be a '*' in case of 416 - if (Result == 416 && sscanf(Val.c_str(), "bytes */%llu",&Size) == 1) + return false; +} + /*}}}*/ +// HttpServerState::Flush - Dump the buffer into the file /*{{{*/ +// --------------------------------------------------------------------- +/* This takes the current input buffer from the Server FD and writes it + into the file */ +bool HttpServerState::Flush(FileFd * const File) +{ + if (File != NULL) + { + // on GNU/kFreeBSD, apt dies on /dev/null because non-blocking + // can't be set + if (File->Name() != "/dev/null") + SetNonBlock(File->Fd(),false); + if (In.WriteSpace() == false) + return true; + + while (In.WriteSpace() == true) { - StartPos = 1; // ignore Content-Length, it would override Size - HaveContent = false; + if (In.Write(File->Fd()) == false) + return _error->Errno("write",_("Error writing to file")); + if (In.IsLimit() == true) + return true; } - else if (sscanf(Val.c_str(),"bytes %llu-%*u/%llu",&StartPos,&Size) != 2) - return _error->Error(_("The HTTP server sent an invalid Content-Range header")); - if ((unsigned long long)StartPos > Size) - return _error->Error(_("This HTTP server has broken range support")); - return true; + + if (In.IsLimit() == true || Encoding == ServerState::Closes) + return true; } + return false; +} + /*}}}*/ +// HttpServerState::Go - Run a single loop /*{{{*/ +// --------------------------------------------------------------------- +/* This runs the select loop over the server FDs, Output file FDs and + stdin. */ +bool HttpServerState::Go(bool ToFile, FileFd * const File) +{ + // Server has closed the connection + if (ServerFd == -1 && (In.WriteSpace() == false || + ToFile == false)) + return false; + + fd_set rfds,wfds; + FD_ZERO(&rfds); + FD_ZERO(&wfds); + + /* Add the server. We only send more requests if the connection will + be persisting */ + if (Out.WriteSpace() == true && ServerFd != -1 + && Persistent == true) + FD_SET(ServerFd,&wfds); + if (In.ReadSpace() == true && ServerFd != -1) + FD_SET(ServerFd,&rfds); - if (stringcasecmp(Tag,"Transfer-Encoding:") == 0) + // Add the file + int FileFD = -1; + if (File != NULL) + FileFD = File->Fd(); + + if (In.WriteSpace() == true && ToFile == true && FileFD != -1) + FD_SET(FileFD,&wfds); + + // Add stdin + if (_config->FindB("Acquire::http::DependOnSTDIN", true) == true) + FD_SET(STDIN_FILENO,&rfds); + + // Figure out the max fd + int MaxFd = FileFD; + if (MaxFd < ServerFd) + MaxFd = ServerFd; + + // Select + struct timeval tv; + tv.tv_sec = TimeOut; + tv.tv_usec = 0; + int Res = 0; + if ((Res = select(MaxFd+1,&rfds,&wfds,0,&tv)) < 0) { - HaveContent = true; - if (stringcasecmp(Val,"chunked") == 0) - Encoding = Chunked; - return true; + if (errno == EINTR) + return true; + return _error->Errno("select",_("Select failed")); } - - if (stringcasecmp(Tag,"Connection:") == 0) + + if (Res == 0) { - if (stringcasecmp(Val,"close") == 0) - Persistent = false; - if (stringcasecmp(Val,"keep-alive") == 0) - Persistent = true; - return true; + _error->Error(_("Connection timed out")); + return Die(*File); } - if (stringcasecmp(Tag,"Last-Modified:") == 0) + // Handle server IO + if (ServerFd != -1 && FD_ISSET(ServerFd,&rfds)) { - if (RFC1123StrToTime(Val.c_str(), Date) == false) - return _error->Error(_("Unknown date format")); - return true; + errno = 0; + if (In.Read(ServerFd) == false) + return Die(*File); + } + + if (ServerFd != -1 && FD_ISSET(ServerFd,&wfds)) + { + errno = 0; + if (Out.Write(ServerFd) == false) + return Die(*File); } - if (stringcasecmp(Tag,"Location:") == 0) + // Send data to the file + if (FileFD != -1 && FD_ISSET(FileFD,&wfds)) { - Location = Val; - return true; + if (In.Write(FileFD) == false) + return _error->Errno("write",_("Error writing to output file")); } + // Handle commands from APT + if (FD_ISSET(STDIN_FILENO,&rfds)) + { + if (Owner->Run(true) != -1) + exit(100); + } + return true; } /*}}}*/ @@ -669,7 +673,7 @@ bool ServerState::HeaderLine(string Line) // HttpMethod::SendReq - Send the HTTP request /*{{{*/ // --------------------------------------------------------------------- /* This places the http request in the outbound buffer */ -void HttpMethod::SendReq(FetchItem *Itm,CircleBuf &Out) +void HttpMethod::SendReq(FetchItem *Itm) { URI Uri = Itm->Uri; @@ -695,7 +699,7 @@ void HttpMethod::SendReq(FetchItem *Itm,CircleBuf &Out) but while its a must for all servers to accept absolute URIs, it is assumed clients will sent an absolute path for non-proxies */ std::string requesturi; - if (Proxy.empty() == true || Proxy.Host.empty()) + if (Server->Proxy.empty() == true || Server->Proxy.Host.empty()) requesturi = Uri.Path; else requesturi = Itm->Uri; @@ -763,9 +767,9 @@ void HttpMethod::SendReq(FetchItem *Itm,CircleBuf &Out) } } - if (Proxy.User.empty() == false || Proxy.Password.empty() == false) + if (Server->Proxy.User.empty() == false || Server->Proxy.Password.empty() == false) Req += string("Proxy-Authorization: Basic ") + - Base64Encode(Proxy.User + ":" + Proxy.Password) + "\r\n"; + Base64Encode(Server->Proxy.User + ":" + Server->Proxy.Password) + "\r\n"; maybe_add_auth (Uri, _config->FindFile("Dir::Etc::netrc")); if (Uri.User.empty() == false || Uri.Password.empty() == false) @@ -779,360 +783,18 @@ void HttpMethod::SendReq(FetchItem *Itm,CircleBuf &Out) if (Debug == true) cerr << Req << endl; - Out.Read(Req); -} - /*}}}*/ -// HttpMethod::Go - Run a single loop /*{{{*/ -// --------------------------------------------------------------------- -/* This runs the select loop over the server FDs, Output file FDs and - stdin. */ -bool HttpMethod::Go(bool ToFile,ServerState *Srv) -{ - // Server has closed the connection - if (Srv->ServerFd == -1 && (Srv->In.WriteSpace() == false || - ToFile == false)) - return false; - - fd_set rfds,wfds; - FD_ZERO(&rfds); - FD_ZERO(&wfds); - - /* Add the server. We only send more requests if the connection will - be persisting */ - if (Srv->Out.WriteSpace() == true && Srv->ServerFd != -1 - && Srv->Persistent == true) - FD_SET(Srv->ServerFd,&wfds); - if (Srv->In.ReadSpace() == true && Srv->ServerFd != -1) - FD_SET(Srv->ServerFd,&rfds); - - // Add the file - int FileFD = -1; - if (File != 0) - FileFD = File->Fd(); - - if (Srv->In.WriteSpace() == true && ToFile == true && FileFD != -1) - FD_SET(FileFD,&wfds); - - // Add stdin - if (_config->FindB("Acquire::http::DependOnSTDIN", true) == true) - FD_SET(STDIN_FILENO,&rfds); - - // Figure out the max fd - int MaxFd = FileFD; - if (MaxFd < Srv->ServerFd) - MaxFd = Srv->ServerFd; - - // Select - struct timeval tv; - tv.tv_sec = TimeOut; - tv.tv_usec = 0; - int Res = 0; - if ((Res = select(MaxFd+1,&rfds,&wfds,0,&tv)) < 0) - { - if (errno == EINTR) - return true; - return _error->Errno("select",_("Select failed")); - } - - if (Res == 0) - { - _error->Error(_("Connection timed out")); - return ServerDie(Srv); - } - - // Handle server IO - if (Srv->ServerFd != -1 && FD_ISSET(Srv->ServerFd,&rfds)) - { - errno = 0; - if (Srv->In.Read(Srv->ServerFd) == false) - return ServerDie(Srv); - } - - if (Srv->ServerFd != -1 && FD_ISSET(Srv->ServerFd,&wfds)) - { - errno = 0; - if (Srv->Out.Write(Srv->ServerFd) == false) - return ServerDie(Srv); - } - - // Send data to the file - if (FileFD != -1 && FD_ISSET(FileFD,&wfds)) - { - if (Srv->In.Write(FileFD) == false) - return _error->Errno("write",_("Error writing to output file")); - } - - // Handle commands from APT - if (FD_ISSET(STDIN_FILENO,&rfds)) - { - if (Run(true) != -1) - exit(100); - } - - return true; -} - /*}}}*/ -// HttpMethod::Flush - Dump the buffer into the file /*{{{*/ -// --------------------------------------------------------------------- -/* This takes the current input buffer from the Server FD and writes it - into the file */ -bool HttpMethod::Flush(ServerState *Srv) -{ - if (File != 0) - { - // on GNU/kFreeBSD, apt dies on /dev/null because non-blocking - // can't be set - if (File->Name() != "/dev/null") - SetNonBlock(File->Fd(),false); - if (Srv->In.WriteSpace() == false) - return true; - - while (Srv->In.WriteSpace() == true) - { - if (Srv->In.Write(File->Fd()) == false) - return _error->Errno("write",_("Error writing to file")); - if (Srv->In.IsLimit() == true) - return true; - } - - if (Srv->In.IsLimit() == true || Srv->Encoding == ServerState::Closes) - return true; - } - return false; + Server->WriteResponse(Req); } /*}}}*/ -// HttpMethod::ServerDie - The server has closed the connection. /*{{{*/ -// --------------------------------------------------------------------- -/* */ -bool HttpMethod::ServerDie(ServerState *Srv) -{ - unsigned int LErrno = errno; - - // Dump the buffer to the file - if (Srv->State == ServerState::Data) - { - // on GNU/kFreeBSD, apt dies on /dev/null because non-blocking - // can't be set - if (File->Name() != "/dev/null") - SetNonBlock(File->Fd(),false); - while (Srv->In.WriteSpace() == true) - { - if (Srv->In.Write(File->Fd()) == false) - return _error->Errno("write",_("Error writing to the file")); - - // Done - if (Srv->In.IsLimit() == true) - return true; - } - } - - // See if this is because the server finished the data stream - if (Srv->In.IsLimit() == false && Srv->State != ServerState::Header && - Srv->Encoding != ServerState::Closes) - { - Srv->Close(); - if (LErrno == 0) - return _error->Error(_("Error reading from server. Remote end closed connection")); - errno = LErrno; - return _error->Errno("read",_("Error reading from server")); - } - else - { - Srv->In.Limit(-1); - - // Nothing left in the buffer - if (Srv->In.WriteSpace() == false) - return false; - - // We may have got multiple responses back in one packet.. - Srv->Close(); - return true; - } - - return false; -} - /*}}}*/ -// HttpMethod::DealWithHeaders - Handle the retrieved header data /*{{{*/ -// --------------------------------------------------------------------- -/* We look at the header data we got back from the server and decide what - to do. Returns DealWithHeadersResult (see http.h for details). - */ -HttpMethod::DealWithHeadersResult -HttpMethod::DealWithHeaders(FetchResult &Res,ServerState *Srv) -{ - // Not Modified - if (Srv->Result == 304) - { - unlink(Queue->DestFile.c_str()); - Res.IMSHit = true; - Res.LastModified = Queue->LastModified; - return IMS_HIT; - } - - /* Redirect - * - * Note that it is only OK for us to treat all redirection the same - * because we *always* use GET, not other HTTP methods. There are - * three redirection codes for which it is not appropriate that we - * redirect. Pass on those codes so the error handling kicks in. - */ - if (AllowRedirect - && (Srv->Result > 300 && Srv->Result < 400) - && (Srv->Result != 300 // Multiple Choices - && Srv->Result != 304 // Not Modified - && Srv->Result != 306)) // (Not part of HTTP/1.1, reserved) - { - if (Srv->Location.empty() == true); - else if (Srv->Location[0] == '/' && Queue->Uri.empty() == false) - { - URI Uri = Queue->Uri; - if (Uri.Host.empty() == false) - NextURI = URI::SiteOnly(Uri); - else - NextURI.clear(); - NextURI.append(DeQuoteString(Srv->Location)); - return TRY_AGAIN_OR_REDIRECT; - } - else - { - NextURI = DeQuoteString(Srv->Location); - URI tmpURI = NextURI; - // Do not allow a redirection to switch protocol - if (tmpURI.Access == "http") - return TRY_AGAIN_OR_REDIRECT; - } - /* else pass through for error message */ - } - // retry after an invalid range response without partial data - else if (Srv->Result == 416) - { - struct stat SBuf; - if (stat(Queue->DestFile.c_str(),&SBuf) >= 0 && SBuf.st_size > 0) - { - if ((unsigned long long)SBuf.st_size == Srv->Size) - { - // the file is completely downloaded, but was not moved - Srv->StartPos = Srv->Size; - Srv->Result = 200; - Srv->HaveContent = false; - } - else if (unlink(Queue->DestFile.c_str()) == 0) - { - NextURI = Queue->Uri; - return TRY_AGAIN_OR_REDIRECT; - } - } - } - - /* We have a reply we dont handle. This should indicate a perm server - failure */ - if (Srv->Result < 200 || Srv->Result >= 300) - { - char err[255]; - snprintf(err,sizeof(err)-1,"HttpError%i",Srv->Result); - SetFailReason(err); - _error->Error("%u %s",Srv->Result,Srv->Code); - if (Srv->HaveContent == true) - return ERROR_WITH_CONTENT_PAGE; - return ERROR_UNRECOVERABLE; - } - - // This is some sort of 2xx 'data follows' reply - Res.LastModified = Srv->Date; - Res.Size = Srv->Size; - - // Open the file - delete File; - File = new FileFd(Queue->DestFile,FileFd::WriteAny); - if (_error->PendingError() == true) - return ERROR_NOT_FROM_SERVER; - - FailFile = Queue->DestFile; - FailFile.c_str(); // Make sure we dont do a malloc in the signal handler - FailFd = File->Fd(); - FailTime = Srv->Date; - - delete Srv->In.Hash; - Srv->In.Hash = new Hashes; - - // Set the expected size and read file for the hashes - if (Srv->StartPos >= 0) - { - Res.ResumePoint = Srv->StartPos; - File->Truncate(Srv->StartPos); - - if (Srv->In.Hash->AddFD(*File,Srv->StartPos) == false) - { - _error->Errno("read",_("Problem hashing file")); - return ERROR_NOT_FROM_SERVER; - } - } - - SetNonBlock(File->Fd(),true); - return FILE_IS_OPEN; -} - /*}}}*/ -// HttpMethod::SigTerm - Handle a fatal signal /*{{{*/ -// --------------------------------------------------------------------- -/* This closes and timestamps the open file. This is neccessary to get - resume behavoir on user abort */ -void HttpMethod::SigTerm(int) -{ - if (FailFd == -1) - _exit(100); - close(FailFd); - - // Timestamp - struct utimbuf UBuf; - UBuf.actime = FailTime; - UBuf.modtime = FailTime; - utime(FailFile.c_str(),&UBuf); - - _exit(100); -} - /*}}}*/ -// HttpMethod::Fetch - Fetch an item /*{{{*/ -// --------------------------------------------------------------------- -/* This adds an item to the pipeline. We keep the pipeline at a fixed - depth. */ -bool HttpMethod::Fetch(FetchItem *) -{ - if (Server == 0) - return true; - - // Queue the requests - int Depth = -1; - for (FetchItem *I = Queue; I != 0 && Depth < (signed)PipelineDepth; - I = I->Next, Depth++) - { - // If pipelining is disabled, we only queue 1 request - if (Server->Pipeline == false && Depth >= 0) - break; - - // Make sure we stick with the same server - if (Server->Comp(I->Uri) == false) - break; - if (QueueBack == I) - { - QueueBack = I->Next; - SendReq(I,Server->Out); - continue; - } - } - - return true; -}; - /*}}}*/ // HttpMethod::Configuration - Handle a configuration message /*{{{*/ // --------------------------------------------------------------------- /* We stash the desired pipeline depth */ bool HttpMethod::Configuration(string Message) { - if (pkgAcqMethod::Configuration(Message) == false) + if (ServerMethod::Configuration(Message) == false) return false; - + AllowRedirect = _config->FindB("Acquire::http::AllowRedirect",true); - TimeOut = _config->FindI("Acquire::http::Timeout",TimeOut); PipelineDepth = _config->FindI("Acquire::http::Pipeline-Depth", PipelineDepth); Debug = _config->FindB("Debug::Acquire::http",false); @@ -1144,260 +806,6 @@ bool HttpMethod::Configuration(string Message) return true; } /*}}}*/ -// HttpMethod::Loop - Main loop /*{{{*/ -// --------------------------------------------------------------------- -/* */ -int HttpMethod::Loop() -{ - typedef vector StringVector; - typedef vector::iterator StringVectorIterator; - map Redirected; - - signal(SIGTERM,SigTerm); - signal(SIGINT,SigTerm); - - Server = 0; - - int FailCounter = 0; - while (1) - { - // We have no commands, wait for some to arrive - if (Queue == 0) - { - if (WaitFd(STDIN_FILENO) == false) - return 0; - } - - /* Run messages, we can accept 0 (no message) if we didn't - do a WaitFd above.. Otherwise the FD is closed. */ - int Result = Run(true); - if (Result != -1 && (Result != 0 || Queue == 0)) - { - if(FailReason.empty() == false || - _config->FindB("Acquire::http::DependOnSTDIN", true) == true) - return 100; - else - return 0; - } - - if (Queue == 0) - continue; - - // Connect to the server - if (Server == 0 || Server->Comp(Queue->Uri) == false) - { - delete Server; - Server = new ServerState(Queue->Uri,this); - } - /* If the server has explicitly said this is the last connection - then we pre-emptively shut down the pipeline and tear down - the connection. This will speed up HTTP/1.0 servers a tad - since we don't have to wait for the close sequence to - complete */ - if (Server->Persistent == false) - Server->Close(); - - // Reset the pipeline - if (Server->ServerFd == -1) - QueueBack = Queue; - - // Connnect to the host - if (Server->Open() == false) - { - Fail(true); - delete Server; - Server = 0; - continue; - } - - // Fill the pipeline. - Fetch(0); - - // Fetch the next URL header data from the server. - switch (Server->RunHeaders()) - { - case ServerState::RUN_HEADERS_OK: - break; - - // The header data is bad - case ServerState::RUN_HEADERS_PARSE_ERROR: - { - _error->Error(_("Bad header data")); - Fail(true); - RotateDNS(); - continue; - } - - // The server closed a connection during the header get.. - default: - case ServerState::RUN_HEADERS_IO_ERROR: - { - FailCounter++; - _error->Discard(); - Server->Close(); - Server->Pipeline = false; - - if (FailCounter >= 2) - { - Fail(_("Connection failed"),true); - FailCounter = 0; - } - - RotateDNS(); - continue; - } - }; - - // Decide what to do. - FetchResult Res; - Res.Filename = Queue->DestFile; - switch (DealWithHeaders(Res,Server)) - { - // Ok, the file is Open - case FILE_IS_OPEN: - { - URIStart(Res); - - // Run the data - bool Result = true; - if (Server->HaveContent) - Result = Server->RunData(); - - /* If the server is sending back sizeless responses then fill in - the size now */ - if (Res.Size == 0) - Res.Size = File->Size(); - - // Close the file, destroy the FD object and timestamp it - FailFd = -1; - delete File; - File = 0; - - // Timestamp - struct utimbuf UBuf; - time(&UBuf.actime); - UBuf.actime = Server->Date; - UBuf.modtime = Server->Date; - utime(Queue->DestFile.c_str(),&UBuf); - - // Send status to APT - if (Result == true) - { - Res.TakeHashes(*Server->In.Hash); - URIDone(Res); - } - else - { - if (Server->ServerFd == -1) - { - FailCounter++; - _error->Discard(); - Server->Close(); - - if (FailCounter >= 2) - { - Fail(_("Connection failed"),true); - FailCounter = 0; - } - - QueueBack = Queue; - } - else - Fail(true); - } - break; - } - - // IMS hit - case IMS_HIT: - { - URIDone(Res); - break; - } - - // Hard server error, not found or something - case ERROR_UNRECOVERABLE: - { - Fail(); - break; - } - - // Hard internal error, kill the connection and fail - case ERROR_NOT_FROM_SERVER: - { - delete File; - File = 0; - - Fail(); - RotateDNS(); - Server->Close(); - break; - } - - // We need to flush the data, the header is like a 404 w/ error text - case ERROR_WITH_CONTENT_PAGE: - { - Fail(); - - // Send to content to dev/null - File = new FileFd("/dev/null",FileFd::WriteExists); - Server->RunData(); - delete File; - File = 0; - break; - } - - // Try again with a new URL - case TRY_AGAIN_OR_REDIRECT: - { - // Clear rest of response if there is content - if (Server->HaveContent) - { - File = new FileFd("/dev/null",FileFd::WriteExists); - Server->RunData(); - delete File; - File = 0; - } - - /* Detect redirect loops. No more redirects are allowed - after the same URI is seen twice in a queue item. */ - StringVector &R = Redirected[Queue->DestFile]; - bool StopRedirects = false; - if (R.empty() == true) - R.push_back(Queue->Uri); - else if (R[0] == "STOP" || R.size() > 10) - StopRedirects = true; - else - { - for (StringVectorIterator I = R.begin(); I != R.end(); ++I) - if (Queue->Uri == *I) - { - R[0] = "STOP"; - break; - } - - R.push_back(Queue->Uri); - } - - if (StopRedirects == false) - Redirect(NextURI); - else - Fail(); - - break; - } - - default: - Fail(_("Internal error")); - break; - } - - FailCounter = 0; - } - - return 0; -} - /*}}}*/ // HttpMethod::AutoDetectProxy - auto detect proxy /*{{{*/ // --------------------------------------------------------------------- /* */ @@ -1450,5 +858,8 @@ bool HttpMethod::AutoDetectProxy() return true; } /*}}}*/ - - +ServerState * HttpMethod::CreateServerState(URI uri) /*{{{*/ +{ + return new HttpServerState(uri, this); +} + /*}}}*/ diff --git a/methods/http.h b/methods/http.h index 7446119cd..112ce171d 100644 --- a/methods/http.h +++ b/methods/http.h @@ -15,6 +15,8 @@ #include +#include "server.h" + using std::cout; using std::endl; @@ -31,7 +33,7 @@ class CircleBuf unsigned long long StrPos; unsigned long long MaxGet; struct timeval Start; - + static unsigned long long BwReadLimit; static unsigned long long BwTickReadData; static struct timeval BwReadTick; @@ -54,21 +56,20 @@ class CircleBuf return Sz; } void FillOut(); - + public: - Hashes *Hash; - + // Read data in bool Read(int Fd); bool Read(std::string Data); - + // Write data out bool Write(int Fd); bool WriteTillEl(std::string &Data,bool Single = false); - + // Control the write limit - void Limit(long long Max) {if (Max == -1) MaxGet = 0-1; else MaxGet = OutP + Max;} + void Limit(long long Max) {if (Max == -1) MaxGet = 0-1; else MaxGet = OutP + Max;} bool IsLimit() const {return MaxGet == OutP;}; void Print() const {cout << MaxGet << ',' << OutP << endl;}; @@ -84,114 +85,55 @@ class CircleBuf ~CircleBuf(); }; -struct ServerState +struct HttpServerState: public ServerState { - // This is the last parsed Header Line - unsigned int Major; - unsigned int Minor; - unsigned int Result; - char Code[360]; - - // These are some statistics from the last parsed header lines - unsigned long long Size; - signed long long StartPos; - time_t Date; - bool HaveContent; - enum {Chunked,Stream,Closes} Encoding; - enum {Header, Data} State; - bool Persistent; - std::string Location; - - // This is a Persistent attribute of the server itself. - bool Pipeline; - - HttpMethod *Owner; - // This is the connection itself. Output is data FROM the server CircleBuf In; CircleBuf Out; int ServerFd; - URI ServerName; - - bool HeaderLine(std::string Line); - bool Comp(URI Other) const {return Other.Host == ServerName.Host && Other.Port == ServerName.Port;}; - void Reset() {Major = 0; Minor = 0; Result = 0; Code[0] = '\0'; Size = 0; - StartPos = 0; Encoding = Closes; time(&Date); HaveContent = false; - State = Header; Persistent = false; ServerFd = -1; - Pipeline = true;}; - - /** \brief Result of the header acquire */ - enum RunHeadersResult { - /** \brief Header ok */ - RUN_HEADERS_OK, - /** \brief IO error while retrieving */ - RUN_HEADERS_IO_ERROR, - /** \brief Parse error after retrieving */ - RUN_HEADERS_PARSE_ERROR, - }; - /** \brief Get the headers before the data */ - RunHeadersResult RunHeaders(); - /** \brief Transfer the data from the socket */ - bool RunData(); - - bool Open(); - bool Close(); - - ServerState(URI Srv,HttpMethod *Owner); - ~ServerState() {Close();}; + + protected: + virtual bool ReadHeaderLines(std::string &Data); + virtual bool LoadNextResponse(bool const ToFile, FileFd * const File); + virtual bool WriteResponse(std::string const &Data); + + public: + virtual void Reset() { ServerState::Reset(); ServerFd = -1; }; + + virtual bool RunData(FileFd * const File); + + virtual bool Open(); + virtual bool IsOpen(); + virtual bool Close(); + virtual bool InitHashes(FileFd &File); + virtual Hashes * GetHashes(); + virtual bool Die(FileFd &File); + virtual bool Flush(FileFd * const File); + virtual bool Go(bool ToFile, FileFd * const File); + + HttpServerState(URI Srv, HttpMethod *Owner); + virtual ~HttpServerState() {Close();}; }; -class HttpMethod : public pkgAcqMethod +class HttpMethod : public ServerMethod { - void SendReq(FetchItem *Itm,CircleBuf &Out); - bool Go(bool ToFile,ServerState *Srv); - bool Flush(ServerState *Srv); - bool ServerDie(ServerState *Srv); - - /** \brief Result of the header parsing */ - enum DealWithHeadersResult { - /** \brief The file is open and ready */ - FILE_IS_OPEN, - /** \brief We got a IMS hit, the file has not changed */ - IMS_HIT, - /** \brief The server reported a unrecoverable error */ - ERROR_UNRECOVERABLE, - /** \brief The server reported a error with a error content page */ - ERROR_WITH_CONTENT_PAGE, - /** \brief An error on the client side */ - ERROR_NOT_FROM_SERVER, - /** \brief A redirect or retry request */ - TRY_AGAIN_OR_REDIRECT - }; - /** \brief Handle the retrieved header data */ - DealWithHeadersResult DealWithHeaders(FetchResult &Res,ServerState *Srv); + public: + virtual void SendReq(FetchItem *Itm); /** \brief Try to AutoDetect the proxy */ bool AutoDetectProxy(); virtual bool Configuration(std::string Message); - - // In the event of a fatal signal this file will be closed and timestamped. - static std::string FailFile; - static int FailFd; - static time_t FailTime; - static void SigTerm(int); + + virtual ServerState * CreateServerState(URI uri); protected: - virtual bool Fetch(FetchItem *); - - std::string NextURI; std::string AutoDetectProxyCmd; public: - friend struct ServerState; - - FileFd *File; - ServerState *Server; - - int Loop(); - - HttpMethod() : pkgAcqMethod("1.2",Pipeline | SendConfig) + friend struct HttpServerState; + + HttpMethod() : ServerMethod("1.2",Pipeline | SendConfig) { File = 0; Server = 0; diff --git a/methods/makefile b/methods/makefile index 294c55d23..f8098de74 100644 --- a/methods/makefile +++ b/methods/makefile @@ -48,7 +48,7 @@ include $(PROGRAM_H) PROGRAM=http SLIBS = -lapt-pkg $(SOCKETLIBS) $(INTLLIBS) LIB_MAKES = apt-pkg/makefile -SOURCE = http.cc http_main.cc rfc2553emu.cc connect.cc +SOURCE = http.cc http_main.cc rfc2553emu.cc connect.cc server.cc include $(PROGRAM_H) # The https method @@ -83,7 +83,7 @@ include $(PROGRAM_H) PROGRAM=mirror SLIBS = -lapt-pkg $(SOCKETLIBS) LIB_MAKES = apt-pkg/makefile -SOURCE = mirror.cc http.cc rfc2553emu.cc connect.cc +SOURCE = mirror.cc http.cc rfc2553emu.cc connect.cc server.cc include $(PROGRAM_H) # SSH method symlink diff --git a/methods/server.cc b/methods/server.cc new file mode 100644 index 000000000..a2128441c --- /dev/null +++ b/methods/server.cc @@ -0,0 +1,665 @@ +// -*- mode: cpp; mode: fold -*- +// Description /*{{{*/ +/* ###################################################################### + + HTTP and HTTPS share a lot of common code and these classes are + exactly the dumping ground for this common code + + ##################################################################### */ + /*}}}*/ +// Include Files /*{{{*/ +#include + +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +// Internet stuff +#include + +#include "config.h" +#include "connect.h" +#include "rfc2553emu.h" +#include "http.h" + +#include + /*}}}*/ +using namespace std; + +string ServerMethod::FailFile; +int ServerMethod::FailFd = -1; +time_t ServerMethod::FailTime = 0; + +// ServerState::RunHeaders - Get the headers before the data /*{{{*/ +// --------------------------------------------------------------------- +/* Returns 0 if things are OK, 1 if an IO error occurred and 2 if a header + parse error occurred */ +ServerState::RunHeadersResult ServerState::RunHeaders(FileFd * const File) +{ + State = Header; + + Owner->Status(_("Waiting for headers")); + + Major = 0; + Minor = 0; + Result = 0; + Size = 0; + StartPos = 0; + Encoding = Closes; + HaveContent = false; + time(&Date); + + do + { + string Data; + if (ReadHeaderLines(Data) == false) + continue; + + if (Owner->Debug == true) + clog << Data; + + for (string::const_iterator I = Data.begin(); I < Data.end(); ++I) + { + string::const_iterator J = I; + for (; J != Data.end() && *J != '\n' && *J != '\r'; ++J); + if (HeaderLine(string(I,J)) == false) + return RUN_HEADERS_PARSE_ERROR; + I = J; + } + + // 100 Continue is a Nop... + if (Result == 100) + continue; + + // Tidy up the connection persistance state. + if (Encoding == Closes && HaveContent == true) + Persistent = false; + + return RUN_HEADERS_OK; + } + while (LoadNextResponse(false, File) == true); + + return RUN_HEADERS_IO_ERROR; +} + /*}}}*/ +// ServerState::HeaderLine - Process a header line /*{{{*/ +// --------------------------------------------------------------------- +/* */ +bool ServerState::HeaderLine(string Line) +{ + if (Line.empty() == true) + return true; + + string::size_type Pos = Line.find(' '); + if (Pos == string::npos || Pos+1 > Line.length()) + { + // Blah, some servers use "connection:closes", evil. + Pos = Line.find(':'); + if (Pos == string::npos || Pos + 2 > Line.length()) + return _error->Error(_("Bad header line")); + Pos++; + } + + // Parse off any trailing spaces between the : and the next word. + string::size_type Pos2 = Pos; + while (Pos2 < Line.length() && isspace(Line[Pos2]) != 0) + Pos2++; + + string Tag = string(Line,0,Pos); + string Val = string(Line,Pos2); + + if (stringcasecmp(Tag.c_str(),Tag.c_str()+4,"HTTP") == 0) + { + // Evil servers return no version + if (Line[4] == '/') + { + int const elements = sscanf(Line.c_str(),"HTTP/%3u.%3u %3u%359[^\n]",&Major,&Minor,&Result,Code); + if (elements == 3) + { + Code[0] = '\0'; + if (Owner->Debug == true) + clog << "HTTP server doesn't give Reason-Phrase for " << Result << std::endl; + } + else if (elements != 4) + return _error->Error(_("The HTTP server sent an invalid reply header")); + } + else + { + Major = 0; + Minor = 9; + if (sscanf(Line.c_str(),"HTTP %3u%359[^\n]",&Result,Code) != 2) + return _error->Error(_("The HTTP server sent an invalid reply header")); + } + + /* Check the HTTP response header to get the default persistance + state. */ + if (Major < 1) + Persistent = false; + else + { + if (Major == 1 && Minor == 0) + Persistent = false; + else + Persistent = true; + } + + return true; + } + + if (stringcasecmp(Tag,"Content-Length:") == 0) + { + if (Encoding == Closes) + Encoding = Stream; + HaveContent = true; + + // The length is already set from the Content-Range header + if (StartPos != 0) + return true; + + Size = strtoull(Val.c_str(), NULL, 10); + if (Size >= std::numeric_limits::max()) + return _error->Errno("HeaderLine", _("The HTTP server sent an invalid Content-Length header")); + else if (Size == 0) + HaveContent = false; + return true; + } + + if (stringcasecmp(Tag,"Content-Type:") == 0) + { + HaveContent = true; + return true; + } + + if (stringcasecmp(Tag,"Content-Range:") == 0) + { + HaveContent = true; + + // §14.16 says 'byte-range-resp-spec' should be a '*' in case of 416 + if (Result == 416 && sscanf(Val.c_str(), "bytes */%llu",&Size) == 1) + { + StartPos = 1; // ignore Content-Length, it would override Size + HaveContent = false; + } + else if (sscanf(Val.c_str(),"bytes %llu-%*u/%llu",&StartPos,&Size) != 2) + return _error->Error(_("The HTTP server sent an invalid Content-Range header")); + if ((unsigned long long)StartPos > Size) + return _error->Error(_("This HTTP server has broken range support")); + return true; + } + + if (stringcasecmp(Tag,"Transfer-Encoding:") == 0) + { + HaveContent = true; + if (stringcasecmp(Val,"chunked") == 0) + Encoding = Chunked; + return true; + } + + if (stringcasecmp(Tag,"Connection:") == 0) + { + if (stringcasecmp(Val,"close") == 0) + Persistent = false; + if (stringcasecmp(Val,"keep-alive") == 0) + Persistent = true; + return true; + } + + if (stringcasecmp(Tag,"Last-Modified:") == 0) + { + if (RFC1123StrToTime(Val.c_str(), Date) == false) + return _error->Error(_("Unknown date format")); + return true; + } + + if (stringcasecmp(Tag,"Location:") == 0) + { + Location = Val; + return true; + } + + return true; +} + /*}}}*/ +// ServerState::ServerState - Constructor /*{{{*/ +ServerState::ServerState(URI Srv, ServerMethod *Owner) : ServerName(Srv), TimeOut(120), Owner(Owner) +{ + Reset(); +} + /*}}}*/ + +bool ServerMethod::Configuration(string Message) /*{{{*/ +{ + return pkgAcqMethod::Configuration(Message); +} + /*}}}*/ + +// ServerMethod::DealWithHeaders - Handle the retrieved header data /*{{{*/ +// --------------------------------------------------------------------- +/* We look at the header data we got back from the server and decide what + to do. Returns DealWithHeadersResult (see http.h for details). + */ +ServerMethod::DealWithHeadersResult +ServerMethod::DealWithHeaders(FetchResult &Res) +{ + // Not Modified + if (Server->Result == 304) + { + unlink(Queue->DestFile.c_str()); + Res.IMSHit = true; + Res.LastModified = Queue->LastModified; + return IMS_HIT; + } + + /* Redirect + * + * Note that it is only OK for us to treat all redirection the same + * because we *always* use GET, not other HTTP methods. There are + * three redirection codes for which it is not appropriate that we + * redirect. Pass on those codes so the error handling kicks in. + */ + if (AllowRedirect + && (Server->Result > 300 && Server->Result < 400) + && (Server->Result != 300 // Multiple Choices + && Server->Result != 304 // Not Modified + && Server->Result != 306)) // (Not part of HTTP/1.1, reserved) + { + if (Server->Location.empty() == true); + else if (Server->Location[0] == '/' && Queue->Uri.empty() == false) + { + URI Uri = Queue->Uri; + if (Uri.Host.empty() == false) + NextURI = URI::SiteOnly(Uri); + else + NextURI.clear(); + NextURI.append(DeQuoteString(Server->Location)); + return TRY_AGAIN_OR_REDIRECT; + } + else + { + NextURI = DeQuoteString(Server->Location); + URI tmpURI = NextURI; + // Do not allow a redirection to switch protocol + if (tmpURI.Access == "http") + return TRY_AGAIN_OR_REDIRECT; + } + /* else pass through for error message */ + } + // retry after an invalid range response without partial data + else if (Server->Result == 416) + { + struct stat SBuf; + if (stat(Queue->DestFile.c_str(),&SBuf) >= 0 && SBuf.st_size > 0) + { + if ((unsigned long long)SBuf.st_size == Server->Size) + { + // the file is completely downloaded, but was not moved + Server->StartPos = Server->Size; + Server->Result = 200; + Server->HaveContent = false; + } + else if (unlink(Queue->DestFile.c_str()) == 0) + { + NextURI = Queue->Uri; + return TRY_AGAIN_OR_REDIRECT; + } + } + } + + /* We have a reply we dont handle. This should indicate a perm server + failure */ + if (Server->Result < 200 || Server->Result >= 300) + { + char err[255]; + snprintf(err,sizeof(err)-1,"HttpError%i",Server->Result); + SetFailReason(err); + _error->Error("%u %s",Server->Result,Server->Code); + if (Server->HaveContent == true) + return ERROR_WITH_CONTENT_PAGE; + return ERROR_UNRECOVERABLE; + } + + // This is some sort of 2xx 'data follows' reply + Res.LastModified = Server->Date; + Res.Size = Server->Size; + + // Open the file + delete File; + File = new FileFd(Queue->DestFile,FileFd::WriteAny); + if (_error->PendingError() == true) + return ERROR_NOT_FROM_SERVER; + + FailFile = Queue->DestFile; + FailFile.c_str(); // Make sure we dont do a malloc in the signal handler + FailFd = File->Fd(); + FailTime = Server->Date; + + if (Server->InitHashes(*File) == false) + { + _error->Errno("read",_("Problem hashing file")); + return ERROR_NOT_FROM_SERVER; + } + if (Server->StartPos > 0) + Res.ResumePoint = Server->StartPos; + + SetNonBlock(File->Fd(),true); + return FILE_IS_OPEN; +} + /*}}}*/ +// ServerMethod::SigTerm - Handle a fatal signal /*{{{*/ +// --------------------------------------------------------------------- +/* This closes and timestamps the open file. This is neccessary to get + resume behavoir on user abort */ +void ServerMethod::SigTerm(int) +{ + if (FailFd == -1) + _exit(100); + close(FailFd); + + // Timestamp + struct utimbuf UBuf; + UBuf.actime = FailTime; + UBuf.modtime = FailTime; + utime(FailFile.c_str(),&UBuf); + + _exit(100); +} + /*}}}*/ +// ServerMethod::Fetch - Fetch an item /*{{{*/ +// --------------------------------------------------------------------- +/* This adds an item to the pipeline. We keep the pipeline at a fixed + depth. */ +bool ServerMethod::Fetch(FetchItem *) +{ + if (Server == 0) + return true; + + // Queue the requests + int Depth = -1; + for (FetchItem *I = Queue; I != 0 && Depth < (signed)PipelineDepth; + I = I->Next, Depth++) + { + // If pipelining is disabled, we only queue 1 request + if (Server->Pipeline == false && Depth >= 0) + break; + + // Make sure we stick with the same server + if (Server->Comp(I->Uri) == false) + break; + if (QueueBack == I) + { + QueueBack = I->Next; + SendReq(I); + continue; + } + } + + return true; +}; + /*}}}*/ +// ServerMethod::Loop - Main loop /*{{{*/ +int ServerMethod::Loop() +{ + typedef vector StringVector; + typedef vector::iterator StringVectorIterator; + map Redirected; + + signal(SIGTERM,SigTerm); + signal(SIGINT,SigTerm); + + Server = 0; + + int FailCounter = 0; + while (1) + { + // We have no commands, wait for some to arrive + if (Queue == 0) + { + if (WaitFd(STDIN_FILENO) == false) + return 0; + } + + /* Run messages, we can accept 0 (no message) if we didn't + do a WaitFd above.. Otherwise the FD is closed. */ + int Result = Run(true); + if (Result != -1 && (Result != 0 || Queue == 0)) + { + if(FailReason.empty() == false || + _config->FindB("Acquire::http::DependOnSTDIN", true) == true) + return 100; + else + return 0; + } + + if (Queue == 0) + continue; + + // Connect to the server + if (Server == 0 || Server->Comp(Queue->Uri) == false) + { + delete Server; + Server = CreateServerState(Queue->Uri); + } + /* If the server has explicitly said this is the last connection + then we pre-emptively shut down the pipeline and tear down + the connection. This will speed up HTTP/1.0 servers a tad + since we don't have to wait for the close sequence to + complete */ + if (Server->Persistent == false) + Server->Close(); + + // Reset the pipeline + if (Server->IsOpen() == false) + QueueBack = Queue; + + // Connnect to the host + if (Server->Open() == false) + { + Fail(true); + delete Server; + Server = 0; + continue; + } + + // Fill the pipeline. + Fetch(0); + + // Fetch the next URL header data from the server. + switch (Server->RunHeaders(File)) + { + case ServerState::RUN_HEADERS_OK: + break; + + // The header data is bad + case ServerState::RUN_HEADERS_PARSE_ERROR: + { + _error->Error(_("Bad header data")); + Fail(true); + RotateDNS(); + continue; + } + + // The server closed a connection during the header get.. + default: + case ServerState::RUN_HEADERS_IO_ERROR: + { + FailCounter++; + _error->Discard(); + Server->Close(); + Server->Pipeline = false; + + if (FailCounter >= 2) + { + Fail(_("Connection failed"),true); + FailCounter = 0; + } + + RotateDNS(); + continue; + } + }; + + // Decide what to do. + FetchResult Res; + Res.Filename = Queue->DestFile; + switch (DealWithHeaders(Res)) + { + // Ok, the file is Open + case FILE_IS_OPEN: + { + URIStart(Res); + + // Run the data + bool Result = true; + if (Server->HaveContent) + Result = Server->RunData(File); + + /* If the server is sending back sizeless responses then fill in + the size now */ + if (Res.Size == 0) + Res.Size = File->Size(); + + // Close the file, destroy the FD object and timestamp it + FailFd = -1; + delete File; + File = 0; + + // Timestamp + struct utimbuf UBuf; + time(&UBuf.actime); + UBuf.actime = Server->Date; + UBuf.modtime = Server->Date; + utime(Queue->DestFile.c_str(),&UBuf); + + // Send status to APT + if (Result == true) + { + Res.TakeHashes(*Server->GetHashes()); + URIDone(Res); + } + else + { + if (Server->IsOpen() == false) + { + FailCounter++; + _error->Discard(); + Server->Close(); + + if (FailCounter >= 2) + { + Fail(_("Connection failed"),true); + FailCounter = 0; + } + + QueueBack = Queue; + } + else + Fail(true); + } + break; + } + + // IMS hit + case IMS_HIT: + { + URIDone(Res); + break; + } + + // Hard server error, not found or something + case ERROR_UNRECOVERABLE: + { + Fail(); + break; + } + + // Hard internal error, kill the connection and fail + case ERROR_NOT_FROM_SERVER: + { + delete File; + File = 0; + + Fail(); + RotateDNS(); + Server->Close(); + break; + } + + // We need to flush the data, the header is like a 404 w/ error text + case ERROR_WITH_CONTENT_PAGE: + { + Fail(); + + // Send to content to dev/null + File = new FileFd("/dev/null",FileFd::WriteExists); + Server->RunData(File); + delete File; + File = 0; + break; + } + + // Try again with a new URL + case TRY_AGAIN_OR_REDIRECT: + { + // Clear rest of response if there is content + if (Server->HaveContent) + { + File = new FileFd("/dev/null",FileFd::WriteExists); + Server->RunData(File); + delete File; + File = 0; + } + + /* Detect redirect loops. No more redirects are allowed + after the same URI is seen twice in a queue item. */ + StringVector &R = Redirected[Queue->DestFile]; + bool StopRedirects = false; + if (R.empty() == true) + R.push_back(Queue->Uri); + else if (R[0] == "STOP" || R.size() > 10) + StopRedirects = true; + else + { + for (StringVectorIterator I = R.begin(); I != R.end(); ++I) + if (Queue->Uri == *I) + { + R[0] = "STOP"; + break; + } + + R.push_back(Queue->Uri); + } + + if (StopRedirects == false) + Redirect(NextURI); + else + Fail(); + + break; + } + + default: + Fail(_("Internal error")); + break; + } + + FailCounter = 0; + } + + return 0; +} + /*}}}*/ diff --git a/methods/server.h b/methods/server.h new file mode 100644 index 000000000..2d43b332f --- /dev/null +++ b/methods/server.h @@ -0,0 +1,142 @@ +// -*- mode: cpp; mode: fold -*- +// Description /*{{{*/ +/* ###################################################################### + + Classes dealing with the abstraction of talking to a end via a text + protocol like HTTP (which is used by the http and https methods) + + ##################################################################### */ + /*}}}*/ + +#ifndef APT_SERVER_H +#define APT_SERVER_H + +#include + +#include + +using std::cout; +using std::endl; + +class Hashes; +class ServerMethod; +class FileFd; + +struct ServerState +{ + // This is the last parsed Header Line + unsigned int Major; + unsigned int Minor; + unsigned int Result; + char Code[360]; + + // These are some statistics from the last parsed header lines + unsigned long long Size; + signed long long StartPos; + time_t Date; + bool HaveContent; + enum {Chunked,Stream,Closes} Encoding; + enum {Header, Data} State; + bool Persistent; + std::string Location; + + // This is a Persistent attribute of the server itself. + bool Pipeline; + URI ServerName; + URI Proxy; + unsigned long TimeOut; + + protected: + ServerMethod *Owner; + + bool HeaderLine(std::string Line); + virtual bool ReadHeaderLines(std::string &Data) = 0; + virtual bool LoadNextResponse(bool const ToFile, FileFd * const File) = 0; + + public: + /** \brief Result of the header acquire */ + enum RunHeadersResult { + /** \brief Header ok */ + RUN_HEADERS_OK, + /** \brief IO error while retrieving */ + RUN_HEADERS_IO_ERROR, + /** \brief Parse error after retrieving */ + RUN_HEADERS_PARSE_ERROR, + }; + /** \brief Get the headers before the data */ + RunHeadersResult RunHeaders(FileFd * const File); + + bool Comp(URI Other) const {return Other.Host == ServerName.Host && Other.Port == ServerName.Port;}; + virtual void Reset() {Major = 0; Minor = 0; Result = 0; Code[0] = '\0'; Size = 0; + StartPos = 0; Encoding = Closes; time(&Date); HaveContent = false; + State = Header; Persistent = false; Pipeline = true;}; + virtual bool WriteResponse(std::string const &Data) = 0; + + /** \brief Transfer the data from the socket */ + virtual bool RunData(FileFd * const File) = 0; + + virtual bool Open() = 0; + virtual bool IsOpen() = 0; + virtual bool Close() = 0; + virtual bool InitHashes(FileFd &File) = 0; + virtual Hashes * GetHashes() = 0; + virtual bool Die(FileFd &File) = 0; + virtual bool Flush(FileFd * const File) = 0; + virtual bool Go(bool ToFile, FileFd * const File) = 0; + + ServerState(URI Srv, ServerMethod *Owner); + virtual ~ServerState() {}; +}; + +class ServerMethod : public pkgAcqMethod +{ + protected: + virtual bool Fetch(FetchItem *); + + ServerState *Server; + std::string NextURI; + FileFd *File; + + unsigned long PipelineDepth; + bool AllowRedirect; + + public: + bool Debug; + + /** \brief Result of the header parsing */ + enum DealWithHeadersResult { + /** \brief The file is open and ready */ + FILE_IS_OPEN, + /** \brief We got a IMS hit, the file has not changed */ + IMS_HIT, + /** \brief The server reported a unrecoverable error */ + ERROR_UNRECOVERABLE, + /** \brief The server reported a error with a error content page */ + ERROR_WITH_CONTENT_PAGE, + /** \brief An error on the client side */ + ERROR_NOT_FROM_SERVER, + /** \brief A redirect or retry request */ + TRY_AGAIN_OR_REDIRECT + }; + /** \brief Handle the retrieved header data */ + DealWithHeadersResult DealWithHeaders(FetchResult &Res); + + // In the event of a fatal signal this file will be closed and timestamped. + static std::string FailFile; + static int FailFd; + static time_t FailTime; + static void SigTerm(int); + + virtual bool Configuration(std::string Message); + virtual bool Flush() { return Server->Flush(File); }; + + int Loop(); + + virtual void SendReq(FetchItem *Itm) = 0; + virtual ServerState * CreateServerState(URI uri) = 0; + + ServerMethod(const char *Ver,unsigned long Flags = 0) : pkgAcqMethod(Ver, Flags), PipelineDepth(0), AllowRedirect(false), Debug(false) {}; + virtual ~ServerMethod() {}; +}; + +#endif -- cgit v1.2.3 From 85050e764482197aad5daeeafd95ff6bf680afcb Mon Sep 17 00:00:00 2001 From: David Kalnischkies Date: Mon, 30 Sep 2013 13:42:33 +0200 Subject: fix partial (206 and 416) support in https As lengthy discussed in lp:1157943 partial https support was utterly broken as a 206 response was handled as an (unhandled) error. This is the first part of fixing it by supporting a 206 response and starting to deal with 416. --- methods/https.cc | 100 +++++++++++++++++++++++++++++++++++++------------------ 1 file changed, 67 insertions(+), 33 deletions(-) diff --git a/methods/https.cc b/methods/https.cc index 84ce2d68f..4f00842ba 100644 --- a/methods/https.cc +++ b/methods/https.cc @@ -121,7 +121,6 @@ bool HttpsMethod::Fetch(FetchItem *Itm) struct stat SBuf; struct curl_slist *headers=NULL; char curl_errorstr[CURL_ERROR_SIZE]; - long curl_responsecode; URI Uri = Itm->Uri; string remotehost = Uri.Host; @@ -277,7 +276,7 @@ bool HttpsMethod::Fetch(FetchItem *Itm) if (stat(Itm->DestFile.c_str(),&SBuf) >= 0 && SBuf.st_size > 0) { char Buf[1000]; - sprintf(Buf, "Range: bytes=%li-", (long) SBuf.st_size - 1); + sprintf(Buf, "Range: bytes=%li-", (long) SBuf.st_size); headers = curl_slist_append(headers, Buf); sprintf(Buf, "If-Range: %s", TimeRFC1123(SBuf.st_mtime).c_str()); headers = curl_slist_append(headers, Buf); @@ -291,75 +290,110 @@ bool HttpsMethod::Fetch(FetchItem *Itm) // go for it - if the file exists, append on it File = new FileFd(Itm->DestFile, FileFd::WriteAny); if (File->Size() > 0) - File->Seek(File->Size() - 1); - + File->Seek(File->Size()); + // keep apt updated Res.Filename = Itm->DestFile; // get it! CURLcode success = curl_easy_perform(curl); + long curl_responsecode; curl_easy_getinfo(curl, CURLINFO_RESPONSE_CODE, &curl_responsecode); - long curl_servdate; - curl_easy_getinfo(curl, CURLINFO_FILETIME, &curl_servdate); - // If the server returns 200 OK but the If-Modified-Since condition is not // met, CURLINFO_CONDITION_UNMET will be set to 1 long curl_condition_unmet = 0; curl_easy_getinfo(curl, CURLINFO_CONDITION_UNMET, &curl_condition_unmet); File->Close(); + curl_slist_free_all(headers); // cleanup - if(success != 0 || (curl_responsecode != 200 && curl_responsecode != 304)) + if (success != 0) { _error->Error("%s", curl_errorstr); - // unlink, no need keep 401/404 page content in partial/ unlink(File->Name().c_str()); - Fail(); + return false; + } + + // server says file not modified + if (curl_responsecode == 304 || curl_condition_unmet == 1) + { + unlink(File->Name().c_str()); + Res.IMSHit = true; + Res.LastModified = Itm->LastModified; + Res.Size = 0; + URIDone(Res); return true; } - // Timestamp - struct utimbuf UBuf; - if (curl_servdate != -1) { - UBuf.actime = curl_servdate; - UBuf.modtime = curl_servdate; - utime(File->Name().c_str(),&UBuf); + if (curl_responsecode != 200 && // OK + curl_responsecode != 206 && // Partial + curl_responsecode != 416) // invalid Range + { + char err[255]; + snprintf(err, sizeof(err) - 1, "HttpError%ld", curl_responsecode); + SetFailReason(err); + _error->Error("%s", err); + // unlink, no need keep 401/404 page content in partial/ + unlink(File->Name().c_str()); + return false; + } + + struct stat resultStat; + if (unlikely(stat(File->Name().c_str(), &resultStat) != 0)) + { + _error->Errno("stat", "Unable to access file %s", File->Name().c_str()); + return false; + } + Res.Size = resultStat.st_size; + + // invalid range-request + if (curl_responsecode == 416) + { + unlink(File->Name().c_str()); + Res.Size = 0; + delete File; + Redirect(Itm->Uri); + return true; } // check the downloaded result - struct stat Buf; - if (stat(File->Name().c_str(),&Buf) == 0) + if (curl_responsecode == 304 || curl_condition_unmet) { - Res.Filename = File->Name(); - Res.LastModified = Buf.st_mtime; - Res.IMSHit = false; - if (curl_responsecode == 304 || curl_condition_unmet) - { - unlink(File->Name().c_str()); - Res.IMSHit = true; - Res.LastModified = Itm->LastModified; - Res.Size = 0; - URIDone(Res); - return true; - } - Res.Size = Buf.st_size; + unlink(File->Name().c_str()); + Res.IMSHit = true; + Res.LastModified = Itm->LastModified; + Res.Size = 0; + URIDone(Res); + return true; } + Res.IMSHit = false; + + // Timestamp + curl_easy_getinfo(curl, CURLINFO_FILETIME, &Res.LastModified); + if (Res.LastModified != -1) + { + struct utimbuf UBuf; + UBuf.actime = Res.LastModified; + UBuf.modtime = Res.LastModified; + utime(File->Name().c_str(),&UBuf); + } + else + Res.LastModified = resultStat.st_mtime; // take hashes Hashes Hash; FileFd Fd(Res.Filename, FileFd::ReadOnly); Hash.AddFD(Fd); Res.TakeHashes(Hash); - + // keep apt updated URIDone(Res); // cleanup Res.Size = 0; delete File; - curl_slist_free_all(headers); return true; }; -- cgit v1.2.3 From fd46d30571eb240ec3aae792e7a56061ede50524 Mon Sep 17 00:00:00 2001 From: David Kalnischkies Date: Mon, 30 Sep 2013 16:41:16 +0200 Subject: handle complete responses to https range requests Servers might respond with a complete file either because they don't support Ranges at all or the If-Range condition isn't statisfied, so we have to parse the headers curl gets ourself to seek or truncate the file we have so far. This also finially adds the testcase testing a bunch of partial situations for both, http and https - which is now all green. Closes: 617643, 667699 LP: 1157943 --- methods/http.cc | 5 ++ methods/http.h | 1 + methods/https.cc | 76 +++++++++++++------- methods/https.h | 29 ++++++++ methods/makefile | 2 +- methods/server.h | 4 +- test/integration/apt.pem | 49 +++++++++++++ test/integration/framework | 92 +++++++++++++++++++------ test/integration/test-partial-file-support | 107 +++++++++++++++++++++++++++++ 9 files changed, 318 insertions(+), 47 deletions(-) create mode 100644 test/integration/apt.pem create mode 100755 test/integration/test-partial-file-support diff --git a/methods/http.cc b/methods/http.cc index d2f084b04..71a02e53a 100644 --- a/methods/http.cc +++ b/methods/http.cc @@ -863,3 +863,8 @@ ServerState * HttpMethod::CreateServerState(URI uri) /*{{{*/ return new HttpServerState(uri, this); } /*}}}*/ +void HttpMethod::RotateDNS() /*{{{*/ +{ + ::RotateDNS(); +} + /*}}}*/ diff --git a/methods/http.h b/methods/http.h index 112ce171d..02c04e8ae 100644 --- a/methods/http.h +++ b/methods/http.h @@ -126,6 +126,7 @@ class HttpMethod : public ServerMethod virtual bool Configuration(std::string Message); virtual ServerState * CreateServerState(URI uri); + virtual void RotateDNS(); protected: std::string AutoDetectProxyCmd; diff --git a/methods/https.cc b/methods/https.cc index 4f00842ba..2a562434b 100644 --- a/methods/https.cc +++ b/methods/https.cc @@ -36,6 +36,41 @@ /*}}}*/ using namespace std; +size_t +HttpsMethod::parse_header(void *buffer, size_t size, size_t nmemb, void *userp) +{ + size_t len = size * nmemb; + HttpsMethod *me = (HttpsMethod *)userp; + std::string line((char*) buffer, len); + for (--len; len > 0; --len) + if (isspace(line[len]) == 0) + { + ++len; + break; + } + line.erase(len); + + if (line.empty() == true) + { + if (me->Server->Result != 416 && me->Server->StartPos != 0) + ; + else if (me->Server->Result == 416 && me->Server->Size == me->File->FileSize()) + { + me->Server->Result = 200; + me->Server->StartPos = me->Server->Size; + } + else + me->Server->StartPos = 0; + + me->File->Truncate(me->Server->StartPos); + me->File->Seek(me->Server->StartPos); + } + else if (me->Server->HeaderLine(line) == false) + return 0; + + return size*nmemb; +} + size_t HttpsMethod::write_data(void *buffer, size_t size, size_t nmemb, void *userp) { @@ -59,6 +94,14 @@ HttpsMethod::progress_callback(void *clientp, double dltotal, double dlnow, return 0; } +// HttpsServerState::HttpsServerState - Constructor /*{{{*/ +HttpsServerState::HttpsServerState(URI Srv,HttpsMethod *Owner) : ServerState(Srv, NULL) +{ + TimeOut = _config->FindI("Acquire::https::Timeout",TimeOut); + Reset(); +} + /*}}}*/ + void HttpsMethod::SetupProxy() /*{{{*/ { URI ServerName = Queue->Uri; @@ -136,6 +179,8 @@ bool HttpsMethod::Fetch(FetchItem *Itm) // callbacks curl_easy_setopt(curl, CURLOPT_URL, static_cast(Uri).c_str()); + curl_easy_setopt(curl, CURLOPT_HEADERFUNCTION, parse_header); + curl_easy_setopt(curl, CURLOPT_WRITEHEADER, this); curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, write_data); curl_easy_setopt(curl, CURLOPT_WRITEDATA, this); curl_easy_setopt(curl, CURLOPT_PROGRESSFUNCTION, progress_callback); @@ -289,16 +334,13 @@ bool HttpsMethod::Fetch(FetchItem *Itm) // go for it - if the file exists, append on it File = new FileFd(Itm->DestFile, FileFd::WriteAny); - if (File->Size() > 0) - File->Seek(File->Size()); + Server = new HttpsServerState(Itm->Uri, this); // keep apt updated Res.Filename = Itm->DestFile; // get it! CURLcode success = curl_easy_perform(curl); - long curl_responsecode; - curl_easy_getinfo(curl, CURLINFO_RESPONSE_CODE, &curl_responsecode); // If the server returns 200 OK but the If-Modified-Since condition is not // met, CURLINFO_CONDITION_UNMET will be set to 1 @@ -317,7 +359,7 @@ bool HttpsMethod::Fetch(FetchItem *Itm) } // server says file not modified - if (curl_responsecode == 304 || curl_condition_unmet == 1) + if (Server->Result == 304 || curl_condition_unmet == 1) { unlink(File->Name().c_str()); Res.IMSHit = true; @@ -326,13 +368,14 @@ bool HttpsMethod::Fetch(FetchItem *Itm) URIDone(Res); return true; } + Res.IMSHit = false; - if (curl_responsecode != 200 && // OK - curl_responsecode != 206 && // Partial - curl_responsecode != 416) // invalid Range + if (Server->Result != 200 && // OK + Server->Result != 206 && // Partial + Server->Result != 416) // invalid Range { char err[255]; - snprintf(err, sizeof(err) - 1, "HttpError%ld", curl_responsecode); + snprintf(err, sizeof(err) - 1, "HttpError%i", Server->Result); SetFailReason(err); _error->Error("%s", err); // unlink, no need keep 401/404 page content in partial/ @@ -349,7 +392,7 @@ bool HttpsMethod::Fetch(FetchItem *Itm) Res.Size = resultStat.st_size; // invalid range-request - if (curl_responsecode == 416) + if (Server->Result == 416) { unlink(File->Name().c_str()); Res.Size = 0; @@ -358,18 +401,6 @@ bool HttpsMethod::Fetch(FetchItem *Itm) return true; } - // check the downloaded result - if (curl_responsecode == 304 || curl_condition_unmet) - { - unlink(File->Name().c_str()); - Res.IMSHit = true; - Res.LastModified = Itm->LastModified; - Res.Size = 0; - URIDone(Res); - return true; - } - Res.IMSHit = false; - // Timestamp curl_easy_getinfo(curl, CURLINFO_FILETIME, &Res.LastModified); if (Res.LastModified != -1) @@ -408,4 +439,3 @@ int main() return Mth.Run(); } - diff --git a/methods/https.h b/methods/https.h index 293e288e0..8632d6d02 100644 --- a/methods/https.h +++ b/methods/https.h @@ -14,24 +14,53 @@ #include #include +#include "server.h" + using std::cout; using std::endl; class HttpsMethod; class FileFd; +class HttpsServerState : public ServerState +{ + protected: + virtual bool ReadHeaderLines(std::string &Data) { return false; } + virtual bool LoadNextResponse(bool const ToFile, FileFd * const File) { return false; } + + public: + virtual bool WriteResponse(std::string const &Data) { return false; } + + /** \brief Transfer the data from the socket */ + virtual bool RunData(FileFd * const File) { return false; } + + virtual bool Open() { return false; } + virtual bool IsOpen() { return false; } + virtual bool Close() { return false; } + virtual bool InitHashes(FileFd &File) { return false; } + virtual Hashes * GetHashes() { return NULL; } + virtual bool Die(FileFd &File) { return false; } + virtual bool Flush(FileFd * const File) { return false; } + virtual bool Go(bool ToFile, FileFd * const File) { return false; } + + HttpsServerState(URI Srv, HttpsMethod *Owner); + virtual ~HttpsServerState() {Close();}; +}; + class HttpsMethod : public pkgAcqMethod { // minimum speed in bytes/se that triggers download timeout handling static const int DL_MIN_SPEED = 10; virtual bool Fetch(FetchItem *); + static size_t parse_header(void *buffer, size_t size, size_t nmemb, void *userp); static size_t write_data(void *buffer, size_t size, size_t nmemb, void *userp); static int progress_callback(void *clientp, double dltotal, double dlnow, double ultotal, double ulnow); void SetupProxy(); CURL *curl; FetchResult Res; + HttpsServerState *Server; public: FileFd *File; diff --git a/methods/makefile b/methods/makefile index f8098de74..6b7781294 100644 --- a/methods/makefile +++ b/methods/makefile @@ -55,7 +55,7 @@ include $(PROGRAM_H) PROGRAM=https SLIBS = -lapt-pkg -lcurl $(INTLLIBS) LIB_MAKES = apt-pkg/makefile -SOURCE = https.cc +SOURCE = https.cc server.cc include $(PROGRAM_H) # The ftp method diff --git a/methods/server.h b/methods/server.h index 2d43b332f..4dc6a1f2f 100644 --- a/methods/server.h +++ b/methods/server.h @@ -49,11 +49,12 @@ struct ServerState protected: ServerMethod *Owner; - bool HeaderLine(std::string Line); virtual bool ReadHeaderLines(std::string &Data) = 0; virtual bool LoadNextResponse(bool const ToFile, FileFd * const File) = 0; public: + bool HeaderLine(std::string Line); + /** \brief Result of the header acquire */ enum RunHeadersResult { /** \brief Header ok */ @@ -134,6 +135,7 @@ class ServerMethod : public pkgAcqMethod virtual void SendReq(FetchItem *Itm) = 0; virtual ServerState * CreateServerState(URI uri) = 0; + virtual void RotateDNS() = 0; ServerMethod(const char *Ver,unsigned long Flags = 0) : pkgAcqMethod(Ver, Flags), PipelineDepth(0), AllowRedirect(false), Debug(false) {}; virtual ~ServerMethod() {}; diff --git a/test/integration/apt.pem b/test/integration/apt.pem new file mode 100644 index 000000000..f48df054d --- /dev/null +++ b/test/integration/apt.pem @@ -0,0 +1,49 @@ +-----BEGIN PRIVATE KEY----- +MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQCt4R1Q2oYF6utL +19GBhnlHW8L2BI7PRFWge/ZpqIZWsaFcb30FV86Z6aXXZmgfEJ2814ZZYD1IKeCe +JsJpns7B4vYe1v64r995ZNBQAAKIYjICkKZOBgOphV+ChBfrCctVXgfLbMP6iBdH +J02wHzSCCdZm0sdVl9tB5l/OyJU8Mb4KB3btBhfZfY2M6lU+FOjcXs1LOduUrv7K +fZ+DEalvVGkomLtHtD0qb2vkqFrTjVCkziUVWhhxFFflt08oQ01Clxpl+uv7rOQo +jtkJ1LrMuv7iPfaZ/z3qLiFxZYG1BCGEwTOKCtJo6bgFzXiN3q7Q5FFlmv851x2J +Dn8C7Qm7AgMBAAECggEAE3q6vAofJZ6Ryadd8zLLd3ESQFl2XkX7icUZb/DPS/sO +ZrqeuPCDVr7UM3NnisNjyHoktPKRKvp2DYGuGgMOiq4QgJf5ZVten8zpgWze28SU +cbEe0HLgCifE8Ww2+b/ZJbEpEmMW+YQxh2khzO9SBJdxi4dliXM/vvw+E35pKZsB +s6glrz6VQAxxa9fY4fLnB2DafHy+pUvRVw8gC6PCM9jXN9tMYAqztsJu7aaanNyT +HX2UDWa8hxVx6t5UQZuxvst9N+RcEwmVCR2qlfZt/VRBRibBm62crEKbTD00mNHQ +4AIDn3g6Y3SXpDlgtNpjLyBL3fODPIwqwGdblaSKkQKBgQDYXecu0Eda7kbR5ciW +IAn8XOxsBIkkh8YVl2gRiiajRVoeiYBHaW9TyuQiaWrftiDQxB/N4G2focTXy/7O +VJn6e/SUoO/ZGRw2GbTxLUQptgvFsejYCcW9XpC8MCwE/y2swiY7JM0WR8cV2nCk +a/Cls6f1LjL13aFO0PAorEcahQKBgQDNuth6EHZVwfDgUuqhRw4HIIpfsfiA3UOd +b5k/NsfQIev1YUqnfucgInNPDq2Jf8eTQw3TKaszo2DCjDffCsEgM09Tym143Bd6 +AsMuqAStsE3IEC7pnmh95l29/7mh4OuG5cp5JUx0Pi5PkuJ6ywA8P1rM1MB9Zf52 +NGJCo1pnPwKBgQCx/n4i+uDYo1DLd/dN2UmdvGwaaJjR3ohMVuQcGcSzaGg82u0W +0lvtWOnYjVSIeXIBjHaFjW1hd1lSFdWms96AO9z3MHZf6NJWh0tdZNnAXqzMlBFz +OIbdxJ/Y0OBFtA9FIesFmL7G54GWLr+f49Ry3Jr9jmYJ8au0BRqsux07aQKBgC4q +CT2KyCMCO/z6XjAGc71hres/UlYIUI3ZZvfqYPfxRLNxO4FOVqq9UEajMomyJKSE +3WtO5F3YAXRmZnskPKXvHZPdzqbaLGJykD298h7PewSzrPM7WpM1yD9ETPFoOTGy +CrcYiYlkEpxEh5GqT8k1JjjkXLVG18zKgGoXocedAoGAQyU2DCNfxwzIJfFHKZEG +zpni72cR68Tu3AhW/38vMR2ZPca4KzXrUA52T+j7vkQC38LHm/mzNXNP7Vya0PJ3 +WoYOcLtg2uFPh0P/35ArEzuNooLsvulgg1jsamPbF8KAvJZKZHr30hlC/JGYSBbV +bnkzJTShsKzHIUiLtQ8Ja+E= +-----END PRIVATE KEY----- +-----BEGIN CERTIFICATE----- +MIIDezCCAmOgAwIBAgIJAJ39xapQo0vLMA0GCSqGSIb3DQEBBQUAMFMxCzAJBgNV +BAYTAkRFMRMwEQYDVQQIDApTb21lLVN0YXRlMRswGQYDVQQKDBJBUFQgVGVzdGNh +c2VzIEdtYkgxEjAQBgNVBAMMCWxvY2FsaG9zdDAgFw0xMzA5MTYwODQ4MzVaGA80 +NzUxMDgxMzA4NDgzNVowUzELMAkGA1UEBhMCREUxEzARBgNVBAgMClNvbWUtU3Rh +dGUxGzAZBgNVBAoMEkFQVCBUZXN0Y2FzZXMgR21iSDESMBAGA1UEAwwJbG9jYWxo +b3N0MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAreEdUNqGBerrS9fR +gYZ5R1vC9gSOz0RVoHv2aaiGVrGhXG99BVfOmeml12ZoHxCdvNeGWWA9SCngnibC +aZ7OweL2Htb+uK/feWTQUAACiGIyApCmTgYDqYVfgoQX6wnLVV4Hy2zD+ogXRydN +sB80ggnWZtLHVZfbQeZfzsiVPDG+Cgd27QYX2X2NjOpVPhTo3F7NSznblK7+yn2f +gxGpb1RpKJi7R7Q9Km9r5Kha041QpM4lFVoYcRRX5bdPKENNQpcaZfrr+6zkKI7Z +CdS6zLr+4j32mf896i4hcWWBtQQhhMEzigrSaOm4Bc14jd6u0ORRZZr/OdcdiQ5/ +Au0JuwIDAQABo1AwTjAdBgNVHQ4EFgQUhd26E7ykEYRTDbgMzkYtFtENhSkwHwYD +VR0jBBgwFoAUhd26E7ykEYRTDbgMzkYtFtENhSkwDAYDVR0TBAUwAwEB/zANBgkq +hkiG9w0BAQUFAAOCAQEAWcyMKi0Vc4beGV7w4Qft0/2P68jjMlQRdgkz+gGXbMVr +//KhqR3PbgFmHHpUsZ718AHeerNNdfFzOUptiAiOqH2muyAGdeWCxJ8KcU0sic8x +/h3TOzMYfEozhgMSJp9YW1z655uHcb15S7jb4zZwXwGyQzxwXT35SKj2mCqSbjIb +G987DGI+MtyoGRXhIwnBEsGTI1ck3NoeXBJ/tS/Ma8gUUC2xldMSprtHjeUHvZV2 +iz/HTqGlMLGW96AVeZiFNiC1fJ6pvref2XW5MkkvQm8tOi2cSrwJc9CgnCpCxkLp +liRsbwAduwkA26XzEomMR7yyYS5pm0Eu0cO9X39FKQ== +-----END CERTIFICATE----- diff --git a/test/integration/framework b/test/integration/framework index 4003d932c..a2bb871cc 100644 --- a/test/integration/framework +++ b/test/integration/framework @@ -795,6 +795,13 @@ signreleasefiles() { msgdone "info" } +rewritesourceslist() { + local APTARCHIVE="file://$(readlink -f "${TMPWORKINGDIRECTORY}/aptarchive")" + for LIST in $(find rootdir/etc/apt/sources.list.d/ -name 'apt-test-*.list'); do + sed -i $LIST -e "s#$APTARCHIVE#${1}#" -e "s#http://localhost:8080/#${1}#" -e "s#http://localhost:4433/#${1}#" + done +} + changetowebserver() { local LOG='/dev/null' if test -x ${BUILDDIRECTORY}/aptwebserver; then @@ -806,31 +813,32 @@ changetowebserver() { fi addtrap "kill $PID;" cd - > /dev/null - elif [ $# -gt 0 ]; then - msgdie 'Need the aptwebserver when passing arguments for the webserver' - elif which weborf > /dev/null; then - weborf -xb aptarchive/ >$LOG 2>&1 & - addtrap "kill $!;" - elif which gatling > /dev/null; then - cd aptarchive - gatling -p 8080 -F -S >$LOG 2>&1 & - addtrap "kill $!;" - cd - > /dev/null - elif which lighttpd > /dev/null; then - echo "server.document-root = \"$(readlink -f ./aptarchive)\" -server.port = 8080 -server.stat-cache-engine = \"disable\"" > lighttpd.conf - lighttpd -t -f lighttpd.conf >/dev/null || msgdie 'Can not change to webserver: our lighttpd config is invalid' - lighttpd -D -f lighttpd.conf >$LOG 2>&1 & - addtrap "kill $!;" else msgdie 'You have to build aptwerbserver or install a webserver' fi - local APTARCHIVE="file://$(readlink -f ./aptarchive)" - for LIST in $(find rootdir/etc/apt/sources.list.d/ -name 'apt-test-*.list'); do - sed -i $LIST -e "s#$APTARCHIVE#http://localhost:8080/#" - done - return 0 + if [ "$1" != '--no-rewrite' ]; then + rewritesourceslist 'http://localhost:8080/' + fi +} + +changetohttpswebserver() { + if ! which stunnel4 >/dev/null; then + msgdie 'You need to install stunnel4 for https testcases' + fi + if [ ! -e "${TMPWORKINGDIRECTORY}/aptarchive/aptwebserver.pid" ]; then + changetowebserver --no-rewrite + fi + echo "pid = ${TMPWORKINGDIRECTORY}/aptarchive/stunnel.pid +cert = ${TESTDIRECTORY}/apt.pem + +[https] +accept = 4433 +connect = 8080 +" > ${TMPWORKINGDIRECTORY}/stunnel.conf + stunnel4 "${TMPWORKINGDIRECTORY}/stunnel.conf" + local PID="$(cat ${TMPWORKINGDIRECTORY}/aptarchive/stunnel.pid)" + addtrap 'prefix' "kill ${PID};" + rewritesourceslist 'https://localhost:4433/' } changetocdrom() { @@ -848,6 +856,46 @@ changetocdrom() { find rootdir/etc/apt/sources.list.d/ -name 'apt-test-*.list' -delete } +downloadfile() { + PROTO="$(echo "$1" | cut -d':' -f 1)" + local DOWNLOG="${TMPWORKINGDIRECTORY}/download.log" + rm -f "$DOWNLOG" + touch "$DOWNLOG" + { + echo "601 Configuration +Config-Item: Acquire::https::CaInfo=${TESTDIR}/apt.pem +Config-Item: Debug::Acquire::${PROTO}=1 + +600 Acquire URI +URI: $1 +Filename: ${2} +" + # simple worker keeping stdin open until we are done (201) or error (400) + # and requesting new URIs on try-agains/redirects inbetween + { tail -n 999 -f "$DOWNLOG" & echo "TAILPID: $!"; } | while read f1 f2; do + if [ "$f1" = 'TAILPID:' ]; then + TAILPID="$f2" + elif [ "$f1" = 'New-URI:' ]; then + echo "600 Acquire URI +URI: $f2 +Filename: ${2} +" + elif [ "$f1" = '201' ] || [ "$f1" = '400' ]; then + # tail would only die on next read – which never happens + test -z "$TAILPID" || kill -s HUP "$TAILPID" + break + fi + done + } | LD_LIBRARY_PATH=${BUILDDIRECTORY} ${BUILDDIRECTORY}/methods/${PROTO} 2>&1 | tee "$DOWNLOG" + rm "$DOWNLOG" + # only if the file exists the download was successful + if [ -e "$2" ]; then + return 0 + else + return 1 + fi +} + checkdiff() { local DIFFTEXT="$($(which diff) -u $* | sed -e '/^---/ d' -e '/^+++/ d' -e '/^@@/ d')" if [ -n "$DIFFTEXT" ]; then diff --git a/test/integration/test-partial-file-support b/test/integration/test-partial-file-support new file mode 100755 index 000000000..8d1c51ae0 --- /dev/null +++ b/test/integration/test-partial-file-support @@ -0,0 +1,107 @@ +#!/bin/sh +set -e + +TESTDIR=$(readlink -f $(dirname $0)) +. $TESTDIR/framework +setupenvironment +configarchitecture 'amd64' + +changetowebserver + +copysource() { + dd if="$1" bs=1 count="$2" of="$3" 2>/dev/null + touch -d "$(stat --format '%y' "${TESTFILE}")" "$3" +} + +testdownloadfile() { + local DOWNLOG='download-testfile.log' + rm -f "$DOWNLOG" + msgtest "Testing download of file $2 with" "$1" + if ! downloadfile "$2" "$3" > "$DOWNLOG"; then + cat "$DOWNLOG" + msgfail + else + msgpass + fi + cat "$DOWNLOG" | while read field hash; do + local EXPECTED + case "$field" in + 'MD5Sum-Hash:') EXPECTED="$(md5sum "$TESTFILE" | cut -d' ' -f 1)";; + 'SHA1-Hash:') EXPECTED="$(sha1sum "$TESTFILE" | cut -d' ' -f 1)";; + 'SHA256-Hash:') EXPECTED="$(sha256sum "$TESTFILE" | cut -d' ' -f 1)";; + 'SHA512-Hash:') EXPECTED="$(sha512sum "$TESTFILE" | cut -d' ' -f 1)";; + *) continue;; + esac + if [ "$4" = '=' ]; then + msgtest 'Test downloaded file for correct' "$field" + else + msgtest 'Test downloaded file does not match in' "$field" + fi + if [ "$EXPECTED" "$4" "$hash" ]; then + msgpass + else + cat "$DOWNLOG" + msgfail "expected: $EXPECTED ; got: $hash" + fi + done +} + +testwebserverlaststatuscode() { + STATUS="$(mktemp)" + addtrap "rm $STATUS;" + msgtest 'Test last status code from the webserver was' "$1" + downloadfile "http://localhost:8080/_config/find/aptwebserver::last-status-code" "$STATUS" >/dev/null + if [ "$(cat "$STATUS")" = "$1" ]; then + msgpass + else + cat download-testfile.log + msgfail "Status was $(cat "$STATUS")" + fi +} + + +TESTFILE='aptarchive/testfile' +cp -a ${TESTDIR}/framework $TESTFILE + +testrun() { + downloadfile "$1/_config/set/aptwebserver::support::range/true" '/dev/null' >/dev/null + testwebserverlaststatuscode '200' + + copysource $TESTFILE 0 ./testfile + testdownloadfile 'no data' "${1}/testfile" './testfile' '=' + testwebserverlaststatuscode '200' + + copysource $TESTFILE 20 ./testfile + testdownloadfile 'valid partial data' "${1}/testfile" './testfile' '=' + testwebserverlaststatuscode '206' + + copysource /dev/zero 20 ./testfile + testdownloadfile 'invalid partial data' "${1}/testfile" './testfile' '!=' + testwebserverlaststatuscode '206' + + copysource $TESTFILE 1M ./testfile + testdownloadfile 'completely downloaded file' "${1}/testfile" './testfile' '=' + testwebserverlaststatuscode '416' + + copysource /dev/zero 1M ./testfile + testdownloadfile 'too-big partial file' "${1}/testfile" './testfile' '=' + testwebserverlaststatuscode '200' + + copysource /dev/zero 20 ./testfile + touch ./testfile + testdownloadfile 'old data' "${1}/testfile" './testfile' '=' + testwebserverlaststatuscode '200' + + downloadfile "$1/_config/set/aptwebserver::support::range/false" '/dev/null' >/dev/null + testwebserverlaststatuscode '200' + + copysource $TESTFILE 20 ./testfile + testdownloadfile 'no server support' "${1}/testfile" './testfile' '=' + testwebserverlaststatuscode '200' +} + +testrun 'http://localhost:8080' + +changetohttpswebserver + +testrun 'https://localhost:4433' -- cgit v1.2.3 From 8fa042ca39dcb39d544f015f4a924c5dbc10ad2c Mon Sep 17 00:00:00 2001 From: David Kalnischkies Date: Mon, 30 Sep 2013 18:51:40 +0200 Subject: don't consider holds for autoremoval MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit We can't remove packages which are held back by the user with a hold, so marking them (or its dependencies) as garbage will lead our autoremover into madness – and given that the package is important enough that the user has held it back it can't be garbage (at least at the moment), so even if a front-end wants to use the info just for information display its a good idea to not consider it garbage for them. Closes: 724995 --- apt-pkg/depcache.cc | 7 ++++--- apt-pkg/depcache.h | 2 +- test/integration/test-apt-get-autoremove | 20 ++++++++++++++++++++ 3 files changed, 25 insertions(+), 4 deletions(-) diff --git a/apt-pkg/depcache.cc b/apt-pkg/depcache.cc index 978a893f7..a06789cdf 100644 --- a/apt-pkg/depcache.cc +++ b/apt-pkg/depcache.cc @@ -896,6 +896,7 @@ char const* PrintMode(char const mode) case pkgDepCache::ModeInstall: return "Install"; case pkgDepCache::ModeKeep: return "Keep"; case pkgDepCache::ModeDelete: return "Delete"; + case pkgDepCache::ModeGarbage: return "Garbage"; default: return "UNKNOWN"; } } @@ -1726,8 +1727,6 @@ bool pkgDepCache::MarkRequired(InRootSetFunc &userFunc) follow_recommends = MarkFollowsRecommends(); follow_suggests = MarkFollowsSuggests(); - - // do the mark part, this is the core bit of the algorithm for(PkgIterator p = PkgBegin(); !p.end(); ++p) { @@ -1738,7 +1737,9 @@ bool pkgDepCache::MarkRequired(InRootSetFunc &userFunc) // be nice even then a required package violates the policy (#583517) // and do the full mark process also for required packages (p.CurrentVer().end() != true && - p.CurrentVer()->Priority == pkgCache::State::Required)) + p.CurrentVer()->Priority == pkgCache::State::Required) || + // packages which can't be changed (like holds) can't be garbage + (IsModeChangeOk(ModeGarbage, p, 0, false) == false)) { // the package is installed (and set to keep) if(PkgState[p->ID].Keep() && !p.CurrentVer().end()) diff --git a/apt-pkg/depcache.h b/apt-pkg/depcache.h index d9c95349b..61c9aa559 100644 --- a/apt-pkg/depcache.h +++ b/apt-pkg/depcache.h @@ -128,7 +128,7 @@ class pkgDepCache : protected pkgCache::Namespace enum InternalFlags {AutoKept = (1 << 0), Purge = (1 << 1), ReInstall = (1 << 2), Protected = (1 << 3)}; enum VersionTypes {NowVersion, InstallVersion, CandidateVersion}; - enum ModeList {ModeDelete = 0, ModeKeep = 1, ModeInstall = 2}; + enum ModeList {ModeDelete = 0, ModeKeep = 1, ModeInstall = 2, ModeGarbage = 3}; /** \brief Represents an active action group. * diff --git a/test/integration/test-apt-get-autoremove b/test/integration/test-apt-get-autoremove index dc30cde34..68ea1c574 100755 --- a/test/integration/test-apt-get-autoremove +++ b/test/integration/test-apt-get-autoremove @@ -49,3 +49,23 @@ Install: unrelated:i386 (1), debhelper:i386 (8.0.0), po-debconf:i386 (1.0.16, au Remove: debhelper:i386 (8.0.0) Remove: po-debconf:i386 (1.0.16)' + +testsuccess aptget install debhelper -y +testdpkginstalled 'unrelated' 'debhelper' 'po-debconf' +testsuccess aptmark auto debhelper + +testmarkedauto 'debhelper' 'po-debconf' +testequal 'Reading package lists... +Building dependency tree... +Reading state information... +The following packages will be REMOVED: + debhelper po-debconf +0 upgraded, 0 newly installed, 2 to remove and 0 not upgraded. +Remv debhelper [8.0.0] +Remv po-debconf [1.0.16]' aptget autoremove -s + +testsuccess aptmark hold debhelper +testequal 'Reading package lists... +Building dependency tree... +Reading state information... +0 upgraded, 0 newly installed, 0 to remove and 0 not upgraded.' aptget autoremove -s -- cgit v1.2.3 From 18908589a096719eb4ce76123596865093d6ff9d Mon Sep 17 00:00:00 2001 From: David Kalnischkies Date: Wed, 2 Oct 2013 13:34:59 +0200 Subject: tests: fix some problems travis encounters Git-Dch: Ignore --- test/integration/framework | 82 ++++++------- .../skip-bug-601016-description-translation | 133 +++++++++++++++++++++ .../test-bug-601016-description-translation | 133 --------------------- ...est-bug-633350-do-not-kill-last-char-in-Release | 2 +- .../test-bug-679371-apt-get-autoclean-multiarch | 5 +- .../test-bug-686346-package-missing-architecture | 13 +- 6 files changed, 187 insertions(+), 181 deletions(-) create mode 100755 test/integration/skip-bug-601016-description-translation delete mode 100755 test/integration/test-bug-601016-description-translation diff --git a/test/integration/framework b/test/integration/framework index a2bb871cc..d899bb574 100644 --- a/test/integration/framework +++ b/test/integration/framework @@ -216,14 +216,13 @@ getarchitectures() { } configarchitecture() { - local CONFFILE=rootdir/etc/apt/apt.conf.d/01multiarch.conf - rm -f $CONFFILE - echo "APT::Architecture \"$(getarchitecture $1)\";" > $CONFFILE - shift - while [ -n "$1" ]; do - echo "APT::Architectures:: \"$(getarchitecture $1)\";" >> $CONFFILE - shift - done + { + echo "APT::Architecture \"$(getarchitecture $1)\";" + while [ -n "$1" ]; do + echo "APT::Architectures:: \"$(getarchitecture $1)\";" + shift + done + } >rootdir/etc/apt/apt.conf.d/01multiarch.conf configdpkg } @@ -236,16 +235,17 @@ configdpkg() { echo -n > rootdir/var/lib/dpkg/status fi fi + rm -f rootdir/etc/apt/apt.conf.d/00foreigndpkg if $(which dpkg) --assert-multi-arch >/dev/null 2>&1; then local ARCHS="$(getarchitectures)" if echo "$ARCHS" | grep -E -q '[^ ]+ [^ ]+'; then DPKGARCH="$(dpkg --print-architecture)" for ARCH in ${ARCHS}; do if [ "${ARCH}" != "${DPKGARCH}" ]; then - if ! dpkg --add-architecture ${ARCH}; then + if ! dpkg --add-architecture ${ARCH} >/dev/null 2>&1; then # old-style used e.g. in Ubuntu-P – and as it seems travis - echo "DPKG::options:: \"--foreign-architecture\";" >> aptconfig.conf - echo "DPKG::options:: \"${ARCH}\";" >> aptconfig.conf + echo "DPKG::options:: \"--foreign-architecture\";" >> rootdir/etc/apt/apt.conf.d/00foreigndpkg + echo "DPKG::options:: \"${ARCH}\";" >> rootdir/etc/apt/apt.conf.d/00foreigndpkg fi fi done @@ -278,7 +278,11 @@ setupsimplenativepackage() { local VERSION="$3" local RELEASE="${4:-unstable}" local DEPENDENCIES="$5" - local DESCRIPTION="$6" + local DESCRIPTION="${6:-"Description: an autogenerated dummy ${NAME}=${VERSION}/${RELEASE} + If you find such a package installed on your system, + something went horribly wrong! They are autogenerated + und used only by testcases and surf no other propose…"}" + local SECTION="${7:-others}" local DISTSECTION if [ "$SECTION" = "$(echo "$SECTION" | cut -d'/' -f 2)" ]; then @@ -310,14 +314,8 @@ Package: $NAME" > debian/control echo "Architecture: any" >> debian/control fi test -z "$DEPENDENCIES" || echo "$DEPENDENCIES" >> debian/control - if [ -z "$DESCRIPTION" ]; then - echo "Description: an autogenerated dummy ${NAME}=${VERSION}/${RELEASE} - If you find such a package installed on your system, - YOU did something horribly wrong! They are autogenerated - und used only by testcases for APT and surf no other propose…" >> debian/control - else - echo "Description: $DESCRIPTION" >> debian/control - fi + echo "Description: $DESCRIPTION" >> debian/control + test -e debian/compat || echo "7" > debian/compat test -e debian/source/format || echo "3.0 (native)" > debian/source/format test -e debian/rules || cp /usr/share/doc/debhelper/examples/rules.tiny debian/rules @@ -330,7 +328,11 @@ buildsimplenativepackage() { local VERSION="$3" local RELEASE="${4:-unstable}" local DEPENDENCIES="$5" - local DESCRIPTION="$6" + local DESCRIPTION="${6:-"Description: an autogenerated dummy ${NAME}=${VERSION}/${RELEASE} + If you find such a package installed on your system, + something went horribly wrong! They are autogenerated + und used only by testcases and surf no other propose…"}" + local SECTION="${7:-others}" local PRIORITY="${8:-optional}" local DISTSECTION @@ -370,14 +372,7 @@ Package: $NAME" >> ${BUILDDIR}/debian/control fi local DEPS="$(echo "$DEPENDENCIES" | grep -v '^Build-')" test -z "$DEPS" || echo "$DEPS" >> ${BUILDDIR}/debian/control - if [ -z "$DESCRIPTION" ]; then - echo "Description: an autogenerated dummy ${NAME}=${VERSION}/${RELEASE} - If you find such a package installed on your system, - YOU did something horribly wrong! They are autogenerated - und used only by testcases for APT and surf no other propose…" >> ${BUILDDIR}/debian/control - else - echo "Description: $DESCRIPTION" >> ${BUILDDIR}/debian/control - fi + echo "Description: $DESCRIPTION" >> ${BUILDDIR}/debian/control echo '3.0 (native)' > ${BUILDDIR}/debian/source/format (cd ${BUILDDIR}/..; dpkg-source -b ${NAME}-${VERSION} 2>&1) | sed -n 's#^dpkg-source: info: building [^ ]\+ in ##p' \ @@ -526,7 +521,10 @@ insertpackage() { local VERSION="$4" local DEPENDENCIES="$5" local PRIORITY="${6:-optional}" - local DESCRIPTION="${7}" + local DESCRIPTION="${7:-"Description: an autogenerated dummy ${NAME}=${VERSION}/${RELEASE} + If you find such a package installed on your system, + something went horribly wrong! They are autogenerated + und used only by testcases and surf no other propose…"}" local ARCHS="" for arch in $(echo "$ARCH" | sed -e 's#,#\n#g' | sed -e "s#^native\$#$(getarchitecture 'native')#"); do if [ "$arch" = 'all' -o "$arch" = 'none' ]; then @@ -548,15 +546,7 @@ Maintainer: Joe Sixpack " >> $FILE echo "Version: $VERSION Filename: pool/main/${NAME}/${NAME}_${VERSION}_${arch}.deb" >> $FILE test -z "$DEPENDENCIES" || echo "$DEPENDENCIES" >> $FILE - echo -n 'Description: ' >> $FILE - if [ -z "$DESCRIPTION" ]; then - echo "an autogenerated dummy ${NAME}=${VERSION}/${RELEASE} - If you find such a package installed on your system, - YOU did something horribly wrong! They are autogenerated - und used only by testcases for APT and surf no other propose…" >> $FILE - else - echo "$DESCRIPTION" >> $FILE - fi + echo "Description: $DESCRIPTION" >> $FILE echo >> $FILE done done @@ -591,6 +581,11 @@ insertinstalledpackage() { local DEPENDENCIES="$4" local PRIORITY="${5:-optional}" local STATUS="${6:-install ok installed}" + local DESCRIPTION="${7:-"Description: an autogenerated dummy ${NAME}=${VERSION}/installed + If you find such a package installed on your system, + something went horribly wrong! They are autogenerated + und used only by testcases and surf no other propose…"}" + local FILE='rootdir/var/lib/dpkg/status' local INFO='rootdir/var/lib/dpkg/info' for arch in $(echo "$ARCH" | sed -e 's#,#\n#g' | sed -e "s#^native\$#$(getarchitecture 'native')#"); do @@ -603,11 +598,8 @@ Maintainer: Joe Sixpack Version: $VERSION" >> $FILE test "$arch" = 'none' || echo "Architecture: $arch" >> $FILE test -z "$DEPENDENCIES" || echo "$DEPENDENCIES" >> $FILE - echo "Description: an autogenerated dummy ${NAME}=${VERSION}/installed - If you find such a package installed on your system, - YOU did something horribly wrong! They are autogenerated - und used only by testcases for APT and surf no other propose… -" >> $FILE + echo "Description: $DESCRIPTION" >> $FILE + echo >> $FILE if [ "$(dpkg-query -W --showformat='${Multi-Arch}')" = 'same' ]; then echo -n > ${INFO}/${NAME}:${arch}.list else @@ -941,7 +933,7 @@ testequalor2() { echo "$2" > $COMPAREFILE2 shift 2 msgtest "Test for equality OR of" "$*" - $* >$COMPAREAGAINST 2>&1 + $* >$COMPAREAGAINST 2>&1 || true (checkdiff $COMPAREFILE1 $COMPAREAGAINST 1> /dev/null || checkdiff $COMPAREFILE2 $COMPAREAGAINST 1> /dev/null) && msgpass || ( echo "\n${CINFO}Diff against OR 1${CNORMAL}" "$(checkdiff $COMPAREFILE1 $COMPAREAGAINST)" \ diff --git a/test/integration/skip-bug-601016-description-translation b/test/integration/skip-bug-601016-description-translation new file mode 100755 index 000000000..33c209e9d --- /dev/null +++ b/test/integration/skip-bug-601016-description-translation @@ -0,0 +1,133 @@ +#!/bin/sh +set -e + +TESTDIR=$(readlink -f $(dirname $0)) +. $TESTDIR/framework +setupenvironment +configarchitecture 'i386' 'amd64' + +# we need a valid locale here, otherwise the language configuration +# will be overridden by LC_ALL=C +LOCALE="$(echo "$LANG" | cut -d'_' -f 1)" +MD5Sum='Description-md5: d41ee493aa9fcc6cbc9ce4eb7069959c' + +PACKAGESTANZA='Package: apt +Priority: important +Section: admin +Installed-Size: 5984 +Maintainer: APT Development Team +Architecture: i386 +Version: 0.8.7 +Filename: pool/main/a/apt/apt_0.8.7_i386.deb +Size: 2140230 +MD5sum: 74769bfbcef9ebc4fa74f7a5271b9c08' + +PACKAGESTANZA2='Package: apt +Priority: important +Section: admin +Installed-Size: 5984 +Maintainer: APT Development Team +Architecture: amd64 +Version: 0.8.7 +Filename: pool/main/a/apt/apt_0.8.7_amd64.deb +Size: 2210342 +MD5sum: 4a869bfbdef9ebc9fa74f7a5271e8d1a' + +echo "$PACKAGESTANZA +Description: Advanced front-end for dpkg +$MD5Sum + +$PACKAGESTANZA2 +Description: Advanced front-end for dpkg +$MD5Sum" > aptarchive/Packages + +echo "Package: apt +Description-${LOCALE}: Mächtige Oberfläche für dpkg + Das Paket bietet dem Nutzer technisch führende Methoden für den Zugriff + auf den dpkg-Paketmanager. Es beinhaltet das apt-get-Werkzeug und die + APT-Dselect-Methode. Beides sind einfache und sicherere Wege, + um Pakete zu installieren und Upgrades durchzuführen. +$MD5Sum" | bzip2 > aptarchive/${LOCALE}.bz2 + +# the $LOCALE translation file will not be included as it is a flat archive it came from and therefore +# its name can not be guessed correctly… (in non-flat archives the files are called Translation-*) +echo 'APT::Cache::Generate "false";' > rootdir/etc/apt/apt.conf.d/00nogenerate + +NOLONGSTANZA="$PACKAGESTANZA +Description: Advanced front-end for dpkg +$MD5Sum +" + +ENGLISHSTANZA="$PACKAGESTANZA +Description: Advanced front-end for dpkg +$MD5Sum +" + +LOCALESTANZA="$PACKAGESTANZA +Description-${LOCALE}: Mächtige Oberfläche für dpkg + Das Paket bietet dem Nutzer technisch führende Methoden für den Zugriff + auf den dpkg-Paketmanager. Es beinhaltet das apt-get-Werkzeug und die + APT-Dselect-Methode. Beides sind einfache und sicherere Wege, + um Pakete zu installieren und Upgrades durchzuführen. +$MD5Sum +" +LOCALESTANZA2="$PACKAGESTANZA2 +Description-${LOCALE}: Mächtige Oberfläche für dpkg + Das Paket bietet dem Nutzer technisch führende Methoden für den Zugriff + auf den dpkg-Paketmanager. Es beinhaltet das apt-get-Werkzeug und die + APT-Dselect-Methode. Beides sind einfache und sicherere Wege, + um Pakete zu installieren und Upgrades durchzuführen. +$MD5Sum +" + +testrun() { + echo "Acquire::Languages { \"${LOCALE}\"; \"en\"; };" > rootdir/etc/apt/apt.conf.d/00languages + export LC_ALL="" + rm -rf rootdir/var/lib/apt/lists + setupaptarchive + testequal "$LOCALESTANZA" aptcache show apt -o Test=File-${LOCALE} + testequal "$LOCALESTANZA" aptcache show apt:i386 -o Test=File-${LOCALE} + testequal "$LOCALESTANZA2" aptcache show apt:amd64 -o Test=File-${LOCALE} + testequal "$NOLONGSTANZA" aptcache show apt -o Acquire::Languages="ww" -o Test=File-${LOCALE} + testequal "$LOCALESTANZA" aptcache show apt -o Acquire::Languages::="ww" -o Test=File-${LOCALE} + LC_ALL=C testequal "$ENGLISHSTANZA" aptcache show apt -o Test=File-${LOCALE} + export LC_ALL="" + echo "Acquire::Languages { \"ww\"; \"${LOCALE}\"; \"en\"; };" > rootdir/etc/apt/apt.conf.d/00languages + testequal "$LOCALESTANZA" aptcache show apt -o Test=File-ww-${LOCALE} + echo "Acquire::Languages { \"ww\"; \"en\"; };" > rootdir/etc/apt/apt.conf.d/00languages + testequal "$ENGLISHSTANZA" aptcache show apt -o Test=File-ww +} + +testrun + +echo "$PACKAGESTANZA +Description: Advanced front-end for dpkg +$MD5Sum + +$PACKAGESTANZA2 +Description: Advanced front-end for dpkg +$MD5Sum" > aptarchive/Packages + +echo "Package: apt +Description-en: Advanced front-end for dpkg + This is Debian's next generation front-end for the dpkg package manager. + It provides the apt-get utility and APT dselect method that provides a + simpler, safer way to install and upgrade packages. +$MD5Sum" | bzip2 > aptarchive/en.bz2 + +ENGLISHSTANZA="$PACKAGESTANZA +Description-en: Advanced front-end for dpkg + This is Debian's next generation front-end for the dpkg package manager. + It provides the apt-get utility and APT dselect method that provides a + simpler, safer way to install and upgrade packages. +$MD5Sum +" +ENGLISHSTANZA2="$PACKAGESTANZA2 +Description-en: Advanced front-end for dpkg + This is Debian's next generation front-end for the dpkg package manager. + It provides the apt-get utility and APT dselect method that provides a + simpler, safer way to install and upgrade packages. +$MD5Sum +" + +testrun diff --git a/test/integration/test-bug-601016-description-translation b/test/integration/test-bug-601016-description-translation deleted file mode 100755 index 33c209e9d..000000000 --- a/test/integration/test-bug-601016-description-translation +++ /dev/null @@ -1,133 +0,0 @@ -#!/bin/sh -set -e - -TESTDIR=$(readlink -f $(dirname $0)) -. $TESTDIR/framework -setupenvironment -configarchitecture 'i386' 'amd64' - -# we need a valid locale here, otherwise the language configuration -# will be overridden by LC_ALL=C -LOCALE="$(echo "$LANG" | cut -d'_' -f 1)" -MD5Sum='Description-md5: d41ee493aa9fcc6cbc9ce4eb7069959c' - -PACKAGESTANZA='Package: apt -Priority: important -Section: admin -Installed-Size: 5984 -Maintainer: APT Development Team -Architecture: i386 -Version: 0.8.7 -Filename: pool/main/a/apt/apt_0.8.7_i386.deb -Size: 2140230 -MD5sum: 74769bfbcef9ebc4fa74f7a5271b9c08' - -PACKAGESTANZA2='Package: apt -Priority: important -Section: admin -Installed-Size: 5984 -Maintainer: APT Development Team -Architecture: amd64 -Version: 0.8.7 -Filename: pool/main/a/apt/apt_0.8.7_amd64.deb -Size: 2210342 -MD5sum: 4a869bfbdef9ebc9fa74f7a5271e8d1a' - -echo "$PACKAGESTANZA -Description: Advanced front-end for dpkg -$MD5Sum - -$PACKAGESTANZA2 -Description: Advanced front-end for dpkg -$MD5Sum" > aptarchive/Packages - -echo "Package: apt -Description-${LOCALE}: Mächtige Oberfläche für dpkg - Das Paket bietet dem Nutzer technisch führende Methoden für den Zugriff - auf den dpkg-Paketmanager. Es beinhaltet das apt-get-Werkzeug und die - APT-Dselect-Methode. Beides sind einfache und sicherere Wege, - um Pakete zu installieren und Upgrades durchzuführen. -$MD5Sum" | bzip2 > aptarchive/${LOCALE}.bz2 - -# the $LOCALE translation file will not be included as it is a flat archive it came from and therefore -# its name can not be guessed correctly… (in non-flat archives the files are called Translation-*) -echo 'APT::Cache::Generate "false";' > rootdir/etc/apt/apt.conf.d/00nogenerate - -NOLONGSTANZA="$PACKAGESTANZA -Description: Advanced front-end for dpkg -$MD5Sum -" - -ENGLISHSTANZA="$PACKAGESTANZA -Description: Advanced front-end for dpkg -$MD5Sum -" - -LOCALESTANZA="$PACKAGESTANZA -Description-${LOCALE}: Mächtige Oberfläche für dpkg - Das Paket bietet dem Nutzer technisch führende Methoden für den Zugriff - auf den dpkg-Paketmanager. Es beinhaltet das apt-get-Werkzeug und die - APT-Dselect-Methode. Beides sind einfache und sicherere Wege, - um Pakete zu installieren und Upgrades durchzuführen. -$MD5Sum -" -LOCALESTANZA2="$PACKAGESTANZA2 -Description-${LOCALE}: Mächtige Oberfläche für dpkg - Das Paket bietet dem Nutzer technisch führende Methoden für den Zugriff - auf den dpkg-Paketmanager. Es beinhaltet das apt-get-Werkzeug und die - APT-Dselect-Methode. Beides sind einfache und sicherere Wege, - um Pakete zu installieren und Upgrades durchzuführen. -$MD5Sum -" - -testrun() { - echo "Acquire::Languages { \"${LOCALE}\"; \"en\"; };" > rootdir/etc/apt/apt.conf.d/00languages - export LC_ALL="" - rm -rf rootdir/var/lib/apt/lists - setupaptarchive - testequal "$LOCALESTANZA" aptcache show apt -o Test=File-${LOCALE} - testequal "$LOCALESTANZA" aptcache show apt:i386 -o Test=File-${LOCALE} - testequal "$LOCALESTANZA2" aptcache show apt:amd64 -o Test=File-${LOCALE} - testequal "$NOLONGSTANZA" aptcache show apt -o Acquire::Languages="ww" -o Test=File-${LOCALE} - testequal "$LOCALESTANZA" aptcache show apt -o Acquire::Languages::="ww" -o Test=File-${LOCALE} - LC_ALL=C testequal "$ENGLISHSTANZA" aptcache show apt -o Test=File-${LOCALE} - export LC_ALL="" - echo "Acquire::Languages { \"ww\"; \"${LOCALE}\"; \"en\"; };" > rootdir/etc/apt/apt.conf.d/00languages - testequal "$LOCALESTANZA" aptcache show apt -o Test=File-ww-${LOCALE} - echo "Acquire::Languages { \"ww\"; \"en\"; };" > rootdir/etc/apt/apt.conf.d/00languages - testequal "$ENGLISHSTANZA" aptcache show apt -o Test=File-ww -} - -testrun - -echo "$PACKAGESTANZA -Description: Advanced front-end for dpkg -$MD5Sum - -$PACKAGESTANZA2 -Description: Advanced front-end for dpkg -$MD5Sum" > aptarchive/Packages - -echo "Package: apt -Description-en: Advanced front-end for dpkg - This is Debian's next generation front-end for the dpkg package manager. - It provides the apt-get utility and APT dselect method that provides a - simpler, safer way to install and upgrade packages. -$MD5Sum" | bzip2 > aptarchive/en.bz2 - -ENGLISHSTANZA="$PACKAGESTANZA -Description-en: Advanced front-end for dpkg - This is Debian's next generation front-end for the dpkg package manager. - It provides the apt-get utility and APT dselect method that provides a - simpler, safer way to install and upgrade packages. -$MD5Sum -" -ENGLISHSTANZA2="$PACKAGESTANZA2 -Description-en: Advanced front-end for dpkg - This is Debian's next generation front-end for the dpkg package manager. - It provides the apt-get utility and APT dselect method that provides a - simpler, safer way to install and upgrade packages. -$MD5Sum -" - -testrun diff --git a/test/integration/test-bug-633350-do-not-kill-last-char-in-Release b/test/integration/test-bug-633350-do-not-kill-last-char-in-Release index 2aae7cfcc..988f8c9d0 100755 --- a/test/integration/test-bug-633350-do-not-kill-last-char-in-Release +++ b/test/integration/test-bug-633350-do-not-kill-last-char-in-Release @@ -8,7 +8,7 @@ configarchitecture 'amd64' insertpackage 'unstable' 'cool' 'amd64' '1.0' -setupaptarchive 2> /dev/null +setupaptarchive --no-update echo 'NotAutomatic: yes' >> aptarchive/dists/unstable/Release diff --git a/test/integration/test-bug-679371-apt-get-autoclean-multiarch b/test/integration/test-bug-679371-apt-get-autoclean-multiarch index b62d437aa..3de7d69f9 100755 --- a/test/integration/test-bug-679371-apt-get-autoclean-multiarch +++ b/test/integration/test-bug-679371-apt-get-autoclean-multiarch @@ -17,7 +17,10 @@ changetowebserver testsuccess aptget update testsuccess aptget install pkgall pkgnative pkgforeign -y -testdpkginstalled pkgall pkgnative pkgforeign +# if we work with an old dpkg, pkgforeign will be listed differently, +# so test with aptcache for install status instead +testdpkginstalled pkgall pkgnative +testsuccess aptcache show pkgforeign/installed testequal 'Reading package lists... Building dependency tree... diff --git a/test/integration/test-bug-686346-package-missing-architecture b/test/integration/test-bug-686346-package-missing-architecture index 3b02811ca..dc51861ab 100755 --- a/test/integration/test-bug-686346-package-missing-architecture +++ b/test/integration/test-bug-686346-package-missing-architecture @@ -73,7 +73,7 @@ insertinstalledpackage 'pkgb' 'none' '1' insertinstalledpackage 'pkgf' 'none' '1' 'Conflicts: pkgb' insertinstalledpackage 'pkgg' 'amd64' '1' 'Conflicts: pkgb' insertinstalledpackage 'pkgb' 'amd64' '2' -testequal "Reading package lists... +testequalor2 "Reading package lists... Building dependency tree... Reading state information... You might want to run 'apt-get -f install' to correct these. @@ -84,6 +84,17 @@ The following packages have unmet dependencies: Conflicts: pkgb but 2 is installed pkgg : Conflicts: pkgb but 2 is installed Conflicts: pkgb:none but 1 is installed +E: Unmet dependencies. Try using -f." "Reading package lists... +Building dependency tree... +Reading state information... +You might want to run 'apt-get -f install' to correct these. +The following packages have unmet dependencies: + pkgb : Conflicts: pkgb:none but 1 is installed + pkgb:none : Conflicts: pkgb but 2 is installed + pkgf:none : Conflicts: pkgb but 2 is installed + Conflicts: pkgb:none but 1 is installed + pkgg : Conflicts: pkgb but 2 is installed + Conflicts: pkgb:none but 1 is installed E: Unmet dependencies. Try using -f." aptget check # check that dependencies are generated for none-packages -- cgit v1.2.3 From 3c8030a4977536e9d3a1954adc68082ae1c6d5a2 Mon Sep 17 00:00:00 2001 From: David Kalnischkies Date: Thu, 3 Oct 2013 01:12:18 +0200 Subject: refactor onError relabeling of DestFile as '.FAILED' This helps ensure three things: - each error is reported via ReportMirrorFailure - if DestFile doesn't exist, do not attempt rename - renames happen for every error The last one wasn't the case for Size mismatches, which isn't nice, but not a exploitable problem per-se as the file isn't picked up and remains in partial/ where the following download-try will at most take it for a partial request which fails the hashsum verification later on Git-Dch: Ignore --- apt-pkg/acquire-item.cc | 75 ++++++++++++++++++++++++++++--------------------- apt-pkg/acquire-item.h | 19 +++++++++++-- 2 files changed, 60 insertions(+), 34 deletions(-) diff --git a/apt-pkg/acquire-item.cc b/apt-pkg/acquire-item.cc index 222b78671..fcc7c7404 100644 --- a/apt-pkg/acquire-item.cc +++ b/apt-pkg/acquire-item.cc @@ -143,6 +143,32 @@ void pkgAcquire::Item::Rename(string From,string To) } } /*}}}*/ +bool pkgAcquire::Item::RenameOnError(pkgAcquire::Item::RenameOnErrorState const error)/*{{{*/ +{ + if(FileExists(DestFile)) + Rename(DestFile, DestFile + ".FAILED"); + + switch (error) + { + case HashSumMismatch: + ErrorText = _("Hash Sum mismatch"); + Status = StatAuthError; + ReportMirrorFailure("HashChecksumFailure"); + break; + case SizeMismatch: + ErrorText = _("Size mismatch"); + Status = StatAuthError; + ReportMirrorFailure("SizeFailure"); + break; + case InvalidFormat: + ErrorText = _("Invalid file format"); + Status = StatError; + // do not report as usually its not the mirrors fault, but Portal/Proxy + break; + } + return false; +} + /*}}}*/ // Acquire::Item::ReportMirrorFailure /*{{{*/ // --------------------------------------------------------------------- void pkgAcquire::Item::ReportMirrorFailure(string FailCode) @@ -595,9 +621,7 @@ void pkgAcqIndexDiffs::Finish(bool allDone) if(!ExpectedHash.empty() && !ExpectedHash.VerifyFile(DestFile)) { - Status = StatAuthError; - ErrorText = _("MD5Sum mismatch"); - Rename(DestFile,DestFile + ".FAILED"); + RenameOnError(HashSumMismatch); Dequeue(); return; } @@ -866,10 +890,7 @@ void pkgAcqIndex::Done(string Message,unsigned long long Size,string Hash, if (!ExpectedHash.empty() && ExpectedHash.toStr() != Hash) { - Status = StatAuthError; - ErrorText = _("Hash Sum mismatch"); - Rename(DestFile,DestFile + ".FAILED"); - ReportMirrorFailure("HashChecksumFailure"); + RenameOnError(HashSumMismatch); return; } @@ -878,22 +899,18 @@ void pkgAcqIndex::Done(string Message,unsigned long long Size,string Hash, if (Verify == true) { FileFd fd(DestFile, FileFd::ReadOnly); - pkgTagSection sec; - pkgTagFile tag(&fd); - - // Only test for correctness if the file is not empty (empty is ok) - if (fd.Size() > 0) { - if (_error->PendingError() || !tag.Step(sec)) { - Status = StatError; - _error->DumpErrors(); - Rename(DestFile,DestFile + ".FAILED"); - return; - } else if (!sec.Exists("Package")) { - Status = StatError; - ErrorText = ("Encountered a section with no Package: header"); - Rename(DestFile,DestFile + ".FAILED"); - return; - } + // Only test for correctness if the file is not empty (empty is ok) + if (fd.FileSize() > 0) + { + pkgTagSection sec; + pkgTagFile tag(&fd); + + // all our current indexes have a field 'Package' in each section + if (_error->PendingError() == true || tag.Step(sec) == false || sec.Exists("Package") == false) + { + RenameOnError(InvalidFormat); + return; + } } } @@ -1907,18 +1924,14 @@ void pkgAcqArchive::Done(string Message,unsigned long long Size,string CalcHash, // Check the size if (Size != Version->Size) { - Status = StatError; - ErrorText = _("Size mismatch"); + RenameOnError(SizeMismatch); return; } // Check the hash if(ExpectedHash.toStr() != CalcHash) { - Status = StatError; - ErrorText = _("Hash Sum mismatch"); - if(FileExists(DestFile)) - Rename(DestFile,DestFile + ".FAILED"); + RenameOnError(HashSumMismatch); return; } @@ -2058,9 +2071,7 @@ void pkgAcqFile::Done(string Message,unsigned long long Size,string CalcHash, // Check the hash if(!ExpectedHash.empty() && ExpectedHash.toStr() != CalcHash) { - Status = StatError; - ErrorText = _("Hash Sum mismatch"); - Rename(DestFile,DestFile + ".FAILED"); + RenameOnError(HashSumMismatch); return; } diff --git a/apt-pkg/acquire-item.h b/apt-pkg/acquire-item.h index 10c855e63..6b4f73708 100644 --- a/apt-pkg/acquire-item.h +++ b/apt-pkg/acquire-item.h @@ -83,7 +83,7 @@ class pkgAcquire::Item : public WeakPointable * overwritten. */ void Rename(std::string From,std::string To); - + public: /** \brief The current status of this item. */ @@ -281,6 +281,21 @@ class pkgAcquire::Item : public WeakPointable * pkgAcquire::Remove. */ virtual ~Item(); + + protected: + + enum RenameOnErrorState { + HashSumMismatch, + SizeMismatch, + InvalidFormat + }; + + /** \brief Rename failed file and set error + * + * \param state respresenting the error we encountered + * \param errorMsg a message describing the error + */ + bool RenameOnError(RenameOnErrorState const state); }; /*}}}*/ /** \brief Information about an index patch (aka diff). */ /*{{{*/ @@ -982,7 +997,7 @@ class pkgAcqArchive : public pkgAcquire::Item * * \param Version The package version to download. * - * \param StoreFilename A location in which the actual filename of + * \param[out] StoreFilename A location in which the actual filename of * the package should be stored. It will be set to a guessed * basename in the constructor, and filled in with a fully * qualified filename once the download finishes. -- cgit v1.2.3 From 866893a619e00966ae6b1549c4bfce92d6c17db1 Mon Sep 17 00:00:00 2001 From: David Kalnischkies Date: Thu, 3 Oct 2013 14:45:41 +0200 Subject: put fetch errors in 'source' on our errorstack refactor the fetching process so that it looks more like the others we have in the hope that we can reuse code in the future. This is a soft interface change as 'source' previously printed errors directly on stderr, while it will now push it onto our usual error stack. --- apt-private/makefile | 2 +- apt-private/private-download.cc | 96 +++++++++++++++++++++++++++++++++++++++++ apt-private/private-download.h | 9 ++++ apt-private/private-install.cc | 72 +++---------------------------- cmdline/apt-get.cc | 28 +++--------- 5 files changed, 117 insertions(+), 90 deletions(-) create mode 100644 apt-private/private-download.cc create mode 100644 apt-private/private-download.h diff --git a/apt-private/makefile b/apt-private/makefile index 8feb1ce6c..1d179f0b2 100644 --- a/apt-private/makefile +++ b/apt-private/makefile @@ -17,7 +17,7 @@ MAJOR=0.0 MINOR=0 SLIBS=$(PTHREADLIB) -lapt-pkg -PRIVATES=list install output cachefile cacheset update upgrade cmndline moo search show main +PRIVATES=list install download output cachefile cacheset update upgrade cmndline moo search show main SOURCE += $(foreach private, $(PRIVATES), private-$(private).cc) HEADERS += $(foreach private, $(PRIVATES), private-$(private).h) diff --git a/apt-private/private-download.cc b/apt-private/private-download.cc new file mode 100644 index 000000000..f02991cde --- /dev/null +++ b/apt-private/private-download.cc @@ -0,0 +1,96 @@ +// Include Files /*{{{*/ +#include + +#include +#include +#include +#include +#include + +#include "private-output.h" + +#include + +#include +#include +#include + +#include + /*}}}*/ + +// CheckAuth - check if each download comes form a trusted source /*{{{*/ +bool CheckAuth(pkgAcquire& Fetcher, bool const PromptUser) +{ + std::string UntrustedList; + for (pkgAcquire::ItemIterator I = Fetcher.ItemsBegin(); I < Fetcher.ItemsEnd(); ++I) + if (!(*I)->IsTrusted()) + UntrustedList += std::string((*I)->ShortDesc()) + " "; + + if (UntrustedList == "") + return true; + + ShowList(c2out,_("WARNING: The following packages cannot be authenticated!"),UntrustedList,""); + + if (_config->FindB("APT::Get::AllowUnauthenticated",false) == true) + { + c2out << _("Authentication warning overridden.\n"); + return true; + } + + if (PromptUser == false) + return _error->Error(_("Some packages could not be authenticated")); + + if (_config->FindI("quiet",0) < 2 + && _config->FindB("APT::Get::Assume-Yes",false) == false) + { + c2out << _("Install these packages without verification?") << std::flush; + if (!YnPrompt(false)) + return _error->Error(_("Some packages could not be authenticated")); + + return true; + } + else if (_config->FindB("APT::Get::Force-Yes",false) == true) + return true; + + return _error->Error(_("There are problems and -y was used without --force-yes")); +} + /*}}}*/ +bool AcquireRun(pkgAcquire &Fetcher, int const PulseInterval, bool * const Failure, bool * const TransientNetworkFailure)/*{{{*/ +{ + pkgAcquire::RunResult res; + if(PulseInterval > 0) + res = Fetcher.Run(PulseInterval); + else + res = Fetcher.Run(); + + if (res == pkgAcquire::Failed) + return false; + + for (pkgAcquire::ItemIterator I = Fetcher.ItemsBegin(); + I != Fetcher.ItemsEnd(); ++I) + { + + if ((*I)->Status == pkgAcquire::Item::StatDone && + (*I)->Complete == true) + continue; + + if (TransientNetworkFailure != NULL && (*I)->Status == pkgAcquire::Item::StatIdle) + { + *TransientNetworkFailure = true; + continue; + } + + ::URI uri((*I)->DescURI()); + uri.User.clear(); + uri.Password.clear(); + std::string descUri = std::string(uri); + _error->Error(_("Failed to fetch %s %s\n"), descUri.c_str(), + (*I)->ErrorText.c_str()); + + if (Failure != NULL) + *Failure = true; + } + + return true; +} + /*}}}*/ diff --git a/apt-private/private-download.h b/apt-private/private-download.h new file mode 100644 index 000000000..b8cc8da1e --- /dev/null +++ b/apt-private/private-download.h @@ -0,0 +1,9 @@ +#ifndef APT_PRIVATE_DOWNLOAD_H +#define APT_PRIVATE_DOWNLOAD_H + +#include + +bool CheckAuth(pkgAcquire& Fetcher, bool const PromptUser); +bool AcquireRun(pkgAcquire &Fetcher, int const PulseInterval, bool * const Failure, bool * const TransientNetworkFailure); + +#endif diff --git a/apt-private/private-install.cc b/apt-private/private-install.cc index c07d060f3..9adad45af 100644 --- a/apt-private/private-install.cc +++ b/apt-private/private-install.cc @@ -42,6 +42,7 @@ #include #include "private-install.h" +#include "private-download.h" #include "private-cachefile.h" #include "private-output.h" #include "private-cacheset.h" @@ -50,50 +51,6 @@ #include /*}}}*/ -// CheckAuth - check if each download comes form a trusted source /*{{{*/ -// --------------------------------------------------------------------- -/* */ -static bool CheckAuth(pkgAcquire& Fetcher) -{ - std::string UntrustedList; - for (pkgAcquire::ItemIterator I = Fetcher.ItemsBegin(); I < Fetcher.ItemsEnd(); ++I) - { - if (!(*I)->IsTrusted()) - { - UntrustedList += std::string((*I)->ShortDesc()) + " "; - } - } - - if (UntrustedList == "") - { - return true; - } - - ShowList(c2out,_("WARNING: The following packages cannot be authenticated!"),UntrustedList,""); - - if (_config->FindB("APT::Get::AllowUnauthenticated",false) == true) - { - c2out << _("Authentication warning overridden.\n"); - return true; - } - - if (_config->FindI("quiet",0) < 2 - && _config->FindB("APT::Get::Assume-Yes",false) == false) - { - c2out << _("Install these packages without verification?") << std::flush; - if (!YnPrompt(false)) - return _error->Error(_("Some packages could not be authenticated")); - - return true; - } - else if (_config->FindB("APT::Get::Force-Yes",false) == true) - { - return true; - } - - return _error->Error(_("There are problems and -y was used without --force-yes")); -} - /*}}}*/ // InstallPackages - Actually download and install the packages /*{{{*/ // --------------------------------------------------------------------- /* This displays the informative messages describing what is going to @@ -304,7 +261,7 @@ bool InstallPackages(CacheFile &Cache,bool ShwKept,bool Ask, bool Safety) return true; } - if (!CheckAuth(Fetcher)) + if (!CheckAuth(Fetcher, true)) return false; /* Unlock the dpkg lock if we are not going to be doing an install @@ -336,29 +293,10 @@ bool InstallPackages(CacheFile &Cache,bool ShwKept,bool Ask, bool Safety) I = Fetcher.ItemsBegin(); } } - - if (Fetcher.Run() == pkgAcquire::Failed) - return false; - - // Print out errors - bool Failed = false; - for (pkgAcquire::ItemIterator I = Fetcher.ItemsBegin(); I != Fetcher.ItemsEnd(); ++I) - { - if ((*I)->Status == pkgAcquire::Item::StatDone && - (*I)->Complete == true) - continue; - - if ((*I)->Status == pkgAcquire::Item::StatIdle) - { - Transient = true; - // Failed = true; - continue; - } - fprintf(stderr,_("Failed to fetch %s %s\n"),(*I)->DescURI().c_str(), - (*I)->ErrorText.c_str()); - Failed = true; - } + bool Failed = false; + if (AcquireRun(Fetcher, 0, &Failed, &Transient) == false) + return false; /* If we are in no download mode and missing files and there were 'failures' then the user must specify -m. Furthermore, there diff --git a/cmdline/apt-get.cc b/cmdline/apt-get.cc index 8a30ac38d..a37f06741 100644 --- a/cmdline/apt-get.cc +++ b/cmdline/apt-get.cc @@ -50,7 +50,7 @@ #include #include - +#include #include #include #include @@ -62,9 +62,11 @@ #include #include +#include +#include + #include #include -#include #include #include #include @@ -76,7 +78,6 @@ #include #include #include -#include #include #include @@ -812,27 +813,10 @@ bool DoSource(CommandLine &CmdL) delete[] Dsc; return true; } - - // Run it - if (Fetcher.Run() == pkgAcquire::Failed) - { - delete[] Dsc; - return false; - } - // Print error messages + // Run it bool Failed = false; - for (pkgAcquire::ItemIterator I = Fetcher.ItemsBegin(); I != Fetcher.ItemsEnd(); ++I) - { - if ((*I)->Status == pkgAcquire::Item::StatDone && - (*I)->Complete == true) - continue; - - fprintf(stderr,_("Failed to fetch %s %s\n"),(*I)->DescURI().c_str(), - (*I)->ErrorText.c_str()); - Failed = true; - } - if (Failed == true) + if (AcquireRun(Fetcher, 0, &Failed, NULL) == false || Failed == true) { delete[] Dsc; return _error->Error(_("Failed to fetch some archives.")); -- cgit v1.2.3 From d57f6084aaa3972073114973d149ea2291b36682 Mon Sep 17 00:00:00 2001 From: David Kalnischkies Date: Thu, 3 Oct 2013 15:11:21 +0200 Subject: use pkgAcqArchive in 'download' for proper errors With a bit of trickery we can reuse the usual infrastructure we have in place to acquire deb files for the 'download' operation as well, which gains us authentification check & display, error messages, correct filenames and "downloads" from the root-owned archives. --- apt-pkg/acquire-item.cc | 5 +- cmdline/apt-get.cc | 80 ++++++++++------------ test/integration/test-apt-get-download | 3 +- ...17690-allow-unauthenticated-makes-all-untrusted | 29 +++++--- .../test-bug-722207-print-uris-even-if-very-quiet | 2 +- 5 files changed, 60 insertions(+), 59 deletions(-) diff --git a/apt-pkg/acquire-item.cc b/apt-pkg/acquire-item.cc index fcc7c7404..04505b35a 100644 --- a/apt-pkg/acquire-item.cc +++ b/apt-pkg/acquire-item.cc @@ -1768,9 +1768,8 @@ pkgAcqArchive::pkgAcqArchive(pkgAcquire *Owner,pkgSourceList *Sources, // Select a source if (QueueNext() == false && _error->PendingError() == false) - _error->Error(_("I wasn't able to locate a file for the %s package. " - "This might mean you need to manually fix this package."), - Version.ParentPkg().Name()); + _error->Error(_("Can't find a source to download version '%s' of '%s'"), + Version.VerStr(), Version.ParentPkg().FullName(false).c_str()); } /*}}}*/ // AcqArchive::QueueNext - Queue the next file source /*{{{*/ diff --git a/cmdline/apt-get.cc b/cmdline/apt-get.cc index a37f06741..630a9489b 100644 --- a/cmdline/apt-get.cc +++ b/cmdline/apt-get.cc @@ -525,7 +525,7 @@ bool DoDownload(CommandLine &CmdL) CacheFile Cache; if (Cache.ReadOnlyOpen() == false) return false; - + APT::CacheSetHelper helper(c0out); APT::VersionList verset = APT::VersionList::FromCommandLine(Cache, CmdL.FileList + 1, APT::VersionList::CANDIDATE, helper); @@ -533,67 +533,57 @@ bool DoDownload(CommandLine &CmdL) if (verset.empty() == true) return false; + AcqTextStatus Stat(ScreenWidth, _config->FindI("quiet", 0)); pkgAcquire Fetcher; - AcqTextStatus Stat(ScreenWidth, _config->FindI("quiet",0)); - if (_config->FindB("APT::Get::Print-URIs") == false) - Fetcher.Setup(&Stat); + if (Fetcher.Setup(&Stat) == false) + return false; pkgRecords Recs(Cache); pkgSourceList *SrcList = Cache.GetSourceList(); - bool gotAll = true; - for (APT::VersionList::const_iterator Ver = verset.begin(); - Ver != verset.end(); - ++Ver) + // reuse the usual acquire methods for deb files, but don't drop them into + // the usual directories - keep everything in the current directory + std::vector storefile(verset.size()); + std::string const cwd = SafeGetCWD(); + _config->Set("Dir::Cache::Archives", cwd); + int i = 0; + for (APT::VersionList::const_iterator Ver = verset.begin(); + Ver != verset.end(); ++Ver, ++i) { - string descr; - // get the right version - pkgCache::PkgIterator Pkg = Ver.ParentPkg(); - pkgRecords::Parser &rec=Recs.Lookup(Ver.FileList()); - pkgCache::VerFileIterator Vf = Ver.FileList(); - if (Vf.end() == true) - { - _error->Error("Can not find VerFile for %s in version %s", Pkg.FullName().c_str(), Ver.VerStr()); - gotAll = false; - continue; - } - pkgCache::PkgFileIterator F = Vf.File(); - pkgIndexFile *index; - if(SrcList->FindIndex(F, index) == false) - { - _error->Error(_("Can't find a source to download version '%s' of '%s'"), Ver.VerStr(), Pkg.FullName().c_str()); - gotAll = false; - continue; - } - string uri = index->ArchiveURI(rec.FileName()); - strprintf(descr, _("Downloading %s %s"), Pkg.Name(), Ver.VerStr()); - // get the most appropriate hash - HashString hash; - if (rec.SHA512Hash() != "") - hash = HashString("sha512", rec.SHA512Hash()); - else if (rec.SHA256Hash() != "") - hash = HashString("sha256", rec.SHA256Hash()); - else if (rec.SHA1Hash() != "") - hash = HashString("sha1", rec.SHA1Hash()); - else if (rec.MD5Hash() != "") - hash = HashString("md5", rec.MD5Hash()); - // get the file - new pkgAcqFile(&Fetcher, uri, hash.toStr(), (*Ver)->Size, descr, Pkg.Name(), "."); + pkgAcquire::Item *I = new pkgAcqArchive(&Fetcher, SrcList, &Recs, *Ver, storefile[i]); + std::string const filename = cwd + flNotDir(storefile[i]); + storefile[i].assign(filename); + I->DestFile.assign(filename); } - if (gotAll == false) - return false; // Just print out the uris and exit if the --print-uris flag was used if (_config->FindB("APT::Get::Print-URIs") == true) { pkgAcquire::UriIterator I = Fetcher.UriBegin(); for (; I != Fetcher.UriEnd(); ++I) - cout << '\'' << I->URI << "' " << flNotDir(I->Owner->DestFile) << ' ' << + cout << '\'' << I->URI << "' " << flNotDir(I->Owner->DestFile) << ' ' << I->Owner->FileSize << ' ' << I->Owner->HashSum() << endl; return true; } - return (Fetcher.Run() == pkgAcquire::Continue); + if (_error->PendingError() == true || CheckAuth(Fetcher, false) == false) + return false; + + bool Failed = false; + if (AcquireRun(Fetcher, 0, &Failed, NULL) == false) + return false; + + // copy files in local sources to the current directory + for (pkgAcquire::ItemIterator I = Fetcher.ItemsBegin(); I != Fetcher.ItemsEnd(); ++I) + if ((*I)->Local == true && (*I)->Status == pkgAcquire::Item::StatDone) + { + std::string const filename = cwd + flNotDir((*I)->DestFile); + std::ifstream src((*I)->DestFile.c_str(), std::ios::binary); + std::ofstream dst(filename.c_str(), std::ios::binary); + dst << src.rdbuf(); + } + + return Failed == false; } /*}}}*/ // DoCheck - Perform the check operation /*{{{*/ diff --git a/test/integration/test-apt-get-download b/test/integration/test-apt-get-download index 420b2e380..6eac079f3 100755 --- a/test/integration/test-apt-get-download +++ b/test/integration/test-apt-get-download @@ -20,13 +20,14 @@ testdownload() { fi msgtest "Test download of package file $1 with" "$APT" aptget -qq download ${APT} && test -f $1 && msgpass || msgfail + rm $1 } testdownload apt_1.0_all.deb apt stable testdownload apt_2.0_all.deb apt DEBFILE="$(readlink -f aptarchive)/pool/apt_2.0_all.deb" -testequal "'file://${DEBFILE}' apt_2.0_all.deb $(stat -c%s $DEBFILE) sha512:$(sha512sum $DEBFILE | cut -d' ' -f 1)" aptget download apt --print-uris +testequal "'file://${DEBFILE}' apt_2.0_all.deb $(stat -c%s $DEBFILE) SHA512:$(sha512sum $DEBFILE | cut -d' ' -f 1)" aptget download apt --print-uris # deb:677887 testequal "E: Can't find a source to download version '1.0' of 'vrms:i386'" aptget download vrms diff --git a/test/integration/test-bug-617690-allow-unauthenticated-makes-all-untrusted b/test/integration/test-bug-617690-allow-unauthenticated-makes-all-untrusted index 1c2514938..633c197c0 100755 --- a/test/integration/test-bug-617690-allow-unauthenticated-makes-all-untrusted +++ b/test/integration/test-bug-617690-allow-unauthenticated-makes-all-untrusted @@ -26,17 +26,26 @@ testrun() { rm -rf rootdir/var/lib/apt testsuccess aptget update - testsuccess aptget download cool - testfileexists 'cool_1.0_i386.deb' + if [ "$1" = 'trusted' ]; then + testsuccess aptget download cool + testfileexists 'cool_1.0_i386.deb' + + testsuccess aptget download cool --allow-unauthenticated + testfileexists 'cool_1.0_i386.deb' + else + testfailure aptget download cool + testfilemissing 'cool_1.0_i386.deb' + + testsuccess aptget download cool --allow-unauthenticated + testfileexists 'cool_1.0_i386.deb' + fi mv aptarchive/pool/cool_1.0_i386.deb aptarchive/pool/cool_1.0_i386.deb.bak echo 'this is not a good package' > aptarchive/pool/cool_1.0_i386.deb - # FIXME: apt-get download should exit non-zero if download fails - aptget download cool + testfailure aptget download cool testfilemissing cool_1.0_i386.deb - # FIXME: apt-get download should exit non-zero if download fails - aptget download cool --allow-unauthenticated # unauthenticated doesn't mean unchecked + testfailure aptget download cool --allow-unauthenticated # unauthenticated doesn't mean unchecked testfilemissing cool_1.0_i386.deb rm -f aptarchive/pool/cool_1.0_i386.deb @@ -45,8 +54,10 @@ testrun() { testfileexists 'cool_1.0_i386.deb' } -testrun +testrun 'trusted' find aptarchive/ \( -name 'Release.gpg' -o -name 'InRelease' \) -delete -# FIXME: apt-get download should warn about untrusted downloads -testrun +testrun 'untrusted' + +changetowebserver +testrun 'untrusted' diff --git a/test/integration/test-bug-722207-print-uris-even-if-very-quiet b/test/integration/test-bug-722207-print-uris-even-if-very-quiet index 9524bab07..f2d95da19 100755 --- a/test/integration/test-bug-722207-print-uris-even-if-very-quiet +++ b/test/integration/test-bug-722207-print-uris-even-if-very-quiet @@ -19,7 +19,7 @@ APTARCHIVE=$(readlink -f ./aptarchive) testequal "'file://${APTARCHIVE}/pool/main/apt/apt_2_all.deb' apt_2_all.deb 0 MD5Sum:" aptget upgrade -qq --print-uris testequal "'file://${APTARCHIVE}/pool/main/apt/apt_2_all.deb' apt_2_all.deb 0 MD5Sum:" aptget dist-upgrade -qq --print-uris testequal "'file://${APTARCHIVE}/pool/main/apt/apt_2_all.deb' apt_2_all.deb 0 MD5Sum:" aptget install apt -qq --print-uris -testequal "'file://${APTARCHIVE}/pool/main/apt/apt_2_all.deb' apt_2_all.deb 0 :" aptget download apt -qq --print-uris +testequal "'file://${APTARCHIVE}/pool/main/apt/apt_2_all.deb' apt_2_all.deb 0 MD5Sum:" aptget download apt -qq --print-uris testequal "'file://${APTARCHIVE}/apt_2.dsc' apt_2.dsc 0 MD5Sum:d41d8cd98f00b204e9800998ecf8427e 'file://${APTARCHIVE}/apt_2.tar.gz' apt_2.tar.gz 0 MD5Sum:d41d8cd98f00b204e9800998ecf8427e" aptget source apt -qq --print-uris testequal "'http://packages.debian.org/changelogs/pool/main/apt/apt_2/changelog'" aptget changelog apt -qq --print-uris -- cgit v1.2.3 From 342df712331004aa4907c9dbdf4b7728d087efb0 Mon Sep 17 00:00:00 2001 From: David Kalnischkies Date: Thu, 3 Oct 2013 21:34:52 +0200 Subject: fix lzma-support detection via xz binary Clear() only clears a config option, not removing it and an empty setting still exists. Hence we set the option instead to the xz path so that the later existance check can find a binary for the test --- apt-pkg/aptconfiguration.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/apt-pkg/aptconfiguration.cc b/apt-pkg/aptconfiguration.cc index 4f9b84e00..115d11616 100644 --- a/apt-pkg/aptconfiguration.cc +++ b/apt-pkg/aptconfiguration.cc @@ -453,7 +453,7 @@ void Configuration::setDefaultConfigurationForCompressors() { _config->CndSet("Dir::Bin::bzip2", "/bin/bzip2"); _config->CndSet("Dir::Bin::xz", "/usr/bin/xz"); if (FileExists(_config->FindFile("Dir::Bin::xz")) == true) { - _config->Clear("Dir::Bin::lzma"); + _config->Set("Dir::Bin::lzma", _config->FindFile("Dir::Bin::xz")); _config->Set("APT::Compressor::lzma::Binary", "xz"); if (_config->Exists("APT::Compressor::lzma::CompressArg") == false) { _config->Set("APT::Compressor::lzma::CompressArg::", "--format=lzma"); -- cgit v1.2.3 From 3a7a206f40b8fda475e39566fc220fe8c5b59a17 Mon Sep 17 00:00:00 2001 From: David Kalnischkies Date: Thu, 3 Oct 2013 22:23:11 +0200 Subject: do not ++ on erased package pointers in autoremove Symptom: In an Ubuntu precise chroot (like on travis-ci) test-bug-613420-new-garbage-dependency segfaults in a std::set operator++ on an iterator we have erased previously (but not if run under gdb of course) --- apt-private/private-install.cc | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/apt-private/private-install.cc b/apt-private/private-install.cc index 9adad45af..643a6b370 100644 --- a/apt-private/private-install.cc +++ b/apt-private/private-install.cc @@ -438,15 +438,15 @@ bool DoAutomaticRemove(CacheFile &Cache) do { Changed = false; for (APT::PackageSet::const_iterator Pkg = tooMuch.begin(); - Pkg != tooMuch.end() && Changed == false; ++Pkg) + Pkg != tooMuch.end(); ++Pkg) { APT::PackageSet too; too.insert(*Pkg); for (pkgCache::PrvIterator Prv = Cache[Pkg].CandidateVerIter(Cache).ProvidesList(); Prv.end() == false; ++Prv) too.insert(Prv.ParentPkg()); - for (APT::PackageSet::const_iterator P = too.begin(); - P != too.end() && Changed == false; ++P) { + for (APT::PackageSet::const_iterator P = too.begin(); P != too.end(); ++P) + { for (pkgCache::DepIterator R = P.RevDependsList(); R.end() == false; ++R) { @@ -465,7 +465,11 @@ bool DoAutomaticRemove(CacheFile &Cache) Changed = true; break; } + if (Changed == true) + break; } + if (Changed == true) + break; } } while (Changed == true); } -- cgit v1.2.3 From df7d029c6d7ff15b81f7b439991bfaef19adcaf6 Mon Sep 17 00:00:00 2001 From: David Kalnischkies Date: Thu, 3 Oct 2013 22:49:58 +0200 Subject: test: use a multiarch capable dpkg rather than workaround The tests require nowadays a (somewhat) multiarch-capable dpkg, so replace the workaround as marked in the FIXME with a proper install as the workaround isn't working always correctly, letting the test fail. Git-Dch: Ignore --- test/integration/test-ubuntu-bug-859188-multiarch-reinstall | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/test/integration/test-ubuntu-bug-859188-multiarch-reinstall b/test/integration/test-ubuntu-bug-859188-multiarch-reinstall index 0fdf97485..be86f2e91 100755 --- a/test/integration/test-ubuntu-bug-859188-multiarch-reinstall +++ b/test/integration/test-ubuntu-bug-859188-multiarch-reinstall @@ -8,14 +8,13 @@ configarchitecture 'amd64' 'i386' 'armel' buildsimplenativepackage 'libsame' 'amd64,i386,armel' '1.0' 'unstable' 'Multi-Arch: same' -# FIXME: hack around dpkg's current inability to handle multiarch, a clean install would be better… -insertinstalledpackage 'libsame' 'amd64,i386' '1.0' 'Multi-Arch: same' -sed -e 's#/installed#/unstable#' -e 's#Installed-Size: 42#Installed-Size: 1#' -i rootdir/var/lib/dpkg/status - setupaptarchive +testsuccess aptget install libsame libsame:i386 + REINSTALL='Reading package lists... Building dependency tree... +Reading state information... 0 upgraded, 0 newly installed, 2 reinstalled, 0 to remove and 0 not upgraded. Inst libsame [1.0] (1.0 unstable [amd64]) Inst libsame:i386 [1.0] (1.0 unstable [i386]) -- cgit v1.2.3 From be297c7ab3c16edcbdd6afc699c73a58e545b599 Mon Sep 17 00:00:00 2001 From: David Kalnischkies Date: Thu, 3 Oct 2013 22:58:55 +0200 Subject: tests: install --no-install-recommends and stunnel4 for travis stunnel4 is required for https tests Git-Dch: Ignore --- .travis.yml | 2 +- test/integration/test-bug-254770-segfault-if-cache-not-buildable | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index 56536837f..2d9194c28 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,5 +1,5 @@ language: cpp before_install: - sudo apt-get update -q - - sudo apt-get install -q dpkg-dev debhelper libdb-dev gettext libcurl4-gnutls-dev zlib1g-dev libbz2-dev xsltproc docbook-xsl docbook-xml po4a autotools-dev autoconf automake doxygen debiandoc-sgml + - sudo apt-get install -q --no-install-recommends dpkg-dev debhelper libdb-dev gettext libcurl4-gnutls-dev zlib1g-dev libbz2-dev xsltproc docbook-xsl docbook-xml po4a autotools-dev autoconf automake doxygen debiandoc-sgml stunnel4 script: make && make test && test/integration/run-tests diff --git a/test/integration/test-bug-254770-segfault-if-cache-not-buildable b/test/integration/test-bug-254770-segfault-if-cache-not-buildable index 8fa337ccc..59102ddc9 100755 --- a/test/integration/test-bug-254770-segfault-if-cache-not-buildable +++ b/test/integration/test-bug-254770-segfault-if-cache-not-buildable @@ -18,7 +18,7 @@ testsegfault() { msgpass else echo - echo $TEST + echo "$TEST" msgfail fi } -- cgit v1.2.3