From fa375493c5a4ed9c10d4e5257ac82c6e687862d3 Mon Sep 17 00:00:00 2001 From: Julian Andres Klode Date: Mon, 10 Aug 2020 11:39:30 +0200 Subject: Do not retry on failure to fetch While we fixed the infinite retrying earlier, we still have problems if we retry in the middle of a transfer, we might end up resuming downloads that are already done and read more than we should (removing the IsOpen() check so that it always retries makes test-ubuntu-bug-1098738-apt-get-source-md5sum fail with wrong file sizes). I think the retrying was added to fixup pipelining messups, but we have better solutions now, so let's get rid of it, until we have implemented this properly. --- methods/basehttp.cc | 33 +++++++++++++-------------------- 1 file changed, 13 insertions(+), 20 deletions(-) (limited to 'methods') diff --git a/methods/basehttp.cc b/methods/basehttp.cc index 5e29e0ce1..b8ab73155 100644 --- a/methods/basehttp.cc +++ b/methods/basehttp.cc @@ -770,31 +770,24 @@ int BaseHttpMethod::Loop() } else { - if (Server->IsOpen() == false && FailCounter < 1) + if (not Server->IsOpen()) { - FailCounter++; - Server->Close(); - _error->Discard(); - // Reset the pipeline QueueBack = Queue; Server->PipelineAnswersReceived = 0; - continue; } - else - { - Server->Close(); - FailCounter = 0; - switch (Result) - { - case ResultState::TRANSIENT_ERROR: - Fail(true); - break; - case ResultState::FATAL_ERROR: - case ResultState::SUCCESSFUL: - Fail(false); - break; - } + + Server->Close(); + FailCounter = 0; + switch (Result) + { + case ResultState::TRANSIENT_ERROR: + Fail(true); + break; + case ResultState::FATAL_ERROR: + case ResultState::SUCCESSFUL: + Fail(false); + break; } } break; -- cgit v1.2.3