From 9127d7aecf01f2999a2589e4b0503288518b2927 Mon Sep 17 00:00:00 2001 From: Michael Vogt Date: Mon, 5 Jan 2015 10:27:53 +0100 Subject: Fix missing URIStart() for https downloads Add a explicit ReceivedData to HttpsMethod that indicates when we got data from the connection so that we can send URISTart() to the parent. This is needed because URIStart got moved in f9b4f12d from the progress_callback to write_data() and it only checks for Res.Size. In the old code if progress_callback is called by libcurl (and sets Res.Size) before write_data is called then URIStart() is never send. Making this a explicit ReceivedData variable fixes this issue. --- methods/https.h | 1 + 1 file changed, 1 insertion(+) (limited to 'methods/https.h') diff --git a/methods/https.h b/methods/https.h index faac8a3cd..411b71440 100644 --- a/methods/https.h +++ b/methods/https.h @@ -66,6 +66,7 @@ class HttpsMethod : public pkgAcqMethod CURL *curl; FetchResult Res; HttpsServerState *Server; + bool ReceivedData; public: FileFd *File; -- cgit v1.2.3-70-g09d2 From 905fba60a046646a26a56b4c5d4a5dc7d5906f0d Mon Sep 17 00:00:00 2001 From: David Kalnischkies Date: Mon, 9 Mar 2015 01:54:46 +0100 Subject: derive more of https from http method Bug #778375 uncovered that https wasn't properly integrated in the class family tree of http as it was supposed to be leading to a NULL pointer dereference. Fixing this 'properly' was deemed to much diff for practically no gain that late in the release, so commit 0c2dc43d4fe1d026650b5e2920a021557f9534a6 just fixed the synptom, while this commit here is fixing the cause plus adding a test. --- methods/http.cc | 2 - methods/https.cc | 37 +++++---- methods/https.h | 16 ++-- methods/server.cc | 7 +- methods/server.h | 3 +- .../test-bug-778375-server-has-no-reason-phrase | 40 ++++++++++ test/interactive-helper/aptwebserver.cc | 88 +++++++++++----------- 7 files changed, 124 insertions(+), 69 deletions(-) create mode 100755 test/integration/test-bug-778375-server-has-no-reason-phrase (limited to 'methods/https.h') diff --git a/methods/http.cc b/methods/http.cc index ad1347d36..021b284d0 100644 --- a/methods/http.cc +++ b/methods/http.cc @@ -772,8 +772,6 @@ bool HttpMethod::Configuration(string Message) if (ServerMethod::Configuration(Message) == false) return false; - DropPrivsOrDie(); - AllowRedirect = _config->FindB("Acquire::http::AllowRedirect",true); PipelineDepth = _config->FindI("Acquire::http::Pipeline-Depth", PipelineDepth); diff --git a/methods/https.cc b/methods/https.cc index 37a8ff5fd..32de42e4b 100644 --- a/methods/https.cc +++ b/methods/https.cc @@ -37,16 +37,6 @@ /*}}}*/ using namespace std; -bool HttpsMethod::Configuration(std::string Message) -{ - if (pkgAcqMethod::Configuration(Message) == false) - return false; - - DropPrivsOrDie(); - - return true; -} - size_t HttpsMethod::parse_header(void *buffer, size_t size, size_t nmemb, void *userp) { @@ -131,7 +121,7 @@ HttpsMethod::progress_callback(void *clientp, double dltotal, double /*dlnow*/, } // HttpsServerState::HttpsServerState - Constructor /*{{{*/ -HttpsServerState::HttpsServerState(URI Srv,HttpsMethod * /*Owner*/) : ServerState(Srv, NULL) +HttpsServerState::HttpsServerState(URI Srv,HttpsMethod * Owner) : ServerState(Srv, Owner) { TimeOut = _config->FindI("Acquire::https::Timeout",TimeOut); ReceivedData = false; @@ -335,13 +325,11 @@ bool HttpsMethod::Fetch(FetchItem *Itm) curl_easy_setopt(curl, CURLOPT_LOW_SPEED_TIME, timeout); // set redirect options and default to 10 redirects - bool const AllowRedirect = _config->FindB("Acquire::https::AllowRedirect", - _config->FindB("Acquire::http::AllowRedirect",true)); curl_easy_setopt(curl, CURLOPT_FOLLOWLOCATION, AllowRedirect); curl_easy_setopt(curl, CURLOPT_MAXREDIRS, 10); // debug - if(_config->FindB("Debug::Acquire::https", false)) + if (Debug == true) curl_easy_setopt(curl, CURLOPT_VERBOSE, true); // error handling @@ -378,7 +366,7 @@ bool HttpsMethod::Fetch(FetchItem *Itm) // go for it - if the file exists, append on it File = new FileFd(Itm->DestFile, FileFd::WriteAny); - Server = new HttpsServerState(Itm->Uri, this); + Server = CreateServerState(Itm->Uri); // keep apt updated Res.Filename = Itm->DestFile; @@ -473,6 +461,25 @@ bool HttpsMethod::Fetch(FetchItem *Itm) return true; } + /*}}}*/ +// HttpsMethod::Configuration - Handle a configuration message /*{{{*/ +bool HttpsMethod::Configuration(string Message) +{ + if (ServerMethod::Configuration(Message) == false) + return false; + + AllowRedirect = _config->FindB("Acquire::https::AllowRedirect", + _config->FindB("Acquire::http::AllowRedirect", true)); + Debug = _config->FindB("Debug::Acquire::https",false); + + return true; +} + /*}}}*/ +ServerState * HttpsMethod::CreateServerState(URI uri) /*{{{*/ +{ + return new HttpsServerState(uri, this); +} + /*}}}*/ int main() { diff --git a/methods/https.h b/methods/https.h index 6917a6ff6..433a84680 100644 --- a/methods/https.h +++ b/methods/https.h @@ -50,17 +50,14 @@ class HttpsServerState : public ServerState HttpsServerState(URI Srv, HttpsMethod *Owner); virtual ~HttpsServerState() {Close();}; - - bool ReceivedData; }; -class HttpsMethod : public pkgAcqMethod +class HttpsMethod : public ServerMethod { // minimum speed in bytes/se that triggers download timeout handling static const int DL_MIN_SPEED = 10; virtual bool Fetch(FetchItem *); - virtual bool Configuration(std::string Message); static size_t parse_header(void *buffer, size_t size, size_t nmemb, void *userp); static size_t write_data(void *buffer, size_t size, size_t nmemb, void *userp); @@ -69,12 +66,19 @@ class HttpsMethod : public pkgAcqMethod void SetupProxy(); CURL *curl; FetchResult Res; - HttpsServerState *Server; + ServerState *Server; + + // Used by ServerMethods unused by https + virtual void SendReq(FetchItem *) { exit(42); } + virtual void RotateDNS() { exit(42); } public: FileFd *File; - HttpsMethod() : pkgAcqMethod("1.2",Pipeline | SendConfig), Server(NULL), File(NULL) + virtual bool Configuration(std::string Message); + virtual ServerState * CreateServerState(URI uri); + + HttpsMethod() : ServerMethod("1.2",Pipeline | SendConfig), File(NULL) { curl = curl_easy_init(); }; diff --git a/methods/server.cc b/methods/server.cc index c17f27f73..91ec824d1 100644 --- a/methods/server.cc +++ b/methods/server.cc @@ -238,7 +238,12 @@ ServerState::ServerState(URI Srv, ServerMethod *Owner) : ServerName(Srv), TimeOu bool ServerMethod::Configuration(string Message) /*{{{*/ { - return pkgAcqMethod::Configuration(Message); + if (pkgAcqMethod::Configuration(Message) == false) + return false; + + DropPrivsOrDie(); + + return true; } /*}}}*/ diff --git a/methods/server.h b/methods/server.h index b974ec89a..3b232dcac 100644 --- a/methods/server.h +++ b/methods/server.h @@ -37,6 +37,7 @@ struct ServerState unsigned long long Size; // size of the usable content (aka: the file) unsigned long long JunkSize; // size of junk content (aka: server error pages) unsigned long long StartPos; + bool ReceivedData; time_t Date; bool HaveContent; enum {Chunked,Stream,Closes} Encoding; @@ -75,7 +76,7 @@ struct ServerState bool Comp(URI Other) const {return Other.Host == ServerName.Host && Other.Port == ServerName.Port;}; virtual void Reset() {Major = 0; Minor = 0; Result = 0; Code[0] = '\0'; Size = 0; JunkSize = 0; - StartPos = 0; Encoding = Closes; time(&Date); HaveContent = false; + StartPos = 0; ReceivedData = false; Encoding = Closes; time(&Date); HaveContent = false; State = Header; Persistent = false; Pipeline = true; MaximumSize = 0;}; virtual bool WriteResponse(std::string const &Data) = 0; diff --git a/test/integration/test-bug-778375-server-has-no-reason-phrase b/test/integration/test-bug-778375-server-has-no-reason-phrase new file mode 100755 index 000000000..23481ef88 --- /dev/null +++ b/test/integration/test-bug-778375-server-has-no-reason-phrase @@ -0,0 +1,40 @@ +#!/bin/sh +set -e + +TESTDIR=$(readlink -f $(dirname $0)) +. $TESTDIR/framework + +setupenvironment +configarchitecture 'native' + +echo 'found' > aptarchive/working +changetohttpswebserver -o 'aptwebserver::redirect::replace::/redirectme/=/' \ + -o 'aptwebserver::httpcode::200=200' -o 'aptwebserver::httpcode::404=404' \ + -o 'aptwebserver::httpcode::301=301' + +testdownload() { + rm -f downfile + msgtest "download of a $1 via" "${3%%:*}" + $2 --nomsg downloadfile "$3" downfile + + cp rootdir/tmp/testsuccess.output download.log + #looking for "HTTP server doesn't give Reason-Phrase for 200" + testsuccess grep 'give Reason-Phrase for' download.log + + if [ "$2" = 'testsuccess' ]; then + testfileequal downfile 'found' + else + testfailure test -e downfile + fi +} + +runtest() { + testdownload 'file works' 'testsuccess' "$1/working" + testdownload 'file via redirect works' 'testsuccess' "$1/redirectme/working" + + testdownload 'non-existent file fails' 'testfailure' "$1/failing" + testdownload 'non-existent file via redirect fails' 'testfailure' "$1/redirectme/failing" +} + +runtest 'http://localhost:8080' +runtest 'https://localhost:4433' diff --git a/test/interactive-helper/aptwebserver.cc b/test/interactive-helper/aptwebserver.cc index 3403bbdd2..644629a33 100644 --- a/test/interactive-helper/aptwebserver.cc +++ b/test/interactive-helper/aptwebserver.cc @@ -27,58 +27,58 @@ #include #include -static char const * httpcodeToStr(int const httpcode) /*{{{*/ +static std::string httpcodeToStr(int const httpcode) /*{{{*/ { switch (httpcode) { // Informational 1xx - case 100: return "100 Continue"; - case 101: return "101 Switching Protocols"; + case 100: return _config->Find("aptwebserver::httpcode::100", "100 Continue"); + case 101: return _config->Find("aptwebserver::httpcode::101", "101 Switching Protocols"); // Successful 2xx - case 200: return "200 OK"; - case 201: return "201 Created"; - case 202: return "202 Accepted"; - case 203: return "203 Non-Authoritative Information"; - case 204: return "204 No Content"; - case 205: return "205 Reset Content"; - case 206: return "206 Partial Content"; + case 200: return _config->Find("aptwebserver::httpcode::200", "200 OK"); + case 201: return _config->Find("aptwebserver::httpcode::201", "201 Created"); + case 202: return _config->Find("aptwebserver::httpcode::202", "202 Accepted"); + case 203: return _config->Find("aptwebserver::httpcode::203", "203 Non-Authoritative Information"); + case 204: return _config->Find("aptwebserver::httpcode::204", "204 No Content"); + case 205: return _config->Find("aptwebserver::httpcode::205", "205 Reset Content"); + case 206: return _config->Find("aptwebserver::httpcode::206", "206 Partial Content"); // Redirections 3xx - case 300: return "300 Multiple Choices"; - case 301: return "301 Moved Permanently"; - case 302: return "302 Found"; - case 303: return "303 See Other"; - case 304: return "304 Not Modified"; - case 305: return "304 Use Proxy"; - case 307: return "307 Temporary Redirect"; + case 300: return _config->Find("aptwebserver::httpcode::300", "300 Multiple Choices"); + case 301: return _config->Find("aptwebserver::httpcode::301", "301 Moved Permanently"); + case 302: return _config->Find("aptwebserver::httpcode::302", "302 Found"); + case 303: return _config->Find("aptwebserver::httpcode::303", "303 See Other"); + case 304: return _config->Find("aptwebserver::httpcode::304", "304 Not Modified"); + case 305: return _config->Find("aptwebserver::httpcode::305", "305 Use Proxy"); + case 307: return _config->Find("aptwebserver::httpcode::307", "307 Temporary Redirect"); // Client errors 4xx - case 400: return "400 Bad Request"; - case 401: return "401 Unauthorized"; - case 402: return "402 Payment Required"; - case 403: return "403 Forbidden"; - case 404: return "404 Not Found"; - case 405: return "405 Method Not Allowed"; - case 406: return "406 Not Acceptable"; - case 407: return "407 Proxy Authentication Required"; - case 408: return "408 Request Time-out"; - case 409: return "409 Conflict"; - case 410: return "410 Gone"; - case 411: return "411 Length Required"; - case 412: return "412 Precondition Failed"; - case 413: return "413 Request Entity Too Large"; - case 414: return "414 Request-URI Too Large"; - case 415: return "415 Unsupported Media Type"; - case 416: return "416 Requested range not satisfiable"; - case 417: return "417 Expectation Failed"; - case 418: return "418 I'm a teapot"; + case 400: return _config->Find("aptwebserver::httpcode::400", "400 Bad Request"); + case 401: return _config->Find("aptwebserver::httpcode::401", "401 Unauthorized"); + case 402: return _config->Find("aptwebserver::httpcode::402", "402 Payment Required"); + case 403: return _config->Find("aptwebserver::httpcode::403", "403 Forbidden"); + case 404: return _config->Find("aptwebserver::httpcode::404", "404 Not Found"); + case 405: return _config->Find("aptwebserver::httpcode::405", "405 Method Not Allowed"); + case 406: return _config->Find("aptwebserver::httpcode::406", "406 Not Acceptable"); + case 407: return _config->Find("aptwebserver::httpcode::407", "407 Proxy Authentication Required"); + case 408: return _config->Find("aptwebserver::httpcode::408", "408 Request Time-out"); + case 409: return _config->Find("aptwebserver::httpcode::409", "409 Conflict"); + case 410: return _config->Find("aptwebserver::httpcode::410", "410 Gone"); + case 411: return _config->Find("aptwebserver::httpcode::411", "411 Length Required"); + case 412: return _config->Find("aptwebserver::httpcode::412", "412 Precondition Failed"); + case 413: return _config->Find("aptwebserver::httpcode::413", "413 Request Entity Too Large"); + case 414: return _config->Find("aptwebserver::httpcode::414", "414 Request-URI Too Large"); + case 415: return _config->Find("aptwebserver::httpcode::415", "415 Unsupported Media Type"); + case 416: return _config->Find("aptwebserver::httpcode::416", "416 Requested range not satisfiable"); + case 417: return _config->Find("aptwebserver::httpcode::417", "417 Expectation Failed"); + case 418: return _config->Find("aptwebserver::httpcode::418", "418 I'm a teapot"); // Server error 5xx - case 500: return "500 Internal Server Error"; - case 501: return "501 Not Implemented"; - case 502: return "502 Bad Gateway"; - case 503: return "503 Service Unavailable"; - case 504: return "504 Gateway Time-out"; - case 505: return "505 HTTP Version not supported"; - } - return NULL; + case 500: return _config->Find("aptwebserver::httpcode::500", "500 Internal Server Error"); + case 501: return _config->Find("aptwebserver::httpcode::501", "501 Not Implemented"); + case 502: return _config->Find("aptwebserver::httpcode::502", "502 Bad Gateway"); + case 503: return _config->Find("aptwebserver::httpcode::503", "503 Service Unavailable"); + case 504: return _config->Find("aptwebserver::httpcode::504", "504 Gateway Time-out"); + case 505: return _config->Find("aptwebserver::httpcode::505", "505 HTTP Version not supported"); + } + return ""; } /*}}}*/ static bool chunkedTransferEncoding(std::list const &headers) { -- cgit v1.2.3-70-g09d2 From 27925d82dd0cbae74d48040363fe6f6c2bae5215 Mon Sep 17 00:00:00 2001 From: David Kalnischkies Date: Fri, 27 Mar 2015 15:53:43 +0100 Subject: improve https method queue progress reporting The worker expects that the methods tell him when they start or finish downloading a file. Various information pieces are passed along in this report including the (expected) filesize. https was using a "global" struct for reporting which made it 'reuse' incorrect values in some cases like a non-existent InRelease fallbacking to Release{,.gpg} resulting in a size-mismatch warning. Reducing the scope and redesigning the setting of the values we can fix this and related issues. Closes: 777565, 781509 Thanks: Robert Edmonds and Anders Kaseorg for initial patchs --- methods/https.cc | 82 ++++++++++----------- methods/https.h | 2 +- methods/server.h | 3 +- test/integration/framework | 20 +++++- test/integration/test-apt-https-no-redirect | 15 ++-- test/integration/test-apt-update-expected-size | 86 +++++++++++++++-------- test/integration/test-bug-602412-dequote-redirect | 3 +- 7 files changed, 121 insertions(+), 90 deletions(-) (limited to 'methods/https.h') diff --git a/methods/https.cc b/methods/https.cc index c69e84d3a..70f6a1046 100644 --- a/methods/https.cc +++ b/methods/https.cc @@ -37,11 +37,17 @@ /*}}}*/ using namespace std; +struct APT_HIDDEN CURLUserPointer { + HttpsMethod * const https; + HttpsMethod::FetchResult * const Res; + CURLUserPointer(HttpsMethod * const https, HttpsMethod::FetchResult * const Res) : https(https), Res(Res) {} +}; + size_t HttpsMethod::parse_header(void *buffer, size_t size, size_t nmemb, void *userp) { size_t len = size * nmemb; - HttpsMethod *me = (HttpsMethod *)userp; + CURLUserPointer *me = (CURLUserPointer *)userp; std::string line((char*) buffer, len); for (--len; len > 0; --len) if (isspace(line[len]) == 0) @@ -53,23 +59,33 @@ HttpsMethod::parse_header(void *buffer, size_t size, size_t nmemb, void *userp) if (line.empty() == true) { - if (me->Server->Result != 416 && me->Server->StartPos != 0) + if (me->https->Server->Result != 416 && me->https->Server->StartPos != 0) ; - else if (me->Server->Result == 416 && me->Server->Size == me->File->FileSize()) + else if (me->https->Server->Result == 416 && me->https->Server->Size == me->https->File->FileSize()) { - me->Server->Result = 200; - me->Server->StartPos = me->Server->Size; + me->https->Server->Result = 200; + me->https->Server->StartPos = me->https->Server->Size; // the actual size is not important for https as curl will deal with it // by itself and e.g. doesn't bother us with transport-encoding… - me->Server->JunkSize = std::numeric_limits::max(); + me->https->Server->JunkSize = std::numeric_limits::max(); } else - me->Server->StartPos = 0; + me->https->Server->StartPos = 0; + + me->https->File->Truncate(me->https->Server->StartPos); + me->https->File->Seek(me->https->Server->StartPos); + + me->Res->LastModified = me->https->Server->Date; + me->Res->Size = me->https->Server->Size; + me->Res->ResumePoint = me->https->Server->StartPos; - me->File->Truncate(me->Server->StartPos); - me->File->Seek(me->Server->StartPos); + // we expect valid data, so tell our caller we get the file now + if (me->https->Server->Result >= 200 && me->https->Server->Result < 300 && + me->https->Server->JunkSize == 0 && + me->Res->Size != 0 && me->Res->Size > me->Res->ResumePoint) + me->https->URIStart(*me->Res); } - else if (me->Server->HeaderLine(line) == false) + else if (me->https->Server->HeaderLine(line) == false) return 0; return size*nmemb; @@ -85,12 +101,6 @@ HttpsMethod::write_data(void *buffer, size_t size, size_t nmemb, void *userp) if (me->Server->JunkSize != 0) return buffer_size; - if (me->Server->ReceivedData == false) - { - me->URIStart(me->Res); - me->Server->ReceivedData = true; - } - if(me->File->Write(buffer, buffer_size) != true) return 0; @@ -109,27 +119,15 @@ HttpsMethod::write_data(void *buffer, size_t size, size_t nmemb, void *userp) return buffer_size; } -int -HttpsMethod::progress_callback(void *clientp, double dltotal, double /*dlnow*/, - double /*ultotal*/, double /*ulnow*/) -{ - HttpsMethod *me = (HttpsMethod *)clientp; - if(dltotal > 0 && me->Res.Size == 0) { - me->Res.Size = (unsigned long long)dltotal; - } - return 0; -} - // HttpsServerState::HttpsServerState - Constructor /*{{{*/ HttpsServerState::HttpsServerState(URI Srv,HttpsMethod * Owner) : ServerState(Srv, Owner) { TimeOut = _config->FindI("Acquire::https::Timeout",TimeOut); - ReceivedData = false; Reset(); } /*}}}*/ -void HttpsMethod::SetupProxy() /*{{{*/ +void HttpsMethod::SetupProxy() /*{{{*/ { URI ServerName = Queue->Uri; @@ -207,16 +205,16 @@ bool HttpsMethod::Fetch(FetchItem *Itm) maybe_add_auth (Uri, _config->FindFile("Dir::Etc::netrc")); + FetchResult Res; + CURLUserPointer userp(this, &Res); // callbacks curl_easy_setopt(curl, CURLOPT_URL, static_cast(Uri).c_str()); curl_easy_setopt(curl, CURLOPT_HEADERFUNCTION, parse_header); - curl_easy_setopt(curl, CURLOPT_WRITEHEADER, this); + curl_easy_setopt(curl, CURLOPT_WRITEHEADER, &userp); curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, write_data); curl_easy_setopt(curl, CURLOPT_WRITEDATA, this); - curl_easy_setopt(curl, CURLOPT_PROGRESSFUNCTION, progress_callback); - curl_easy_setopt(curl, CURLOPT_PROGRESSDATA, this); // options - curl_easy_setopt(curl, CURLOPT_NOPROGRESS, false); + curl_easy_setopt(curl, CURLOPT_NOPROGRESS, true); curl_easy_setopt(curl, CURLOPT_FILETIME, true); // only allow curl to handle https, not the other stuff it supports curl_easy_setopt(curl, CURLOPT_PROTOCOLS, CURLPROTO_HTTPS); @@ -414,24 +412,23 @@ bool HttpsMethod::Fetch(FetchItem *Itm) return false; } - struct stat resultStat; - if (unlikely(stat(File->Name().c_str(), &resultStat) != 0)) - { - _error->Errno("stat", "Unable to access file %s", File->Name().c_str()); - return false; - } - Res.Size = resultStat.st_size; - // invalid range-request if (Server->Result == 416) { unlink(File->Name().c_str()); - Res.Size = 0; delete File; Redirect(Itm->Uri); return true; } + struct stat resultStat; + if (unlikely(stat(File->Name().c_str(), &resultStat) != 0)) + { + _error->Errno("stat", "Unable to access file %s", File->Name().c_str()); + return false; + } + Res.Size = resultStat.st_size; + // Timestamp curl_easy_getinfo(curl, CURLINFO_FILETIME, &Res.LastModified); if (Res.LastModified != -1) @@ -455,7 +452,6 @@ bool HttpsMethod::Fetch(FetchItem *Itm) URIDone(Res); // cleanup - Res.Size = 0; delete File; return true; diff --git a/methods/https.h b/methods/https.h index 433a84680..4cc48fc34 100644 --- a/methods/https.h +++ b/methods/https.h @@ -65,7 +65,6 @@ class HttpsMethod : public ServerMethod double ultotal, double ulnow); void SetupProxy(); CURL *curl; - FetchResult Res; ServerState *Server; // Used by ServerMethods unused by https @@ -77,6 +76,7 @@ class HttpsMethod : public ServerMethod virtual bool Configuration(std::string Message); virtual ServerState * CreateServerState(URI uri); + using pkgAcqMethod::FetchResult; HttpsMethod() : ServerMethod("1.2",Pipeline | SendConfig), File(NULL) { diff --git a/methods/server.h b/methods/server.h index 3b232dcac..b974ec89a 100644 --- a/methods/server.h +++ b/methods/server.h @@ -37,7 +37,6 @@ struct ServerState unsigned long long Size; // size of the usable content (aka: the file) unsigned long long JunkSize; // size of junk content (aka: server error pages) unsigned long long StartPos; - bool ReceivedData; time_t Date; bool HaveContent; enum {Chunked,Stream,Closes} Encoding; @@ -76,7 +75,7 @@ struct ServerState bool Comp(URI Other) const {return Other.Host == ServerName.Host && Other.Port == ServerName.Port;}; virtual void Reset() {Major = 0; Minor = 0; Result = 0; Code[0] = '\0'; Size = 0; JunkSize = 0; - StartPos = 0; ReceivedData = false; Encoding = Closes; time(&Date); HaveContent = false; + StartPos = 0; Encoding = Closes; time(&Date); HaveContent = false; State = Header; Persistent = false; Pipeline = true; MaximumSize = 0;}; virtual bool WriteResponse(std::string const &Data) = 0; diff --git a/test/integration/framework b/test/integration/framework index ec23e41e6..50c027a2c 100644 --- a/test/integration/framework +++ b/test/integration/framework @@ -1125,8 +1125,10 @@ acquire::cdrom::autodetect 0;" > rootdir/etc/apt/apt.conf.d/00cdrom downloadfile() { local PROTO="${1%%:*}" - apthelper -o Debug::Acquire::${PROTO}=1 -o Debug::pkgAcquire::Worker=1 \ - download-file "$1" "$2" 2>&1 || true + if ! apthelper -o Debug::Acquire::${PROTO}=1 -o Debug::pkgAcquire::Worker=1 \ + download-file "$1" "$2" 2>&1 ; then + return 1 + fi # only if the file exists the download was successful if [ -r "$2" ]; then return 0 @@ -1407,6 +1409,20 @@ testfailureequal() { testfileequal "${TMPWORKINGDIRECTORY}/rootdir/tmp/testfailure.output" "$CMP" } +testfailuremsg() { + local CMP="$1" + shift + testfailure "$@" + msgtest 'Check that the output of the previous failed command has expected' 'failures and warnings' + grep '^\(W\|E\):' "${TMPWORKINGDIRECTORY}/rootdir/tmp/testfailure.output" > "${TMPWORKINGDIRECTORY}/rootdir/tmp/testfailureequal.output" 2>&1 || true + if echo "$CMP" | checkdiff - "${TMPWORKINGDIRECTORY}/rootdir/tmp/testfailureequal.output"; then + msgpass + else + echo '### Complete output ###' + cat "${TMPWORKINGDIRECTORY}/rootdir/tmp/testfailure.output" + msgfail + fi +} testfilestats() { msgtest "Test that file $1 has $2 $3" "$4" diff --git a/test/integration/test-apt-https-no-redirect b/test/integration/test-apt-https-no-redirect index bc744d6f2..c91c78916 100755 --- a/test/integration/test-apt-https-no-redirect +++ b/test/integration/test-apt-https-no-redirect @@ -14,22 +14,15 @@ echo 'alright' > aptarchive/working changetohttpswebserver -o 'aptwebserver::redirect::replace::/redirectme/=http://localhost:8080/' msgtest 'download of a file works via' 'http' -downloadfile 'http://localhost:8080/working' httpfile >/dev/null 2>&1 && msgpass || msgfail +testsuccess --nomsg downloadfile 'http://localhost:8080/working' httpfile testfileequal httpfile 'alright' msgtest 'download of a file works via' 'https' -downloadfile 'https://localhost:4433/working' httpsfile >/dev/null 2>&1 && msgpass || msgfail +testsuccess --nomsg downloadfile 'https://localhost:4433/working' httpsfile testfileequal httpsfile 'alright' msgtest 'download of a file does not work if' 'https redirected to http' -downloadfile 'https://localhost:4433/redirectme/working' redirectfile >curloutput 2>&1 && msgfail || msgpass +testfailure --nomsg downloadfile 'https://localhost:4433/redirectme/working' redirectfile msgtest 'libcurl has forbidden access in last request to' 'http resource' -if grep -q -E -- 'Protocol "?http"? not supported or disabled in libcurl' curloutput; then - msgpass -else - cat curloutput - msgfail -fi - - +testsuccess --nomsg grep -q -E -- 'Protocol "?http"? not supported or disabled in libcurl' rootdir/tmp/testfailure.output diff --git a/test/integration/test-apt-update-expected-size b/test/integration/test-apt-update-expected-size index 9711c293a..22de13ea5 100755 --- a/test/integration/test-apt-update-expected-size +++ b/test/integration/test-apt-update-expected-size @@ -10,35 +10,61 @@ configarchitecture "i386" insertpackage 'unstable' 'apt' 'all' '1.0' setupaptarchive --no-update +cp -a aptarchive/dists aptarchive/dists.good + +test_inreleasetoobig() { + # make InRelease really big to trigger fallback + dd if=/dev/zero of=aptarchive/dists/unstable/InRelease bs=1M count=2 2>/dev/null + touch -d '+1hour' aptarchive/dists/unstable/InRelease + testsuccess aptget update -o Apt::Get::List-Cleanup=0 -o acquire::MaxReleaseFileSize=$((1*1000*1000)) -o Debug::pkgAcquire::worker=0 + msgtest 'Check that the max write warning is triggered' + cp rootdir/tmp/testsuccess.output update.output + testsuccess --nomsg grep -q 'Writing more data than expected' update.output + rm -f update.output + # ensure the failed InRelease file got renamed + testsuccess ls rootdir/var/lib/apt/lists/partial/*InRelease.FAILED +} + +test_packagestoobig() { + # append junk at the end of the Packages.gz/Packages + SIZE="$(stat --printf=%s aptarchive/dists/unstable/main/binary-i386/Packages)" + find aptarchive/dists -name 'Packages*' | while read pkg; do + echo "1234567890" >> "$pkg" + touch -d '+1hour' "$pkg" + done + NEW_SIZE="$(stat --printf=%s aptarchive/dists/unstable/main/binary-i386/Packages)" + testfailuremsg "W: Failed to fetch ${1}/dists/unstable/main/binary-i386/Packages Writing more data than expected ($NEW_SIZE > $SIZE) +E: Some index files failed to download. They have been ignored, or old ones used instead." aptget update -o Debug::pkgAcquire::Worker=0 +} + +methodtest() { + msgmsg 'Test with' "$1" 'and clean start' + rm -rf rootdir/var/lib/apt/lists rootdir/var/lib/apt/lists.good + # normal update works fine + testsuccess aptget update + mv rootdir/var/lib/apt/lists rootdir/var/lib/apt/lists.good + + # starting fresh works + test_inreleasetoobig "$1" + rm -rf aptarchive/dists rootdir/var/lib/apt/lists + cp -a aptarchive/dists.good aptarchive/dists + test_packagestoobig "$1" + rm -rf aptarchive/dists rootdir/var/lib/apt/lists + cp -a aptarchive/dists.good aptarchive/dists + + msgmsg 'Test with' "$1" 'and existing old data' + cp -a rootdir/var/lib/apt/lists.good rootdir/var/lib/apt/lists + test_inreleasetoobig "$1" + rm -rf aptarchive/dists rootdir/var/lib/apt/lists + cp -a rootdir/var/lib/apt/lists.good rootdir/var/lib/apt/lists + cp -a aptarchive/dists.good aptarchive/dists + test_packagestoobig "$1" + rm -rf aptarchive/dists + cp -a aptarchive/dists.good aptarchive/dists +} + changetowebserver +methodtest 'http://localhost:8080' -# normal update works fine -testsuccess aptget update - -# make InRelease really big to trigger fallback -mv aptarchive/dists/unstable/InRelease aptarchive/dists/unstable/InRelease.good -dd if=/dev/zero of=aptarchive/dists/unstable/InRelease bs=1M count=2 2>/dev/null -touch -d '+1hour' aptarchive/dists/unstable/InRelease -testsuccess aptget update -o Apt::Get::List-Cleanup=0 -o acquire::MaxReleaseFileSize=$((1*1000*1000)) -o Debug::pkgAcquire::worker=0 -msgtest 'Check that the max write warning is triggered' -if grep -q "Writing more data than expected" rootdir/tmp/testsuccess.output; then - msgpass -else - cat rootdir/tmp/testsuccess.output - msgfail -fi -# ensure the failed InRelease file got renamed -testsuccess ls rootdir/var/lib/apt/lists/partial/*InRelease.FAILED -mv aptarchive/dists/unstable/InRelease.good aptarchive/dists/unstable/InRelease - - -# append junk at the end of the Packages.gz/Packages -SIZE="$(stat --printf=%s aptarchive/dists/unstable/main/binary-i386/Packages)" -find aptarchive -name 'Packages*' | while read pkg; do - echo "1234567890" >> "$pkg" -done -NEW_SIZE="$(stat --printf=%s aptarchive/dists/unstable/main/binary-i386/Packages)" -rm -f rootdir/var/lib/apt/lists/localhost* -testfailureequal "W: Failed to fetch http://localhost:8080/dists/unstable/main/binary-i386/Packages Writing more data than expected ($NEW_SIZE > $SIZE) - -E: Some index files failed to download. They have been ignored, or old ones used instead." aptget update -qq +changetohttpswebserver +methodtest 'https://localhost:4433' diff --git a/test/integration/test-bug-602412-dequote-redirect b/test/integration/test-bug-602412-dequote-redirect index d3573a79a..384c8b113 100755 --- a/test/integration/test-bug-602412-dequote-redirect +++ b/test/integration/test-bug-602412-dequote-redirect @@ -8,7 +8,7 @@ configarchitecture 'amd64' buildsimplenativepackage 'unrelated' 'all' '0.5~squeeze1' 'unstable' -setupaptarchive +setupaptarchive --no-update changetowebserver -o aptwebserver::redirect::replace::/pool/=/newpool/ \ -o aptwebserver::redirect::replace::/dists/=/newdists/ @@ -16,6 +16,7 @@ mv aptarchive/pool aptarchive/newpool mv aptarchive/dists aptarchive/newdists testrun() { + msgmsg 'Test redirection works in method boundaries' "$1" msgtest 'Test redirection works in' 'apt-get update' testsuccess --nomsg aptget update -- cgit v1.2.3-70-g09d2 From 9224ce3d4d1ea0428a70e75134998e08aa45b1e6 Mon Sep 17 00:00:00 2001 From: David Kalnischkies Date: Mon, 30 Mar 2015 20:47:13 +0200 Subject: calculate only expected hashes in methods Methods get told which hashes are expected by the acquire system, which means we can use this list to restrict what we calculate in the methods as any extra we are calculating is wasted effort as we can't compare it with anything anyway. Adding support for a new hash algorithm is therefore 'free' now and if a algorithm is no longer provided in a repository for a file, we automatically stop calculating it. In practice this results in a speed-up in Debian as we don't have SHA512 here (so far), so we practically stop calculating it. --- apt-pkg/contrib/hashes.cc | 63 +++++++++++++++++++++++++++++++++----------- apt-pkg/contrib/hashes.h | 21 ++++++++++++--- ftparchive/cachedb.cc | 4 +-- ftparchive/writer.cc | 4 +-- methods/cdrom.cc | 2 +- methods/copy.cc | 10 +++---- methods/file.cc | 2 +- methods/ftp.cc | 2 +- methods/gzip.cc | 2 +- methods/http.cc | 14 +++++----- methods/http.h | 2 +- methods/https.cc | 2 +- methods/https.h | 2 +- methods/rred.cc | 2 +- methods/rsh.cc | 2 +- methods/server.cc | 2 +- methods/server.h | 2 +- test/libapt/hashsums_test.cc | 24 +++++++++++++++++ 18 files changed, 116 insertions(+), 46 deletions(-) (limited to 'methods/https.h') diff --git a/apt-pkg/contrib/hashes.cc b/apt-pkg/contrib/hashes.cc index 6e7080bc9..953465091 100644 --- a/apt-pkg/contrib/hashes.cc +++ b/apt-pkg/contrib/hashes.cc @@ -250,28 +250,34 @@ bool HashStringList::operator!=(HashStringList const &other) const class PrivateHashes { public: unsigned long long FileSize; + unsigned int CalcHashes; - PrivateHashes() : FileSize(0) {} + PrivateHashes(unsigned int const CalcHashes) : FileSize(0), CalcHashes(CalcHashes) {} }; /*}}}*/ // Hashes::Add* - Add the contents of data or FD /*{{{*/ -bool Hashes::Add(const unsigned char * const Data,unsigned long long const Size, unsigned int const Hashes) +bool Hashes::Add(const unsigned char * const Data, unsigned long long const Size) { bool Res = true; APT_IGNORE_DEPRECATED_PUSH - if ((Hashes & MD5SUM) == MD5SUM) + if ((d->CalcHashes & MD5SUM) == MD5SUM) Res &= MD5.Add(Data, Size); - if ((Hashes & SHA1SUM) == SHA1SUM) + if ((d->CalcHashes & SHA1SUM) == SHA1SUM) Res &= SHA1.Add(Data, Size); - if ((Hashes & SHA256SUM) == SHA256SUM) + if ((d->CalcHashes & SHA256SUM) == SHA256SUM) Res &= SHA256.Add(Data, Size); - if ((Hashes & SHA512SUM) == SHA512SUM) + if ((d->CalcHashes & SHA512SUM) == SHA512SUM) Res &= SHA512.Add(Data, Size); APT_IGNORE_DEPRECATED_POP d->FileSize += Size; return Res; } -bool Hashes::AddFD(int const Fd,unsigned long long Size, unsigned int const Hashes) +bool Hashes::Add(const unsigned char * const Data, unsigned long long const Size, unsigned int const Hashes) +{ + d->CalcHashes = Hashes; + return Add(Data, Size); +} +bool Hashes::AddFD(int const Fd,unsigned long long Size) { unsigned char Buf[64*64]; bool const ToEOF = (Size == UntilEOF); @@ -285,12 +291,17 @@ bool Hashes::AddFD(int const Fd,unsigned long long Size, unsigned int const Hash if (ToEOF && Res == 0) // EOF break; Size -= Res; - if (Add(Buf, Res, Hashes) == false) + if (Add(Buf, Res) == false) return false; } return true; } -bool Hashes::AddFD(FileFd &Fd,unsigned long long Size, unsigned int const Hashes) +bool Hashes::AddFD(int const Fd,unsigned long long Size, unsigned int const Hashes) +{ + d->CalcHashes = Hashes; + return AddFD(Fd, Size); +} +bool Hashes::AddFD(FileFd &Fd,unsigned long long Size) { unsigned char Buf[64*64]; bool const ToEOF = (Size == 0); @@ -309,20 +320,29 @@ bool Hashes::AddFD(FileFd &Fd,unsigned long long Size, unsigned int const Hashes else if (a == 0) // EOF break; Size -= a; - if (Add(Buf, a, Hashes) == false) + if (Add(Buf, a) == false) return false; } return true; +} +bool Hashes::AddFD(FileFd &Fd,unsigned long long Size, unsigned int const Hashes) +{ + d->CalcHashes = Hashes; + return AddFD(Fd, Size); } /*}}}*/ HashStringList Hashes::GetHashStringList() { HashStringList hashes; APT_IGNORE_DEPRECATED_PUSH - hashes.push_back(HashString("MD5Sum", MD5.Result().Value())); - hashes.push_back(HashString("SHA1", SHA1.Result().Value())); - hashes.push_back(HashString("SHA256", SHA256.Result().Value())); - hashes.push_back(HashString("SHA512", SHA512.Result().Value())); + if ((d->CalcHashes & MD5SUM) == MD5SUM) + hashes.push_back(HashString("MD5Sum", MD5.Result().Value())); + if ((d->CalcHashes & SHA1SUM) == SHA1SUM) + hashes.push_back(HashString("SHA1", SHA1.Result().Value())); + if ((d->CalcHashes & SHA256SUM) == SHA256SUM) + hashes.push_back(HashString("SHA256", SHA256.Result().Value())); + if ((d->CalcHashes & SHA512SUM) == SHA512SUM) + hashes.push_back(HashString("SHA512", SHA512.Result().Value())); APT_IGNORE_DEPRECATED_POP std::string SizeStr; strprintf(SizeStr, "%llu", d->FileSize); @@ -330,6 +350,19 @@ APT_IGNORE_DEPRECATED_POP return hashes; } APT_IGNORE_DEPRECATED_PUSH -Hashes::Hashes() { d = new PrivateHashes(); } +Hashes::Hashes() { d = new PrivateHashes(~0); } +Hashes::Hashes(unsigned int const Hashes) { d = new PrivateHashes(Hashes); } +Hashes::Hashes(HashStringList const &Hashes) { + unsigned int calcHashes = Hashes.usable() ? 0 : ~0; + if (Hashes.find("MD5Sum") != NULL) + calcHashes |= MD5SUM; + if (Hashes.find("SHA1") != NULL) + calcHashes |= SHA1SUM; + if (Hashes.find("SHA256") != NULL) + calcHashes |= SHA256SUM; + if (Hashes.find("SHA512") != NULL) + calcHashes |= SHA512SUM; + d = new PrivateHashes(calcHashes); +} Hashes::~Hashes() { delete d; } APT_IGNORE_DEPRECATED_POP diff --git a/apt-pkg/contrib/hashes.h b/apt-pkg/contrib/hashes.h index 154862457..ac13c8ace 100644 --- a/apt-pkg/contrib/hashes.h +++ b/apt-pkg/contrib/hashes.h @@ -178,7 +178,8 @@ class Hashes static const int UntilEOF = 0; - bool Add(const unsigned char * const Data, unsigned long long const Size, unsigned int const Hashes = ~0); + bool Add(const unsigned char * const Data, unsigned long long const Size); + APT_DEPRECATED bool Add(const unsigned char * const Data, unsigned long long const Size, unsigned int const Hashes); inline bool Add(const char * const Data) {return Add((unsigned char const * const)Data,strlen(Data));}; inline bool Add(const unsigned char * const Beg,const unsigned char * const End) @@ -186,13 +187,24 @@ class Hashes enum SupportedHashes { MD5SUM = (1 << 0), SHA1SUM = (1 << 1), SHA256SUM = (1 << 2), SHA512SUM = (1 << 3) }; - bool AddFD(int const Fd,unsigned long long Size = 0, unsigned int const Hashes = ~0); - bool AddFD(FileFd &Fd,unsigned long long Size = 0, unsigned int const Hashes = ~0); + bool AddFD(int const Fd,unsigned long long Size = 0); + APT_DEPRECATED bool AddFD(int const Fd,unsigned long long Size, unsigned int const Hashes); + bool AddFD(FileFd &Fd,unsigned long long Size = 0); + APT_DEPRECATED bool AddFD(FileFd &Fd,unsigned long long Size, unsigned int const Hashes); HashStringList GetHashStringList(); APT_IGNORE_DEPRECATED_PUSH + /** create a Hashes object to calculate all supported hashes + * + * If ALL is too much, you can limit which Hashes are calculated + * with the following other constructors which mention explicitly + * which hashes to generate. */ Hashes(); + /** @param Hashes bitflag composed of #SupportedHashes */ + Hashes(unsigned int const Hashes); + /** @param Hashes is a list of hashes */ + Hashes(HashStringList const &Hashes); virtual ~Hashes(); APT_IGNORE_DEPRECATED_POP @@ -208,15 +220,16 @@ APT_IGNORE_DEPRECATED_POP } public: +APT_IGNORE_DEPRECATED_PUSH APT_DEPRECATED bool AddFD(int const Fd, unsigned long long Size, bool const addMD5, bool const addSHA1, bool const addSHA256, bool const addSHA512) { return AddFD(Fd, Size, boolsToFlag(addMD5, addSHA1, addSHA256, addSHA512)); }; - APT_DEPRECATED bool AddFD(FileFd &Fd, unsigned long long Size, bool const addMD5, bool const addSHA1, bool const addSHA256, bool const addSHA512) { return AddFD(Fd, Size, boolsToFlag(addMD5, addSHA1, addSHA256, addSHA512)); }; +APT_IGNORE_DEPRECATED_POP }; #endif diff --git a/ftparchive/cachedb.cc b/ftparchive/cachedb.cc index 1dc268594..cc3527ea4 100644 --- a/ftparchive/cachedb.cc +++ b/ftparchive/cachedb.cc @@ -441,8 +441,8 @@ bool CacheDB::GetHashes(bool const GenOnly, unsigned int const DoHashes) if (OpenFile() == false) return false; - Hashes hashes; - if (Fd->Seek(0) == false || hashes.AddFD(*Fd, CurStat.FileSize, FlHashes) == false) + Hashes hashes(FlHashes); + if (Fd->Seek(0) == false || hashes.AddFD(*Fd, CurStat.FileSize) == false) return false; HashStringList hl = hashes.GetHashStringList(); diff --git a/ftparchive/writer.cc b/ftparchive/writer.cc index db68c21f0..593278590 100644 --- a/ftparchive/writer.cc +++ b/ftparchive/writer.cc @@ -1075,8 +1075,8 @@ bool ReleaseWriter::DoPackage(string FileName) CheckSums[NewFileName].size = fd.Size(); - Hashes hs; - hs.AddFD(fd, 0, DoHashes); + Hashes hs(DoHashes); + hs.AddFD(fd); CheckSums[NewFileName].Hashes = hs.GetHashStringList(); fd.Close(); diff --git a/methods/cdrom.cc b/methods/cdrom.cc index 74e2ecc6b..10cb29f66 100644 --- a/methods/cdrom.cc +++ b/methods/cdrom.cc @@ -266,7 +266,7 @@ bool CDROMMethod::Fetch(FetchItem *Itm) Res.LastModified = Buf.st_mtime; Res.Size = Buf.st_size; - Hashes Hash; + Hashes Hash(Itm->ExpectedHashes); FileFd Fd(Res.Filename, FileFd::ReadOnly); Hash.AddFD(Fd); Res.TakeHashes(Hash); diff --git a/methods/copy.cc b/methods/copy.cc index f3cd84215..a8e289df5 100644 --- a/methods/copy.cc +++ b/methods/copy.cc @@ -28,16 +28,16 @@ class CopyMethod : public pkgAcqMethod { virtual bool Fetch(FetchItem *Itm); - void CalculateHashes(FetchResult &Res); + void CalculateHashes(FetchItem const * const Itm, FetchResult &Res); public: CopyMethod() : pkgAcqMethod("1.0",SingleInstance | SendConfig) {}; }; -void CopyMethod::CalculateHashes(FetchResult &Res) +void CopyMethod::CalculateHashes(FetchItem const * const Itm, FetchResult &Res) { - Hashes Hash; + Hashes Hash(Itm->ExpectedHashes); FileFd::CompressMode CompressMode = FileFd::None; if (_config->FindB("Acquire::GzipIndexes", false) == true) CompressMode = FileFd::Extension; @@ -71,7 +71,7 @@ bool CopyMethod::Fetch(FetchItem *Itm) // just calc the hashes if the source and destination are identical if (File == Itm->DestFile) { - CalculateHashes(Res); + CalculateHashes(Itm, Res); URIDone(Res); return true; } @@ -104,7 +104,7 @@ bool CopyMethod::Fetch(FetchItem *Itm) if (utimes(Res.Filename.c_str(), times) != 0) return _error->Errno("utimes",_("Failed to set modification time")); - CalculateHashes(Res); + CalculateHashes(Itm, Res); URIDone(Res); return true; diff --git a/methods/file.cc b/methods/file.cc index 5d9d7b951..043ab04b8 100644 --- a/methods/file.cc +++ b/methods/file.cc @@ -87,7 +87,7 @@ bool FileMethod::Fetch(FetchItem *Itm) if (Res.Filename.empty() == true) return _error->Error(_("File not found")); - Hashes Hash; + Hashes Hash(Itm->ExpectedHashes); FileFd Fd(Res.Filename, FileFd::ReadOnly); Hash.AddFD(Fd); Res.TakeHashes(Hash); diff --git a/methods/ftp.cc b/methods/ftp.cc index 7764acf6a..92d8573f1 100644 --- a/methods/ftp.cc +++ b/methods/ftp.cc @@ -1064,7 +1064,7 @@ bool FtpMethod::Fetch(FetchItem *Itm) } // Open the file - Hashes Hash; + Hashes Hash(Itm->ExpectedHashes); { FileFd Fd(Itm->DestFile,FileFd::WriteAny); if (_error->PendingError() == true) diff --git a/methods/gzip.cc b/methods/gzip.cc index 387c05f2e..65519633c 100644 --- a/methods/gzip.cc +++ b/methods/gzip.cc @@ -91,7 +91,7 @@ bool GzipMethod::Fetch(FetchItem *Itm) return false; // Read data from source, generate checksums and write - Hashes Hash; + Hashes Hash(Itm->ExpectedHashes); bool Failed = false; while (1) { diff --git a/methods/http.cc b/methods/http.cc index 947002cc6..e4773b0e2 100644 --- a/methods/http.cc +++ b/methods/http.cc @@ -64,8 +64,8 @@ const unsigned int CircleBuf::BW_HZ=10; // CircleBuf::CircleBuf - Circular input buffer /*{{{*/ // --------------------------------------------------------------------- /* */ -CircleBuf::CircleBuf(unsigned long long Size) - : Size(Size), Hash(0), TotalWriten(0) +CircleBuf::CircleBuf(unsigned long long Size) + : Size(Size), Hash(NULL), TotalWriten(0) { Buf = new unsigned char[Size]; Reset(); @@ -84,10 +84,10 @@ void CircleBuf::Reset() TotalWriten = 0; MaxGet = (unsigned long long)-1; OutQueue = string(); - if (Hash != 0) + if (Hash != NULL) { delete Hash; - Hash = new Hashes; + Hash = NULL; } } /*}}}*/ @@ -222,7 +222,7 @@ bool CircleBuf::Write(int Fd) TotalWriten += Res; - if (Hash != 0) + if (Hash != NULL) Hash->Add(Buf + (OutP%Size),Res); OutP += Res; @@ -484,10 +484,10 @@ APT_PURE bool HttpServerState::IsOpen() /*{{{*/ return (ServerFd != -1); } /*}}}*/ -bool HttpServerState::InitHashes(FileFd &File) /*{{{*/ +bool HttpServerState::InitHashes(FileFd &File, HashStringList const &ExpectedHashes)/*{{{*/ { delete In.Hash; - In.Hash = new Hashes; + In.Hash = new Hashes(ExpectedHashes); // Set the expected size and read file for the hashes File.Truncate(StartPos); diff --git a/methods/http.h b/methods/http.h index 40a88a7be..6dc872659 100644 --- a/methods/http.h +++ b/methods/http.h @@ -111,7 +111,7 @@ struct HttpServerState: public ServerState virtual bool Open(); virtual bool IsOpen(); virtual bool Close(); - virtual bool InitHashes(FileFd &File); + virtual bool InitHashes(FileFd &File, HashStringList const &ExpectedHashes); virtual Hashes * GetHashes(); virtual bool Die(FileFd &File); virtual bool Flush(FileFd * const File); diff --git a/methods/https.cc b/methods/https.cc index 70f6a1046..81903b239 100644 --- a/methods/https.cc +++ b/methods/https.cc @@ -443,7 +443,7 @@ bool HttpsMethod::Fetch(FetchItem *Itm) Res.LastModified = resultStat.st_mtime; // take hashes - Hashes Hash; + Hashes Hash(Itm->ExpectedHashes); FileFd Fd(Res.Filename, FileFd::ReadOnly); Hash.AddFD(Fd); Res.TakeHashes(Hash); diff --git a/methods/https.h b/methods/https.h index 4cc48fc34..dc0ff3322 100644 --- a/methods/https.h +++ b/methods/https.h @@ -42,7 +42,7 @@ class HttpsServerState : public ServerState virtual bool Open() { return false; } virtual bool IsOpen() { return false; } virtual bool Close() { return false; } - virtual bool InitHashes(FileFd &/*File*/) { return false; } + virtual bool InitHashes(FileFd &/*File*/, HashStringList const &/*ExpectedHashes*/) { return false; } virtual Hashes * GetHashes() { return NULL; } virtual bool Die(FileFd &/*File*/) { return false; } virtual bool Flush(FileFd * const /*File*/) { return false; } diff --git a/methods/rred.cc b/methods/rred.cc index 774b58a40..554ac99b4 100644 --- a/methods/rred.cc +++ b/methods/rred.cc @@ -581,7 +581,7 @@ class RredMethod : public pkgAcqMethod { FILE *inp = fopen(Path.c_str(), "r"); FILE *out = fopen(Itm->DestFile.c_str(), "w"); - Hashes hash; + Hashes hash(Itm->ExpectedHashes); patch.apply_against_file(out, inp, &hash); diff --git a/methods/rsh.cc b/methods/rsh.cc index 0e949160b..52349c61c 100644 --- a/methods/rsh.cc +++ b/methods/rsh.cc @@ -477,7 +477,7 @@ bool RSHMethod::Fetch(FetchItem *Itm) } // Open the file - Hashes Hash; + Hashes Hash(Itm->ExpectedHashes); { FileFd Fd(Itm->DestFile,FileFd::WriteAny); if (_error->PendingError() == true) diff --git a/methods/server.cc b/methods/server.cc index 91ec824d1..e403f1071 100644 --- a/methods/server.cc +++ b/methods/server.cc @@ -357,7 +357,7 @@ ServerMethod::DealWithHeaders(FetchResult &Res) FailFd = File->Fd(); FailTime = Server->Date; - if (Server->InitHashes(*File) == false) + if (Server->InitHashes(*File, Queue->ExpectedHashes) == false) { _error->Errno("read",_("Problem hashing file")); return ERROR_NOT_FROM_SERVER; diff --git a/methods/server.h b/methods/server.h index b974ec89a..45622dd34 100644 --- a/methods/server.h +++ b/methods/server.h @@ -85,7 +85,7 @@ struct ServerState virtual bool Open() = 0; virtual bool IsOpen() = 0; virtual bool Close() = 0; - virtual bool InitHashes(FileFd &File) = 0; + virtual bool InitHashes(FileFd &File, HashStringList const &ExpectedHashes) = 0; virtual Hashes * GetHashes() = 0; virtual bool Die(FileFd &File) = 0; virtual bool Flush(FileFd * const File) = 0; diff --git a/test/libapt/hashsums_test.cc b/test/libapt/hashsums_test.cc index a19a0befd..edcd8a11a 100644 --- a/test/libapt/hashsums_test.cc +++ b/test/libapt/hashsums_test.cc @@ -193,6 +193,30 @@ TEST(HashSumsTest, FileBased) EXPECT_EQ(FileSize, list.find("Checksum-FileSize")->HashValue()); } fd.Seek(0); + { + Hashes hashes(Hashes::MD5SUM | Hashes::SHA512SUM); + hashes.AddFD(fd); + HashStringList list = hashes.GetHashStringList(); + EXPECT_FALSE(list.empty()); + EXPECT_EQ(3, list.size()); + EXPECT_EQ(md5.Value(), list.find("MD5Sum")->HashValue()); + EXPECT_EQ(NULL, list.find("SHA1")); + EXPECT_EQ(NULL, list.find("SHA256")); + EXPECT_EQ(sha512.Value(), list.find("SHA512")->HashValue()); + EXPECT_EQ(FileSize, list.find("Checksum-FileSize")->HashValue()); + fd.Seek(0); + Hashes hashes2(list); + hashes2.AddFD(fd); + list = hashes2.GetHashStringList(); + EXPECT_FALSE(list.empty()); + EXPECT_EQ(3, list.size()); + EXPECT_EQ(md5.Value(), list.find("MD5Sum")->HashValue()); + EXPECT_EQ(NULL, list.find("SHA1")); + EXPECT_EQ(NULL, list.find("SHA256")); + EXPECT_EQ(sha512.Value(), list.find("SHA512")->HashValue()); + EXPECT_EQ(FileSize, list.find("Checksum-FileSize")->HashValue()); + } + fd.Seek(0); { MD5Summation MD5; MD5.AddFD(fd.Fd()); -- cgit v1.2.3-70-g09d2 From 34faa8f7ae2526f46cd1f84bb6962ad06d841e5e Mon Sep 17 00:00:00 2001 From: David Kalnischkies Date: Sat, 11 Apr 2015 10:23:52 +0200 Subject: calculate hashes while downloading in https MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit We do this in HTTP already to give the CPU some exercise while the disk is heavily spinning (or flashing?) to store the data avoiding the need to reread the entire file again later on to calculate the hashes – which happens outside of the eyes of progress reporting, so you might ended up with a bunch of https workers 'stuck' at 100% while they were busy calculating hashes. This is a bummer for everyone using apt as a connection speedtest as the https method works slower now (not really, it just isn't reporting done too early anymore). --- methods/http.cc | 8 +++--- methods/http.h | 2 +- methods/https.cc | 38 ++++++++++++++++++++--------- methods/https.h | 6 +++-- methods/server.cc | 8 +++++- methods/server.h | 3 ++- test/integration/test-apt-download-progress | 6 +++-- 7 files changed, 47 insertions(+), 24 deletions(-) (limited to 'methods/https.h') diff --git a/methods/http.cc b/methods/http.cc index e4773b0e2..af3d5ccb6 100644 --- a/methods/http.cc +++ b/methods/http.cc @@ -484,16 +484,14 @@ APT_PURE bool HttpServerState::IsOpen() /*{{{*/ return (ServerFd != -1); } /*}}}*/ -bool HttpServerState::InitHashes(FileFd &File, HashStringList const &ExpectedHashes)/*{{{*/ +bool HttpServerState::InitHashes(HashStringList const &ExpectedHashes) /*{{{*/ { delete In.Hash; In.Hash = new Hashes(ExpectedHashes); - - // Set the expected size and read file for the hashes - File.Truncate(StartPos); - return In.Hash->AddFD(File, StartPos); + return true; } /*}}}*/ + APT_PURE Hashes * HttpServerState::GetHashes() /*{{{*/ { return In.Hash; diff --git a/methods/http.h b/methods/http.h index 6dc872659..e73871931 100644 --- a/methods/http.h +++ b/methods/http.h @@ -111,7 +111,7 @@ struct HttpServerState: public ServerState virtual bool Open(); virtual bool IsOpen(); virtual bool Close(); - virtual bool InitHashes(FileFd &File, HashStringList const &ExpectedHashes); + virtual bool InitHashes(HashStringList const &ExpectedHashes); virtual Hashes * GetHashes(); virtual bool Die(FileFd &File); virtual bool Flush(FileFd * const File); diff --git a/methods/https.cc b/methods/https.cc index 81903b239..c6b75d9ad 100644 --- a/methods/https.cc +++ b/methods/https.cc @@ -72,18 +72,18 @@ HttpsMethod::parse_header(void *buffer, size_t size, size_t nmemb, void *userp) else me->https->Server->StartPos = 0; - me->https->File->Truncate(me->https->Server->StartPos); - me->https->File->Seek(me->https->Server->StartPos); - me->Res->LastModified = me->https->Server->Date; me->Res->Size = me->https->Server->Size; me->Res->ResumePoint = me->https->Server->StartPos; // we expect valid data, so tell our caller we get the file now - if (me->https->Server->Result >= 200 && me->https->Server->Result < 300 && - me->https->Server->JunkSize == 0 && - me->Res->Size != 0 && me->Res->Size > me->Res->ResumePoint) - me->https->URIStart(*me->Res); + if (me->https->Server->Result >= 200 && me->https->Server->Result < 300) + { + if (me->https->Server->JunkSize == 0 && me->Res->Size != 0 && me->Res->Size > me->Res->ResumePoint) + me->https->URIStart(*me->Res); + if (me->https->Server->AddPartialFileToHashes(*(me->https->File)) == false) + return 0; + } } else if (me->https->Server->HeaderLine(line) == false) return 0; @@ -116,16 +116,31 @@ HttpsMethod::write_data(void *buffer, size_t size, size_t nmemb, void *userp) } } + if (me->Server->GetHashes()->Add((unsigned char const * const)buffer, buffer_size) == false) + return 0; + return buffer_size; } // HttpsServerState::HttpsServerState - Constructor /*{{{*/ -HttpsServerState::HttpsServerState(URI Srv,HttpsMethod * Owner) : ServerState(Srv, Owner) +HttpsServerState::HttpsServerState(URI Srv,HttpsMethod * Owner) : ServerState(Srv, Owner), Hash(NULL) { TimeOut = _config->FindI("Acquire::https::Timeout",TimeOut); Reset(); } /*}}}*/ +bool HttpsServerState::InitHashes(HashStringList const &ExpectedHashes) /*{{{*/ +{ + delete Hash; + Hash = new Hashes(ExpectedHashes); + return true; +} + /*}}}*/ +APT_PURE Hashes * HttpsServerState::GetHashes() /*{{{*/ +{ + return Hash; +} + /*}}}*/ void HttpsMethod::SetupProxy() /*{{{*/ { @@ -365,6 +380,8 @@ bool HttpsMethod::Fetch(FetchItem *Itm) // go for it - if the file exists, append on it File = new FileFd(Itm->DestFile, FileFd::WriteAny); Server = CreateServerState(Itm->Uri); + if (Server->InitHashes(Itm->ExpectedHashes) == false) + return false; // keep apt updated Res.Filename = Itm->DestFile; @@ -443,10 +460,7 @@ bool HttpsMethod::Fetch(FetchItem *Itm) Res.LastModified = resultStat.st_mtime; // take hashes - Hashes Hash(Itm->ExpectedHashes); - FileFd Fd(Res.Filename, FileFd::ReadOnly); - Hash.AddFD(Fd); - Res.TakeHashes(Hash); + Res.TakeHashes(*(Server->GetHashes())); // keep apt updated URIDone(Res); diff --git a/methods/https.h b/methods/https.h index dc0ff3322..6e32e8d3d 100644 --- a/methods/https.h +++ b/methods/https.h @@ -29,6 +29,8 @@ class FileFd; class HttpsServerState : public ServerState { + Hashes * Hash; + protected: virtual bool ReadHeaderLines(std::string &/*Data*/) { return false; } virtual bool LoadNextResponse(bool const /*ToFile*/, FileFd * const /*File*/) { return false; } @@ -42,8 +44,8 @@ class HttpsServerState : public ServerState virtual bool Open() { return false; } virtual bool IsOpen() { return false; } virtual bool Close() { return false; } - virtual bool InitHashes(FileFd &/*File*/, HashStringList const &/*ExpectedHashes*/) { return false; } - virtual Hashes * GetHashes() { return NULL; } + virtual bool InitHashes(HashStringList const &ExpectedHashes); + virtual Hashes * GetHashes(); virtual bool Die(FileFd &/*File*/) { return false; } virtual bool Flush(FileFd * const /*File*/) { return false; } virtual bool Go(bool /*ToFile*/, FileFd * const /*File*/) { return false; } diff --git a/methods/server.cc b/methods/server.cc index e403f1071..2116926b0 100644 --- a/methods/server.cc +++ b/methods/server.cc @@ -235,6 +235,12 @@ ServerState::ServerState(URI Srv, ServerMethod *Owner) : ServerName(Srv), TimeOu Reset(); } /*}}}*/ +bool ServerState::AddPartialFileToHashes(FileFd &File) /*{{{*/ +{ + File.Truncate(StartPos); + return GetHashes()->AddFD(File, StartPos); +} + /*}}}*/ bool ServerMethod::Configuration(string Message) /*{{{*/ { @@ -357,7 +363,7 @@ ServerMethod::DealWithHeaders(FetchResult &Res) FailFd = File->Fd(); FailTime = Server->Date; - if (Server->InitHashes(*File, Queue->ExpectedHashes) == false) + if (Server->InitHashes(Queue->ExpectedHashes) == false || Server->AddPartialFileToHashes(*File) == false) { _error->Errno("read",_("Problem hashing file")); return ERROR_NOT_FROM_SERVER; diff --git a/methods/server.h b/methods/server.h index 45622dd34..1b1f754a3 100644 --- a/methods/server.h +++ b/methods/server.h @@ -72,6 +72,7 @@ struct ServerState }; /** \brief Get the headers before the data */ RunHeadersResult RunHeaders(FileFd * const File, const std::string &Uri); + bool AddPartialFileToHashes(FileFd &File); bool Comp(URI Other) const {return Other.Host == ServerName.Host && Other.Port == ServerName.Port;}; virtual void Reset() {Major = 0; Minor = 0; Result = 0; Code[0] = '\0'; Size = 0; JunkSize = 0; @@ -85,7 +86,7 @@ struct ServerState virtual bool Open() = 0; virtual bool IsOpen() = 0; virtual bool Close() = 0; - virtual bool InitHashes(FileFd &File, HashStringList const &ExpectedHashes) = 0; + virtual bool InitHashes(HashStringList const &ExpectedHashes) = 0; virtual Hashes * GetHashes() = 0; virtual bool Die(FileFd &File) = 0; virtual bool Flush(FileFd * const File) = 0; diff --git a/test/integration/test-apt-download-progress b/test/integration/test-apt-download-progress index b2c9effe6..07c5e09c5 100755 --- a/test/integration/test-apt-download-progress +++ b/test/integration/test-apt-download-progress @@ -26,14 +26,16 @@ assertprogress() { TESTFILE=testfile.big testsuccess dd if=/dev/zero of=./aptarchive/$TESTFILE bs=800k count=1 +OPT='-o APT::Status-Fd=3 -o Debug::pkgAcquire::Worker=1 -o Debug::Acquire::http=1 -o Debug::Acquire::https=1' + msgtest 'download progress works via' 'http' exec 3> apt-progress.log -testsuccess --nomsg apthelper download-file "http://localhost:8080/$TESTFILE" http-$TESTFILE -o APT::Status-Fd=3 -o Acquire::http::Dl-Limit=800 +testsuccess --nomsg apthelper download-file "http://localhost:8080/$TESTFILE" http-$TESTFILE $OPT -o Acquire::http::Dl-Limit=800 assertprogress apt-progress.log msgtest 'download progress works via' 'https' exec 3> apt-progress.log -testsuccess --nomsg apthelper download-file "https://localhost:4433/$TESTFILE" https-$TESTFILE -o APT::Status-Fd=3 -o Acquire::https::Dl-Limit=800 +testsuccess --nomsg apthelper download-file "https://localhost:4433/$TESTFILE" https-$TESTFILE $OPT -o Acquire::https::Dl-Limit=800 assertprogress apt-progress.log # cleanup -- cgit v1.2.3-70-g09d2 From dcbb364fc69e1108b3fea3adb12a7ba83d9af467 Mon Sep 17 00:00:00 2001 From: David Kalnischkies Date: Tue, 12 May 2015 00:30:16 +0200 Subject: detect 416 complete file in partial by expected hash If we have the expected hashes we can check with them if the file we have in partial we got a 416 for is the expected file. We detected this with same-size before, but not every server sends a good Content-Range header with a 416 response. --- methods/https.cc | 37 +++++++++++++++++++++------ methods/https.h | 1 + methods/server.cc | 19 +++++++++++--- test/integration/framework | 2 +- test/integration/test-apt-update-transactions | 1 + test/integration/test-partial-file-support | 10 ++++++-- test/interactive-helper/aptwebserver.cc | 9 ++++--- 7 files changed, 62 insertions(+), 17 deletions(-) (limited to 'methods/https.h') diff --git a/methods/https.cc b/methods/https.cc index c6b75d9ad..712e9ee73 100644 --- a/methods/https.cc +++ b/methods/https.cc @@ -40,7 +40,9 @@ using namespace std; struct APT_HIDDEN CURLUserPointer { HttpsMethod * const https; HttpsMethod::FetchResult * const Res; - CURLUserPointer(HttpsMethod * const https, HttpsMethod::FetchResult * const Res) : https(https), Res(Res) {} + HttpsMethod::FetchItem const * const Itm; + CURLUserPointer(HttpsMethod * const https, HttpsMethod::FetchResult * const Res, + HttpsMethod::FetchItem const * const Itm) : https(https), Res(Res), Itm(Itm) {} }; size_t @@ -61,13 +63,32 @@ HttpsMethod::parse_header(void *buffer, size_t size, size_t nmemb, void *userp) { if (me->https->Server->Result != 416 && me->https->Server->StartPos != 0) ; - else if (me->https->Server->Result == 416 && me->https->Server->Size == me->https->File->FileSize()) + else if (me->https->Server->Result == 416) { - me->https->Server->Result = 200; - me->https->Server->StartPos = me->https->Server->Size; - // the actual size is not important for https as curl will deal with it - // by itself and e.g. doesn't bother us with transport-encoding… - me->https->Server->JunkSize = std::numeric_limits::max(); + bool partialHit = false; + if (me->Itm->ExpectedHashes.usable() == true) + { + Hashes resultHashes(me->Itm->ExpectedHashes); + FileFd file(me->Itm->DestFile, FileFd::ReadOnly); + me->https->Server->Size = file.FileSize(); + me->https->Server->Date = file.ModificationTime(); + resultHashes.AddFD(file); + HashStringList const hashList = resultHashes.GetHashStringList(); + partialHit = (me->Itm->ExpectedHashes == hashList); + } + else if (me->https->Server->Result == 416 && me->https->Server->Size == me->https->File->FileSize()) + partialHit = true; + + if (partialHit == true) + { + me->https->Server->Result = 200; + me->https->Server->StartPos = me->https->Server->Size; + // the actual size is not important for https as curl will deal with it + // by itself and e.g. doesn't bother us with transport-encoding… + me->https->Server->JunkSize = std::numeric_limits::max(); + } + else + me->https->Server->StartPos = 0; } else me->https->Server->StartPos = 0; @@ -221,7 +242,7 @@ bool HttpsMethod::Fetch(FetchItem *Itm) maybe_add_auth (Uri, _config->FindFile("Dir::Etc::netrc")); FetchResult Res; - CURLUserPointer userp(this, &Res); + CURLUserPointer userp(this, &Res, Itm); // callbacks curl_easy_setopt(curl, CURLOPT_URL, static_cast(Uri).c_str()); curl_easy_setopt(curl, CURLOPT_HEADERFUNCTION, parse_header); diff --git a/methods/https.h b/methods/https.h index 6e32e8d3d..57fc292ee 100644 --- a/methods/https.h +++ b/methods/https.h @@ -79,6 +79,7 @@ class HttpsMethod : public ServerMethod virtual bool Configuration(std::string Message); virtual ServerState * CreateServerState(URI uri); using pkgAcqMethod::FetchResult; + using pkgAcqMethod::FetchItem; HttpsMethod() : ServerMethod("1.2",Pipeline | SendConfig), File(NULL) { diff --git a/methods/server.cc b/methods/server.cc index 2116926b0..bd01c3e98 100644 --- a/methods/server.cc +++ b/methods/server.cc @@ -269,7 +269,7 @@ ServerMethod::DealWithHeaders(FetchResult &Res) Res.LastModified = Queue->LastModified; return IMS_HIT; } - + /* Redirect * * Note that it is only OK for us to treat all redirection the same @@ -314,7 +314,20 @@ ServerMethod::DealWithHeaders(FetchResult &Res) struct stat SBuf; if (stat(Queue->DestFile.c_str(),&SBuf) >= 0 && SBuf.st_size > 0) { - if ((unsigned long long)SBuf.st_size == Server->Size) + bool partialHit = false; + if (Queue->ExpectedHashes.usable() == true) + { + Hashes resultHashes(Queue->ExpectedHashes); + FileFd file(Queue->DestFile, FileFd::ReadOnly); + Server->Size = file.FileSize(); + Server->Date = file.ModificationTime(); + resultHashes.AddFD(file); + HashStringList const hashList = resultHashes.GetHashStringList(); + partialHit = (Queue->ExpectedHashes == hashList); + } + else if ((unsigned long long)SBuf.st_size == Server->Size) + partialHit = true; + if (partialHit == true) { // the file is completely downloaded, but was not moved if (Server->HaveContent == true) @@ -351,7 +364,7 @@ ServerMethod::DealWithHeaders(FetchResult &Res) // This is some sort of 2xx 'data follows' reply Res.LastModified = Server->Date; Res.Size = Server->Size; - + // Open the file delete File; File = new FileFd(Queue->DestFile,FileFd::WriteAny); diff --git a/test/integration/framework b/test/integration/framework index 03c188189..2a53e8365 100644 --- a/test/integration/framework +++ b/test/integration/framework @@ -1128,7 +1128,7 @@ acquire::cdrom::autodetect 0;" > rootdir/etc/apt/apt.conf.d/00cdrom downloadfile() { local PROTO="${1%%:*}" if ! apthelper -o Debug::Acquire::${PROTO}=1 -o Debug::pkgAcquire::Worker=1 \ - download-file "$1" "$2" 2>&1 ; then + download-file "$1" "$2" "$3" 2>&1 ; then return 1 fi # only if the file exists the download was successful diff --git a/test/integration/test-apt-update-transactions b/test/integration/test-apt-update-transactions index f028ac0c7..67dd633f9 100755 --- a/test/integration/test-apt-update-transactions +++ b/test/integration/test-apt-update-transactions @@ -63,6 +63,7 @@ testsetup 'file' changetowebserver webserverconfig 'aptwebserver::support::modified-since' 'false' "$1" webserverconfig 'aptwebserver::support::last-modified' 'false' "$1" # curl is clever and sees hits here also +webserverconfig 'aptwebserver::support::range' 'false' "$1" testsetup 'http' diff --git a/test/integration/test-partial-file-support b/test/integration/test-partial-file-support index 85046b3eb..c07af7bd0 100755 --- a/test/integration/test-partial-file-support +++ b/test/integration/test-partial-file-support @@ -17,8 +17,8 @@ DOWNLOADLOG='rootdir/tmp/testdownloadfile.log' testdownloadfile() { rm -f "$DOWNLOADLOG" - msgtest "Testing download of file $2 with" "$1" - if ! downloadfile "$2" "$3" > "$DOWNLOADLOG"; then + msgtest "Testing download of file $2 with" "$1 $5" + if ! downloadfile "$2" "$3" "$5" > "$DOWNLOADLOG"; then cat >&2 "$DOWNLOADLOG" msgfail else @@ -78,6 +78,12 @@ followuprequest() { testdownloadfile 'completely downloaded file' "${1}/testfile" "$DOWN" '=' testwebserverlaststatuscode '416' "$DOWNLOADLOG" + webserverconfig 'aptwebserver::support::content-range' 'false' + copysource $TESTFILE 1M $DOWN + testdownloadfile 'completely downloaded file' "${1}/testfile" "$DOWN" '=' "SHA1:$(sha1sum "$TESTFILE" | cut -d' ' -f 1)" + testwebserverlaststatuscode '416' "$DOWNLOADLOG" + webserverconfig 'aptwebserver::support::content-range' 'true' + copysource $TESTFILE 1M $DOWN copysource "${TESTFILE}2" 20 "${DOWN}2" msgtest 'Testing download of files with' 'completely downloaded file + partial file' diff --git a/test/interactive-helper/aptwebserver.cc b/test/interactive-helper/aptwebserver.cc index 6a411e24e..c933060e7 100644 --- a/test/interactive-helper/aptwebserver.cc +++ b/test/interactive-helper/aptwebserver.cc @@ -745,9 +745,12 @@ static void * handleClient(void * voidclient) /*{{{*/ } else { - std::ostringstream contentrange; - contentrange << "Content-Range: bytes */" << filesize; - headers.push_back(contentrange.str()); + if (_config->FindB("aptwebserver::support::content-range", true) == true) + { + std::ostringstream contentrange; + contentrange << "Content-Range: bytes */" << filesize; + headers.push_back(contentrange.str()); + } sendError(client, 416, *m, sendContent, "", headers); break; } -- cgit v1.2.3-70-g09d2