diff options
author | David Kalnischkies <david@kalnischkies.de> | 2015-03-27 15:53:43 +0100 |
---|---|---|
committer | David Kalnischkies <david@kalnischkies.de> | 2015-04-19 01:13:08 +0200 |
commit | 27925d82dd0cbae74d48040363fe6f6c2bae5215 (patch) | |
tree | dfeb2d006049eda1dfa2ae082eb717a515fbbfae | |
parent | bb948ef562862e5cc9fcfb3d7b5e41c70382adeb (diff) |
improve https method queue progress reporting
The worker expects that the methods tell him when they start or finish
downloading a file. Various information pieces are passed along in this
report including the (expected) filesize. https was using a "global"
struct for reporting which made it 'reuse' incorrect values in some
cases like a non-existent InRelease fallbacking to Release{,.gpg}
resulting in a size-mismatch warning. Reducing the scope and redesigning
the setting of the values we can fix this and related issues.
Closes: 777565, 781509
Thanks: Robert Edmonds and Anders Kaseorg for initial patchs
-rw-r--r-- | methods/https.cc | 82 | ||||
-rw-r--r-- | methods/https.h | 2 | ||||
-rw-r--r-- | methods/server.h | 3 | ||||
-rw-r--r-- | test/integration/framework | 20 | ||||
-rwxr-xr-x | test/integration/test-apt-https-no-redirect | 15 | ||||
-rwxr-xr-x | test/integration/test-apt-update-expected-size | 86 | ||||
-rwxr-xr-x | test/integration/test-bug-602412-dequote-redirect | 3 |
7 files changed, 121 insertions, 90 deletions
diff --git a/methods/https.cc b/methods/https.cc index c69e84d3a..70f6a1046 100644 --- a/methods/https.cc +++ b/methods/https.cc @@ -37,11 +37,17 @@ /*}}}*/ using namespace std; +struct APT_HIDDEN CURLUserPointer { + HttpsMethod * const https; + HttpsMethod::FetchResult * const Res; + CURLUserPointer(HttpsMethod * const https, HttpsMethod::FetchResult * const Res) : https(https), Res(Res) {} +}; + size_t HttpsMethod::parse_header(void *buffer, size_t size, size_t nmemb, void *userp) { size_t len = size * nmemb; - HttpsMethod *me = (HttpsMethod *)userp; + CURLUserPointer *me = (CURLUserPointer *)userp; std::string line((char*) buffer, len); for (--len; len > 0; --len) if (isspace(line[len]) == 0) @@ -53,23 +59,33 @@ HttpsMethod::parse_header(void *buffer, size_t size, size_t nmemb, void *userp) if (line.empty() == true) { - if (me->Server->Result != 416 && me->Server->StartPos != 0) + if (me->https->Server->Result != 416 && me->https->Server->StartPos != 0) ; - else if (me->Server->Result == 416 && me->Server->Size == me->File->FileSize()) + else if (me->https->Server->Result == 416 && me->https->Server->Size == me->https->File->FileSize()) { - me->Server->Result = 200; - me->Server->StartPos = me->Server->Size; + me->https->Server->Result = 200; + me->https->Server->StartPos = me->https->Server->Size; // the actual size is not important for https as curl will deal with it // by itself and e.g. doesn't bother us with transport-encoding… - me->Server->JunkSize = std::numeric_limits<unsigned long long>::max(); + me->https->Server->JunkSize = std::numeric_limits<unsigned long long>::max(); } else - me->Server->StartPos = 0; + me->https->Server->StartPos = 0; + + me->https->File->Truncate(me->https->Server->StartPos); + me->https->File->Seek(me->https->Server->StartPos); + + me->Res->LastModified = me->https->Server->Date; + me->Res->Size = me->https->Server->Size; + me->Res->ResumePoint = me->https->Server->StartPos; - me->File->Truncate(me->Server->StartPos); - me->File->Seek(me->Server->StartPos); + // we expect valid data, so tell our caller we get the file now + if (me->https->Server->Result >= 200 && me->https->Server->Result < 300 && + me->https->Server->JunkSize == 0 && + me->Res->Size != 0 && me->Res->Size > me->Res->ResumePoint) + me->https->URIStart(*me->Res); } - else if (me->Server->HeaderLine(line) == false) + else if (me->https->Server->HeaderLine(line) == false) return 0; return size*nmemb; @@ -85,12 +101,6 @@ HttpsMethod::write_data(void *buffer, size_t size, size_t nmemb, void *userp) if (me->Server->JunkSize != 0) return buffer_size; - if (me->Server->ReceivedData == false) - { - me->URIStart(me->Res); - me->Server->ReceivedData = true; - } - if(me->File->Write(buffer, buffer_size) != true) return 0; @@ -109,27 +119,15 @@ HttpsMethod::write_data(void *buffer, size_t size, size_t nmemb, void *userp) return buffer_size; } -int -HttpsMethod::progress_callback(void *clientp, double dltotal, double /*dlnow*/, - double /*ultotal*/, double /*ulnow*/) -{ - HttpsMethod *me = (HttpsMethod *)clientp; - if(dltotal > 0 && me->Res.Size == 0) { - me->Res.Size = (unsigned long long)dltotal; - } - return 0; -} - // HttpsServerState::HttpsServerState - Constructor /*{{{*/ HttpsServerState::HttpsServerState(URI Srv,HttpsMethod * Owner) : ServerState(Srv, Owner) { TimeOut = _config->FindI("Acquire::https::Timeout",TimeOut); - ReceivedData = false; Reset(); } /*}}}*/ -void HttpsMethod::SetupProxy() /*{{{*/ +void HttpsMethod::SetupProxy() /*{{{*/ { URI ServerName = Queue->Uri; @@ -207,16 +205,16 @@ bool HttpsMethod::Fetch(FetchItem *Itm) maybe_add_auth (Uri, _config->FindFile("Dir::Etc::netrc")); + FetchResult Res; + CURLUserPointer userp(this, &Res); // callbacks curl_easy_setopt(curl, CURLOPT_URL, static_cast<string>(Uri).c_str()); curl_easy_setopt(curl, CURLOPT_HEADERFUNCTION, parse_header); - curl_easy_setopt(curl, CURLOPT_WRITEHEADER, this); + curl_easy_setopt(curl, CURLOPT_WRITEHEADER, &userp); curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, write_data); curl_easy_setopt(curl, CURLOPT_WRITEDATA, this); - curl_easy_setopt(curl, CURLOPT_PROGRESSFUNCTION, progress_callback); - curl_easy_setopt(curl, CURLOPT_PROGRESSDATA, this); // options - curl_easy_setopt(curl, CURLOPT_NOPROGRESS, false); + curl_easy_setopt(curl, CURLOPT_NOPROGRESS, true); curl_easy_setopt(curl, CURLOPT_FILETIME, true); // only allow curl to handle https, not the other stuff it supports curl_easy_setopt(curl, CURLOPT_PROTOCOLS, CURLPROTO_HTTPS); @@ -414,24 +412,23 @@ bool HttpsMethod::Fetch(FetchItem *Itm) return false; } - struct stat resultStat; - if (unlikely(stat(File->Name().c_str(), &resultStat) != 0)) - { - _error->Errno("stat", "Unable to access file %s", File->Name().c_str()); - return false; - } - Res.Size = resultStat.st_size; - // invalid range-request if (Server->Result == 416) { unlink(File->Name().c_str()); - Res.Size = 0; delete File; Redirect(Itm->Uri); return true; } + struct stat resultStat; + if (unlikely(stat(File->Name().c_str(), &resultStat) != 0)) + { + _error->Errno("stat", "Unable to access file %s", File->Name().c_str()); + return false; + } + Res.Size = resultStat.st_size; + // Timestamp curl_easy_getinfo(curl, CURLINFO_FILETIME, &Res.LastModified); if (Res.LastModified != -1) @@ -455,7 +452,6 @@ bool HttpsMethod::Fetch(FetchItem *Itm) URIDone(Res); // cleanup - Res.Size = 0; delete File; return true; diff --git a/methods/https.h b/methods/https.h index 433a84680..4cc48fc34 100644 --- a/methods/https.h +++ b/methods/https.h @@ -65,7 +65,6 @@ class HttpsMethod : public ServerMethod double ultotal, double ulnow); void SetupProxy(); CURL *curl; - FetchResult Res; ServerState *Server; // Used by ServerMethods unused by https @@ -77,6 +76,7 @@ class HttpsMethod : public ServerMethod virtual bool Configuration(std::string Message); virtual ServerState * CreateServerState(URI uri); + using pkgAcqMethod::FetchResult; HttpsMethod() : ServerMethod("1.2",Pipeline | SendConfig), File(NULL) { diff --git a/methods/server.h b/methods/server.h index 3b232dcac..b974ec89a 100644 --- a/methods/server.h +++ b/methods/server.h @@ -37,7 +37,6 @@ struct ServerState unsigned long long Size; // size of the usable content (aka: the file) unsigned long long JunkSize; // size of junk content (aka: server error pages) unsigned long long StartPos; - bool ReceivedData; time_t Date; bool HaveContent; enum {Chunked,Stream,Closes} Encoding; @@ -76,7 +75,7 @@ struct ServerState bool Comp(URI Other) const {return Other.Host == ServerName.Host && Other.Port == ServerName.Port;}; virtual void Reset() {Major = 0; Minor = 0; Result = 0; Code[0] = '\0'; Size = 0; JunkSize = 0; - StartPos = 0; ReceivedData = false; Encoding = Closes; time(&Date); HaveContent = false; + StartPos = 0; Encoding = Closes; time(&Date); HaveContent = false; State = Header; Persistent = false; Pipeline = true; MaximumSize = 0;}; virtual bool WriteResponse(std::string const &Data) = 0; diff --git a/test/integration/framework b/test/integration/framework index ec23e41e6..50c027a2c 100644 --- a/test/integration/framework +++ b/test/integration/framework @@ -1125,8 +1125,10 @@ acquire::cdrom::autodetect 0;" > rootdir/etc/apt/apt.conf.d/00cdrom downloadfile() { local PROTO="${1%%:*}" - apthelper -o Debug::Acquire::${PROTO}=1 -o Debug::pkgAcquire::Worker=1 \ - download-file "$1" "$2" 2>&1 || true + if ! apthelper -o Debug::Acquire::${PROTO}=1 -o Debug::pkgAcquire::Worker=1 \ + download-file "$1" "$2" 2>&1 ; then + return 1 + fi # only if the file exists the download was successful if [ -r "$2" ]; then return 0 @@ -1407,6 +1409,20 @@ testfailureequal() { testfileequal "${TMPWORKINGDIRECTORY}/rootdir/tmp/testfailure.output" "$CMP" } +testfailuremsg() { + local CMP="$1" + shift + testfailure "$@" + msgtest 'Check that the output of the previous failed command has expected' 'failures and warnings' + grep '^\(W\|E\):' "${TMPWORKINGDIRECTORY}/rootdir/tmp/testfailure.output" > "${TMPWORKINGDIRECTORY}/rootdir/tmp/testfailureequal.output" 2>&1 || true + if echo "$CMP" | checkdiff - "${TMPWORKINGDIRECTORY}/rootdir/tmp/testfailureequal.output"; then + msgpass + else + echo '### Complete output ###' + cat "${TMPWORKINGDIRECTORY}/rootdir/tmp/testfailure.output" + msgfail + fi +} testfilestats() { msgtest "Test that file $1 has $2 $3" "$4" diff --git a/test/integration/test-apt-https-no-redirect b/test/integration/test-apt-https-no-redirect index bc744d6f2..c91c78916 100755 --- a/test/integration/test-apt-https-no-redirect +++ b/test/integration/test-apt-https-no-redirect @@ -14,22 +14,15 @@ echo 'alright' > aptarchive/working changetohttpswebserver -o 'aptwebserver::redirect::replace::/redirectme/=http://localhost:8080/' msgtest 'download of a file works via' 'http' -downloadfile 'http://localhost:8080/working' httpfile >/dev/null 2>&1 && msgpass || msgfail +testsuccess --nomsg downloadfile 'http://localhost:8080/working' httpfile testfileequal httpfile 'alright' msgtest 'download of a file works via' 'https' -downloadfile 'https://localhost:4433/working' httpsfile >/dev/null 2>&1 && msgpass || msgfail +testsuccess --nomsg downloadfile 'https://localhost:4433/working' httpsfile testfileequal httpsfile 'alright' msgtest 'download of a file does not work if' 'https redirected to http' -downloadfile 'https://localhost:4433/redirectme/working' redirectfile >curloutput 2>&1 && msgfail || msgpass +testfailure --nomsg downloadfile 'https://localhost:4433/redirectme/working' redirectfile msgtest 'libcurl has forbidden access in last request to' 'http resource' -if grep -q -E -- 'Protocol "?http"? not supported or disabled in libcurl' curloutput; then - msgpass -else - cat curloutput - msgfail -fi - - +testsuccess --nomsg grep -q -E -- 'Protocol "?http"? not supported or disabled in libcurl' rootdir/tmp/testfailure.output diff --git a/test/integration/test-apt-update-expected-size b/test/integration/test-apt-update-expected-size index 9711c293a..22de13ea5 100755 --- a/test/integration/test-apt-update-expected-size +++ b/test/integration/test-apt-update-expected-size @@ -10,35 +10,61 @@ configarchitecture "i386" insertpackage 'unstable' 'apt' 'all' '1.0' setupaptarchive --no-update +cp -a aptarchive/dists aptarchive/dists.good + +test_inreleasetoobig() { + # make InRelease really big to trigger fallback + dd if=/dev/zero of=aptarchive/dists/unstable/InRelease bs=1M count=2 2>/dev/null + touch -d '+1hour' aptarchive/dists/unstable/InRelease + testsuccess aptget update -o Apt::Get::List-Cleanup=0 -o acquire::MaxReleaseFileSize=$((1*1000*1000)) -o Debug::pkgAcquire::worker=0 + msgtest 'Check that the max write warning is triggered' + cp rootdir/tmp/testsuccess.output update.output + testsuccess --nomsg grep -q 'Writing more data than expected' update.output + rm -f update.output + # ensure the failed InRelease file got renamed + testsuccess ls rootdir/var/lib/apt/lists/partial/*InRelease.FAILED +} + +test_packagestoobig() { + # append junk at the end of the Packages.gz/Packages + SIZE="$(stat --printf=%s aptarchive/dists/unstable/main/binary-i386/Packages)" + find aptarchive/dists -name 'Packages*' | while read pkg; do + echo "1234567890" >> "$pkg" + touch -d '+1hour' "$pkg" + done + NEW_SIZE="$(stat --printf=%s aptarchive/dists/unstable/main/binary-i386/Packages)" + testfailuremsg "W: Failed to fetch ${1}/dists/unstable/main/binary-i386/Packages Writing more data than expected ($NEW_SIZE > $SIZE) +E: Some index files failed to download. They have been ignored, or old ones used instead." aptget update -o Debug::pkgAcquire::Worker=0 +} + +methodtest() { + msgmsg 'Test with' "$1" 'and clean start' + rm -rf rootdir/var/lib/apt/lists rootdir/var/lib/apt/lists.good + # normal update works fine + testsuccess aptget update + mv rootdir/var/lib/apt/lists rootdir/var/lib/apt/lists.good + + # starting fresh works + test_inreleasetoobig "$1" + rm -rf aptarchive/dists rootdir/var/lib/apt/lists + cp -a aptarchive/dists.good aptarchive/dists + test_packagestoobig "$1" + rm -rf aptarchive/dists rootdir/var/lib/apt/lists + cp -a aptarchive/dists.good aptarchive/dists + + msgmsg 'Test with' "$1" 'and existing old data' + cp -a rootdir/var/lib/apt/lists.good rootdir/var/lib/apt/lists + test_inreleasetoobig "$1" + rm -rf aptarchive/dists rootdir/var/lib/apt/lists + cp -a rootdir/var/lib/apt/lists.good rootdir/var/lib/apt/lists + cp -a aptarchive/dists.good aptarchive/dists + test_packagestoobig "$1" + rm -rf aptarchive/dists + cp -a aptarchive/dists.good aptarchive/dists +} + changetowebserver +methodtest 'http://localhost:8080' -# normal update works fine -testsuccess aptget update - -# make InRelease really big to trigger fallback -mv aptarchive/dists/unstable/InRelease aptarchive/dists/unstable/InRelease.good -dd if=/dev/zero of=aptarchive/dists/unstable/InRelease bs=1M count=2 2>/dev/null -touch -d '+1hour' aptarchive/dists/unstable/InRelease -testsuccess aptget update -o Apt::Get::List-Cleanup=0 -o acquire::MaxReleaseFileSize=$((1*1000*1000)) -o Debug::pkgAcquire::worker=0 -msgtest 'Check that the max write warning is triggered' -if grep -q "Writing more data than expected" rootdir/tmp/testsuccess.output; then - msgpass -else - cat rootdir/tmp/testsuccess.output - msgfail -fi -# ensure the failed InRelease file got renamed -testsuccess ls rootdir/var/lib/apt/lists/partial/*InRelease.FAILED -mv aptarchive/dists/unstable/InRelease.good aptarchive/dists/unstable/InRelease - - -# append junk at the end of the Packages.gz/Packages -SIZE="$(stat --printf=%s aptarchive/dists/unstable/main/binary-i386/Packages)" -find aptarchive -name 'Packages*' | while read pkg; do - echo "1234567890" >> "$pkg" -done -NEW_SIZE="$(stat --printf=%s aptarchive/dists/unstable/main/binary-i386/Packages)" -rm -f rootdir/var/lib/apt/lists/localhost* -testfailureequal "W: Failed to fetch http://localhost:8080/dists/unstable/main/binary-i386/Packages Writing more data than expected ($NEW_SIZE > $SIZE) - -E: Some index files failed to download. They have been ignored, or old ones used instead." aptget update -qq +changetohttpswebserver +methodtest 'https://localhost:4433' diff --git a/test/integration/test-bug-602412-dequote-redirect b/test/integration/test-bug-602412-dequote-redirect index d3573a79a..384c8b113 100755 --- a/test/integration/test-bug-602412-dequote-redirect +++ b/test/integration/test-bug-602412-dequote-redirect @@ -8,7 +8,7 @@ configarchitecture 'amd64' buildsimplenativepackage 'unrelated' 'all' '0.5~squeeze1' 'unstable' -setupaptarchive +setupaptarchive --no-update changetowebserver -o aptwebserver::redirect::replace::/pool/=/newpool/ \ -o aptwebserver::redirect::replace::/dists/=/newdists/ @@ -16,6 +16,7 @@ mv aptarchive/pool aptarchive/newpool mv aptarchive/dists aptarchive/newdists testrun() { + msgmsg 'Test redirection works in method boundaries' "$1" msgtest 'Test redirection works in' 'apt-get update' testsuccess --nomsg aptget update |