From 9dd940ed722e8235c615e79b7eb688eb427e9a23 Mon Sep 17 00:00:00 2001 From: David Kalnischkies Date: Wed, 5 Nov 2014 18:33:07 +0100 Subject: Assert statement calls a function which may have desired side effects: 'pos_is_okay' It does not have any desired sideeffect, so we just mark it as const to properly advertise this fact to developer, compiler and linter alike. Reported-By: cppcheck Git-Dch: Ignore --- methods/rred.cc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) (limited to 'methods') diff --git a/methods/rred.cc b/methods/rred.cc index cabb3c456..774b58a40 100644 --- a/methods/rred.cc +++ b/methods/rred.cc @@ -150,11 +150,11 @@ class FileChanges { std::list::iterator where; size_t pos; // line number is as far left of iterator as possible - bool pos_is_okay(void) + bool pos_is_okay(void) const { #ifdef POSDEBUG size_t cpos = 0; - std::list::iterator x; + std::list::const_iterator x; for (x = changes.begin(); x != where; ++x) { assert(x != changes.end()); cpos += x->offset + x->add_cnt; -- cgit v1.2.3-70-g09d2 From bf6ac7ca615922c23d1f3cf1963efc5be9c23e32 Mon Sep 17 00:00:00 2001 From: David Kalnischkies Date: Sun, 9 Nov 2014 15:57:43 +0100 Subject: use getline() instead of rolling our own We use it in other places already as well even though it is farly new addition to the POSIX family with 2008, but rolling our own here is really something which should be avoided in such a important method. Git-Dch: Ignore --- methods/gpgv.cc | 31 +++++-------------------------- 1 file changed, 5 insertions(+), 26 deletions(-) (limited to 'methods') diff --git a/methods/gpgv.cc b/methods/gpgv.cc index 488c16826..41f138be6 100644 --- a/methods/gpgv.cc +++ b/methods/gpgv.cc @@ -86,33 +86,12 @@ string GPGVMethod::VerifyGetSigners(const char *file, const char *outfile, FILE *pipein = fdopen(fd[0], "r"); // Loop over the output of apt-key (which really is gnupg), and check the signatures. - size_t buffersize = 64; - char *buffer = (char *) malloc(buffersize); - size_t bufferoff = 0; + size_t buffersize = 0; + char *buffer = NULL; while (1) { - int c; - - // Read a line. Sigh. - while ((c = getc(pipein)) != EOF && c != '\n') - { - if (bufferoff == buffersize) - { - char* newBuffer = (char *) realloc(buffer, buffersize *= 2); - if (newBuffer == NULL) - { - free(buffer); - return "Couldn't allocate a buffer big enough for reading"; - } - buffer = newBuffer; - } - *(buffer+bufferoff) = c; - bufferoff++; - } - if (bufferoff == 0 && c == EOF) - break; - *(buffer+bufferoff) = '\0'; - bufferoff = 0; + if (getline(&buffer, &buffersize, pipein) == -1) + break; if (Debug == true) std::clog << "Read: " << buffer << std::endl; @@ -126,7 +105,7 @@ string GPGVMethod::VerifyGetSigners(const char *file, const char *outfile, std::clog << "Got BADSIG! " << std::endl; BadSigners.push_back(string(buffer+sizeof(GNUPGPREFIX))); } - + if (strncmp(buffer, GNUPGNOPUBKEY, sizeof(GNUPGNOPUBKEY)-1) == 0) { if (Debug == true) -- cgit v1.2.3-70-g09d2 From ed793a19ec00b83254029509bc516e3ba911c75a Mon Sep 17 00:00:00 2001 From: David Kalnischkies Date: Sat, 29 Nov 2014 17:59:52 +0100 Subject: dispose http(s) 416 error page as non-content MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Real webservers (like apache) actually send an error page with a 416 response, but our client didn't expect it leaving the page on the socket to be parsed as response for the next request (http) or as file content (https), which isn't what we want at all… Symptom is a "Bad header line" as html usually doesn't parse that well to an http-header. This manifests itself e.g. if we have a complete file (or larger) in partial/ which isn't discarded by If-Range as the server doesn't support it (or it is just newer, think: mirror rotation). It is a sort-of regression of 78c72d0ce22e00b194251445aae306df357d5c1a, which removed the filesize - 1 trick, but this had its own problems… To properly test this our webserver gains the ability to reply with transfer-encoding: chunked as most real webservers will use it to send the dynamically generated error pages. Closes: 768797 --- cmdline/apt-helper.cc | 31 +++-- methods/http.cc | 2 + methods/https.cc | 15 ++- methods/server.cc | 26 ++-- methods/server.h | 5 +- test/integration/framework | 4 +- test/integration/test-partial-file-support | 62 ++++++++- test/interactive-helper/aptwebserver.cc | 193 ++++++++++++++++++----------- 8 files changed, 233 insertions(+), 105 deletions(-) (limited to 'methods') diff --git a/cmdline/apt-helper.cc b/cmdline/apt-helper.cc index 1b832f165..a05ae90a2 100644 --- a/cmdline/apt-helper.cc +++ b/cmdline/apt-helper.cc @@ -51,22 +51,33 @@ static bool DoDownloadFile(CommandLine &CmdL) AcqTextStatus Stat(ScreenWidth, _config->FindI("quiet",0)); pkgAcquire Fetcher(&Stat); - std::string download_uri = CmdL.FileList[1]; - std::string targetfile = CmdL.FileList[2]; - std::string hash; - if (CmdL.FileSize() > 3) - hash = CmdL.FileList[3]; - // we use download_uri as descr and targetfile as short-descr - new pkgAcqFile(&Fetcher, download_uri, hash, 0, download_uri, targetfile, - "dest-dir-ignored", targetfile); + size_t fileind = 0; + std::vector targetfiles; + while (fileind + 2 <= CmdL.FileSize()) + { + std::string download_uri = CmdL.FileList[fileind + 1]; + std::string targetfile = CmdL.FileList[fileind + 2]; + std::string hash; + if (CmdL.FileSize() > fileind + 3) + hash = CmdL.FileList[fileind + 3]; + // we use download_uri as descr and targetfile as short-descr + new pkgAcqFile(&Fetcher, download_uri, hash, 0, download_uri, targetfile, + "dest-dir-ignored", targetfile); + targetfiles.push_back(targetfile); + fileind += 3; + } // Disable drop-privs if "_apt" can not write to the target dir CheckDropPrivsMustBeDisabled(Fetcher); bool Failed = false; - if (AcquireRun(Fetcher, 0, &Failed, NULL) == false || Failed == true || - FileExists(targetfile) == false) + if (AcquireRun(Fetcher, 0, &Failed, NULL) == false || Failed == true) return _error->Error(_("Download Failed")); + if (targetfiles.empty() == false) + for (std::vector::const_iterator f = targetfiles.begin(); f != targetfiles.end(); ++f) + if (FileExists(*f) == false) + return _error->Error(_("Download Failed")); + return true; } diff --git a/methods/http.cc b/methods/http.cc index a5de13511..ad1347d36 100644 --- a/methods/http.cc +++ b/methods/http.cc @@ -444,6 +444,8 @@ bool HttpServerState::RunData(FileFd * const File) loss of the connection means we are done */ if (Encoding == Closes) In.Limit(-1); + else if (JunkSize != 0) + In.Limit(JunkSize); else In.Limit(Size - StartPos); diff --git a/methods/https.cc b/methods/https.cc index 366148e19..23b3a10d4 100644 --- a/methods/https.cc +++ b/methods/https.cc @@ -69,6 +69,9 @@ HttpsMethod::parse_header(void *buffer, size_t size, size_t nmemb, void *userp) { me->Server->Result = 200; me->Server->StartPos = me->Server->Size; + // the actual size is not important for https as curl will deal with it + // by itself and e.g. doesn't bother us with transport-encoding… + me->Server->JunkSize = std::numeric_limits::max(); } else me->Server->StartPos = 0; @@ -86,19 +89,25 @@ size_t HttpsMethod::write_data(void *buffer, size_t size, size_t nmemb, void *userp) { HttpsMethod *me = (HttpsMethod *)userp; + size_t buffer_size = size * nmemb; + // we don't need to count the junk here, just drop anything we get as + // we don't always know how long it would be, e.g. in chunked encoding. + if (me->Server->JunkSize != 0) + return buffer_size; if (me->Res.Size == 0) me->URIStart(me->Res); - if(me->File->Write(buffer, size*nmemb) != true) + if(me->File->Write(buffer, buffer_size) != true) return false; if(me->Queue->MaximumSize > 0 && me->File->Tell() > me->Queue->MaximumSize) { me->SetFailReason("MaximumSizeExceeded"); - return _error->Error("Writing more data than expected (%llu > %llu)", + _error->Error("Writing more data than expected (%llu > %llu)", me->TotalWritten, me->Queue->MaximumSize); + return 0; } - return size*nmemb; + return buffer_size; } int diff --git a/methods/server.cc b/methods/server.cc index c4689ff12..9b3d39cf2 100644 --- a/methods/server.cc +++ b/methods/server.cc @@ -55,6 +55,7 @@ ServerState::RunHeadersResult ServerState::RunHeaders(FileFd * const File, Minor = 0; Result = 0; Size = 0; + JunkSize = 0; StartPos = 0; Encoding = Closes; HaveContent = false; @@ -163,14 +164,14 @@ bool ServerState::HeaderLine(string Line) Encoding = Stream; HaveContent = true; - // The length is already set from the Content-Range header - if (StartPos != 0) - return true; + unsigned long long * SizePtr = &Size; + if (Result == 416) + SizePtr = &JunkSize; - Size = strtoull(Val.c_str(), NULL, 10); - if (Size >= std::numeric_limits::max()) + *SizePtr = strtoull(Val.c_str(), NULL, 10); + if (*SizePtr >= std::numeric_limits::max()) return _error->Errno("HeaderLine", _("The HTTP server sent an invalid Content-Length header")); - else if (Size == 0) + else if (*SizePtr == 0) HaveContent = false; return true; } @@ -187,10 +188,7 @@ bool ServerState::HeaderLine(string Line) // §14.16 says 'byte-range-resp-spec' should be a '*' in case of 416 if (Result == 416 && sscanf(Val.c_str(), "bytes */%llu",&Size) == 1) - { - StartPos = 1; // ignore Content-Length, it would override Size - HaveContent = false; - } + ; // we got the expected filesize which is all we wanted else if (sscanf(Val.c_str(),"bytes %llu-%*u/%llu",&StartPos,&Size) != 2) return _error->Error(_("The HTTP server sent an invalid Content-Range header")); if ((unsigned long long)StartPos > Size) @@ -308,9 +306,15 @@ ServerMethod::DealWithHeaders(FetchResult &Res) if ((unsigned long long)SBuf.st_size == Server->Size) { // the file is completely downloaded, but was not moved + if (Server->HaveContent == true) + { + // Send to error page to dev/null + FileFd DevNull("/dev/null",FileFd::WriteExists); + Server->RunData(&DevNull); + } + Server->HaveContent = false; Server->StartPos = Server->Size; Server->Result = 200; - Server->HaveContent = false; } else if (unlink(Queue->DestFile.c_str()) == 0) { diff --git a/methods/server.h b/methods/server.h index 7d5198478..b974ec89a 100644 --- a/methods/server.h +++ b/methods/server.h @@ -34,7 +34,8 @@ struct ServerState char Code[360]; // These are some statistics from the last parsed header lines - unsigned long long Size; + unsigned long long Size; // size of the usable content (aka: the file) + unsigned long long JunkSize; // size of junk content (aka: server error pages) unsigned long long StartPos; time_t Date; bool HaveContent; @@ -73,7 +74,7 @@ struct ServerState RunHeadersResult RunHeaders(FileFd * const File, const std::string &Uri); bool Comp(URI Other) const {return Other.Host == ServerName.Host && Other.Port == ServerName.Port;}; - virtual void Reset() {Major = 0; Minor = 0; Result = 0; Code[0] = '\0'; Size = 0; + virtual void Reset() {Major = 0; Minor = 0; Result = 0; Code[0] = '\0'; Size = 0; JunkSize = 0; StartPos = 0; Encoding = Closes; time(&Date); HaveContent = false; State = Header; Persistent = false; Pipeline = true; MaximumSize = 0;}; virtual bool WriteResponse(std::string const &Data) = 0; diff --git a/test/integration/framework b/test/integration/framework index 930ab9367..f7f69f5d5 100644 --- a/test/integration/framework +++ b/test/integration/framework @@ -1119,8 +1119,8 @@ acquire::cdrom::autodetect 0;" > rootdir/etc/apt/apt.conf.d/00cdrom } downloadfile() { - local PROTO="$(echo "$1" | cut -d':' -f 1 )" - apthelper -o Debug::Acquire::${PROTO}=1 \ + local PROTO="${1%%:*}" + apthelper -o Debug::Acquire::${PROTO}=1 -o Debug::pkgAcquire::Worker=1 \ download-file "$1" "$2" 2>&1 || true # only if the file exists the download was successful if [ -r "$2" ]; then diff --git a/test/integration/test-partial-file-support b/test/integration/test-partial-file-support index 98b2f242a..b6b305d25 100755 --- a/test/integration/test-partial-file-support +++ b/test/integration/test-partial-file-support @@ -24,13 +24,25 @@ testdownloadfile() { else msgpass fi - cat "$DOWNLOADLOG" | while read field hash; do + sed -e '/^ <- / s#%20# #g' -e '/^ <- / s#%0a#\n#g' "$DOWNLOADLOG" | grep '^.*-Hash: ' > receivedhashes.log + testsuccess test -s receivedhashes.log + local HASHES_OK=0 + local HASHES_BAD=0 + while read field hash; do local EXPECTED case "$field" in 'MD5Sum-Hash:') EXPECTED="$(md5sum "$TESTFILE" | cut -d' ' -f 1)";; 'SHA1-Hash:') EXPECTED="$(sha1sum "$TESTFILE" | cut -d' ' -f 1)";; 'SHA256-Hash:') EXPECTED="$(sha256sum "$TESTFILE" | cut -d' ' -f 1)";; 'SHA512-Hash:') EXPECTED="$(sha512sum "$TESTFILE" | cut -d' ' -f 1)";; + 'Checksum-FileSize-Hash:') + #filesize is too weak to check for != + if [ "$4" = '=' ]; then + EXPECTED="$(stat -c '%s' "$TESTFILE")" + else + continue + fi + ;; *) continue;; esac if [ "$4" = '=' ]; then @@ -40,15 +52,41 @@ testdownloadfile() { fi if [ "$EXPECTED" "$4" "$hash" ]; then msgpass + HASHES_OK=$((HASHES_OK+1)); else - cat >&2 "$DOWNLOADLOG" msgfail "expected: $EXPECTED ; got: $hash" + HASHES_BAD=$((HASHES_BAD+1)); fi - done + done < receivedhashes.log + msgtest 'At least one good hash and no bad ones' + if [ $HASHES_OK -eq 0 ] || [ $HASHES_BAD -ne 0 ]; then + cat >&2 "$DOWNLOADLOG" + msgfail + else + msgpass + fi } TESTFILE='aptarchive/testfile' cp -a ${TESTDIR}/framework $TESTFILE +cp -a ${TESTDIR}/framework "${TESTFILE}2" + +followuprequest() { + local DOWN='./downloaded/testfile' + + copysource $TESTFILE 1M $DOWN + testdownloadfile 'completely downloaded file' "${1}/testfile" "$DOWN" '=' + testwebserverlaststatuscode '416' "$DOWNLOADLOG" + + copysource $TESTFILE 1M $DOWN + copysource "${TESTFILE}2" 20 "${DOWN}2" + msgtest 'Testing download of files with' 'completely downloaded file + partial file' + testsuccess --nomsg apthelper -o Debug::Acquire::${1%%:*}=1 -o Debug::pkgAcquire::Worker=1 \ + download-file "$1/testfile" "$DOWN" '' "$1/testfile2" "${DOWN}2" + testwebserverlaststatuscode '206' 'rootdir/tmp/testsuccess.output' + testsuccess diff -u "$TESTFILE" "${DOWN}" + testsuccess diff -u "${DOWN}" "${DOWN}2" +} testrun() { webserverconfig 'aptwebserver::support::range' 'true' @@ -66,9 +104,11 @@ testrun() { testdownloadfile 'invalid partial data' "${1}/testfile" "$DOWN" '!=' testwebserverlaststatuscode '206' "$DOWNLOADLOG" - copysource $TESTFILE 1M $DOWN - testdownloadfile 'completely downloaded file' "${1}/testfile" "$DOWN" '=' - testwebserverlaststatuscode '416' "$DOWNLOADLOG" + webserverconfig 'aptwebserver::closeOnError' 'false' + followuprequest "$1" + webserverconfig 'aptwebserver::closeOnError' 'true' + followuprequest "$1" + webserverconfig 'aptwebserver::closeOnError' 'false' copysource /dev/zero 1M $DOWN testdownloadfile 'too-big partial file' "${1}/testfile" "$DOWN" '=' @@ -86,8 +126,18 @@ testrun() { testwebserverlaststatuscode '200' "$DOWNLOADLOG" } +msgmsg 'http: Test with Content-Length' +webserverconfig 'aptwebserver::chunked-transfer-encoding' 'false' +testrun 'http://localhost:8080' +msgmsg 'http: Test with Transfer-Encoding: chunked' +webserverconfig 'aptwebserver::chunked-transfer-encoding' 'true' testrun 'http://localhost:8080' changetohttpswebserver +msgmsg 'https: Test with Content-Length' +webserverconfig 'aptwebserver::chunked-transfer-encoding' 'false' +testrun 'https://localhost:4433' +msgmsg 'https: Test with Transfer-Encoding: chunked' +webserverconfig 'aptwebserver::chunked-transfer-encoding' 'true' testrun 'https://localhost:4433' diff --git a/test/interactive-helper/aptwebserver.cc b/test/interactive-helper/aptwebserver.cc index 00004a524..ca6f88c58 100644 --- a/test/interactive-helper/aptwebserver.cc +++ b/test/interactive-helper/aptwebserver.cc @@ -19,6 +19,8 @@ #include #include #include + +#include #include #include #include @@ -79,12 +81,21 @@ static char const * httpcodeToStr(int const httpcode) /*{{{*/ return NULL; } /*}}}*/ +static bool chunkedTransferEncoding(std::list const &headers) { + if (std::find(headers.begin(), headers.end(), "Transfer-Encoding: chunked") != headers.end()) + return true; + if (_config->FindB("aptwebserver::chunked-transfer-encoding", false) == true) + return true; + return false; +} static void addFileHeaders(std::list &headers, FileFd &data)/*{{{*/ { - std::ostringstream contentlength; - contentlength << "Content-Length: " << data.FileSize(); - headers.push_back(contentlength.str()); - + if (chunkedTransferEncoding(headers) == false) + { + std::ostringstream contentlength; + contentlength << "Content-Length: " << data.FileSize(); + headers.push_back(contentlength.str()); + } std::string lastmodified("Last-Modified: "); lastmodified.append(TimeRFC1123(data.ModificationTime())); headers.push_back(lastmodified); @@ -92,9 +103,12 @@ static void addFileHeaders(std::list &headers, FileFd &data)/*{{{*/ /*}}}*/ static void addDataHeaders(std::list &headers, std::string &data)/*{{{*/ { - std::ostringstream contentlength; - contentlength << "Content-Length: " << data.size(); - headers.push_back(contentlength.str()); + if (chunkedTransferEncoding(headers) == false) + { + std::ostringstream contentlength; + contentlength << "Content-Length: " << data.size(); + headers.push_back(contentlength.str()); + } } /*}}}*/ static bool sendHead(int const client, int const httpcode, std::list &headers)/*{{{*/ @@ -114,6 +128,9 @@ static bool sendHead(int const client, int const httpcode, std::list>> RESPONSE to " << client << " >>>" << std::endl; bool Success = true; for (std::list::const_iterator h = headers.begin(); @@ -130,25 +147,55 @@ static bool sendHead(int const client, int const httpcode, std::list const &headers, FileFd &data)/*{{{*/ { bool Success = true; + bool const chunked = chunkedTransferEncoding(headers); char buffer[500]; unsigned long long actual = 0; while ((Success &= data.Read(buffer, sizeof(buffer), &actual)) == true) { if (actual == 0) break; - Success &= FileFd::Write(client, buffer, actual); + + if (chunked == true) + { + std::string size; + strprintf(size, "%llX\r\n", actual); + Success &= FileFd::Write(client, size.c_str(), size.size()); + Success &= FileFd::Write(client, buffer, actual); + Success &= FileFd::Write(client, "\r\n", strlen("\r\n")); + } + else + Success &= FileFd::Write(client, buffer, actual); + } + if (chunked == true) + { + char const * const finish = "0\r\n\r\n"; + Success &= FileFd::Write(client, finish, strlen(finish)); } if (Success == false) - std::cerr << "SENDFILE: READ/WRITE ERROR to " << client << std::endl; + std::cerr << "SENDFILE:" << (chunked ? " CHUNKED" : "") << " READ/WRITE ERROR to " << client << std::endl; return Success; } /*}}}*/ -static bool sendData(int const client, std::string const &data) /*{{{*/ +static bool sendData(int const client, std::list const &headers, std::string const &data)/*{{{*/ { - if (FileFd::Write(client, data.c_str(), data.size()) == false) + if (chunkedTransferEncoding(headers) == true) + { + unsigned long long const ullsize = data.length(); + std::string size; + strprintf(size, "%llX\r\n", ullsize); + char const * const finish = "\r\n0\r\n\r\n"; + if (FileFd::Write(client, size.c_str(), size.length()) == false || + FileFd::Write(client, data.c_str(), ullsize) == false || + FileFd::Write(client, finish, strlen(finish)) == false) + { + std::cerr << "SENDDATA: CHUNK WRITE ERROR to " << client << std::endl; + return false; + } + } + else if (FileFd::Write(client, data.c_str(), data.size()) == false) { std::cerr << "SENDDATA: WRITE ERROR to " << client << std::endl; return false; @@ -157,33 +204,38 @@ static bool sendData(int const client, std::string const &data) /*{{{*/ } /*}}}*/ static void sendError(int const client, int const httpcode, std::string const &request,/*{{{*/ - bool content, std::string const &error = "", std::list headers = std::list()) + bool const content, std::string const &error, std::list &headers) { std::string response(""); response.append(httpcodeToStr(httpcode)).append(""); response.append("

").append(httpcodeToStr(httpcode)).append("

"); if (httpcode != 200) - { - if (error.empty() == false) - response.append("

Error: ").append(error).append("

"); - response.append("This error is a result of the request:
");
-   }
+      response.append("

Error: "); + else + response.append("

Success: "); + if (error.empty() == false) + response.append(error); + else + response.append(httpcodeToStr(httpcode)); + if (httpcode != 200) + response.append("

This error is a result of the request:
");
    else
-   {
-      if (error.empty() == false)
-	 response.append("

Success: ").append(error).append("

"); response.append("The successfully executed operation was requested by:
");
-   }
    response.append(request).append("
"); + if (httpcode != 200) + { + if (_config->FindB("aptwebserver::closeOnError", false) == true) + headers.push_back("Connection: close"); + } addDataHeaders(headers, response); sendHead(client, httpcode, headers); if (content == true) - sendData(client, response); + sendData(client, headers, response); } static void sendSuccess(int const client, std::string const &request, - bool content, std::string const &error = "") + bool const content, std::string const &error, std::list &headers) { - sendError(client, 200, request, content, error); + sendError(client, 200, request, content, error, headers); } /*}}}*/ static void sendRedirect(int const client, int const httpcode, std::string const &uri,/*{{{*/ @@ -220,7 +272,7 @@ static void sendRedirect(int const client, int const httpcode, std::string const headers.push_back(location); sendHead(client, httpcode, headers); if (content == true) - sendData(client, response); + sendData(client, headers, response); } /*}}}*/ static int filter_hidden_files(const struct dirent *a) /*{{{*/ @@ -262,16 +314,15 @@ static int grouped_alpha_case_sort(const struct dirent **a, const struct dirent } /*}}}*/ static void sendDirectoryListing(int const client, std::string const &dir,/*{{{*/ - std::string const &request, bool content) + std::string const &request, bool content, std::list &headers) { - std::list headers; std::ostringstream listing; struct dirent **namelist; int const counter = scandir(dir.c_str(), &namelist, filter_hidden_files, grouped_alpha_case_sort); if (counter == -1) { - sendError(client, 500, request, content); + sendError(client, 500, request, content, "scandir failed", headers); return; } @@ -310,18 +361,18 @@ static void sendDirectoryListing(int const client, std::string const &dir,/*{{{* addDataHeaders(headers, response); sendHead(client, 200, headers); if (content == true) - sendData(client, response); + sendData(client, headers, response); } /*}}}*/ static bool parseFirstLine(int const client, std::string const &request,/*{{{*/ std::string &filename, std::string ¶ms, bool &sendContent, - bool &closeConnection) + bool &closeConnection, std::list &headers) { if (strncmp(request.c_str(), "HEAD ", 5) == 0) sendContent = false; if (strncmp(request.c_str(), "GET ", 4) != 0) { - sendError(client, 501, request, true); + sendError(client, 501, request, true, "", headers); return false; } @@ -332,7 +383,7 @@ static bool parseFirstLine(int const client, std::string const &request,/*{{{*/ if (lineend == std::string::npos || filestart == std::string::npos || fileend == std::string::npos || filestart == fileend) { - sendError(client, 500, request, sendContent, "Filename can't be extracted"); + sendError(client, 500, request, sendContent, "Filename can't be extracted", headers); return false; } @@ -344,14 +395,14 @@ static bool parseFirstLine(int const client, std::string const &request,/*{{{*/ closeConnection = strcasecmp(LookupTag(request, "Connection", "Keep-Alive").c_str(), "close") == 0; else { - sendError(client, 500, request, sendContent, "Not a HTTP/1.{0,1} request"); + sendError(client, 500, request, sendContent, "Not a HTTP/1.{0,1} request", headers); return false; } filename = request.substr(filestart, fileend - filestart); if (filename.find(' ') != std::string::npos) { - sendError(client, 500, request, sendContent, "Filename contains an unencoded space"); + sendError(client, 500, request, sendContent, "Filename contains an unencoded space", headers); return false; } @@ -359,7 +410,7 @@ static bool parseFirstLine(int const client, std::string const &request,/*{{{*/ if (host.empty() == true) { // RFC 2616 §14.23 requires Host - sendError(client, 400, request, sendContent, "Host header is required"); + sendError(client, 400, request, sendContent, "Host header is required", headers); return false; } host = "http://" + host; @@ -370,7 +421,7 @@ static bool parseFirstLine(int const client, std::string const &request,/*{{{*/ { if (absolute.find("uri") == std::string::npos) { - sendError(client, 400, request, sendContent, "Request is absoluteURI, but configured to not accept that"); + sendError(client, 400, request, sendContent, "Request is absoluteURI, but configured to not accept that", headers); return false; } @@ -382,9 +433,9 @@ static bool parseFirstLine(int const client, std::string const &request,/*{{{*/ if (authConf.empty() != auth.empty()) { if (auth.empty()) - sendError(client, 407, request, sendContent, "Proxy requires authentication"); + sendError(client, 407, request, sendContent, "Proxy requires authentication", headers); else - sendError(client, 407, request, sendContent, "Client wants to authenticate to proxy, but proxy doesn't need it"); + sendError(client, 407, request, sendContent, "Client wants to authenticate to proxy, but proxy doesn't need it", headers); return false; } if (authConf.empty() == false) @@ -395,7 +446,7 @@ static bool parseFirstLine(int const client, std::string const &request,/*{{{*/ auth.erase(0, strlen(basic)); if (auth != authConf) { - sendError(client, 407, request, sendContent, "Proxy-Authentication doesn't match"); + sendError(client, 407, request, sendContent, "Proxy-Authentication doesn't match", headers); return false; } } @@ -410,7 +461,7 @@ static bool parseFirstLine(int const client, std::string const &request,/*{{{*/ } else if (absolute.find("path") == std::string::npos && APT::String::Startswith(filename, "/_config/") == false) { - sendError(client, 400, request, sendContent, "Request is absolutePath, but configured to not accept that"); + sendError(client, 400, request, sendContent, "Request is absolutePath, but configured to not accept that", headers); return false; } @@ -421,9 +472,9 @@ static bool parseFirstLine(int const client, std::string const &request,/*{{{*/ if (authConf.empty() != auth.empty()) { if (auth.empty()) - sendError(client, 401, request, sendContent, "Server requires authentication"); + sendError(client, 401, request, sendContent, "Server requires authentication", headers); else - sendError(client, 401, request, sendContent, "Client wants to authenticate to server, but server doesn't need it"); + sendError(client, 401, request, sendContent, "Client wants to authenticate to server, but server doesn't need it", headers); return false; } if (authConf.empty() == false) @@ -434,13 +485,12 @@ static bool parseFirstLine(int const client, std::string const &request,/*{{{*/ auth.erase(0, strlen(basic)); if (auth != authConf) { - sendError(client, 401, request, sendContent, "Authentication doesn't match"); + sendError(client, 401, request, sendContent, "Authentication doesn't match", headers); return false; } } else { - std::list headers; headers.push_back("WWW-Authenticate: Basic"); sendError(client, 401, request, sendContent, "Unsupported Authentication Scheme", headers); return false; @@ -463,7 +513,8 @@ static bool parseFirstLine(int const client, std::string const &request,/*{{{*/ filename.find_first_of("\r\n\t\f\v") != std::string::npos || filename.find("/../") != std::string::npos) { - sendError(client, 400, request, sendContent, "Filename contains illegal character (sequence)"); + std::list headers; + sendError(client, 400, request, sendContent, "Filename contains illegal character (sequence)", headers); return false; } @@ -499,7 +550,8 @@ static bool parseFirstLine(int const client, std::string const &request,/*{{{*/ return true; } /*}}}*/ -static bool handleOnTheFlyReconfiguration(int const client, std::string const &request, std::vector parts)/*{{{*/ +static bool handleOnTheFlyReconfiguration(int const client, std::string const &request,/*{{{*/ + std::vector parts, std::list &headers) { size_t const pcount = parts.size(); for (size_t i = 0; i < pcount; ++i) @@ -507,40 +559,38 @@ static bool handleOnTheFlyReconfiguration(int const client, std::string const &r if (pcount == 4 && parts[1] == "set") { _config->Set(parts[2], parts[3]); - sendSuccess(client, request, true, "Option '" + parts[2] + "' was set to '" + parts[3] + "'!"); + sendSuccess(client, request, true, "Option '" + parts[2] + "' was set to '" + parts[3] + "'!", headers); return true; } else if (pcount == 4 && parts[1] == "find") { - std::list headers; std::string response = _config->Find(parts[2], parts[3]); addDataHeaders(headers, response); sendHead(client, 200, headers); - sendData(client, response); + sendData(client, headers, response); return true; } else if (pcount == 3 && parts[1] == "find") { - std::list headers; if (_config->Exists(parts[2]) == true) { std::string response = _config->Find(parts[2]); addDataHeaders(headers, response); sendHead(client, 200, headers); - sendData(client, response); + sendData(client, headers, response); return true; } - sendError(client, 404, request, "Requested Configuration option doesn't exist."); + sendError(client, 404, request, true, "Requested Configuration option doesn't exist", headers); return false; } else if (pcount == 3 && parts[1] == "clear") { _config->Clear(parts[2]); - sendSuccess(client, request, true, "Option '" + parts[2] + "' was cleared."); + sendSuccess(client, request, true, "Option '" + parts[2] + "' was cleared.", headers); return true; } - sendError(client, 400, request, true, "Unknown on-the-fly configuration request"); + sendError(client, 400, request, true, "Unknown on-the-fly configuration request", headers); return false; } /*}}}*/ @@ -549,18 +599,22 @@ static void * handleClient(void * voidclient) /*{{{*/ int client = *((int*)(voidclient)); std::clog << "ACCEPT client " << client << std::endl; std::vector messages; - while (ReadMessages(client, messages)) + bool closeConnection = false; + std::list headers; + while (closeConnection == false && ReadMessages(client, messages)) { - bool closeConnection = false; + // if we announced a closing, do the close + if (std::find(headers.begin(), headers.end(), std::string("Connection: close")) != headers.end()) + break; + headers.clear(); for (std::vector::const_iterator m = messages.begin(); m != messages.end() && closeConnection == false; ++m) { std::clog << ">>> REQUEST from " << client << " >>>" << std::endl << *m << std::endl << "<<<<<<<<<<<<<<<<" << std::endl; - std::list headers; std::string filename; std::string params; bool sendContent = true; - if (parseFirstLine(client, *m, filename, params, sendContent, closeConnection) == false) + if (parseFirstLine(client, *m, filename, params, sendContent, closeConnection, headers) == false) continue; // special webserver command request @@ -569,7 +623,7 @@ static void * handleClient(void * voidclient) /*{{{*/ std::vector parts = VectorizeString(filename, '/'); if (parts[0] == "_config") { - handleOnTheFlyReconfiguration(client, *m, parts); + handleOnTheFlyReconfiguration(client, *m, parts, headers); continue; } } @@ -601,7 +655,7 @@ static void * handleClient(void * voidclient) /*{{{*/ { char error[300]; regerror(res, pattern, error, sizeof(error)); - sendError(client, 500, *m, sendContent, error); + sendError(client, 500, *m, sendContent, error, headers); continue; } if (regexec(pattern, filename.c_str(), 0, 0, 0) == 0) @@ -620,7 +674,7 @@ static void * handleClient(void * voidclient) /*{{{*/ if (_config->FindB("aptwebserver::support::http", true) == false && LookupTag(*m, "Host").find(":4433") == std::string::npos) { - sendError(client, 400, *m, sendContent, "HTTP disabled, all requests must be HTTPS"); + sendError(client, 400, *m, sendContent, "HTTP disabled, all requests must be HTTPS", headers); continue; } else if (RealFileExists(filename) == true) @@ -676,17 +730,16 @@ static void * handleClient(void * voidclient) /*{{{*/ headers.push_back(contentrange.str()); sendHead(client, 206, headers); if (sendContent == true) - sendFile(client, data); + sendFile(client, headers, data); continue; } else { - headers.push_back("Content-Length: 0"); std::ostringstream contentrange; contentrange << "Content-Range: bytes */" << filesize; headers.push_back(contentrange.str()); - sendHead(client, 416, headers); - continue; + sendError(client, 416, *m, sendContent, "", headers); + break; } } } @@ -695,22 +748,20 @@ static void * handleClient(void * voidclient) /*{{{*/ addFileHeaders(headers, data); sendHead(client, 200, headers); if (sendContent == true) - sendFile(client, data); + sendFile(client, headers, data); } else if (DirectoryExists(filename) == true) { if (filename[filename.length()-1] == '/') - sendDirectoryListing(client, filename, *m, sendContent); + sendDirectoryListing(client, filename, *m, sendContent, headers); else sendRedirect(client, 301, filename.append("/"), *m, sendContent); } else - sendError(client, 404, *m, sendContent); + sendError(client, 404, *m, sendContent, "", headers); } _error->DumpErrors(std::cerr); messages.clear(); - if (closeConnection == true) - break; } close(client); std::clog << "CLOSE client " << client << std::endl; -- cgit v1.2.3-70-g09d2 From 92e8c1ff287ab829de825e00cdf94744e699ff97 Mon Sep 17 00:00:00 2001 From: David Kalnischkies Date: Sat, 29 Nov 2014 17:59:52 +0100 Subject: dispose http(s) 416 error page as non-content MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Real webservers (like apache) actually send an error page with a 416 response, but our client didn't expect it leaving the page on the socket to be parsed as response for the next request (http) or as file content (https), which isn't what we want at all… Symptom is a "Bad header line" as html usually doesn't parse that well to an http-header. This manifests itself e.g. if we have a complete file (or larger) in partial/ which isn't discarded by If-Range as the server doesn't support it (or it is just newer, think: mirror rotation). It is a sort-of regression of 78c72d0ce22e00b194251445aae306df357d5c1a, which removed the filesize - 1 trick, but this had its own problems… To properly test this our webserver gains the ability to reply with transfer-encoding: chunked as most real webservers will use it to send the dynamically generated error pages. (The tests and their binary helpers had to be slightly modified to apply, but the patch to fix the issue itself is unchanged.) Closes: 768797 --- cmdline/apt-helper.cc | 35 ++++-- methods/http.cc | 2 + methods/https.cc | 12 +- methods/server.cc | 26 +++-- methods/server.h | 5 +- test/integration/framework | 6 +- test/integration/test-apt-helper | 24 ++-- test/integration/test-partial-file-support | 62 +++++++++- test/interactive-helper/aptwebserver.cc | 181 ++++++++++++++++++----------- 9 files changed, 241 insertions(+), 112 deletions(-) (limited to 'methods') diff --git a/cmdline/apt-helper.cc b/cmdline/apt-helper.cc index dd43ea1bc..63f70983c 100644 --- a/cmdline/apt-helper.cc +++ b/cmdline/apt-helper.cc @@ -48,23 +48,34 @@ static bool DoDownloadFile(CommandLine &CmdL) if (CmdL.FileSize() <= 2) return _error->Error(_("Must specify at least one pair url/filename")); - pkgAcquire Fetcher; AcqTextStatus Stat(ScreenWidth, _config->FindI("quiet",0)); Fetcher.Setup(&Stat); - std::string download_uri = CmdL.FileList[1]; - std::string targetfile = CmdL.FileList[2]; - std::string hash; - if (CmdL.FileSize() > 3) - hash = CmdL.FileList[3]; - // we use download_uri as descr and targetfile as short-descr - new pkgAcqFile(&Fetcher, download_uri, hash, 0, download_uri, targetfile, - "dest-dir-ignored", targetfile); - Fetcher.Run(); + + size_t fileind = 0; + std::vector targetfiles; + while (fileind + 2 <= CmdL.FileSize()) + { + std::string download_uri = CmdL.FileList[fileind + 1]; + std::string targetfile = CmdL.FileList[fileind + 2]; + std::string hash; + if (CmdL.FileSize() > fileind + 3) + hash = CmdL.FileList[fileind + 3]; + // we use download_uri as descr and targetfile as short-descr + new pkgAcqFile(&Fetcher, download_uri, hash, 0, download_uri, targetfile, + "dest-dir-ignored", targetfile); + targetfiles.push_back(targetfile); + fileind += 3; + } + bool Failed = false; - if (AcquireRun(Fetcher, 0, &Failed, NULL) == false || Failed == true || - FileExists(targetfile) == false) + if (AcquireRun(Fetcher, 0, &Failed, NULL) == false || Failed == true) return _error->Error(_("Download Failed")); + if (targetfiles.empty() == false) + for (std::vector::const_iterator f = targetfiles.begin(); f != targetfiles.end(); ++f) + if (FileExists(*f) == false) + return _error->Error(_("Download Failed")); + return true; } diff --git a/methods/http.cc b/methods/http.cc index f2a4a4db6..1b996db98 100644 --- a/methods/http.cc +++ b/methods/http.cc @@ -440,6 +440,8 @@ bool HttpServerState::RunData(FileFd * const File) loss of the connection means we are done */ if (Encoding == Closes) In.Limit(-1); + else if (JunkSize != 0) + In.Limit(JunkSize); else In.Limit(Size - StartPos); diff --git a/methods/https.cc b/methods/https.cc index 0499af0c5..65a744e2a 100644 --- a/methods/https.cc +++ b/methods/https.cc @@ -59,6 +59,9 @@ HttpsMethod::parse_header(void *buffer, size_t size, size_t nmemb, void *userp) { me->Server->Result = 200; me->Server->StartPos = me->Server->Size; + // the actual size is not important for https as curl will deal with it + // by itself and e.g. doesn't bother us with transport-encoding… + me->Server->JunkSize = std::numeric_limits::max(); } else me->Server->StartPos = 0; @@ -76,13 +79,18 @@ size_t HttpsMethod::write_data(void *buffer, size_t size, size_t nmemb, void *userp) { HttpsMethod *me = (HttpsMethod *)userp; + size_t buffer_size = size * nmemb; + // we don't need to count the junk here, just drop anything we get as + // we don't always know how long it would be, e.g. in chunked encoding. + if (me->Server->JunkSize != 0) + return buffer_size; if (me->Res.Size == 0) me->URIStart(me->Res); - if(me->File->Write(buffer, size*nmemb) != true) + if(me->File->Write(buffer, buffer_size) != true) return false; - return size*nmemb; + return buffer_size; } int diff --git a/methods/server.cc b/methods/server.cc index 92d94e638..cb0341d5f 100644 --- a/methods/server.cc +++ b/methods/server.cc @@ -55,6 +55,7 @@ ServerState::RunHeadersResult ServerState::RunHeaders(FileFd * const File, Minor = 0; Result = 0; Size = 0; + JunkSize = 0; StartPos = 0; Encoding = Closes; HaveContent = false; @@ -163,14 +164,14 @@ bool ServerState::HeaderLine(string Line) Encoding = Stream; HaveContent = true; - // The length is already set from the Content-Range header - if (StartPos != 0) - return true; + unsigned long long * SizePtr = &Size; + if (Result == 416) + SizePtr = &JunkSize; - Size = strtoull(Val.c_str(), NULL, 10); - if (Size >= std::numeric_limits::max()) + *SizePtr = strtoull(Val.c_str(), NULL, 10); + if (*SizePtr >= std::numeric_limits::max()) return _error->Errno("HeaderLine", _("The HTTP server sent an invalid Content-Length header")); - else if (Size == 0) + else if (*SizePtr == 0) HaveContent = false; return true; } @@ -187,10 +188,7 @@ bool ServerState::HeaderLine(string Line) // §14.16 says 'byte-range-resp-spec' should be a '*' in case of 416 if (Result == 416 && sscanf(Val.c_str(), "bytes */%llu",&Size) == 1) - { - StartPos = 1; // ignore Content-Length, it would override Size - HaveContent = false; - } + ; // we got the expected filesize which is all we wanted else if (sscanf(Val.c_str(),"bytes %llu-%*u/%llu",&StartPos,&Size) != 2) return _error->Error(_("The HTTP server sent an invalid Content-Range header")); if ((unsigned long long)StartPos > Size) @@ -308,9 +306,15 @@ ServerMethod::DealWithHeaders(FetchResult &Res) if ((unsigned long long)SBuf.st_size == Server->Size) { // the file is completely downloaded, but was not moved + if (Server->HaveContent == true) + { + // Send to error page to dev/null + FileFd DevNull("/dev/null",FileFd::WriteExists); + Server->RunData(&DevNull); + } + Server->HaveContent = false; Server->StartPos = Server->Size; Server->Result = 200; - Server->HaveContent = false; } else if (unlink(Queue->DestFile.c_str()) == 0) { diff --git a/methods/server.h b/methods/server.h index f5e68d902..1b81e3549 100644 --- a/methods/server.h +++ b/methods/server.h @@ -34,7 +34,8 @@ struct ServerState char Code[360]; // These are some statistics from the last parsed header lines - unsigned long long Size; + unsigned long long Size; // size of the usable content (aka: the file) + unsigned long long JunkSize; // size of junk content (aka: server error pages) unsigned long long StartPos; time_t Date; bool HaveContent; @@ -71,7 +72,7 @@ struct ServerState RunHeadersResult RunHeaders(FileFd * const File, const std::string &Uri); bool Comp(URI Other) const {return Other.Host == ServerName.Host && Other.Port == ServerName.Port;}; - virtual void Reset() {Major = 0; Minor = 0; Result = 0; Code[0] = '\0'; Size = 0; + virtual void Reset() {Major = 0; Minor = 0; Result = 0; Code[0] = '\0'; Size = 0; JunkSize = 0; StartPos = 0; Encoding = Closes; time(&Date); HaveContent = false; State = Header; Persistent = false; Pipeline = true;}; virtual bool WriteResponse(std::string const &Data) = 0; diff --git a/test/integration/framework b/test/integration/framework index df1942ff9..ac482a7a0 100644 --- a/test/integration/framework +++ b/test/integration/framework @@ -1064,8 +1064,8 @@ acquire::cdrom::autodetect 0;" > rootdir/etc/apt/apt.conf.d/00cdrom } downloadfile() { - local PROTO="$(echo "$1" | cut -d':' -f 1 )" - apthelper -o Debug::Acquire::${PROTO}=1 \ + local PROTO="${1%%:*}" + apthelper -o Debug::Acquire::${PROTO}=1 -o Debug::pkgAcquire::Worker=1 \ download-file "$1" "$2" 2>&1 || true # only if the file exists the download was successful if [ -e "$2" ]; then @@ -1221,7 +1221,7 @@ testsuccess() { msgtest 'Test for successful execution of' "$*" fi local OUTPUT="${TMPWORKINGDIRECTORY}/rootdir/tmp/testsuccess.output" - if $@ >${OUTPUT} 2>&1; then + if "$@" >${OUTPUT} 2>&1; then msgpass else echo >&2 diff --git a/test/integration/test-apt-helper b/test/integration/test-apt-helper index c749224ca..31e471677 100755 --- a/test/integration/test-apt-helper +++ b/test/integration/test-apt-helper @@ -10,34 +10,36 @@ configarchitecture "i386" changetohttpswebserver test_apt_helper_download() { - echo "foo" > aptarchive/foo + echo 'foo' > aptarchive/foo + echo 'bar' > aptarchive/foo2 msgtest 'apt-file download-file md5sum' - apthelper -qq download-file http://localhost:8080/foo foo2 MD5Sum:d3b07384d113edec49eaa6238ad5ff00 && msgpass || msgfail + testsuccess --nomsg apthelper download-file http://localhost:8080/foo foo2 MD5Sum:d3b07384d113edec49eaa6238ad5ff00 testfileequal foo2 'foo' msgtest 'apt-file download-file sha1' - apthelper -qq download-file http://localhost:8080/foo foo1 SHA1:f1d2d2f924e986ac86fdf7b36c94bcdf32beec15 && msgpass || msgfail + testsuccess --nomsg apthelper download-file http://localhost:8080/foo foo1 SHA1:f1d2d2f924e986ac86fdf7b36c94bcdf32beec15 testfileequal foo1 'foo' msgtest 'apt-file download-file sha256' - apthelper -qq download-file http://localhost:8080/foo foo3 SHA256:b5bb9d8014a0f9b1d61e21e796d78dccdf1352f23cd32812f4850b878ae4944c && msgpass || msgfail + testsuccess --nomsg apthelper download-file http://localhost:8080/foo foo3 SHA256:b5bb9d8014a0f9b1d61e21e796d78dccdf1352f23cd32812f4850b878ae4944c testfileequal foo3 'foo' msgtest 'apt-file download-file no-hash' - apthelper -qq download-file http://localhost:8080/foo foo4 && msgpass || msgfail + testsuccess --nomsg apthelper download-file http://localhost:8080/foo foo4 testfileequal foo4 'foo' msgtest 'apt-file download-file wrong hash' - if ! apthelper -qq download-file http://localhost:8080/foo foo5 MD5Sum:aabbcc 2>&1 2> download.stderr; then - msgpass - else - msgfail - fi - testfileequal download.stderr 'E: Failed to fetch http://localhost:8080/foo Hash Sum mismatch + testfailure --nomsg apthelper -qq download-file http://localhost:8080/foo foo5 MD5Sum:aabbcc + testfileequal rootdir/tmp/testfailure.output 'E: Failed to fetch http://localhost:8080/foo Hash Sum mismatch E: Download Failed' testfileequal foo5.FAILED 'foo' + + msgtest 'apt-file download-file md5sum sha1' + testsuccess --nomsg apthelper download-file http://localhost:8080/foo foo6 MD5Sum:d3b07384d113edec49eaa6238ad5ff00 http://localhost:8080/foo2 foo7 SHA1:e242ed3bffccdf271b7fbaf34ed72d089537b42f + testfileequal foo6 'foo' + testfileequal foo7 'bar' } test_apt_helper_detect_proxy() { diff --git a/test/integration/test-partial-file-support b/test/integration/test-partial-file-support index 5ab326def..160d451b6 100755 --- a/test/integration/test-partial-file-support +++ b/test/integration/test-partial-file-support @@ -24,13 +24,25 @@ testdownloadfile() { else msgpass fi - cat "$DOWNLOADLOG" | while read field hash; do + sed -e '/^ <- / s#%20# #g' -e '/^ <- / s#%0a#\n#g' "$DOWNLOADLOG" | grep '^.*-Hash: ' > receivedhashes.log + testsuccess test -s receivedhashes.log + local HASHES_OK=0 + local HASHES_BAD=0 + while read field hash; do local EXPECTED case "$field" in 'MD5Sum-Hash:') EXPECTED="$(md5sum "$TESTFILE" | cut -d' ' -f 1)";; 'SHA1-Hash:') EXPECTED="$(sha1sum "$TESTFILE" | cut -d' ' -f 1)";; 'SHA256-Hash:') EXPECTED="$(sha256sum "$TESTFILE" | cut -d' ' -f 1)";; 'SHA512-Hash:') EXPECTED="$(sha512sum "$TESTFILE" | cut -d' ' -f 1)";; + 'Checksum-FileSize-Hash:') + #filesize is too weak to check for != + if [ "$4" = '=' ]; then + EXPECTED="$(stat -c '%s' "$TESTFILE")" + else + continue + fi + ;; *) continue;; esac if [ "$4" = '=' ]; then @@ -40,15 +52,41 @@ testdownloadfile() { fi if [ "$EXPECTED" "$4" "$hash" ]; then msgpass + HASHES_OK=$((HASHES_OK+1)); else - cat >&2 "$DOWNLOADLOG" msgfail "expected: $EXPECTED ; got: $hash" + HASHES_BAD=$((HASHES_BAD+1)); fi - done + done < receivedhashes.log + msgtest 'At least one good hash and no bad ones' + if [ $HASHES_OK -eq 0 ] || [ $HASHES_BAD -ne 0 ]; then + cat >&2 "$DOWNLOADLOG" + msgfail + else + msgpass + fi } TESTFILE='aptarchive/testfile' cp -a ${TESTDIR}/framework $TESTFILE +cp -a ${TESTDIR}/framework "${TESTFILE}2" + +followuprequest() { + local DOWN='./testfile' + + copysource $TESTFILE 1M $DOWN + testdownloadfile 'completely downloaded file' "${1}/testfile" "$DOWN" '=' + testwebserverlaststatuscode '416' "$DOWNLOADLOG" + + copysource $TESTFILE 1M $DOWN + copysource "${TESTFILE}2" 20 "${DOWN}2" + msgtest 'Testing download of files with' 'completely downloaded file + partial file' + testsuccess --nomsg apthelper -o Debug::Acquire::${1%%:*}=1 -o Debug::pkgAcquire::Worker=1 \ + download-file "$1/testfile" "$DOWN" '' "$1/testfile2" "${DOWN}2" + testwebserverlaststatuscode '206' 'rootdir/tmp/testsuccess.output' + testsuccess diff -u "$TESTFILE" "${DOWN}" + testsuccess diff -u "${DOWN}" "${DOWN}2" +} testrun() { webserverconfig 'aptwebserver::support::range' 'true' @@ -65,9 +103,11 @@ testrun() { testdownloadfile 'invalid partial data' "${1}/testfile" './testfile' '!=' testwebserverlaststatuscode '206' "$DOWNLOADLOG" - copysource $TESTFILE 1M ./testfile - testdownloadfile 'completely downloaded file' "${1}/testfile" './testfile' '=' - testwebserverlaststatuscode '416' "$DOWNLOADLOG" + webserverconfig 'aptwebserver::closeOnError' 'false' + followuprequest "$1" + webserverconfig 'aptwebserver::closeOnError' 'true' + followuprequest "$1" + webserverconfig 'aptwebserver::closeOnError' 'false' copysource /dev/zero 1M ./testfile testdownloadfile 'too-big partial file' "${1}/testfile" './testfile' '=' @@ -85,8 +125,18 @@ testrun() { testwebserverlaststatuscode '200' "$DOWNLOADLOG" } +msgmsg 'http: Test with Content-Length' +webserverconfig 'aptwebserver::chunked-transfer-encoding' 'false' +testrun 'http://localhost:8080' +msgmsg 'http: Test with Transfer-Encoding: chunked' +webserverconfig 'aptwebserver::chunked-transfer-encoding' 'true' testrun 'http://localhost:8080' changetohttpswebserver +msgmsg 'https: Test with Content-Length' +webserverconfig 'aptwebserver::chunked-transfer-encoding' 'false' +testrun 'https://localhost:4433' +msgmsg 'https: Test with Transfer-Encoding: chunked' +webserverconfig 'aptwebserver::chunked-transfer-encoding' 'true' testrun 'https://localhost:4433' diff --git a/test/interactive-helper/aptwebserver.cc b/test/interactive-helper/aptwebserver.cc index 34476e1af..cd52da692 100644 --- a/test/interactive-helper/aptwebserver.cc +++ b/test/interactive-helper/aptwebserver.cc @@ -19,6 +19,8 @@ #include #include #include + +#include #include #include #include @@ -79,12 +81,21 @@ static char const * httpcodeToStr(int const httpcode) /*{{{*/ return NULL; } /*}}}*/ +static bool chunkedTransferEncoding(std::list const &headers) { + if (std::find(headers.begin(), headers.end(), "Transfer-Encoding: chunked") != headers.end()) + return true; + if (_config->FindB("aptwebserver::chunked-transfer-encoding", false) == true) + return true; + return false; +} static void addFileHeaders(std::list &headers, FileFd &data)/*{{{*/ { - std::ostringstream contentlength; - contentlength << "Content-Length: " << data.FileSize(); - headers.push_back(contentlength.str()); - + if (chunkedTransferEncoding(headers) == false) + { + std::ostringstream contentlength; + contentlength << "Content-Length: " << data.FileSize(); + headers.push_back(contentlength.str()); + } std::string lastmodified("Last-Modified: "); lastmodified.append(TimeRFC1123(data.ModificationTime())); headers.push_back(lastmodified); @@ -92,9 +103,12 @@ static void addFileHeaders(std::list &headers, FileFd &data)/*{{{*/ /*}}}*/ static void addDataHeaders(std::list &headers, std::string &data)/*{{{*/ { - std::ostringstream contentlength; - contentlength << "Content-Length: " << data.size(); - headers.push_back(contentlength.str()); + if (chunkedTransferEncoding(headers) == false) + { + std::ostringstream contentlength; + contentlength << "Content-Length: " << data.size(); + headers.push_back(contentlength.str()); + } } /*}}}*/ static bool sendHead(int const client, int const httpcode, std::list &headers)/*{{{*/ @@ -114,6 +128,9 @@ static bool sendHead(int const client, int const httpcode, std::list>> RESPONSE to " << client << " >>>" << std::endl; bool Success = true; for (std::list::const_iterator h = headers.begin(); @@ -130,25 +147,55 @@ static bool sendHead(int const client, int const httpcode, std::list const &headers, FileFd &data)/*{{{*/ { bool Success = true; + bool const chunked = chunkedTransferEncoding(headers); char buffer[500]; unsigned long long actual = 0; while ((Success &= data.Read(buffer, sizeof(buffer), &actual)) == true) { if (actual == 0) break; - Success &= FileFd::Write(client, buffer, actual); + + if (chunked == true) + { + std::string size; + strprintf(size, "%llX\r\n", actual); + Success &= FileFd::Write(client, size.c_str(), size.size()); + Success &= FileFd::Write(client, buffer, actual); + Success &= FileFd::Write(client, "\r\n", strlen("\r\n")); + } + else + Success &= FileFd::Write(client, buffer, actual); + } + if (chunked == true) + { + char const * const finish = "0\r\n\r\n"; + Success &= FileFd::Write(client, finish, strlen(finish)); } if (Success == false) - std::cerr << "SENDFILE: READ/WRITE ERROR to " << client << std::endl; + std::cerr << "SENDFILE:" << (chunked ? " CHUNKED" : "") << " READ/WRITE ERROR to " << client << std::endl; return Success; } /*}}}*/ -static bool sendData(int const client, std::string const &data) /*{{{*/ +static bool sendData(int const client, std::list const &headers, std::string const &data)/*{{{*/ { - if (FileFd::Write(client, data.c_str(), data.size()) == false) + if (chunkedTransferEncoding(headers) == true) + { + unsigned long long const ullsize = data.length(); + std::string size; + strprintf(size, "%llX\r\n", ullsize); + char const * const finish = "\r\n0\r\n\r\n"; + if (FileFd::Write(client, size.c_str(), size.length()) == false || + FileFd::Write(client, data.c_str(), ullsize) == false || + FileFd::Write(client, finish, strlen(finish)) == false) + { + std::cerr << "SENDDATA: CHUNK WRITE ERROR to " << client << std::endl; + return false; + } + } + else if (FileFd::Write(client, data.c_str(), data.size()) == false) { std::cerr << "SENDDATA: WRITE ERROR to " << client << std::endl; return false; @@ -157,34 +204,38 @@ static bool sendData(int const client, std::string const &data) /*{{{*/ } /*}}}*/ static void sendError(int const client, int const httpcode, std::string const &request,/*{{{*/ - bool content, std::string const &error = "") + bool const content, std::string const &error, std::list &headers) { - std::list headers; std::string response(""); response.append(httpcodeToStr(httpcode)).append(""); response.append("

").append(httpcodeToStr(httpcode)).append("

"); if (httpcode != 200) - { - if (error.empty() == false) - response.append("

Error: ").append(error).append("

"); - response.append("This error is a result of the request:
");
-   }
+      response.append("

Error: "); + else + response.append("

Success: "); + if (error.empty() == false) + response.append(error); + else + response.append(httpcodeToStr(httpcode)); + if (httpcode != 200) + response.append("

This error is a result of the request:
");
    else
-   {
-      if (error.empty() == false)
-	 response.append("

Success: ").append(error).append("

"); response.append("The successfully executed operation was requested by:
");
-   }
    response.append(request).append("
"); + if (httpcode != 200) + { + if (_config->FindB("aptwebserver::closeOnError", false) == true) + headers.push_back("Connection: close"); + } addDataHeaders(headers, response); sendHead(client, httpcode, headers); if (content == true) - sendData(client, response); + sendData(client, headers, response); } static void sendSuccess(int const client, std::string const &request, - bool content, std::string const &error = "") + bool const content, std::string const &error, std::list &headers) { - sendError(client, 200, request, content, error); + sendError(client, 200, request, content, error, headers); } /*}}}*/ static void sendRedirect(int const client, int const httpcode, std::string const &uri,/*{{{*/ @@ -221,7 +272,7 @@ static void sendRedirect(int const client, int const httpcode, std::string const headers.push_back(location); sendHead(client, httpcode, headers); if (content == true) - sendData(client, response); + sendData(client, headers, response); } /*}}}*/ static int filter_hidden_files(const struct dirent *a) /*{{{*/ @@ -263,16 +314,15 @@ static int grouped_alpha_case_sort(const struct dirent **a, const struct dirent } /*}}}*/ static void sendDirectoryListing(int const client, std::string const &dir,/*{{{*/ - std::string const &request, bool content) + std::string const &request, bool content, std::list &headers) { - std::list headers; std::ostringstream listing; struct dirent **namelist; int const counter = scandir(dir.c_str(), &namelist, filter_hidden_files, grouped_alpha_case_sort); if (counter == -1) { - sendError(client, 500, request, content); + sendError(client, 500, request, content, "scandir failed", headers); return; } @@ -311,18 +361,18 @@ static void sendDirectoryListing(int const client, std::string const &dir,/*{{{* addDataHeaders(headers, response); sendHead(client, 200, headers); if (content == true) - sendData(client, response); + sendData(client, headers, response); } /*}}}*/ static bool parseFirstLine(int const client, std::string const &request,/*{{{*/ std::string &filename, std::string ¶ms, bool &sendContent, - bool &closeConnection) + bool &closeConnection, std::list &headers) { if (strncmp(request.c_str(), "HEAD ", 5) == 0) sendContent = false; if (strncmp(request.c_str(), "GET ", 4) != 0) { - sendError(client, 501, request, true); + sendError(client, 501, request, true, "", headers); return false; } @@ -333,7 +383,7 @@ static bool parseFirstLine(int const client, std::string const &request,/*{{{*/ if (lineend == std::string::npos || filestart == std::string::npos || fileend == std::string::npos || filestart == fileend) { - sendError(client, 500, request, sendContent, "Filename can't be extracted"); + sendError(client, 500, request, sendContent, "Filename can't be extracted", headers); return false; } @@ -345,14 +395,14 @@ static bool parseFirstLine(int const client, std::string const &request,/*{{{*/ closeConnection = strcasecmp(LookupTag(request, "Connection", "Keep-Alive").c_str(), "close") == 0; else { - sendError(client, 500, request, sendContent, "Not a HTTP/1.{0,1} request"); + sendError(client, 500, request, sendContent, "Not a HTTP/1.{0,1} request", headers); return false; } filename = request.substr(filestart, fileend - filestart); if (filename.find(' ') != std::string::npos) { - sendError(client, 500, request, sendContent, "Filename contains an unencoded space"); + sendError(client, 500, request, sendContent, "Filename contains an unencoded space", headers); return false; } @@ -360,7 +410,7 @@ static bool parseFirstLine(int const client, std::string const &request,/*{{{*/ if (host.empty() == true) { // RFC 2616 §14.23 requires Host - sendError(client, 400, request, sendContent, "Host header is required"); + sendError(client, 400, request, sendContent, "Host header is required", headers); return false; } host = "http://" + host; @@ -371,7 +421,7 @@ static bool parseFirstLine(int const client, std::string const &request,/*{{{*/ { if (absolute.find("uri") == std::string::npos) { - sendError(client, 400, request, sendContent, "Request is absoluteURI, but configured to not accept that"); + sendError(client, 400, request, sendContent, "Request is absoluteURI, but configured to not accept that", headers); return false; } // strip the host from the request to make it an absolute path @@ -379,7 +429,7 @@ static bool parseFirstLine(int const client, std::string const &request,/*{{{*/ } else if (absolute.find("path") == std::string::npos) { - sendError(client, 400, request, sendContent, "Request is absolutePath, but configured to not accept that"); + sendError(client, 400, request, sendContent, "Request is absolutePath, but configured to not accept that", headers); return false; } @@ -398,7 +448,8 @@ static bool parseFirstLine(int const client, std::string const &request,/*{{{*/ filename.find_first_of("\r\n\t\f\v") != std::string::npos || filename.find("/../") != std::string::npos) { - sendError(client, 400, request, sendContent, "Filename contains illegal character (sequence)"); + std::list headers; + sendError(client, 400, request, sendContent, "Filename contains illegal character (sequence)", headers); return false; } @@ -434,46 +485,45 @@ static bool parseFirstLine(int const client, std::string const &request,/*{{{*/ return true; } /*}}}*/ -static bool handleOnTheFlyReconfiguration(int const client, std::string const &request, std::vector const &parts)/*{{{*/ +static bool handleOnTheFlyReconfiguration(int const client, std::string const &request,/*{{{*/ + std::vector parts, std::list &headers) { size_t const pcount = parts.size(); if (pcount == 4 && parts[1] == "set") { _config->Set(parts[2], parts[3]); - sendSuccess(client, request, true, "Option '" + parts[2] + "' was set to '" + parts[3] + "'!"); + sendSuccess(client, request, true, "Option '" + parts[2] + "' was set to '" + parts[3] + "'!", headers); return true; } else if (pcount == 4 && parts[1] == "find") { - std::list headers; std::string response = _config->Find(parts[2], parts[3]); addDataHeaders(headers, response); sendHead(client, 200, headers); - sendData(client, response); + sendData(client, headers, response); return true; } else if (pcount == 3 && parts[1] == "find") { - std::list headers; if (_config->Exists(parts[2]) == true) { std::string response = _config->Find(parts[2]); addDataHeaders(headers, response); sendHead(client, 200, headers); - sendData(client, response); + sendData(client, headers, response); return true; } - sendError(client, 404, request, "Requested Configuration option doesn't exist."); + sendError(client, 404, request, true, "Requested Configuration option doesn't exist", headers); return false; } else if (pcount == 3 && parts[1] == "clear") { _config->Clear(parts[2]); - sendSuccess(client, request, true, "Option '" + parts[2] + "' was cleared."); + sendSuccess(client, request, true, "Option '" + parts[2] + "' was cleared.", headers); return true; } - sendError(client, 400, request, true, "Unknown on-the-fly configuration request"); + sendError(client, 400, request, true, "Unknown on-the-fly configuration request", headers); return false; } /*}}}*/ @@ -482,18 +532,22 @@ static void * handleClient(void * voidclient) /*{{{*/ int client = *((int*)(voidclient)); std::clog << "ACCEPT client " << client << std::endl; std::vector messages; - while (ReadMessages(client, messages)) + bool closeConnection = false; + std::list headers; + while (closeConnection == false && ReadMessages(client, messages)) { - bool closeConnection = false; + // if we announced a closing, do the close + if (std::find(headers.begin(), headers.end(), std::string("Connection: close")) != headers.end()) + break; + headers.clear(); for (std::vector::const_iterator m = messages.begin(); m != messages.end() && closeConnection == false; ++m) { std::clog << ">>> REQUEST from " << client << " >>>" << std::endl << *m << std::endl << "<<<<<<<<<<<<<<<<" << std::endl; - std::list headers; std::string filename; std::string params; bool sendContent = true; - if (parseFirstLine(client, *m, filename, params, sendContent, closeConnection) == false) + if (parseFirstLine(client, *m, filename, params, sendContent, closeConnection, headers) == false) continue; // special webserver command request @@ -502,7 +556,7 @@ static void * handleClient(void * voidclient) /*{{{*/ std::vector parts = VectorizeString(filename, '/'); if (parts[0] == "_config") { - handleOnTheFlyReconfiguration(client, *m, parts); + handleOnTheFlyReconfiguration(client, *m, parts, headers); continue; } } @@ -534,7 +588,7 @@ static void * handleClient(void * voidclient) /*{{{*/ { char error[300]; regerror(res, pattern, error, sizeof(error)); - sendError(client, 500, *m, sendContent, error); + sendError(client, 500, *m, sendContent, error, headers); continue; } if (regexec(pattern, filename.c_str(), 0, 0, 0) == 0) @@ -553,7 +607,7 @@ static void * handleClient(void * voidclient) /*{{{*/ if (_config->FindB("aptwebserver::support::http", true) == false && LookupTag(*m, "Host").find(":4433") == std::string::npos) { - sendError(client, 400, *m, sendContent, "HTTP disabled, all requests must be HTTPS"); + sendError(client, 400, *m, sendContent, "HTTP disabled, all requests must be HTTPS", headers); continue; } else if (RealFileExists(filename) == true) @@ -609,17 +663,16 @@ static void * handleClient(void * voidclient) /*{{{*/ headers.push_back(contentrange.str()); sendHead(client, 206, headers); if (sendContent == true) - sendFile(client, data); + sendFile(client, headers, data); continue; } else { - headers.push_back("Content-Length: 0"); std::ostringstream contentrange; contentrange << "Content-Range: bytes */" << filesize; headers.push_back(contentrange.str()); - sendHead(client, 416, headers); - continue; + sendError(client, 416, *m, sendContent, "", headers); + break; } } } @@ -628,22 +681,20 @@ static void * handleClient(void * voidclient) /*{{{*/ addFileHeaders(headers, data); sendHead(client, 200, headers); if (sendContent == true) - sendFile(client, data); + sendFile(client, headers, data); } else if (DirectoryExists(filename) == true) { if (filename[filename.length()-1] == '/') - sendDirectoryListing(client, filename, *m, sendContent); + sendDirectoryListing(client, filename, *m, sendContent, headers); else sendRedirect(client, 301, filename.append("/"), *m, sendContent); } else - sendError(client, 404, *m, sendContent); + sendError(client, 404, *m, sendContent, "", headers); } _error->DumpErrors(std::cerr); messages.clear(); - if (closeConnection == true) - break; } close(client); std::clog << "CLOSE client " << client << std::endl; -- cgit v1.2.3-70-g09d2 From 9127d7aecf01f2999a2589e4b0503288518b2927 Mon Sep 17 00:00:00 2001 From: Michael Vogt Date: Mon, 5 Jan 2015 10:27:53 +0100 Subject: Fix missing URIStart() for https downloads Add a explicit ReceivedData to HttpsMethod that indicates when we got data from the connection so that we can send URISTart() to the parent. This is needed because URIStart got moved in f9b4f12d from the progress_callback to write_data() and it only checks for Res.Size. In the old code if progress_callback is called by libcurl (and sets Res.Size) before write_data is called then URIStart() is never send. Making this a explicit ReceivedData variable fixes this issue. --- methods/https.cc | 9 +++++++-- methods/https.h | 1 + 2 files changed, 8 insertions(+), 2 deletions(-) (limited to 'methods') diff --git a/methods/https.cc b/methods/https.cc index 65a744e2a..3a5981b58 100644 --- a/methods/https.cc +++ b/methods/https.cc @@ -85,8 +85,12 @@ HttpsMethod::write_data(void *buffer, size_t size, size_t nmemb, void *userp) if (me->Server->JunkSize != 0) return buffer_size; - if (me->Res.Size == 0) + if (me->ReceivedData == false) + { me->URIStart(me->Res); + me->ReceivedData = true; + } + if(me->File->Write(buffer, buffer_size) != true) return false; @@ -95,7 +99,7 @@ HttpsMethod::write_data(void *buffer, size_t size, size_t nmemb, void *userp) int HttpsMethod::progress_callback(void *clientp, double dltotal, double /*dlnow*/, - double /*ultotal*/, double /*ulnow*/) + double /*ultotal*/, double /*ulnow*/) { HttpsMethod *me = (HttpsMethod *)clientp; if(dltotal > 0 && me->Res.Size == 0) { @@ -179,6 +183,7 @@ bool HttpsMethod::Fetch(FetchItem *Itm) char curl_errorstr[CURL_ERROR_SIZE]; URI Uri = Itm->Uri; string remotehost = Uri.Host; + ReceivedData = false; // TODO: // - http::Pipeline-Depth diff --git a/methods/https.h b/methods/https.h index faac8a3cd..411b71440 100644 --- a/methods/https.h +++ b/methods/https.h @@ -66,6 +66,7 @@ class HttpsMethod : public pkgAcqMethod CURL *curl; FetchResult Res; HttpsServerState *Server; + bool ReceivedData; public: FileFd *File; -- cgit v1.2.3-70-g09d2 From 0c2dc43d4fe1d026650b5e2920a021557f9534a6 Mon Sep 17 00:00:00 2001 From: Tomasz Buchert Date: Mon, 16 Feb 2015 00:57:29 +0100 Subject: Fix crash in the apt-transport-https when Owner is NULL Do not crash in ServerState::HeaderLine if there is no Owner. Closes: #778375 --- methods/server.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'methods') diff --git a/methods/server.cc b/methods/server.cc index cb0341d5f..e321e0230 100644 --- a/methods/server.cc +++ b/methods/server.cc @@ -129,7 +129,7 @@ bool ServerState::HeaderLine(string Line) if (elements == 3) { Code[0] = '\0'; - if (Owner->Debug == true) + if (Owner != NULL && Owner->Debug == true) clog << "HTTP server doesn't give Reason-Phrase for " << Result << std::endl; } else if (elements != 4) -- cgit v1.2.3-70-g09d2 From 905fba60a046646a26a56b4c5d4a5dc7d5906f0d Mon Sep 17 00:00:00 2001 From: David Kalnischkies Date: Mon, 9 Mar 2015 01:54:46 +0100 Subject: derive more of https from http method Bug #778375 uncovered that https wasn't properly integrated in the class family tree of http as it was supposed to be leading to a NULL pointer dereference. Fixing this 'properly' was deemed to much diff for practically no gain that late in the release, so commit 0c2dc43d4fe1d026650b5e2920a021557f9534a6 just fixed the synptom, while this commit here is fixing the cause plus adding a test. --- methods/http.cc | 2 - methods/https.cc | 37 +++++---- methods/https.h | 16 ++-- methods/server.cc | 7 +- methods/server.h | 3 +- .../test-bug-778375-server-has-no-reason-phrase | 40 ++++++++++ test/interactive-helper/aptwebserver.cc | 88 +++++++++++----------- 7 files changed, 124 insertions(+), 69 deletions(-) create mode 100755 test/integration/test-bug-778375-server-has-no-reason-phrase (limited to 'methods') diff --git a/methods/http.cc b/methods/http.cc index ad1347d36..021b284d0 100644 --- a/methods/http.cc +++ b/methods/http.cc @@ -772,8 +772,6 @@ bool HttpMethod::Configuration(string Message) if (ServerMethod::Configuration(Message) == false) return false; - DropPrivsOrDie(); - AllowRedirect = _config->FindB("Acquire::http::AllowRedirect",true); PipelineDepth = _config->FindI("Acquire::http::Pipeline-Depth", PipelineDepth); diff --git a/methods/https.cc b/methods/https.cc index 37a8ff5fd..32de42e4b 100644 --- a/methods/https.cc +++ b/methods/https.cc @@ -37,16 +37,6 @@ /*}}}*/ using namespace std; -bool HttpsMethod::Configuration(std::string Message) -{ - if (pkgAcqMethod::Configuration(Message) == false) - return false; - - DropPrivsOrDie(); - - return true; -} - size_t HttpsMethod::parse_header(void *buffer, size_t size, size_t nmemb, void *userp) { @@ -131,7 +121,7 @@ HttpsMethod::progress_callback(void *clientp, double dltotal, double /*dlnow*/, } // HttpsServerState::HttpsServerState - Constructor /*{{{*/ -HttpsServerState::HttpsServerState(URI Srv,HttpsMethod * /*Owner*/) : ServerState(Srv, NULL) +HttpsServerState::HttpsServerState(URI Srv,HttpsMethod * Owner) : ServerState(Srv, Owner) { TimeOut = _config->FindI("Acquire::https::Timeout",TimeOut); ReceivedData = false; @@ -335,13 +325,11 @@ bool HttpsMethod::Fetch(FetchItem *Itm) curl_easy_setopt(curl, CURLOPT_LOW_SPEED_TIME, timeout); // set redirect options and default to 10 redirects - bool const AllowRedirect = _config->FindB("Acquire::https::AllowRedirect", - _config->FindB("Acquire::http::AllowRedirect",true)); curl_easy_setopt(curl, CURLOPT_FOLLOWLOCATION, AllowRedirect); curl_easy_setopt(curl, CURLOPT_MAXREDIRS, 10); // debug - if(_config->FindB("Debug::Acquire::https", false)) + if (Debug == true) curl_easy_setopt(curl, CURLOPT_VERBOSE, true); // error handling @@ -378,7 +366,7 @@ bool HttpsMethod::Fetch(FetchItem *Itm) // go for it - if the file exists, append on it File = new FileFd(Itm->DestFile, FileFd::WriteAny); - Server = new HttpsServerState(Itm->Uri, this); + Server = CreateServerState(Itm->Uri); // keep apt updated Res.Filename = Itm->DestFile; @@ -473,6 +461,25 @@ bool HttpsMethod::Fetch(FetchItem *Itm) return true; } + /*}}}*/ +// HttpsMethod::Configuration - Handle a configuration message /*{{{*/ +bool HttpsMethod::Configuration(string Message) +{ + if (ServerMethod::Configuration(Message) == false) + return false; + + AllowRedirect = _config->FindB("Acquire::https::AllowRedirect", + _config->FindB("Acquire::http::AllowRedirect", true)); + Debug = _config->FindB("Debug::Acquire::https",false); + + return true; +} + /*}}}*/ +ServerState * HttpsMethod::CreateServerState(URI uri) /*{{{*/ +{ + return new HttpsServerState(uri, this); +} + /*}}}*/ int main() { diff --git a/methods/https.h b/methods/https.h index 6917a6ff6..433a84680 100644 --- a/methods/https.h +++ b/methods/https.h @@ -50,17 +50,14 @@ class HttpsServerState : public ServerState HttpsServerState(URI Srv, HttpsMethod *Owner); virtual ~HttpsServerState() {Close();}; - - bool ReceivedData; }; -class HttpsMethod : public pkgAcqMethod +class HttpsMethod : public ServerMethod { // minimum speed in bytes/se that triggers download timeout handling static const int DL_MIN_SPEED = 10; virtual bool Fetch(FetchItem *); - virtual bool Configuration(std::string Message); static size_t parse_header(void *buffer, size_t size, size_t nmemb, void *userp); static size_t write_data(void *buffer, size_t size, size_t nmemb, void *userp); @@ -69,12 +66,19 @@ class HttpsMethod : public pkgAcqMethod void SetupProxy(); CURL *curl; FetchResult Res; - HttpsServerState *Server; + ServerState *Server; + + // Used by ServerMethods unused by https + virtual void SendReq(FetchItem *) { exit(42); } + virtual void RotateDNS() { exit(42); } public: FileFd *File; - HttpsMethod() : pkgAcqMethod("1.2",Pipeline | SendConfig), Server(NULL), File(NULL) + virtual bool Configuration(std::string Message); + virtual ServerState * CreateServerState(URI uri); + + HttpsMethod() : ServerMethod("1.2",Pipeline | SendConfig), File(NULL) { curl = curl_easy_init(); }; diff --git a/methods/server.cc b/methods/server.cc index c17f27f73..91ec824d1 100644 --- a/methods/server.cc +++ b/methods/server.cc @@ -238,7 +238,12 @@ ServerState::ServerState(URI Srv, ServerMethod *Owner) : ServerName(Srv), TimeOu bool ServerMethod::Configuration(string Message) /*{{{*/ { - return pkgAcqMethod::Configuration(Message); + if (pkgAcqMethod::Configuration(Message) == false) + return false; + + DropPrivsOrDie(); + + return true; } /*}}}*/ diff --git a/methods/server.h b/methods/server.h index b974ec89a..3b232dcac 100644 --- a/methods/server.h +++ b/methods/server.h @@ -37,6 +37,7 @@ struct ServerState unsigned long long Size; // size of the usable content (aka: the file) unsigned long long JunkSize; // size of junk content (aka: server error pages) unsigned long long StartPos; + bool ReceivedData; time_t Date; bool HaveContent; enum {Chunked,Stream,Closes} Encoding; @@ -75,7 +76,7 @@ struct ServerState bool Comp(URI Other) const {return Other.Host == ServerName.Host && Other.Port == ServerName.Port;}; virtual void Reset() {Major = 0; Minor = 0; Result = 0; Code[0] = '\0'; Size = 0; JunkSize = 0; - StartPos = 0; Encoding = Closes; time(&Date); HaveContent = false; + StartPos = 0; ReceivedData = false; Encoding = Closes; time(&Date); HaveContent = false; State = Header; Persistent = false; Pipeline = true; MaximumSize = 0;}; virtual bool WriteResponse(std::string const &Data) = 0; diff --git a/test/integration/test-bug-778375-server-has-no-reason-phrase b/test/integration/test-bug-778375-server-has-no-reason-phrase new file mode 100755 index 000000000..23481ef88 --- /dev/null +++ b/test/integration/test-bug-778375-server-has-no-reason-phrase @@ -0,0 +1,40 @@ +#!/bin/sh +set -e + +TESTDIR=$(readlink -f $(dirname $0)) +. $TESTDIR/framework + +setupenvironment +configarchitecture 'native' + +echo 'found' > aptarchive/working +changetohttpswebserver -o 'aptwebserver::redirect::replace::/redirectme/=/' \ + -o 'aptwebserver::httpcode::200=200' -o 'aptwebserver::httpcode::404=404' \ + -o 'aptwebserver::httpcode::301=301' + +testdownload() { + rm -f downfile + msgtest "download of a $1 via" "${3%%:*}" + $2 --nomsg downloadfile "$3" downfile + + cp rootdir/tmp/testsuccess.output download.log + #looking for "HTTP server doesn't give Reason-Phrase for 200" + testsuccess grep 'give Reason-Phrase for' download.log + + if [ "$2" = 'testsuccess' ]; then + testfileequal downfile 'found' + else + testfailure test -e downfile + fi +} + +runtest() { + testdownload 'file works' 'testsuccess' "$1/working" + testdownload 'file via redirect works' 'testsuccess' "$1/redirectme/working" + + testdownload 'non-existent file fails' 'testfailure' "$1/failing" + testdownload 'non-existent file via redirect fails' 'testfailure' "$1/redirectme/failing" +} + +runtest 'http://localhost:8080' +runtest 'https://localhost:4433' diff --git a/test/interactive-helper/aptwebserver.cc b/test/interactive-helper/aptwebserver.cc index 3403bbdd2..644629a33 100644 --- a/test/interactive-helper/aptwebserver.cc +++ b/test/interactive-helper/aptwebserver.cc @@ -27,58 +27,58 @@ #include #include -static char const * httpcodeToStr(int const httpcode) /*{{{*/ +static std::string httpcodeToStr(int const httpcode) /*{{{*/ { switch (httpcode) { // Informational 1xx - case 100: return "100 Continue"; - case 101: return "101 Switching Protocols"; + case 100: return _config->Find("aptwebserver::httpcode::100", "100 Continue"); + case 101: return _config->Find("aptwebserver::httpcode::101", "101 Switching Protocols"); // Successful 2xx - case 200: return "200 OK"; - case 201: return "201 Created"; - case 202: return "202 Accepted"; - case 203: return "203 Non-Authoritative Information"; - case 204: return "204 No Content"; - case 205: return "205 Reset Content"; - case 206: return "206 Partial Content"; + case 200: return _config->Find("aptwebserver::httpcode::200", "200 OK"); + case 201: return _config->Find("aptwebserver::httpcode::201", "201 Created"); + case 202: return _config->Find("aptwebserver::httpcode::202", "202 Accepted"); + case 203: return _config->Find("aptwebserver::httpcode::203", "203 Non-Authoritative Information"); + case 204: return _config->Find("aptwebserver::httpcode::204", "204 No Content"); + case 205: return _config->Find("aptwebserver::httpcode::205", "205 Reset Content"); + case 206: return _config->Find("aptwebserver::httpcode::206", "206 Partial Content"); // Redirections 3xx - case 300: return "300 Multiple Choices"; - case 301: return "301 Moved Permanently"; - case 302: return "302 Found"; - case 303: return "303 See Other"; - case 304: return "304 Not Modified"; - case 305: return "304 Use Proxy"; - case 307: return "307 Temporary Redirect"; + case 300: return _config->Find("aptwebserver::httpcode::300", "300 Multiple Choices"); + case 301: return _config->Find("aptwebserver::httpcode::301", "301 Moved Permanently"); + case 302: return _config->Find("aptwebserver::httpcode::302", "302 Found"); + case 303: return _config->Find("aptwebserver::httpcode::303", "303 See Other"); + case 304: return _config->Find("aptwebserver::httpcode::304", "304 Not Modified"); + case 305: return _config->Find("aptwebserver::httpcode::305", "305 Use Proxy"); + case 307: return _config->Find("aptwebserver::httpcode::307", "307 Temporary Redirect"); // Client errors 4xx - case 400: return "400 Bad Request"; - case 401: return "401 Unauthorized"; - case 402: return "402 Payment Required"; - case 403: return "403 Forbidden"; - case 404: return "404 Not Found"; - case 405: return "405 Method Not Allowed"; - case 406: return "406 Not Acceptable"; - case 407: return "407 Proxy Authentication Required"; - case 408: return "408 Request Time-out"; - case 409: return "409 Conflict"; - case 410: return "410 Gone"; - case 411: return "411 Length Required"; - case 412: return "412 Precondition Failed"; - case 413: return "413 Request Entity Too Large"; - case 414: return "414 Request-URI Too Large"; - case 415: return "415 Unsupported Media Type"; - case 416: return "416 Requested range not satisfiable"; - case 417: return "417 Expectation Failed"; - case 418: return "418 I'm a teapot"; + case 400: return _config->Find("aptwebserver::httpcode::400", "400 Bad Request"); + case 401: return _config->Find("aptwebserver::httpcode::401", "401 Unauthorized"); + case 402: return _config->Find("aptwebserver::httpcode::402", "402 Payment Required"); + case 403: return _config->Find("aptwebserver::httpcode::403", "403 Forbidden"); + case 404: return _config->Find("aptwebserver::httpcode::404", "404 Not Found"); + case 405: return _config->Find("aptwebserver::httpcode::405", "405 Method Not Allowed"); + case 406: return _config->Find("aptwebserver::httpcode::406", "406 Not Acceptable"); + case 407: return _config->Find("aptwebserver::httpcode::407", "407 Proxy Authentication Required"); + case 408: return _config->Find("aptwebserver::httpcode::408", "408 Request Time-out"); + case 409: return _config->Find("aptwebserver::httpcode::409", "409 Conflict"); + case 410: return _config->Find("aptwebserver::httpcode::410", "410 Gone"); + case 411: return _config->Find("aptwebserver::httpcode::411", "411 Length Required"); + case 412: return _config->Find("aptwebserver::httpcode::412", "412 Precondition Failed"); + case 413: return _config->Find("aptwebserver::httpcode::413", "413 Request Entity Too Large"); + case 414: return _config->Find("aptwebserver::httpcode::414", "414 Request-URI Too Large"); + case 415: return _config->Find("aptwebserver::httpcode::415", "415 Unsupported Media Type"); + case 416: return _config->Find("aptwebserver::httpcode::416", "416 Requested range not satisfiable"); + case 417: return _config->Find("aptwebserver::httpcode::417", "417 Expectation Failed"); + case 418: return _config->Find("aptwebserver::httpcode::418", "418 I'm a teapot"); // Server error 5xx - case 500: return "500 Internal Server Error"; - case 501: return "501 Not Implemented"; - case 502: return "502 Bad Gateway"; - case 503: return "503 Service Unavailable"; - case 504: return "504 Gateway Time-out"; - case 505: return "505 HTTP Version not supported"; - } - return NULL; + case 500: return _config->Find("aptwebserver::httpcode::500", "500 Internal Server Error"); + case 501: return _config->Find("aptwebserver::httpcode::501", "501 Not Implemented"); + case 502: return _config->Find("aptwebserver::httpcode::502", "502 Bad Gateway"); + case 503: return _config->Find("aptwebserver::httpcode::503", "503 Service Unavailable"); + case 504: return _config->Find("aptwebserver::httpcode::504", "504 Gateway Time-out"); + case 505: return _config->Find("aptwebserver::httpcode::505", "505 HTTP Version not supported"); + } + return ""; } /*}}}*/ static bool chunkedTransferEncoding(std::list const &headers) { -- cgit v1.2.3-70-g09d2 From 1296bc7c466181a7978c313c40a041b34ce3eaeb Mon Sep 17 00:00:00 2001 From: Robert Edmonds Date: Sun, 22 Mar 2015 00:12:45 -0400 Subject: HttpsMethod::Fetch(): Zero the FetchResult object when leaving due to 404 --- methods/https.cc | 2 ++ 1 file changed, 2 insertions(+) (limited to 'methods') diff --git a/methods/https.cc b/methods/https.cc index 3a5981b58..f2b00dd64 100644 --- a/methods/https.cc +++ b/methods/https.cc @@ -402,6 +402,8 @@ bool HttpsMethod::Fetch(FetchItem *Itm) _error->Error("%s", err); // unlink, no need keep 401/404 page content in partial/ unlink(File->Name().c_str()); + Res.Size = 0; + Res.LastModified = 0; return false; } -- cgit v1.2.3-70-g09d2 From a2679f55b5b14092db88fab3799f06e6b68e439e Mon Sep 17 00:00:00 2001 From: David Kalnischkies Date: Tue, 7 Apr 2015 14:34:04 +0200 Subject: properly handle expected filesize in https The worker expects that the methods tell him when they start or finish downloading a file. Various information pieces are passed along in this report including the (expected) filesize. https is using a "global" struct for reporting which made it 'reuse' incorrect values in some cases like a non-existent InRelease fallbacking to Release{,.gpg} resulting in an incorrect size-mismatch warning scaring and desensitizing users as well as being subject to a race between the write_data and progress callbacks generating incorrect progress reporting and potentially the same error message. Other branches as well as the bugreports contain 'better' fixes making the struct local and other sensible changes, but are larger as a result, so in this version we opted for short diff with minimal effect above else instead. Closes: 777565, 781509 Thanks: Robert Edmonds and Anders Kaseorg for initial patchs --- methods/https.cc | 18 ++++-------------- 1 file changed, 4 insertions(+), 14 deletions(-) (limited to 'methods') diff --git a/methods/https.cc b/methods/https.cc index 3a5981b58..cb11159bc 100644 --- a/methods/https.cc +++ b/methods/https.cc @@ -68,6 +68,8 @@ HttpsMethod::parse_header(void *buffer, size_t size, size_t nmemb, void *userp) me->File->Truncate(me->Server->StartPos); me->File->Seek(me->Server->StartPos); + + me->Res.Size = me->Server->Size; } else if (me->Server->HeaderLine(line) == false) return 0; @@ -97,17 +99,6 @@ HttpsMethod::write_data(void *buffer, size_t size, size_t nmemb, void *userp) return buffer_size; } -int -HttpsMethod::progress_callback(void *clientp, double dltotal, double /*dlnow*/, - double /*ultotal*/, double /*ulnow*/) -{ - HttpsMethod *me = (HttpsMethod *)clientp; - if(dltotal > 0 && me->Res.Size == 0) { - me->Res.Size = (unsigned long long)dltotal; - } - return 0; -} - // HttpsServerState::HttpsServerState - Constructor /*{{{*/ HttpsServerState::HttpsServerState(URI Srv,HttpsMethod * /*Owner*/) : ServerState(Srv, NULL) { @@ -201,10 +192,8 @@ bool HttpsMethod::Fetch(FetchItem *Itm) curl_easy_setopt(curl, CURLOPT_WRITEHEADER, this); curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, write_data); curl_easy_setopt(curl, CURLOPT_WRITEDATA, this); - curl_easy_setopt(curl, CURLOPT_PROGRESSFUNCTION, progress_callback); - curl_easy_setopt(curl, CURLOPT_PROGRESSDATA, this); // options - curl_easy_setopt(curl, CURLOPT_NOPROGRESS, false); + curl_easy_setopt(curl, CURLOPT_NOPROGRESS, true); curl_easy_setopt(curl, CURLOPT_FILETIME, true); // only allow curl to handle https, not the other stuff it supports curl_easy_setopt(curl, CURLOPT_PROTOCOLS, CURLPROTO_HTTPS); @@ -357,6 +346,7 @@ bool HttpsMethod::Fetch(FetchItem *Itm) // go for it - if the file exists, append on it File = new FileFd(Itm->DestFile, FileFd::WriteAny); Server = new HttpsServerState(Itm->Uri, this); + Res = FetchResult(); // keep apt updated Res.Filename = Itm->DestFile; -- cgit v1.2.3-70-g09d2 From b8eba208daebe3e3f235983e44da9c398d6f7a57 Mon Sep 17 00:00:00 2001 From: David Kalnischkies Date: Tue, 10 Mar 2015 14:11:54 +0100 Subject: reimplement the last uses of sprintf Working with strings c-style is complicated and error-prune, so by converting to c++ style we gain some simplicity and avoid buffer overflows by later extensions. Git-Dch: Ignore --- apt-pkg/contrib/cdromutl.cc | 31 ++++---- apt-pkg/contrib/strutl.cc | 126 ++++++++++++++------------------ apt-private/acqprogress.cc | 173 ++++++++++++++++++++++---------------------- apt-private/acqprogress.h | 4 +- ftparchive/writer.cc | 8 +- methods/ftp.cc | 16 ++-- test/libapt/strutil_test.cc | 22 ++++++ test/libapt/uri_test.cc | 12 +++ 8 files changed, 206 insertions(+), 186 deletions(-) (limited to 'methods') diff --git a/apt-pkg/contrib/cdromutl.cc b/apt-pkg/contrib/cdromutl.cc index 936e377fb..6eb917457 100644 --- a/apt-pkg/contrib/cdromutl.cc +++ b/apt-pkg/contrib/cdromutl.cc @@ -207,7 +207,6 @@ bool IdentCdrom(string CD,string &Res,unsigned int Version) /* Run over the directory, we assume that the reader order will never change as the media is read-only. In theory if the kernel did some sort of wacked caching this might not be true.. */ - char S[300]; for (struct dirent *Dir = readdir(D); Dir != 0; Dir = readdir(D)) { // Skip some files.. @@ -215,30 +214,32 @@ bool IdentCdrom(string CD,string &Res,unsigned int Version) strcmp(Dir->d_name,"..") == 0) continue; + std::string S; if (Version <= 1) { - sprintf(S,"%lu",(unsigned long)Dir->d_ino); + strprintf(S, "%lu", (unsigned long)Dir->d_ino); } else { struct stat Buf; if (stat(Dir->d_name,&Buf) != 0) continue; - sprintf(S,"%lu",(unsigned long)Buf.st_mtime); + strprintf(S, "%lu", (unsigned long)Buf.st_mtime); } - - Hash.Add(S); + + Hash.Add(S.c_str()); Hash.Add(Dir->d_name); }; - + if (chdir(StartDir.c_str()) != 0) { _error->Errno("chdir",_("Unable to change to %s"),StartDir.c_str()); closedir(D); return false; } closedir(D); - + // Some stats from the fsys + std::string S; if (_config->FindB("Debug::identcdrom",false) == false) { struct statvfs Buf; @@ -248,19 +249,19 @@ bool IdentCdrom(string CD,string &Res,unsigned int Version) // We use a kilobyte block size to advoid overflow if (writable_media) { - sprintf(S,"%lu",(long)(Buf.f_blocks*(Buf.f_bsize/1024))); + strprintf(S, "%lu", (unsigned long)(Buf.f_blocks*(Buf.f_bsize/1024))); } else { - sprintf(S,"%lu %lu",(long)(Buf.f_blocks*(Buf.f_bsize/1024)), - (long)(Buf.f_bfree*(Buf.f_bsize/1024))); + strprintf(S, "%lu %lu", (unsigned long)(Buf.f_blocks*(Buf.f_bsize/1024)), + (unsigned long)(Buf.f_bfree*(Buf.f_bsize/1024))); } - Hash.Add(S); - sprintf(S,"-%u",Version); + Hash.Add(S.c_str()); + strprintf(S, "-%u", Version); } else - sprintf(S,"-%u.debug",Version); - + strprintf(S, "-%u.debug", Version); + Res = Hash.Result().Value() + S; - return true; + return true; } /*}}}*/ // FindMountPointForDevice - Find mountpoint for the given device /*{{{*/ diff --git a/apt-pkg/contrib/strutl.cc b/apt-pkg/contrib/strutl.cc index 0ac587a9e..0db4c57b2 100644 --- a/apt-pkg/contrib/strutl.cc +++ b/apt-pkg/contrib/strutl.cc @@ -324,21 +324,19 @@ bool ParseCWord(const char *&String,string &Res) /* */ string QuoteString(const string &Str, const char *Bad) { - string Res; + std::stringstream Res; for (string::const_iterator I = Str.begin(); I != Str.end(); ++I) { - if (strchr(Bad,*I) != 0 || isprint(*I) == 0 || + if (strchr(Bad,*I) != 0 || isprint(*I) == 0 || *I == 0x25 || // percent '%' char *I <= 0x20 || *I >= 0x7F) // control chars { - char Buf[10]; - sprintf(Buf,"%%%02x",(int)*I); - Res += Buf; + ioprintf(Res,"%%%02x",(int)*I); } else - Res += *I; + Res << *I; } - return Res; + return Res.str(); } /*}}}*/ // DeQuoteString - Convert a string from quoted from /*{{{*/ @@ -379,13 +377,12 @@ string DeQuoteString(string::const_iterator const &begin, YottaBytes (E24) */ string SizeToStr(double Size) { - char S[300]; double ASize; if (Size >= 0) ASize = Size; else ASize = -1*Size; - + /* bytes, KiloBytes, MegaBytes, GigaBytes, TeraBytes, PetaBytes, ExaBytes, ZettaBytes, YottaBytes */ char Ext[] = {'\0','k','M','G','T','P','E','Z','Y'}; @@ -394,20 +391,21 @@ string SizeToStr(double Size) { if (ASize < 100 && I != 0) { - sprintf(S,"%'.1f %c",ASize,Ext[I]); - break; + std::string S; + strprintf(S, "%'.1f %c", ASize, Ext[I]); + return S; } - + if (ASize < 10000) { - sprintf(S,"%'.0f %c",ASize,Ext[I]); - break; + std::string S; + strprintf(S, "%'.0f %c", ASize, Ext[I]); + return S; } ASize /= 1000.0; I++; } - - return S; + return ""; } /*}}}*/ // TimeToStr - Convert the time into a string /*{{{*/ @@ -415,36 +413,27 @@ string SizeToStr(double Size) /* Converts a number of seconds to a hms format */ string TimeToStr(unsigned long Sec) { - char S[300]; - - while (1) + std::string S; + if (Sec > 60*60*24) { - if (Sec > 60*60*24) - { - //d means days, h means hours, min means minutes, s means seconds - sprintf(S,_("%lid %lih %limin %lis"),Sec/60/60/24,(Sec/60/60) % 24,(Sec/60) % 60,Sec % 60); - break; - } - - if (Sec > 60*60) - { - //h means hours, min means minutes, s means seconds - sprintf(S,_("%lih %limin %lis"),Sec/60/60,(Sec/60) % 60,Sec % 60); - break; - } - - if (Sec > 60) - { - //min means minutes, s means seconds - sprintf(S,_("%limin %lis"),Sec/60,Sec % 60); - break; - } - - //s means seconds - sprintf(S,_("%lis"),Sec); - break; + //TRANSLATOR: d means days, h means hours, min means minutes, s means seconds + strprintf(S,_("%lid %lih %limin %lis"),Sec/60/60/24,(Sec/60/60) % 24,(Sec/60) % 60,Sec % 60); + } + else if (Sec > 60*60) + { + //TRANSLATOR: h means hours, min means minutes, s means seconds + strprintf(S,_("%lih %limin %lis"),Sec/60/60,(Sec/60) % 60,Sec % 60); + } + else if (Sec > 60) + { + //TRANSLATOR: min means minutes, s means seconds + strprintf(S,_("%limin %lis"),Sec/60,Sec % 60); + } + else + { + //TRANSLATOR: s means seconds + strprintf(S,_("%lis"),Sec); } - return S; } /*}}}*/ @@ -1423,7 +1412,7 @@ size_t strv_length(const char **str_array) ; return i; } - + /*}}}*/ // DeEscapeString - unescape (\0XX and \xXX) from a string /*{{{*/ // --------------------------------------------------------------------- /* */ @@ -1605,51 +1594,46 @@ void URI::CopyFrom(const string &U) /* */ URI::operator string() { - string Res; - + std::stringstream Res; + if (Access.empty() == false) - Res = Access + ':'; - + Res << Access << ':'; + if (Host.empty() == false) - { + { if (Access.empty() == false) - Res += "//"; - + Res << "//"; + if (User.empty() == false) { // FIXME: Technically userinfo is permitted even less // characters than these, but this is not conveniently // expressed with a blacklist. - Res += QuoteString(User, ":/?#[]@"); + Res << QuoteString(User, ":/?#[]@"); if (Password.empty() == false) - Res += ":" + QuoteString(Password, ":/?#[]@"); - Res += "@"; + Res << ":" << QuoteString(Password, ":/?#[]@"); + Res << "@"; } - + // Add RFC 2732 escaping characters - if (Access.empty() == false && - (Host.find('/') != string::npos || Host.find(':') != string::npos)) - Res += '[' + Host + ']'; + if (Access.empty() == false && Host.find_first_of("/:") != string::npos) + Res << '[' << Host << ']'; else - Res += Host; - + Res << Host; + if (Port != 0) - { - char S[30]; - sprintf(S,":%u",Port); - Res += S; - } + Res << ':' << Port; } - + if (Path.empty() == false) { if (Path[0] != '/') - Res += "/" + Path; + Res << "/" << Path; else - Res += Path; + Res << Path; } - - return Res; + + return Res.str(); } /*}}}*/ // URI::SiteOnly - Return the schema and site for the URI /*{{{*/ diff --git a/apt-private/acqprogress.cc b/apt-private/acqprogress.cc index 14a53eacb..e834d7d6a 100644 --- a/apt-private/acqprogress.cc +++ b/apt-private/acqprogress.cc @@ -23,6 +23,7 @@ #include #include #include +#include #include #include @@ -34,9 +35,8 @@ using namespace std; // --------------------------------------------------------------------- /* */ AcqTextStatus::AcqTextStatus(unsigned int &ScreenWidth,unsigned int const Quiet) : - pkgAcquireStatus(), ScreenWidth(ScreenWidth), ID(0), Quiet(Quiet) + pkgAcquireStatus(), ScreenWidth(ScreenWidth), LastLineLength(0), ID(0), Quiet(Quiet) { - BlankLine[0] = 0; // testcases use it to disable pulses without disabling other user messages if (Quiet == 0 && _config->FindB("quiet::NoUpdate", false) == true) this->Quiet = 1; @@ -48,7 +48,7 @@ AcqTextStatus::AcqTextStatus(unsigned int &ScreenWidth,unsigned int const Quiet) void AcqTextStatus::Start() { pkgAcquireStatus::Start(); - BlankLine[0] = 0; + LastLineLength = 0; ID = 1; } /*}}}*/ @@ -60,8 +60,7 @@ void AcqTextStatus::IMSHit(pkgAcquire::ItemDesc &Itm) if (Quiet > 1) return; - if (Quiet <= 0) - cout << '\r' << BlankLine << '\r'; + clearLastLine(); cout << _("Hit ") << Itm.Description; cout << endl; @@ -82,8 +81,7 @@ void AcqTextStatus::Fetch(pkgAcquire::ItemDesc &Itm) if (Quiet > 1) return; - if (Quiet <= 0) - cout << '\r' << BlankLine << '\r'; + clearLastLine(); cout << _("Get:") << Itm.Owner->ID << ' ' << Itm.Description; if (Itm.Owner->FileSize != 0) @@ -111,8 +109,7 @@ void AcqTextStatus::Fail(pkgAcquire::ItemDesc &Itm) if (Itm.Owner->Status == pkgAcquire::Item::StatIdle) return; - if (Quiet <= 0) - cout << '\r' << BlankLine << '\r'; + clearLastLine(); if (Itm.Owner->Status == pkgAcquire::Item::StatDone) { @@ -140,8 +137,7 @@ void AcqTextStatus::Stop() if (Quiet > 1) return; - if (Quiet <= 0) - cout << '\r' << BlankLine << '\r' << flush; + clearLastLine(); if (_config->FindB("quiet::NoStatistic", false) == true) return; @@ -167,77 +163,66 @@ bool AcqTextStatus::Pulse(pkgAcquire *Owner) enum {Long = 0,Medium,Short} Mode = Medium; - char Buffer[sizeof(BlankLine)]; - char *End = Buffer + sizeof(Buffer); - char *S = Buffer; - if (ScreenWidth >= sizeof(Buffer)) - ScreenWidth = sizeof(Buffer)-1; - - // Put in the percent done - sprintf(S,"%.0f%%", Percent); - - bool Shown = false; - for (pkgAcquire::Worker *I = Owner->WorkersBegin(); I != 0; - I = Owner->WorkerStep(I)) + std::string Line; { - S += strlen(S); - - // There is no item running - if (I->CurrentItem == 0) + std::stringstream S; + for (pkgAcquire::Worker *I = Owner->WorkersBegin(); I != 0; + I = Owner->WorkerStep(I)) { - if (I->Status.empty() == false) + // There is no item running + if (I->CurrentItem == 0) { - snprintf(S,End-S," [%s]",I->Status.c_str()); - Shown = true; - } - - continue; - } - - Shown = true; + if (I->Status.empty() == false) + S << " [" << I->Status << "]"; - // Add in the short description - if (I->CurrentItem->Owner->ID != 0) - snprintf(S,End-S," [%lu %s",I->CurrentItem->Owner->ID, - I->CurrentItem->ShortDesc.c_str()); - else - snprintf(S,End-S," [%s",I->CurrentItem->ShortDesc.c_str()); - S += strlen(S); + continue; + } - // Show the short mode string - if (I->CurrentItem->Owner->ActiveSubprocess.empty() == false) - { - snprintf(S,End-S, " %s", I->CurrentItem->Owner->ActiveSubprocess.c_str()); - S += strlen(S); - } + // Add in the short description + S << " ["; + if (I->CurrentItem->Owner->ID != 0) + S << I->CurrentItem->Owner->ID << " "; + S << I->CurrentItem->ShortDesc; - // Add the current progress - if (Mode == Long) - snprintf(S,End-S," %llu",I->CurrentSize); - else - { - if (Mode == Medium || I->TotalSize == 0) - snprintf(S,End-S," %sB",SizeToStr(I->CurrentSize).c_str()); - } - S += strlen(S); + // Show the short mode string + if (I->CurrentItem->Owner->ActiveSubprocess.empty() == false) + S << " " << I->CurrentItem->Owner->ActiveSubprocess; - // Add the total size and percent - if (I->TotalSize > 0 && I->CurrentItem->Owner->Complete == false) - { - if (Mode == Short) - snprintf(S,End-S," %.0f%%", - (I->CurrentSize*100.0)/I->TotalSize); + // Add the current progress + if (Mode == Long) + S << " " << I->CurrentSize; else - snprintf(S,End-S,"/%sB %.0f%%",SizeToStr(I->TotalSize).c_str(), + { + if (Mode == Medium || I->TotalSize == 0) + S << " " << SizeToStr(I->CurrentSize) << "B"; + } + + // Add the total size and percent + if (I->TotalSize > 0 && I->CurrentItem->Owner->Complete == false) + { + if (Mode == Short) + ioprintf(S, " %.0f%%", (I->CurrentSize*100.0)/I->TotalSize); + else + ioprintf(S, "/%sB %.0f%%", SizeToStr(I->TotalSize).c_str(), (I->CurrentSize*100.0)/I->TotalSize); + } + S << "]"; } - S += strlen(S); - snprintf(S,End-S,"]"); - } - // Show something.. - if (Shown == false) - snprintf(S,End-S,_(" [Working]")); + // Show at least something + Line = S.str(); + S.clear(); + if (Line.empty() == true) + Line = _(" [Working]"); + } + // Put in the percent done + { + std::stringstream S; + ioprintf(S, "%.0f%%", Percent); + S << Line; + Line = S.str(); + S.clear(); + } /* Put in the ETA and cps meter, block off signals to prevent strangeness during resizing */ @@ -248,34 +233,33 @@ bool AcqTextStatus::Pulse(pkgAcquire *Owner) if (CurrentCPS != 0) { - char Tmp[300]; unsigned long long ETA = (TotalBytes - CurrentBytes)/CurrentCPS; - sprintf(Tmp," %sB/s %s",SizeToStr(CurrentCPS).c_str(),TimeToStr(ETA).c_str()); - unsigned int Len = strlen(Buffer); - unsigned int LenT = strlen(Tmp); - if (Len + LenT < ScreenWidth) + std::string Tmp = " " + SizeToStr(CurrentCPS) + "B/s " + TimeToStr(ETA); + size_t alignment = Line.length() + Tmp.length(); + if (alignment < ScreenWidth) { - memset(Buffer + Len,' ',ScreenWidth - Len); - strcpy(Buffer + ScreenWidth - LenT,Tmp); + alignment = ScreenWidth - alignment; + for (size_t i = 0; i < alignment; ++i) + Line.append(" "); + Line.append(Tmp); } } - Buffer[ScreenWidth] = 0; - BlankLine[ScreenWidth] = 0; + if (Line.length() > ScreenWidth) + Line.erase(ScreenWidth); sigprocmask(SIG_SETMASK,&OldSigs,0); // Draw the current status if (_config->FindB("Apt::Color", false) == true) cout << _config->Find("APT::Color::Yellow"); - if (strlen(Buffer) == strlen(BlankLine)) - cout << '\r' << Buffer << flush; + if (LastLineLength > Line.length()) + clearLastLine(); else - cout << '\r' << BlankLine << '\r' << Buffer << flush; + cout << '\r'; + cout << Line << flush; if (_config->FindB("Apt::Color", false) == true) cout << _config->Find("APT::Color::Neutral") << flush; - memset(BlankLine,' ',strlen(Buffer)); - BlankLine[strlen(Buffer)] = 0; - + LastLineLength = Line.length(); Update = false; return true; @@ -296,8 +280,7 @@ bool AcqTextStatus::MediaChange(string Media,string Drive) return false; - if (Quiet <= 0) - cout << '\r' << BlankLine << '\r'; + clearLastLine(); ioprintf(cout,_("Media change: please insert the disc labeled\n" " '%s'\n" "in the drive '%s' and press enter\n"), @@ -317,3 +300,17 @@ bool AcqTextStatus::MediaChange(string Media,string Drive) return bStatus; } /*}}}*/ +void AcqTextStatus::clearLastLine() { /*{{{*/ + if (Quiet > 0) + return; + + // do not try to clear more than the (now smaller) screen + if (LastLineLength > ScreenWidth) + LastLineLength = ScreenWidth; + + std::cout << '\r'; + for (size_t i = 0; i < LastLineLength; ++i) + std::cout << ' '; + std::cout << '\r' << std::flush; +} + /*}}}*/ diff --git a/apt-private/acqprogress.h b/apt-private/acqprogress.h index 71a10d78a..ab8170126 100644 --- a/apt-private/acqprogress.h +++ b/apt-private/acqprogress.h @@ -17,10 +17,12 @@ class APT_PUBLIC AcqTextStatus : public pkgAcquireStatus { unsigned int &ScreenWidth; - char BlankLine[1024]; + size_t LastLineLength; unsigned long ID; unsigned long Quiet; + void clearLastLine(); + public: virtual bool MediaChange(std::string Media,std::string Drive); diff --git a/ftparchive/writer.cc b/ftparchive/writer.cc index a63d8846b..db68c21f0 100644 --- a/ftparchive/writer.cc +++ b/ftparchive/writer.cc @@ -440,9 +440,6 @@ bool PackagesWriter::DoPackage(string FileName) OverItem->Priority = Tags.FindS("Priority"); } - char Size[40]; - sprintf(Size,"%llu", (unsigned long long) FileSize); - // Strip the DirStrip prefix from the FileName and add the PathPrefix string NewFileName; if (DirStrip.empty() == false && @@ -466,7 +463,10 @@ bool PackagesWriter::DoPackage(string FileName) // This lists all the changes to the fields we are going to make. std::vector Changes; - Changes.push_back(SetTFRewriteData("Size", Size)); + std::string Size; + strprintf(Size, "%llu", (unsigned long long) FileSize); + Changes.push_back(SetTFRewriteData("Size", Size.c_str())); + for (HashStringList::const_iterator hs = Db.HashesList.begin(); hs != Db.HashesList.end(); ++hs) { if (hs->HashType() == "MD5Sum") diff --git a/methods/ftp.cc b/methods/ftp.cc index 0504e5872..7764acf6a 100644 --- a/methods/ftp.cc +++ b/methods/ftp.cc @@ -259,19 +259,21 @@ bool FTPConn::Login() { if (Opts->Value.empty() == true) continue; - + // Substitute the variables into the command - char SitePort[20]; - if (ServerName.Port != 0) - sprintf(SitePort,"%u",ServerName.Port); - else - strcpy(SitePort,"21"); string Tmp = Opts->Value; Tmp = SubstVar(Tmp,"$(PROXY_USER)",Proxy.User); Tmp = SubstVar(Tmp,"$(PROXY_PASS)",Proxy.Password); Tmp = SubstVar(Tmp,"$(SITE_USER)",User); Tmp = SubstVar(Tmp,"$(SITE_PASS)",Pass); - Tmp = SubstVar(Tmp,"$(SITE_PORT)",SitePort); + if (ServerName.Port != 0) + { + std::string SitePort; + strprintf(SitePort, "%u", ServerName.Port); + Tmp = SubstVar(Tmp,"$(SITE_PORT)", SitePort); + } + else + Tmp = SubstVar(Tmp,"$(SITE_PORT)", "21"); Tmp = SubstVar(Tmp,"$(SITE)",ServerName.Host); // Send the command diff --git a/test/libapt/strutil_test.cc b/test/libapt/strutil_test.cc index 9bc3c76fd..23dc08727 100644 --- a/test/libapt/strutil_test.cc +++ b/test/libapt/strutil_test.cc @@ -97,6 +97,28 @@ TEST(StrUtilTest,StartsWith) EXPECT_FALSE(Startswith("abcd", "x")); EXPECT_FALSE(Startswith("abcd", "abcndefg")); } +TEST(StrUtilTest,TimeToStr) +{ + EXPECT_EQ("0s", TimeToStr(0)); + EXPECT_EQ("42s", TimeToStr(42)); + EXPECT_EQ("9min 21s", TimeToStr((9*60) + 21)); + EXPECT_EQ("20min 42s", TimeToStr((20*60) + 42)); + EXPECT_EQ("10h 42min 21s", TimeToStr((10*3600) + (42*60) + 21)); + EXPECT_EQ("10h 42min 21s", TimeToStr((10*3600) + (42*60) + 21)); + EXPECT_EQ("1988d 3h 29min 7s", TimeToStr((1988*86400) + (3*3600) + (29*60) + 7)); + + EXPECT_EQ("59s", TimeToStr(59)); + EXPECT_EQ("60s", TimeToStr(60)); + EXPECT_EQ("1min 1s", TimeToStr(61)); + EXPECT_EQ("59min 59s", TimeToStr(3599)); + EXPECT_EQ("60min 0s", TimeToStr(3600)); + EXPECT_EQ("1h 0min 1s", TimeToStr(3601)); + EXPECT_EQ("1h 1min 0s", TimeToStr(3660)); + EXPECT_EQ("23h 59min 59s", TimeToStr(86399)); + EXPECT_EQ("24h 0min 0s", TimeToStr(86400)); + EXPECT_EQ("1d 0h 0min 1s", TimeToStr(86401)); + EXPECT_EQ("1d 0h 1min 0s", TimeToStr(86460)); +} TEST(StrUtilTest,SubstVar) { EXPECT_EQ("", SubstVar("", "fails", "passes")); diff --git a/test/libapt/uri_test.cc b/test/libapt/uri_test.cc index 1662f51f0..5d5ae9679 100644 --- a/test/libapt/uri_test.cc +++ b/test/libapt/uri_test.cc @@ -12,6 +12,7 @@ TEST(URITest, BasicHTTP) EXPECT_EQ(90, U.Port); EXPECT_EQ("www.debian.org", U.Host); EXPECT_EQ("/temp/test", U.Path); + EXPECT_EQ("http://www.debian.org:90/temp/test", (std::string)U); // Login data U = URI("http://jgg:foo@ualberta.ca/blah"); EXPECT_EQ("http", U.Access); @@ -20,6 +21,7 @@ TEST(URITest, BasicHTTP) EXPECT_EQ(0, U.Port); EXPECT_EQ("ualberta.ca", U.Host); EXPECT_EQ("/blah", U.Path); + EXPECT_EQ("http://jgg:foo@ualberta.ca/blah", (std::string)U); } TEST(URITest, SingeSlashFile) { @@ -30,6 +32,7 @@ TEST(URITest, SingeSlashFile) EXPECT_EQ(0, U.Port); EXPECT_EQ("", U.Host); EXPECT_EQ("/usr/bin/foo", U.Path); + EXPECT_EQ("file:/usr/bin/foo", (std::string)U); } TEST(URITest, BasicCDROM) { @@ -40,6 +43,7 @@ TEST(URITest, BasicCDROM) EXPECT_EQ(0, U.Port); EXPECT_EQ("Moo Cow Rom", U.Host); EXPECT_EQ("/debian", U.Path); + EXPECT_EQ("cdrom://Moo Cow Rom/debian", (std::string)U); } TEST(URITest, RelativeGzip) { @@ -50,6 +54,7 @@ TEST(URITest, RelativeGzip) EXPECT_EQ(0, U.Port); EXPECT_EQ(".", U.Host); EXPECT_EQ("/bar/cow", U.Path); + EXPECT_EQ("gzip://./bar/cow", (std::string)U); } TEST(URITest, NoSlashFTP) { @@ -60,6 +65,7 @@ TEST(URITest, NoSlashFTP) EXPECT_EQ(0, U.Port); EXPECT_EQ("ftp.fr.debian.org", U.Host); EXPECT_EQ("/debian/pool/main/x/xtel/xtel_3.2.1-15_i386.deb", U.Path); + EXPECT_EQ("ftp://ftp.fr.debian.org/debian/pool/main/x/xtel/xtel_3.2.1-15_i386.deb", (std::string)U); } TEST(URITest, RFC2732) { @@ -70,6 +76,7 @@ TEST(URITest, RFC2732) EXPECT_EQ(0, U.Port); EXPECT_EQ("1080::8:800:200C:417A", U.Host); EXPECT_EQ("/foo", U.Path); + EXPECT_EQ("http://[1080::8:800:200C:417A]/foo", (std::string)U); // with port U = URI("http://[::FFFF:129.144.52.38]:80/index.html"); EXPECT_EQ("http", U.Access); @@ -78,6 +85,7 @@ TEST(URITest, RFC2732) EXPECT_EQ(80, U.Port); EXPECT_EQ("::FFFF:129.144.52.38", U.Host); EXPECT_EQ("/index.html", U.Path); + EXPECT_EQ("http://[::FFFF:129.144.52.38]:80/index.html", (std::string)U); // extra colon U = URI("http://[::FFFF:129.144.52.38:]:80/index.html"); EXPECT_EQ("http", U.Access); @@ -86,6 +94,7 @@ TEST(URITest, RFC2732) EXPECT_EQ(80, U.Port); EXPECT_EQ("::FFFF:129.144.52.38:", U.Host); EXPECT_EQ("/index.html", U.Path); + EXPECT_EQ("http://[::FFFF:129.144.52.38:]:80/index.html", (std::string)U); // extra colon port U = URI("http://[::FFFF:129.144.52.38:]/index.html"); EXPECT_EQ("http", U.Access); @@ -94,6 +103,7 @@ TEST(URITest, RFC2732) EXPECT_EQ(0, U.Port); EXPECT_EQ("::FFFF:129.144.52.38:", U.Host); EXPECT_EQ("/index.html", U.Path); + EXPECT_EQ("http://[::FFFF:129.144.52.38:]/index.html", (std::string)U); // My Evil Corruption of RFC 2732 to handle CDROM names! // Fun for the whole family! */ U = URI("cdrom:[The Debian 1.2 disk, 1/2 R1:6]/debian/"); @@ -103,6 +113,7 @@ TEST(URITest, RFC2732) EXPECT_EQ(0, U.Port); EXPECT_EQ("The Debian 1.2 disk, 1/2 R1:6", U.Host); EXPECT_EQ("/debian/", U.Path); + EXPECT_EQ("cdrom://[The Debian 1.2 disk, 1/2 R1:6]/debian/", (std::string)U); // no brackets U = URI("cdrom:Foo Bar Cow/debian/"); EXPECT_EQ("cdrom", U.Access); @@ -111,6 +122,7 @@ TEST(URITest, RFC2732) EXPECT_EQ(0, U.Port); EXPECT_EQ("Foo Bar Cow", U.Host); EXPECT_EQ("/debian/", U.Path); + EXPECT_EQ("cdrom://Foo Bar Cow/debian/", (std::string)U); // percent encoded U = URI("ftp://foo:b%40r@example.org"); EXPECT_EQ("foo", U.User); -- cgit v1.2.3-70-g09d2 From 3a53f6a1510d332e24c3330a69b987f2341d1a94 Mon Sep 17 00:00:00 2001 From: Michael Vogt Date: Mon, 13 Apr 2015 12:57:22 -0400 Subject: Revert "HttpsMethod::Fetch(): Zero the FetchResult object when leaving due to 404" This reverts commit 1296bc7c466181a7978c313c40a041b34ce3eaeb. --- methods/https.cc | 2 -- 1 file changed, 2 deletions(-) (limited to 'methods') diff --git a/methods/https.cc b/methods/https.cc index f2b00dd64..3a5981b58 100644 --- a/methods/https.cc +++ b/methods/https.cc @@ -402,8 +402,6 @@ bool HttpsMethod::Fetch(FetchItem *Itm) _error->Error("%s", err); // unlink, no need keep 401/404 page content in partial/ unlink(File->Name().c_str()); - Res.Size = 0; - Res.LastModified = 0; return false; } -- cgit v1.2.3-70-g09d2 From bb948ef562862e5cc9fcfb3d7b5e41c70382adeb Mon Sep 17 00:00:00 2001 From: David Kalnischkies Date: Fri, 27 Mar 2015 11:14:44 +0100 Subject: do not unlink https file on general error It might be quite interesting which file (content) made curl freak out and other methods keep the file around as well. Git-Dch: Ignore --- methods/https.cc | 1 - 1 file changed, 1 deletion(-) (limited to 'methods') diff --git a/methods/https.cc b/methods/https.cc index 32de42e4b..c69e84d3a 100644 --- a/methods/https.cc +++ b/methods/https.cc @@ -386,7 +386,6 @@ bool HttpsMethod::Fetch(FetchItem *Itm) if (success != 0) { _error->Error("%s", curl_errorstr); - unlink(File->Name().c_str()); return false; } -- cgit v1.2.3-70-g09d2 From 27925d82dd0cbae74d48040363fe6f6c2bae5215 Mon Sep 17 00:00:00 2001 From: David Kalnischkies Date: Fri, 27 Mar 2015 15:53:43 +0100 Subject: improve https method queue progress reporting The worker expects that the methods tell him when they start or finish downloading a file. Various information pieces are passed along in this report including the (expected) filesize. https was using a "global" struct for reporting which made it 'reuse' incorrect values in some cases like a non-existent InRelease fallbacking to Release{,.gpg} resulting in a size-mismatch warning. Reducing the scope and redesigning the setting of the values we can fix this and related issues. Closes: 777565, 781509 Thanks: Robert Edmonds and Anders Kaseorg for initial patchs --- methods/https.cc | 82 ++++++++++----------- methods/https.h | 2 +- methods/server.h | 3 +- test/integration/framework | 20 +++++- test/integration/test-apt-https-no-redirect | 15 ++-- test/integration/test-apt-update-expected-size | 86 +++++++++++++++-------- test/integration/test-bug-602412-dequote-redirect | 3 +- 7 files changed, 121 insertions(+), 90 deletions(-) (limited to 'methods') diff --git a/methods/https.cc b/methods/https.cc index c69e84d3a..70f6a1046 100644 --- a/methods/https.cc +++ b/methods/https.cc @@ -37,11 +37,17 @@ /*}}}*/ using namespace std; +struct APT_HIDDEN CURLUserPointer { + HttpsMethod * const https; + HttpsMethod::FetchResult * const Res; + CURLUserPointer(HttpsMethod * const https, HttpsMethod::FetchResult * const Res) : https(https), Res(Res) {} +}; + size_t HttpsMethod::parse_header(void *buffer, size_t size, size_t nmemb, void *userp) { size_t len = size * nmemb; - HttpsMethod *me = (HttpsMethod *)userp; + CURLUserPointer *me = (CURLUserPointer *)userp; std::string line((char*) buffer, len); for (--len; len > 0; --len) if (isspace(line[len]) == 0) @@ -53,23 +59,33 @@ HttpsMethod::parse_header(void *buffer, size_t size, size_t nmemb, void *userp) if (line.empty() == true) { - if (me->Server->Result != 416 && me->Server->StartPos != 0) + if (me->https->Server->Result != 416 && me->https->Server->StartPos != 0) ; - else if (me->Server->Result == 416 && me->Server->Size == me->File->FileSize()) + else if (me->https->Server->Result == 416 && me->https->Server->Size == me->https->File->FileSize()) { - me->Server->Result = 200; - me->Server->StartPos = me->Server->Size; + me->https->Server->Result = 200; + me->https->Server->StartPos = me->https->Server->Size; // the actual size is not important for https as curl will deal with it // by itself and e.g. doesn't bother us with transport-encoding… - me->Server->JunkSize = std::numeric_limits::max(); + me->https->Server->JunkSize = std::numeric_limits::max(); } else - me->Server->StartPos = 0; + me->https->Server->StartPos = 0; + + me->https->File->Truncate(me->https->Server->StartPos); + me->https->File->Seek(me->https->Server->StartPos); + + me->Res->LastModified = me->https->Server->Date; + me->Res->Size = me->https->Server->Size; + me->Res->ResumePoint = me->https->Server->StartPos; - me->File->Truncate(me->Server->StartPos); - me->File->Seek(me->Server->StartPos); + // we expect valid data, so tell our caller we get the file now + if (me->https->Server->Result >= 200 && me->https->Server->Result < 300 && + me->https->Server->JunkSize == 0 && + me->Res->Size != 0 && me->Res->Size > me->Res->ResumePoint) + me->https->URIStart(*me->Res); } - else if (me->Server->HeaderLine(line) == false) + else if (me->https->Server->HeaderLine(line) == false) return 0; return size*nmemb; @@ -85,12 +101,6 @@ HttpsMethod::write_data(void *buffer, size_t size, size_t nmemb, void *userp) if (me->Server->JunkSize != 0) return buffer_size; - if (me->Server->ReceivedData == false) - { - me->URIStart(me->Res); - me->Server->ReceivedData = true; - } - if(me->File->Write(buffer, buffer_size) != true) return 0; @@ -109,27 +119,15 @@ HttpsMethod::write_data(void *buffer, size_t size, size_t nmemb, void *userp) return buffer_size; } -int -HttpsMethod::progress_callback(void *clientp, double dltotal, double /*dlnow*/, - double /*ultotal*/, double /*ulnow*/) -{ - HttpsMethod *me = (HttpsMethod *)clientp; - if(dltotal > 0 && me->Res.Size == 0) { - me->Res.Size = (unsigned long long)dltotal; - } - return 0; -} - // HttpsServerState::HttpsServerState - Constructor /*{{{*/ HttpsServerState::HttpsServerState(URI Srv,HttpsMethod * Owner) : ServerState(Srv, Owner) { TimeOut = _config->FindI("Acquire::https::Timeout",TimeOut); - ReceivedData = false; Reset(); } /*}}}*/ -void HttpsMethod::SetupProxy() /*{{{*/ +void HttpsMethod::SetupProxy() /*{{{*/ { URI ServerName = Queue->Uri; @@ -207,16 +205,16 @@ bool HttpsMethod::Fetch(FetchItem *Itm) maybe_add_auth (Uri, _config->FindFile("Dir::Etc::netrc")); + FetchResult Res; + CURLUserPointer userp(this, &Res); // callbacks curl_easy_setopt(curl, CURLOPT_URL, static_cast(Uri).c_str()); curl_easy_setopt(curl, CURLOPT_HEADERFUNCTION, parse_header); - curl_easy_setopt(curl, CURLOPT_WRITEHEADER, this); + curl_easy_setopt(curl, CURLOPT_WRITEHEADER, &userp); curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, write_data); curl_easy_setopt(curl, CURLOPT_WRITEDATA, this); - curl_easy_setopt(curl, CURLOPT_PROGRESSFUNCTION, progress_callback); - curl_easy_setopt(curl, CURLOPT_PROGRESSDATA, this); // options - curl_easy_setopt(curl, CURLOPT_NOPROGRESS, false); + curl_easy_setopt(curl, CURLOPT_NOPROGRESS, true); curl_easy_setopt(curl, CURLOPT_FILETIME, true); // only allow curl to handle https, not the other stuff it supports curl_easy_setopt(curl, CURLOPT_PROTOCOLS, CURLPROTO_HTTPS); @@ -414,24 +412,23 @@ bool HttpsMethod::Fetch(FetchItem *Itm) return false; } - struct stat resultStat; - if (unlikely(stat(File->Name().c_str(), &resultStat) != 0)) - { - _error->Errno("stat", "Unable to access file %s", File->Name().c_str()); - return false; - } - Res.Size = resultStat.st_size; - // invalid range-request if (Server->Result == 416) { unlink(File->Name().c_str()); - Res.Size = 0; delete File; Redirect(Itm->Uri); return true; } + struct stat resultStat; + if (unlikely(stat(File->Name().c_str(), &resultStat) != 0)) + { + _error->Errno("stat", "Unable to access file %s", File->Name().c_str()); + return false; + } + Res.Size = resultStat.st_size; + // Timestamp curl_easy_getinfo(curl, CURLINFO_FILETIME, &Res.LastModified); if (Res.LastModified != -1) @@ -455,7 +452,6 @@ bool HttpsMethod::Fetch(FetchItem *Itm) URIDone(Res); // cleanup - Res.Size = 0; delete File; return true; diff --git a/methods/https.h b/methods/https.h index 433a84680..4cc48fc34 100644 --- a/methods/https.h +++ b/methods/https.h @@ -65,7 +65,6 @@ class HttpsMethod : public ServerMethod double ultotal, double ulnow); void SetupProxy(); CURL *curl; - FetchResult Res; ServerState *Server; // Used by ServerMethods unused by https @@ -77,6 +76,7 @@ class HttpsMethod : public ServerMethod virtual bool Configuration(std::string Message); virtual ServerState * CreateServerState(URI uri); + using pkgAcqMethod::FetchResult; HttpsMethod() : ServerMethod("1.2",Pipeline | SendConfig), File(NULL) { diff --git a/methods/server.h b/methods/server.h index 3b232dcac..b974ec89a 100644 --- a/methods/server.h +++ b/methods/server.h @@ -37,7 +37,6 @@ struct ServerState unsigned long long Size; // size of the usable content (aka: the file) unsigned long long JunkSize; // size of junk content (aka: server error pages) unsigned long long StartPos; - bool ReceivedData; time_t Date; bool HaveContent; enum {Chunked,Stream,Closes} Encoding; @@ -76,7 +75,7 @@ struct ServerState bool Comp(URI Other) const {return Other.Host == ServerName.Host && Other.Port == ServerName.Port;}; virtual void Reset() {Major = 0; Minor = 0; Result = 0; Code[0] = '\0'; Size = 0; JunkSize = 0; - StartPos = 0; ReceivedData = false; Encoding = Closes; time(&Date); HaveContent = false; + StartPos = 0; Encoding = Closes; time(&Date); HaveContent = false; State = Header; Persistent = false; Pipeline = true; MaximumSize = 0;}; virtual bool WriteResponse(std::string const &Data) = 0; diff --git a/test/integration/framework b/test/integration/framework index ec23e41e6..50c027a2c 100644 --- a/test/integration/framework +++ b/test/integration/framework @@ -1125,8 +1125,10 @@ acquire::cdrom::autodetect 0;" > rootdir/etc/apt/apt.conf.d/00cdrom downloadfile() { local PROTO="${1%%:*}" - apthelper -o Debug::Acquire::${PROTO}=1 -o Debug::pkgAcquire::Worker=1 \ - download-file "$1" "$2" 2>&1 || true + if ! apthelper -o Debug::Acquire::${PROTO}=1 -o Debug::pkgAcquire::Worker=1 \ + download-file "$1" "$2" 2>&1 ; then + return 1 + fi # only if the file exists the download was successful if [ -r "$2" ]; then return 0 @@ -1407,6 +1409,20 @@ testfailureequal() { testfileequal "${TMPWORKINGDIRECTORY}/rootdir/tmp/testfailure.output" "$CMP" } +testfailuremsg() { + local CMP="$1" + shift + testfailure "$@" + msgtest 'Check that the output of the previous failed command has expected' 'failures and warnings' + grep '^\(W\|E\):' "${TMPWORKINGDIRECTORY}/rootdir/tmp/testfailure.output" > "${TMPWORKINGDIRECTORY}/rootdir/tmp/testfailureequal.output" 2>&1 || true + if echo "$CMP" | checkdiff - "${TMPWORKINGDIRECTORY}/rootdir/tmp/testfailureequal.output"; then + msgpass + else + echo '### Complete output ###' + cat "${TMPWORKINGDIRECTORY}/rootdir/tmp/testfailure.output" + msgfail + fi +} testfilestats() { msgtest "Test that file $1 has $2 $3" "$4" diff --git a/test/integration/test-apt-https-no-redirect b/test/integration/test-apt-https-no-redirect index bc744d6f2..c91c78916 100755 --- a/test/integration/test-apt-https-no-redirect +++ b/test/integration/test-apt-https-no-redirect @@ -14,22 +14,15 @@ echo 'alright' > aptarchive/working changetohttpswebserver -o 'aptwebserver::redirect::replace::/redirectme/=http://localhost:8080/' msgtest 'download of a file works via' 'http' -downloadfile 'http://localhost:8080/working' httpfile >/dev/null 2>&1 && msgpass || msgfail +testsuccess --nomsg downloadfile 'http://localhost:8080/working' httpfile testfileequal httpfile 'alright' msgtest 'download of a file works via' 'https' -downloadfile 'https://localhost:4433/working' httpsfile >/dev/null 2>&1 && msgpass || msgfail +testsuccess --nomsg downloadfile 'https://localhost:4433/working' httpsfile testfileequal httpsfile 'alright' msgtest 'download of a file does not work if' 'https redirected to http' -downloadfile 'https://localhost:4433/redirectme/working' redirectfile >curloutput 2>&1 && msgfail || msgpass +testfailure --nomsg downloadfile 'https://localhost:4433/redirectme/working' redirectfile msgtest 'libcurl has forbidden access in last request to' 'http resource' -if grep -q -E -- 'Protocol "?http"? not supported or disabled in libcurl' curloutput; then - msgpass -else - cat curloutput - msgfail -fi - - +testsuccess --nomsg grep -q -E -- 'Protocol "?http"? not supported or disabled in libcurl' rootdir/tmp/testfailure.output diff --git a/test/integration/test-apt-update-expected-size b/test/integration/test-apt-update-expected-size index 9711c293a..22de13ea5 100755 --- a/test/integration/test-apt-update-expected-size +++ b/test/integration/test-apt-update-expected-size @@ -10,35 +10,61 @@ configarchitecture "i386" insertpackage 'unstable' 'apt' 'all' '1.0' setupaptarchive --no-update +cp -a aptarchive/dists aptarchive/dists.good + +test_inreleasetoobig() { + # make InRelease really big to trigger fallback + dd if=/dev/zero of=aptarchive/dists/unstable/InRelease bs=1M count=2 2>/dev/null + touch -d '+1hour' aptarchive/dists/unstable/InRelease + testsuccess aptget update -o Apt::Get::List-Cleanup=0 -o acquire::MaxReleaseFileSize=$((1*1000*1000)) -o Debug::pkgAcquire::worker=0 + msgtest 'Check that the max write warning is triggered' + cp rootdir/tmp/testsuccess.output update.output + testsuccess --nomsg grep -q 'Writing more data than expected' update.output + rm -f update.output + # ensure the failed InRelease file got renamed + testsuccess ls rootdir/var/lib/apt/lists/partial/*InRelease.FAILED +} + +test_packagestoobig() { + # append junk at the end of the Packages.gz/Packages + SIZE="$(stat --printf=%s aptarchive/dists/unstable/main/binary-i386/Packages)" + find aptarchive/dists -name 'Packages*' | while read pkg; do + echo "1234567890" >> "$pkg" + touch -d '+1hour' "$pkg" + done + NEW_SIZE="$(stat --printf=%s aptarchive/dists/unstable/main/binary-i386/Packages)" + testfailuremsg "W: Failed to fetch ${1}/dists/unstable/main/binary-i386/Packages Writing more data than expected ($NEW_SIZE > $SIZE) +E: Some index files failed to download. They have been ignored, or old ones used instead." aptget update -o Debug::pkgAcquire::Worker=0 +} + +methodtest() { + msgmsg 'Test with' "$1" 'and clean start' + rm -rf rootdir/var/lib/apt/lists rootdir/var/lib/apt/lists.good + # normal update works fine + testsuccess aptget update + mv rootdir/var/lib/apt/lists rootdir/var/lib/apt/lists.good + + # starting fresh works + test_inreleasetoobig "$1" + rm -rf aptarchive/dists rootdir/var/lib/apt/lists + cp -a aptarchive/dists.good aptarchive/dists + test_packagestoobig "$1" + rm -rf aptarchive/dists rootdir/var/lib/apt/lists + cp -a aptarchive/dists.good aptarchive/dists + + msgmsg 'Test with' "$1" 'and existing old data' + cp -a rootdir/var/lib/apt/lists.good rootdir/var/lib/apt/lists + test_inreleasetoobig "$1" + rm -rf aptarchive/dists rootdir/var/lib/apt/lists + cp -a rootdir/var/lib/apt/lists.good rootdir/var/lib/apt/lists + cp -a aptarchive/dists.good aptarchive/dists + test_packagestoobig "$1" + rm -rf aptarchive/dists + cp -a aptarchive/dists.good aptarchive/dists +} + changetowebserver +methodtest 'http://localhost:8080' -# normal update works fine -testsuccess aptget update - -# make InRelease really big to trigger fallback -mv aptarchive/dists/unstable/InRelease aptarchive/dists/unstable/InRelease.good -dd if=/dev/zero of=aptarchive/dists/unstable/InRelease bs=1M count=2 2>/dev/null -touch -d '+1hour' aptarchive/dists/unstable/InRelease -testsuccess aptget update -o Apt::Get::List-Cleanup=0 -o acquire::MaxReleaseFileSize=$((1*1000*1000)) -o Debug::pkgAcquire::worker=0 -msgtest 'Check that the max write warning is triggered' -if grep -q "Writing more data than expected" rootdir/tmp/testsuccess.output; then - msgpass -else - cat rootdir/tmp/testsuccess.output - msgfail -fi -# ensure the failed InRelease file got renamed -testsuccess ls rootdir/var/lib/apt/lists/partial/*InRelease.FAILED -mv aptarchive/dists/unstable/InRelease.good aptarchive/dists/unstable/InRelease - - -# append junk at the end of the Packages.gz/Packages -SIZE="$(stat --printf=%s aptarchive/dists/unstable/main/binary-i386/Packages)" -find aptarchive -name 'Packages*' | while read pkg; do - echo "1234567890" >> "$pkg" -done -NEW_SIZE="$(stat --printf=%s aptarchive/dists/unstable/main/binary-i386/Packages)" -rm -f rootdir/var/lib/apt/lists/localhost* -testfailureequal "W: Failed to fetch http://localhost:8080/dists/unstable/main/binary-i386/Packages Writing more data than expected ($NEW_SIZE > $SIZE) - -E: Some index files failed to download. They have been ignored, or old ones used instead." aptget update -qq +changetohttpswebserver +methodtest 'https://localhost:4433' diff --git a/test/integration/test-bug-602412-dequote-redirect b/test/integration/test-bug-602412-dequote-redirect index d3573a79a..384c8b113 100755 --- a/test/integration/test-bug-602412-dequote-redirect +++ b/test/integration/test-bug-602412-dequote-redirect @@ -8,7 +8,7 @@ configarchitecture 'amd64' buildsimplenativepackage 'unrelated' 'all' '0.5~squeeze1' 'unstable' -setupaptarchive +setupaptarchive --no-update changetowebserver -o aptwebserver::redirect::replace::/pool/=/newpool/ \ -o aptwebserver::redirect::replace::/dists/=/newdists/ @@ -16,6 +16,7 @@ mv aptarchive/pool aptarchive/newpool mv aptarchive/dists aptarchive/newdists testrun() { + msgmsg 'Test redirection works in method boundaries' "$1" msgtest 'Test redirection works in' 'apt-get update' testsuccess --nomsg aptget update -- cgit v1.2.3-70-g09d2 From 936d5613e4b6145798c5a1d0c484158115576fa8 Mon Sep 17 00:00:00 2001 From: David Kalnischkies Date: Fri, 27 Mar 2015 18:56:44 +0100 Subject: remove duplicated check for same file copy Git-Dch: Ignore --- methods/copy.cc | 10 +--------- 1 file changed, 1 insertion(+), 9 deletions(-) (limited to 'methods') diff --git a/methods/copy.cc b/methods/copy.cc index a23c0316c..f3cd84215 100644 --- a/methods/copy.cc +++ b/methods/copy.cc @@ -65,17 +65,9 @@ bool CopyMethod::Fetch(FetchItem *Itm) Res.Size = Buf.st_size; Res.Filename = Itm->DestFile; Res.LastModified = Buf.st_mtime; - Res.IMSHit = false; + Res.IMSHit = false; URIStart(Res); - // when the files are identical, just compute the hashes - if(File == Itm->DestFile) - { - CalculateHashes(Res); - URIDone(Res); - return true; - } - // just calc the hashes if the source and destination are identical if (File == Itm->DestFile) { -- cgit v1.2.3-70-g09d2 From a09f6eb8fc67cd2d836019f448f18580396185e5 Mon Sep 17 00:00:00 2001 From: David Kalnischkies Date: Fri, 27 Mar 2015 19:59:44 +0100 Subject: send Alt-* info for uncompressed based on any compressions file sends information about the uncompressed file if it can find it as well as for the compressed file. This was done only for gzip so far, but we support more compression types. That this information isn't used a lot is a different story. Git-Dch: Ignore --- methods/file.cc | 43 ++++++++++++++++++-------------- test/integration/framework | 2 +- test/integration/test-compressed-indexes | 2 +- 3 files changed, 26 insertions(+), 21 deletions(-) (limited to 'methods') diff --git a/methods/file.cc b/methods/file.cc index 12db62203..5d9d7b951 100644 --- a/methods/file.cc +++ b/methods/file.cc @@ -16,6 +16,7 @@ #include #include +#include #include #include #include @@ -33,7 +34,7 @@ class FileMethod : public pkgAcqMethod public: - FileMethod() : pkgAcqMethod("1.0",SingleInstance | LocalOnly) {}; + FileMethod() : pkgAcqMethod("1.0",SingleInstance | SendConfig | LocalOnly) {}; }; // FileMethod::Fetch - Fetch a file /*{{{*/ @@ -58,27 +59,31 @@ bool FileMethod::Fetch(FetchItem *Itm) if (Itm->LastModified == Buf.st_mtime && Itm->LastModified != 0) Res.IMSHit = true; } - - // See if we can compute a file without a .gz exentsion - std::string::size_type Pos = File.rfind(".gz"); - if (Pos + 3 == File.length()) + + // See if the uncompressed file exists and reuse it + std::vector extensions = APT::Configuration::getCompressorExtensions(); + for (std::vector::const_iterator ext = extensions.begin(); ext != extensions.end(); ++ext) { - File = std::string(File,0,Pos); - if (stat(File.c_str(),&Buf) == 0) + if (APT::String::Endswith(File, *ext) == true) { - FetchResult AltRes; - AltRes.Size = Buf.st_size; - AltRes.Filename = File; - AltRes.LastModified = Buf.st_mtime; - AltRes.IMSHit = false; - if (Itm->LastModified == Buf.st_mtime && Itm->LastModified != 0) - AltRes.IMSHit = true; - - URIDone(Res,&AltRes); - return true; - } + std::string const unfile = File.substr(0, File.length() - ext->length() - 1); + if (stat(unfile.c_str(),&Buf) == 0) + { + FetchResult AltRes; + AltRes.Size = Buf.st_size; + AltRes.Filename = unfile; + AltRes.LastModified = Buf.st_mtime; + AltRes.IMSHit = false; + if (Itm->LastModified == Buf.st_mtime && Itm->LastModified != 0) + AltRes.IMSHit = true; + + URIDone(Res,&AltRes); + return true; + } + // no break here as we could have situations similar to '.gz' vs '.tar.gz' here + } } - + if (Res.Filename.empty() == true) return _error->Error(_("File not found")); diff --git a/test/integration/framework b/test/integration/framework index 50c027a2c..994956b74 100644 --- a/test/integration/framework +++ b/test/integration/framework @@ -3,7 +3,7 @@ EXIT_CODE=0 # we all like colorful messages -if [ "$MSGCOLOR" != 'NO' ]; then +if [ "$MSGCOLOR" != 'NO' ] && [ "$MSGCOLOR" != 'ALWAYS' ]; then if [ ! -t 1 ]; then # but check that we output to a terminal export MSGCOLOR='NO' fi diff --git a/test/integration/test-compressed-indexes b/test/integration/test-compressed-indexes index 5b966754c..c6b292baa 100755 --- a/test/integration/test-compressed-indexes +++ b/test/integration/test-compressed-indexes @@ -61,7 +61,7 @@ testrun() { cd downloaded testsuccess --nomsg aptget download testpkg msgtest '\tdeb file is present'; testsuccess --nomsg test -f testpkg_1.0_i386.deb - rm testpkg_1.0_i386.deb + rm -f testpkg_1.0_i386.deb cd - >/dev/null testsuccessequal 'Reading package lists... Building dependency tree... -- cgit v1.2.3-70-g09d2 From 117038bac90261351518870b3f48136f134d4bfc Mon Sep 17 00:00:00 2001 From: David Kalnischkies Date: Mon, 30 Mar 2015 19:52:32 +0200 Subject: handle servers closing encoded connections correctly Servers who advertise that they close the connection get the 'Closes' encoding flag, but this conflicts with servers who response with a transfer-encoding (e.g. encoding) as it is saved in the same flag. We have a better flag for the keep-alive (or not) of the connection anyway, so we check this instead of the encoding. This is in practice not much of a problem as real servers we talk to are HTTP1.1 servers (with keep-alive) and there isn't much point in doing chunked encoding if you are going to close anyway, but our simple testserver stumbles over this if pressed and its a bit cleaner, too. Git-Dch: Ignore --- methods/http.cc | 6 +++--- test/interactive-helper/aptwebserver.cc | 28 +++++++++++++++++++++------- 2 files changed, 24 insertions(+), 10 deletions(-) (limited to 'methods') diff --git a/methods/http.cc b/methods/http.cc index 021b284d0..947002cc6 100644 --- a/methods/http.cc +++ b/methods/http.cc @@ -442,7 +442,7 @@ bool HttpServerState::RunData(FileFd * const File) { /* Closes encoding is used when the server did not specify a size, the loss of the connection means we are done */ - if (Encoding == Closes) + if (Persistent == false) In.Limit(-1); else if (JunkSize != 0) In.Limit(JunkSize); @@ -524,7 +524,7 @@ bool HttpServerState::Die(FileFd &File) // See if this is because the server finished the data stream if (In.IsLimit() == false && State != HttpServerState::Header && - Encoding != HttpServerState::Closes) + Persistent == true) { Close(); if (LErrno == 0) @@ -571,7 +571,7 @@ bool HttpServerState::Flush(FileFd * const File) return true; } - if (In.IsLimit() == true || Encoding == ServerState::Closes) + if (In.IsLimit() == true || Persistent == false) return true; } return false; diff --git a/test/interactive-helper/aptwebserver.cc b/test/interactive-helper/aptwebserver.cc index 644629a33..86d5c06f0 100644 --- a/test/interactive-helper/aptwebserver.cc +++ b/test/interactive-helper/aptwebserver.cc @@ -598,17 +598,24 @@ static void * handleClient(void * voidclient) /*{{{*/ { int client = *((int*)(voidclient)); std::clog << "ACCEPT client " << client << std::endl; - std::vector messages; bool closeConnection = false; - std::list headers; - while (closeConnection == false && ReadMessages(client, messages)) + while (closeConnection == false) { - // if we announced a closing, do the close - if (std::find(headers.begin(), headers.end(), std::string("Connection: close")) != headers.end()) + std::vector messages; + if (ReadMessages(client, messages) == false) break; - headers.clear(); + + std::list headers; for (std::vector::const_iterator m = messages.begin(); m != messages.end() && closeConnection == false; ++m) { + // if we announced a closing in previous response, do the close now + if (std::find(headers.begin(), headers.end(), std::string("Connection: close")) != headers.end()) + { + closeConnection = true; + break; + } + headers.clear(); + std::clog << ">>> REQUEST from " << client << " >>>" << std::endl << *m << std::endl << "<<<<<<<<<<<<<<<<" << std::endl; std::string filename; @@ -760,9 +767,16 @@ static void * handleClient(void * voidclient) /*{{{*/ else sendError(client, 404, *m, sendContent, "", headers); } + + // if we announced a closing in the last response, do the close now + if (std::find(headers.begin(), headers.end(), std::string("Connection: close")) != headers.end()) + closeConnection = true; + + if (_error->PendingError() == true) + break; _error->DumpErrors(std::cerr); - messages.clear(); } + _error->DumpErrors(std::cerr); close(client); std::clog << "CLOSE client " << client << std::endl; return NULL; -- cgit v1.2.3-70-g09d2 From 9224ce3d4d1ea0428a70e75134998e08aa45b1e6 Mon Sep 17 00:00:00 2001 From: David Kalnischkies Date: Mon, 30 Mar 2015 20:47:13 +0200 Subject: calculate only expected hashes in methods Methods get told which hashes are expected by the acquire system, which means we can use this list to restrict what we calculate in the methods as any extra we are calculating is wasted effort as we can't compare it with anything anyway. Adding support for a new hash algorithm is therefore 'free' now and if a algorithm is no longer provided in a repository for a file, we automatically stop calculating it. In practice this results in a speed-up in Debian as we don't have SHA512 here (so far), so we practically stop calculating it. --- apt-pkg/contrib/hashes.cc | 63 +++++++++++++++++++++++++++++++++----------- apt-pkg/contrib/hashes.h | 21 ++++++++++++--- ftparchive/cachedb.cc | 4 +-- ftparchive/writer.cc | 4 +-- methods/cdrom.cc | 2 +- methods/copy.cc | 10 +++---- methods/file.cc | 2 +- methods/ftp.cc | 2 +- methods/gzip.cc | 2 +- methods/http.cc | 14 +++++----- methods/http.h | 2 +- methods/https.cc | 2 +- methods/https.h | 2 +- methods/rred.cc | 2 +- methods/rsh.cc | 2 +- methods/server.cc | 2 +- methods/server.h | 2 +- test/libapt/hashsums_test.cc | 24 +++++++++++++++++ 18 files changed, 116 insertions(+), 46 deletions(-) (limited to 'methods') diff --git a/apt-pkg/contrib/hashes.cc b/apt-pkg/contrib/hashes.cc index 6e7080bc9..953465091 100644 --- a/apt-pkg/contrib/hashes.cc +++ b/apt-pkg/contrib/hashes.cc @@ -250,28 +250,34 @@ bool HashStringList::operator!=(HashStringList const &other) const class PrivateHashes { public: unsigned long long FileSize; + unsigned int CalcHashes; - PrivateHashes() : FileSize(0) {} + PrivateHashes(unsigned int const CalcHashes) : FileSize(0), CalcHashes(CalcHashes) {} }; /*}}}*/ // Hashes::Add* - Add the contents of data or FD /*{{{*/ -bool Hashes::Add(const unsigned char * const Data,unsigned long long const Size, unsigned int const Hashes) +bool Hashes::Add(const unsigned char * const Data, unsigned long long const Size) { bool Res = true; APT_IGNORE_DEPRECATED_PUSH - if ((Hashes & MD5SUM) == MD5SUM) + if ((d->CalcHashes & MD5SUM) == MD5SUM) Res &= MD5.Add(Data, Size); - if ((Hashes & SHA1SUM) == SHA1SUM) + if ((d->CalcHashes & SHA1SUM) == SHA1SUM) Res &= SHA1.Add(Data, Size); - if ((Hashes & SHA256SUM) == SHA256SUM) + if ((d->CalcHashes & SHA256SUM) == SHA256SUM) Res &= SHA256.Add(Data, Size); - if ((Hashes & SHA512SUM) == SHA512SUM) + if ((d->CalcHashes & SHA512SUM) == SHA512SUM) Res &= SHA512.Add(Data, Size); APT_IGNORE_DEPRECATED_POP d->FileSize += Size; return Res; } -bool Hashes::AddFD(int const Fd,unsigned long long Size, unsigned int const Hashes) +bool Hashes::Add(const unsigned char * const Data, unsigned long long const Size, unsigned int const Hashes) +{ + d->CalcHashes = Hashes; + return Add(Data, Size); +} +bool Hashes::AddFD(int const Fd,unsigned long long Size) { unsigned char Buf[64*64]; bool const ToEOF = (Size == UntilEOF); @@ -285,12 +291,17 @@ bool Hashes::AddFD(int const Fd,unsigned long long Size, unsigned int const Hash if (ToEOF && Res == 0) // EOF break; Size -= Res; - if (Add(Buf, Res, Hashes) == false) + if (Add(Buf, Res) == false) return false; } return true; } -bool Hashes::AddFD(FileFd &Fd,unsigned long long Size, unsigned int const Hashes) +bool Hashes::AddFD(int const Fd,unsigned long long Size, unsigned int const Hashes) +{ + d->CalcHashes = Hashes; + return AddFD(Fd, Size); +} +bool Hashes::AddFD(FileFd &Fd,unsigned long long Size) { unsigned char Buf[64*64]; bool const ToEOF = (Size == 0); @@ -309,20 +320,29 @@ bool Hashes::AddFD(FileFd &Fd,unsigned long long Size, unsigned int const Hashes else if (a == 0) // EOF break; Size -= a; - if (Add(Buf, a, Hashes) == false) + if (Add(Buf, a) == false) return false; } return true; +} +bool Hashes::AddFD(FileFd &Fd,unsigned long long Size, unsigned int const Hashes) +{ + d->CalcHashes = Hashes; + return AddFD(Fd, Size); } /*}}}*/ HashStringList Hashes::GetHashStringList() { HashStringList hashes; APT_IGNORE_DEPRECATED_PUSH - hashes.push_back(HashString("MD5Sum", MD5.Result().Value())); - hashes.push_back(HashString("SHA1", SHA1.Result().Value())); - hashes.push_back(HashString("SHA256", SHA256.Result().Value())); - hashes.push_back(HashString("SHA512", SHA512.Result().Value())); + if ((d->CalcHashes & MD5SUM) == MD5SUM) + hashes.push_back(HashString("MD5Sum", MD5.Result().Value())); + if ((d->CalcHashes & SHA1SUM) == SHA1SUM) + hashes.push_back(HashString("SHA1", SHA1.Result().Value())); + if ((d->CalcHashes & SHA256SUM) == SHA256SUM) + hashes.push_back(HashString("SHA256", SHA256.Result().Value())); + if ((d->CalcHashes & SHA512SUM) == SHA512SUM) + hashes.push_back(HashString("SHA512", SHA512.Result().Value())); APT_IGNORE_DEPRECATED_POP std::string SizeStr; strprintf(SizeStr, "%llu", d->FileSize); @@ -330,6 +350,19 @@ APT_IGNORE_DEPRECATED_POP return hashes; } APT_IGNORE_DEPRECATED_PUSH -Hashes::Hashes() { d = new PrivateHashes(); } +Hashes::Hashes() { d = new PrivateHashes(~0); } +Hashes::Hashes(unsigned int const Hashes) { d = new PrivateHashes(Hashes); } +Hashes::Hashes(HashStringList const &Hashes) { + unsigned int calcHashes = Hashes.usable() ? 0 : ~0; + if (Hashes.find("MD5Sum") != NULL) + calcHashes |= MD5SUM; + if (Hashes.find("SHA1") != NULL) + calcHashes |= SHA1SUM; + if (Hashes.find("SHA256") != NULL) + calcHashes |= SHA256SUM; + if (Hashes.find("SHA512") != NULL) + calcHashes |= SHA512SUM; + d = new PrivateHashes(calcHashes); +} Hashes::~Hashes() { delete d; } APT_IGNORE_DEPRECATED_POP diff --git a/apt-pkg/contrib/hashes.h b/apt-pkg/contrib/hashes.h index 154862457..ac13c8ace 100644 --- a/apt-pkg/contrib/hashes.h +++ b/apt-pkg/contrib/hashes.h @@ -178,7 +178,8 @@ class Hashes static const int UntilEOF = 0; - bool Add(const unsigned char * const Data, unsigned long long const Size, unsigned int const Hashes = ~0); + bool Add(const unsigned char * const Data, unsigned long long const Size); + APT_DEPRECATED bool Add(const unsigned char * const Data, unsigned long long const Size, unsigned int const Hashes); inline bool Add(const char * const Data) {return Add((unsigned char const * const)Data,strlen(Data));}; inline bool Add(const unsigned char * const Beg,const unsigned char * const End) @@ -186,13 +187,24 @@ class Hashes enum SupportedHashes { MD5SUM = (1 << 0), SHA1SUM = (1 << 1), SHA256SUM = (1 << 2), SHA512SUM = (1 << 3) }; - bool AddFD(int const Fd,unsigned long long Size = 0, unsigned int const Hashes = ~0); - bool AddFD(FileFd &Fd,unsigned long long Size = 0, unsigned int const Hashes = ~0); + bool AddFD(int const Fd,unsigned long long Size = 0); + APT_DEPRECATED bool AddFD(int const Fd,unsigned long long Size, unsigned int const Hashes); + bool AddFD(FileFd &Fd,unsigned long long Size = 0); + APT_DEPRECATED bool AddFD(FileFd &Fd,unsigned long long Size, unsigned int const Hashes); HashStringList GetHashStringList(); APT_IGNORE_DEPRECATED_PUSH + /** create a Hashes object to calculate all supported hashes + * + * If ALL is too much, you can limit which Hashes are calculated + * with the following other constructors which mention explicitly + * which hashes to generate. */ Hashes(); + /** @param Hashes bitflag composed of #SupportedHashes */ + Hashes(unsigned int const Hashes); + /** @param Hashes is a list of hashes */ + Hashes(HashStringList const &Hashes); virtual ~Hashes(); APT_IGNORE_DEPRECATED_POP @@ -208,15 +220,16 @@ APT_IGNORE_DEPRECATED_POP } public: +APT_IGNORE_DEPRECATED_PUSH APT_DEPRECATED bool AddFD(int const Fd, unsigned long long Size, bool const addMD5, bool const addSHA1, bool const addSHA256, bool const addSHA512) { return AddFD(Fd, Size, boolsToFlag(addMD5, addSHA1, addSHA256, addSHA512)); }; - APT_DEPRECATED bool AddFD(FileFd &Fd, unsigned long long Size, bool const addMD5, bool const addSHA1, bool const addSHA256, bool const addSHA512) { return AddFD(Fd, Size, boolsToFlag(addMD5, addSHA1, addSHA256, addSHA512)); }; +APT_IGNORE_DEPRECATED_POP }; #endif diff --git a/ftparchive/cachedb.cc b/ftparchive/cachedb.cc index 1dc268594..cc3527ea4 100644 --- a/ftparchive/cachedb.cc +++ b/ftparchive/cachedb.cc @@ -441,8 +441,8 @@ bool CacheDB::GetHashes(bool const GenOnly, unsigned int const DoHashes) if (OpenFile() == false) return false; - Hashes hashes; - if (Fd->Seek(0) == false || hashes.AddFD(*Fd, CurStat.FileSize, FlHashes) == false) + Hashes hashes(FlHashes); + if (Fd->Seek(0) == false || hashes.AddFD(*Fd, CurStat.FileSize) == false) return false; HashStringList hl = hashes.GetHashStringList(); diff --git a/ftparchive/writer.cc b/ftparchive/writer.cc index db68c21f0..593278590 100644 --- a/ftparchive/writer.cc +++ b/ftparchive/writer.cc @@ -1075,8 +1075,8 @@ bool ReleaseWriter::DoPackage(string FileName) CheckSums[NewFileName].size = fd.Size(); - Hashes hs; - hs.AddFD(fd, 0, DoHashes); + Hashes hs(DoHashes); + hs.AddFD(fd); CheckSums[NewFileName].Hashes = hs.GetHashStringList(); fd.Close(); diff --git a/methods/cdrom.cc b/methods/cdrom.cc index 74e2ecc6b..10cb29f66 100644 --- a/methods/cdrom.cc +++ b/methods/cdrom.cc @@ -266,7 +266,7 @@ bool CDROMMethod::Fetch(FetchItem *Itm) Res.LastModified = Buf.st_mtime; Res.Size = Buf.st_size; - Hashes Hash; + Hashes Hash(Itm->ExpectedHashes); FileFd Fd(Res.Filename, FileFd::ReadOnly); Hash.AddFD(Fd); Res.TakeHashes(Hash); diff --git a/methods/copy.cc b/methods/copy.cc index f3cd84215..a8e289df5 100644 --- a/methods/copy.cc +++ b/methods/copy.cc @@ -28,16 +28,16 @@ class CopyMethod : public pkgAcqMethod { virtual bool Fetch(FetchItem *Itm); - void CalculateHashes(FetchResult &Res); + void CalculateHashes(FetchItem const * const Itm, FetchResult &Res); public: CopyMethod() : pkgAcqMethod("1.0",SingleInstance | SendConfig) {}; }; -void CopyMethod::CalculateHashes(FetchResult &Res) +void CopyMethod::CalculateHashes(FetchItem const * const Itm, FetchResult &Res) { - Hashes Hash; + Hashes Hash(Itm->ExpectedHashes); FileFd::CompressMode CompressMode = FileFd::None; if (_config->FindB("Acquire::GzipIndexes", false) == true) CompressMode = FileFd::Extension; @@ -71,7 +71,7 @@ bool CopyMethod::Fetch(FetchItem *Itm) // just calc the hashes if the source and destination are identical if (File == Itm->DestFile) { - CalculateHashes(Res); + CalculateHashes(Itm, Res); URIDone(Res); return true; } @@ -104,7 +104,7 @@ bool CopyMethod::Fetch(FetchItem *Itm) if (utimes(Res.Filename.c_str(), times) != 0) return _error->Errno("utimes",_("Failed to set modification time")); - CalculateHashes(Res); + CalculateHashes(Itm, Res); URIDone(Res); return true; diff --git a/methods/file.cc b/methods/file.cc index 5d9d7b951..043ab04b8 100644 --- a/methods/file.cc +++ b/methods/file.cc @@ -87,7 +87,7 @@ bool FileMethod::Fetch(FetchItem *Itm) if (Res.Filename.empty() == true) return _error->Error(_("File not found")); - Hashes Hash; + Hashes Hash(Itm->ExpectedHashes); FileFd Fd(Res.Filename, FileFd::ReadOnly); Hash.AddFD(Fd); Res.TakeHashes(Hash); diff --git a/methods/ftp.cc b/methods/ftp.cc index 7764acf6a..92d8573f1 100644 --- a/methods/ftp.cc +++ b/methods/ftp.cc @@ -1064,7 +1064,7 @@ bool FtpMethod::Fetch(FetchItem *Itm) } // Open the file - Hashes Hash; + Hashes Hash(Itm->ExpectedHashes); { FileFd Fd(Itm->DestFile,FileFd::WriteAny); if (_error->PendingError() == true) diff --git a/methods/gzip.cc b/methods/gzip.cc index 387c05f2e..65519633c 100644 --- a/methods/gzip.cc +++ b/methods/gzip.cc @@ -91,7 +91,7 @@ bool GzipMethod::Fetch(FetchItem *Itm) return false; // Read data from source, generate checksums and write - Hashes Hash; + Hashes Hash(Itm->ExpectedHashes); bool Failed = false; while (1) { diff --git a/methods/http.cc b/methods/http.cc index 947002cc6..e4773b0e2 100644 --- a/methods/http.cc +++ b/methods/http.cc @@ -64,8 +64,8 @@ const unsigned int CircleBuf::BW_HZ=10; // CircleBuf::CircleBuf - Circular input buffer /*{{{*/ // --------------------------------------------------------------------- /* */ -CircleBuf::CircleBuf(unsigned long long Size) - : Size(Size), Hash(0), TotalWriten(0) +CircleBuf::CircleBuf(unsigned long long Size) + : Size(Size), Hash(NULL), TotalWriten(0) { Buf = new unsigned char[Size]; Reset(); @@ -84,10 +84,10 @@ void CircleBuf::Reset() TotalWriten = 0; MaxGet = (unsigned long long)-1; OutQueue = string(); - if (Hash != 0) + if (Hash != NULL) { delete Hash; - Hash = new Hashes; + Hash = NULL; } } /*}}}*/ @@ -222,7 +222,7 @@ bool CircleBuf::Write(int Fd) TotalWriten += Res; - if (Hash != 0) + if (Hash != NULL) Hash->Add(Buf + (OutP%Size),Res); OutP += Res; @@ -484,10 +484,10 @@ APT_PURE bool HttpServerState::IsOpen() /*{{{*/ return (ServerFd != -1); } /*}}}*/ -bool HttpServerState::InitHashes(FileFd &File) /*{{{*/ +bool HttpServerState::InitHashes(FileFd &File, HashStringList const &ExpectedHashes)/*{{{*/ { delete In.Hash; - In.Hash = new Hashes; + In.Hash = new Hashes(ExpectedHashes); // Set the expected size and read file for the hashes File.Truncate(StartPos); diff --git a/methods/http.h b/methods/http.h index 40a88a7be..6dc872659 100644 --- a/methods/http.h +++ b/methods/http.h @@ -111,7 +111,7 @@ struct HttpServerState: public ServerState virtual bool Open(); virtual bool IsOpen(); virtual bool Close(); - virtual bool InitHashes(FileFd &File); + virtual bool InitHashes(FileFd &File, HashStringList const &ExpectedHashes); virtual Hashes * GetHashes(); virtual bool Die(FileFd &File); virtual bool Flush(FileFd * const File); diff --git a/methods/https.cc b/methods/https.cc index 70f6a1046..81903b239 100644 --- a/methods/https.cc +++ b/methods/https.cc @@ -443,7 +443,7 @@ bool HttpsMethod::Fetch(FetchItem *Itm) Res.LastModified = resultStat.st_mtime; // take hashes - Hashes Hash; + Hashes Hash(Itm->ExpectedHashes); FileFd Fd(Res.Filename, FileFd::ReadOnly); Hash.AddFD(Fd); Res.TakeHashes(Hash); diff --git a/methods/https.h b/methods/https.h index 4cc48fc34..dc0ff3322 100644 --- a/methods/https.h +++ b/methods/https.h @@ -42,7 +42,7 @@ class HttpsServerState : public ServerState virtual bool Open() { return false; } virtual bool IsOpen() { return false; } virtual bool Close() { return false; } - virtual bool InitHashes(FileFd &/*File*/) { return false; } + virtual bool InitHashes(FileFd &/*File*/, HashStringList const &/*ExpectedHashes*/) { return false; } virtual Hashes * GetHashes() { return NULL; } virtual bool Die(FileFd &/*File*/) { return false; } virtual bool Flush(FileFd * const /*File*/) { return false; } diff --git a/methods/rred.cc b/methods/rred.cc index 774b58a40..554ac99b4 100644 --- a/methods/rred.cc +++ b/methods/rred.cc @@ -581,7 +581,7 @@ class RredMethod : public pkgAcqMethod { FILE *inp = fopen(Path.c_str(), "r"); FILE *out = fopen(Itm->DestFile.c_str(), "w"); - Hashes hash; + Hashes hash(Itm->ExpectedHashes); patch.apply_against_file(out, inp, &hash); diff --git a/methods/rsh.cc b/methods/rsh.cc index 0e949160b..52349c61c 100644 --- a/methods/rsh.cc +++ b/methods/rsh.cc @@ -477,7 +477,7 @@ bool RSHMethod::Fetch(FetchItem *Itm) } // Open the file - Hashes Hash; + Hashes Hash(Itm->ExpectedHashes); { FileFd Fd(Itm->DestFile,FileFd::WriteAny); if (_error->PendingError() == true) diff --git a/methods/server.cc b/methods/server.cc index 91ec824d1..e403f1071 100644 --- a/methods/server.cc +++ b/methods/server.cc @@ -357,7 +357,7 @@ ServerMethod::DealWithHeaders(FetchResult &Res) FailFd = File->Fd(); FailTime = Server->Date; - if (Server->InitHashes(*File) == false) + if (Server->InitHashes(*File, Queue->ExpectedHashes) == false) { _error->Errno("read",_("Problem hashing file")); return ERROR_NOT_FROM_SERVER; diff --git a/methods/server.h b/methods/server.h index b974ec89a..45622dd34 100644 --- a/methods/server.h +++ b/methods/server.h @@ -85,7 +85,7 @@ struct ServerState virtual bool Open() = 0; virtual bool IsOpen() = 0; virtual bool Close() = 0; - virtual bool InitHashes(FileFd &File) = 0; + virtual bool InitHashes(FileFd &File, HashStringList const &ExpectedHashes) = 0; virtual Hashes * GetHashes() = 0; virtual bool Die(FileFd &File) = 0; virtual bool Flush(FileFd * const File) = 0; diff --git a/test/libapt/hashsums_test.cc b/test/libapt/hashsums_test.cc index a19a0befd..edcd8a11a 100644 --- a/test/libapt/hashsums_test.cc +++ b/test/libapt/hashsums_test.cc @@ -193,6 +193,30 @@ TEST(HashSumsTest, FileBased) EXPECT_EQ(FileSize, list.find("Checksum-FileSize")->HashValue()); } fd.Seek(0); + { + Hashes hashes(Hashes::MD5SUM | Hashes::SHA512SUM); + hashes.AddFD(fd); + HashStringList list = hashes.GetHashStringList(); + EXPECT_FALSE(list.empty()); + EXPECT_EQ(3, list.size()); + EXPECT_EQ(md5.Value(), list.find("MD5Sum")->HashValue()); + EXPECT_EQ(NULL, list.find("SHA1")); + EXPECT_EQ(NULL, list.find("SHA256")); + EXPECT_EQ(sha512.Value(), list.find("SHA512")->HashValue()); + EXPECT_EQ(FileSize, list.find("Checksum-FileSize")->HashValue()); + fd.Seek(0); + Hashes hashes2(list); + hashes2.AddFD(fd); + list = hashes2.GetHashStringList(); + EXPECT_FALSE(list.empty()); + EXPECT_EQ(3, list.size()); + EXPECT_EQ(md5.Value(), list.find("MD5Sum")->HashValue()); + EXPECT_EQ(NULL, list.find("SHA1")); + EXPECT_EQ(NULL, list.find("SHA256")); + EXPECT_EQ(sha512.Value(), list.find("SHA512")->HashValue()); + EXPECT_EQ(FileSize, list.find("Checksum-FileSize")->HashValue()); + } + fd.Seek(0); { MD5Summation MD5; MD5.AddFD(fd.Fd()); -- cgit v1.2.3-70-g09d2 From 34faa8f7ae2526f46cd1f84bb6962ad06d841e5e Mon Sep 17 00:00:00 2001 From: David Kalnischkies Date: Sat, 11 Apr 2015 10:23:52 +0200 Subject: calculate hashes while downloading in https MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit We do this in HTTP already to give the CPU some exercise while the disk is heavily spinning (or flashing?) to store the data avoiding the need to reread the entire file again later on to calculate the hashes – which happens outside of the eyes of progress reporting, so you might ended up with a bunch of https workers 'stuck' at 100% while they were busy calculating hashes. This is a bummer for everyone using apt as a connection speedtest as the https method works slower now (not really, it just isn't reporting done too early anymore). --- methods/http.cc | 8 +++--- methods/http.h | 2 +- methods/https.cc | 38 ++++++++++++++++++++--------- methods/https.h | 6 +++-- methods/server.cc | 8 +++++- methods/server.h | 3 ++- test/integration/test-apt-download-progress | 6 +++-- 7 files changed, 47 insertions(+), 24 deletions(-) (limited to 'methods') diff --git a/methods/http.cc b/methods/http.cc index e4773b0e2..af3d5ccb6 100644 --- a/methods/http.cc +++ b/methods/http.cc @@ -484,16 +484,14 @@ APT_PURE bool HttpServerState::IsOpen() /*{{{*/ return (ServerFd != -1); } /*}}}*/ -bool HttpServerState::InitHashes(FileFd &File, HashStringList const &ExpectedHashes)/*{{{*/ +bool HttpServerState::InitHashes(HashStringList const &ExpectedHashes) /*{{{*/ { delete In.Hash; In.Hash = new Hashes(ExpectedHashes); - - // Set the expected size and read file for the hashes - File.Truncate(StartPos); - return In.Hash->AddFD(File, StartPos); + return true; } /*}}}*/ + APT_PURE Hashes * HttpServerState::GetHashes() /*{{{*/ { return In.Hash; diff --git a/methods/http.h b/methods/http.h index 6dc872659..e73871931 100644 --- a/methods/http.h +++ b/methods/http.h @@ -111,7 +111,7 @@ struct HttpServerState: public ServerState virtual bool Open(); virtual bool IsOpen(); virtual bool Close(); - virtual bool InitHashes(FileFd &File, HashStringList const &ExpectedHashes); + virtual bool InitHashes(HashStringList const &ExpectedHashes); virtual Hashes * GetHashes(); virtual bool Die(FileFd &File); virtual bool Flush(FileFd * const File); diff --git a/methods/https.cc b/methods/https.cc index 81903b239..c6b75d9ad 100644 --- a/methods/https.cc +++ b/methods/https.cc @@ -72,18 +72,18 @@ HttpsMethod::parse_header(void *buffer, size_t size, size_t nmemb, void *userp) else me->https->Server->StartPos = 0; - me->https->File->Truncate(me->https->Server->StartPos); - me->https->File->Seek(me->https->Server->StartPos); - me->Res->LastModified = me->https->Server->Date; me->Res->Size = me->https->Server->Size; me->Res->ResumePoint = me->https->Server->StartPos; // we expect valid data, so tell our caller we get the file now - if (me->https->Server->Result >= 200 && me->https->Server->Result < 300 && - me->https->Server->JunkSize == 0 && - me->Res->Size != 0 && me->Res->Size > me->Res->ResumePoint) - me->https->URIStart(*me->Res); + if (me->https->Server->Result >= 200 && me->https->Server->Result < 300) + { + if (me->https->Server->JunkSize == 0 && me->Res->Size != 0 && me->Res->Size > me->Res->ResumePoint) + me->https->URIStart(*me->Res); + if (me->https->Server->AddPartialFileToHashes(*(me->https->File)) == false) + return 0; + } } else if (me->https->Server->HeaderLine(line) == false) return 0; @@ -116,16 +116,31 @@ HttpsMethod::write_data(void *buffer, size_t size, size_t nmemb, void *userp) } } + if (me->Server->GetHashes()->Add((unsigned char const * const)buffer, buffer_size) == false) + return 0; + return buffer_size; } // HttpsServerState::HttpsServerState - Constructor /*{{{*/ -HttpsServerState::HttpsServerState(URI Srv,HttpsMethod * Owner) : ServerState(Srv, Owner) +HttpsServerState::HttpsServerState(URI Srv,HttpsMethod * Owner) : ServerState(Srv, Owner), Hash(NULL) { TimeOut = _config->FindI("Acquire::https::Timeout",TimeOut); Reset(); } /*}}}*/ +bool HttpsServerState::InitHashes(HashStringList const &ExpectedHashes) /*{{{*/ +{ + delete Hash; + Hash = new Hashes(ExpectedHashes); + return true; +} + /*}}}*/ +APT_PURE Hashes * HttpsServerState::GetHashes() /*{{{*/ +{ + return Hash; +} + /*}}}*/ void HttpsMethod::SetupProxy() /*{{{*/ { @@ -365,6 +380,8 @@ bool HttpsMethod::Fetch(FetchItem *Itm) // go for it - if the file exists, append on it File = new FileFd(Itm->DestFile, FileFd::WriteAny); Server = CreateServerState(Itm->Uri); + if (Server->InitHashes(Itm->ExpectedHashes) == false) + return false; // keep apt updated Res.Filename = Itm->DestFile; @@ -443,10 +460,7 @@ bool HttpsMethod::Fetch(FetchItem *Itm) Res.LastModified = resultStat.st_mtime; // take hashes - Hashes Hash(Itm->ExpectedHashes); - FileFd Fd(Res.Filename, FileFd::ReadOnly); - Hash.AddFD(Fd); - Res.TakeHashes(Hash); + Res.TakeHashes(*(Server->GetHashes())); // keep apt updated URIDone(Res); diff --git a/methods/https.h b/methods/https.h index dc0ff3322..6e32e8d3d 100644 --- a/methods/https.h +++ b/methods/https.h @@ -29,6 +29,8 @@ class FileFd; class HttpsServerState : public ServerState { + Hashes * Hash; + protected: virtual bool ReadHeaderLines(std::string &/*Data*/) { return false; } virtual bool LoadNextResponse(bool const /*ToFile*/, FileFd * const /*File*/) { return false; } @@ -42,8 +44,8 @@ class HttpsServerState : public ServerState virtual bool Open() { return false; } virtual bool IsOpen() { return false; } virtual bool Close() { return false; } - virtual bool InitHashes(FileFd &/*File*/, HashStringList const &/*ExpectedHashes*/) { return false; } - virtual Hashes * GetHashes() { return NULL; } + virtual bool InitHashes(HashStringList const &ExpectedHashes); + virtual Hashes * GetHashes(); virtual bool Die(FileFd &/*File*/) { return false; } virtual bool Flush(FileFd * const /*File*/) { return false; } virtual bool Go(bool /*ToFile*/, FileFd * const /*File*/) { return false; } diff --git a/methods/server.cc b/methods/server.cc index e403f1071..2116926b0 100644 --- a/methods/server.cc +++ b/methods/server.cc @@ -235,6 +235,12 @@ ServerState::ServerState(URI Srv, ServerMethod *Owner) : ServerName(Srv), TimeOu Reset(); } /*}}}*/ +bool ServerState::AddPartialFileToHashes(FileFd &File) /*{{{*/ +{ + File.Truncate(StartPos); + return GetHashes()->AddFD(File, StartPos); +} + /*}}}*/ bool ServerMethod::Configuration(string Message) /*{{{*/ { @@ -357,7 +363,7 @@ ServerMethod::DealWithHeaders(FetchResult &Res) FailFd = File->Fd(); FailTime = Server->Date; - if (Server->InitHashes(*File, Queue->ExpectedHashes) == false) + if (Server->InitHashes(Queue->ExpectedHashes) == false || Server->AddPartialFileToHashes(*File) == false) { _error->Errno("read",_("Problem hashing file")); return ERROR_NOT_FROM_SERVER; diff --git a/methods/server.h b/methods/server.h index 45622dd34..1b1f754a3 100644 --- a/methods/server.h +++ b/methods/server.h @@ -72,6 +72,7 @@ struct ServerState }; /** \brief Get the headers before the data */ RunHeadersResult RunHeaders(FileFd * const File, const std::string &Uri); + bool AddPartialFileToHashes(FileFd &File); bool Comp(URI Other) const {return Other.Host == ServerName.Host && Other.Port == ServerName.Port;}; virtual void Reset() {Major = 0; Minor = 0; Result = 0; Code[0] = '\0'; Size = 0; JunkSize = 0; @@ -85,7 +86,7 @@ struct ServerState virtual bool Open() = 0; virtual bool IsOpen() = 0; virtual bool Close() = 0; - virtual bool InitHashes(FileFd &File, HashStringList const &ExpectedHashes) = 0; + virtual bool InitHashes(HashStringList const &ExpectedHashes) = 0; virtual Hashes * GetHashes() = 0; virtual bool Die(FileFd &File) = 0; virtual bool Flush(FileFd * const File) = 0; diff --git a/test/integration/test-apt-download-progress b/test/integration/test-apt-download-progress index b2c9effe6..07c5e09c5 100755 --- a/test/integration/test-apt-download-progress +++ b/test/integration/test-apt-download-progress @@ -26,14 +26,16 @@ assertprogress() { TESTFILE=testfile.big testsuccess dd if=/dev/zero of=./aptarchive/$TESTFILE bs=800k count=1 +OPT='-o APT::Status-Fd=3 -o Debug::pkgAcquire::Worker=1 -o Debug::Acquire::http=1 -o Debug::Acquire::https=1' + msgtest 'download progress works via' 'http' exec 3> apt-progress.log -testsuccess --nomsg apthelper download-file "http://localhost:8080/$TESTFILE" http-$TESTFILE -o APT::Status-Fd=3 -o Acquire::http::Dl-Limit=800 +testsuccess --nomsg apthelper download-file "http://localhost:8080/$TESTFILE" http-$TESTFILE $OPT -o Acquire::http::Dl-Limit=800 assertprogress apt-progress.log msgtest 'download progress works via' 'https' exec 3> apt-progress.log -testsuccess --nomsg apthelper download-file "https://localhost:4433/$TESTFILE" https-$TESTFILE -o APT::Status-Fd=3 -o Acquire::https::Dl-Limit=800 +testsuccess --nomsg apthelper download-file "https://localhost:4433/$TESTFILE" https-$TESTFILE $OPT -o Acquire::https::Dl-Limit=800 assertprogress apt-progress.log # cleanup -- cgit v1.2.3-70-g09d2 From dcbb364fc69e1108b3fea3adb12a7ba83d9af467 Mon Sep 17 00:00:00 2001 From: David Kalnischkies Date: Tue, 12 May 2015 00:30:16 +0200 Subject: detect 416 complete file in partial by expected hash If we have the expected hashes we can check with them if the file we have in partial we got a 416 for is the expected file. We detected this with same-size before, but not every server sends a good Content-Range header with a 416 response. --- methods/https.cc | 37 +++++++++++++++++++++------ methods/https.h | 1 + methods/server.cc | 19 +++++++++++--- test/integration/framework | 2 +- test/integration/test-apt-update-transactions | 1 + test/integration/test-partial-file-support | 10 ++++++-- test/interactive-helper/aptwebserver.cc | 9 ++++--- 7 files changed, 62 insertions(+), 17 deletions(-) (limited to 'methods') diff --git a/methods/https.cc b/methods/https.cc index c6b75d9ad..712e9ee73 100644 --- a/methods/https.cc +++ b/methods/https.cc @@ -40,7 +40,9 @@ using namespace std; struct APT_HIDDEN CURLUserPointer { HttpsMethod * const https; HttpsMethod::FetchResult * const Res; - CURLUserPointer(HttpsMethod * const https, HttpsMethod::FetchResult * const Res) : https(https), Res(Res) {} + HttpsMethod::FetchItem const * const Itm; + CURLUserPointer(HttpsMethod * const https, HttpsMethod::FetchResult * const Res, + HttpsMethod::FetchItem const * const Itm) : https(https), Res(Res), Itm(Itm) {} }; size_t @@ -61,13 +63,32 @@ HttpsMethod::parse_header(void *buffer, size_t size, size_t nmemb, void *userp) { if (me->https->Server->Result != 416 && me->https->Server->StartPos != 0) ; - else if (me->https->Server->Result == 416 && me->https->Server->Size == me->https->File->FileSize()) + else if (me->https->Server->Result == 416) { - me->https->Server->Result = 200; - me->https->Server->StartPos = me->https->Server->Size; - // the actual size is not important for https as curl will deal with it - // by itself and e.g. doesn't bother us with transport-encoding… - me->https->Server->JunkSize = std::numeric_limits::max(); + bool partialHit = false; + if (me->Itm->ExpectedHashes.usable() == true) + { + Hashes resultHashes(me->Itm->ExpectedHashes); + FileFd file(me->Itm->DestFile, FileFd::ReadOnly); + me->https->Server->Size = file.FileSize(); + me->https->Server->Date = file.ModificationTime(); + resultHashes.AddFD(file); + HashStringList const hashList = resultHashes.GetHashStringList(); + partialHit = (me->Itm->ExpectedHashes == hashList); + } + else if (me->https->Server->Result == 416 && me->https->Server->Size == me->https->File->FileSize()) + partialHit = true; + + if (partialHit == true) + { + me->https->Server->Result = 200; + me->https->Server->StartPos = me->https->Server->Size; + // the actual size is not important for https as curl will deal with it + // by itself and e.g. doesn't bother us with transport-encoding… + me->https->Server->JunkSize = std::numeric_limits::max(); + } + else + me->https->Server->StartPos = 0; } else me->https->Server->StartPos = 0; @@ -221,7 +242,7 @@ bool HttpsMethod::Fetch(FetchItem *Itm) maybe_add_auth (Uri, _config->FindFile("Dir::Etc::netrc")); FetchResult Res; - CURLUserPointer userp(this, &Res); + CURLUserPointer userp(this, &Res, Itm); // callbacks curl_easy_setopt(curl, CURLOPT_URL, static_cast(Uri).c_str()); curl_easy_setopt(curl, CURLOPT_HEADERFUNCTION, parse_header); diff --git a/methods/https.h b/methods/https.h index 6e32e8d3d..57fc292ee 100644 --- a/methods/https.h +++ b/methods/https.h @@ -79,6 +79,7 @@ class HttpsMethod : public ServerMethod virtual bool Configuration(std::string Message); virtual ServerState * CreateServerState(URI uri); using pkgAcqMethod::FetchResult; + using pkgAcqMethod::FetchItem; HttpsMethod() : ServerMethod("1.2",Pipeline | SendConfig), File(NULL) { diff --git a/methods/server.cc b/methods/server.cc index 2116926b0..bd01c3e98 100644 --- a/methods/server.cc +++ b/methods/server.cc @@ -269,7 +269,7 @@ ServerMethod::DealWithHeaders(FetchResult &Res) Res.LastModified = Queue->LastModified; return IMS_HIT; } - + /* Redirect * * Note that it is only OK for us to treat all redirection the same @@ -314,7 +314,20 @@ ServerMethod::DealWithHeaders(FetchResult &Res) struct stat SBuf; if (stat(Queue->DestFile.c_str(),&SBuf) >= 0 && SBuf.st_size > 0) { - if ((unsigned long long)SBuf.st_size == Server->Size) + bool partialHit = false; + if (Queue->ExpectedHashes.usable() == true) + { + Hashes resultHashes(Queue->ExpectedHashes); + FileFd file(Queue->DestFile, FileFd::ReadOnly); + Server->Size = file.FileSize(); + Server->Date = file.ModificationTime(); + resultHashes.AddFD(file); + HashStringList const hashList = resultHashes.GetHashStringList(); + partialHit = (Queue->ExpectedHashes == hashList); + } + else if ((unsigned long long)SBuf.st_size == Server->Size) + partialHit = true; + if (partialHit == true) { // the file is completely downloaded, but was not moved if (Server->HaveContent == true) @@ -351,7 +364,7 @@ ServerMethod::DealWithHeaders(FetchResult &Res) // This is some sort of 2xx 'data follows' reply Res.LastModified = Server->Date; Res.Size = Server->Size; - + // Open the file delete File; File = new FileFd(Queue->DestFile,FileFd::WriteAny); diff --git a/test/integration/framework b/test/integration/framework index 03c188189..2a53e8365 100644 --- a/test/integration/framework +++ b/test/integration/framework @@ -1128,7 +1128,7 @@ acquire::cdrom::autodetect 0;" > rootdir/etc/apt/apt.conf.d/00cdrom downloadfile() { local PROTO="${1%%:*}" if ! apthelper -o Debug::Acquire::${PROTO}=1 -o Debug::pkgAcquire::Worker=1 \ - download-file "$1" "$2" 2>&1 ; then + download-file "$1" "$2" "$3" 2>&1 ; then return 1 fi # only if the file exists the download was successful diff --git a/test/integration/test-apt-update-transactions b/test/integration/test-apt-update-transactions index f028ac0c7..67dd633f9 100755 --- a/test/integration/test-apt-update-transactions +++ b/test/integration/test-apt-update-transactions @@ -63,6 +63,7 @@ testsetup 'file' changetowebserver webserverconfig 'aptwebserver::support::modified-since' 'false' "$1" webserverconfig 'aptwebserver::support::last-modified' 'false' "$1" # curl is clever and sees hits here also +webserverconfig 'aptwebserver::support::range' 'false' "$1" testsetup 'http' diff --git a/test/integration/test-partial-file-support b/test/integration/test-partial-file-support index 85046b3eb..c07af7bd0 100755 --- a/test/integration/test-partial-file-support +++ b/test/integration/test-partial-file-support @@ -17,8 +17,8 @@ DOWNLOADLOG='rootdir/tmp/testdownloadfile.log' testdownloadfile() { rm -f "$DOWNLOADLOG" - msgtest "Testing download of file $2 with" "$1" - if ! downloadfile "$2" "$3" > "$DOWNLOADLOG"; then + msgtest "Testing download of file $2 with" "$1 $5" + if ! downloadfile "$2" "$3" "$5" > "$DOWNLOADLOG"; then cat >&2 "$DOWNLOADLOG" msgfail else @@ -78,6 +78,12 @@ followuprequest() { testdownloadfile 'completely downloaded file' "${1}/testfile" "$DOWN" '=' testwebserverlaststatuscode '416' "$DOWNLOADLOG" + webserverconfig 'aptwebserver::support::content-range' 'false' + copysource $TESTFILE 1M $DOWN + testdownloadfile 'completely downloaded file' "${1}/testfile" "$DOWN" '=' "SHA1:$(sha1sum "$TESTFILE" | cut -d' ' -f 1)" + testwebserverlaststatuscode '416' "$DOWNLOADLOG" + webserverconfig 'aptwebserver::support::content-range' 'true' + copysource $TESTFILE 1M $DOWN copysource "${TESTFILE}2" 20 "${DOWN}2" msgtest 'Testing download of files with' 'completely downloaded file + partial file' diff --git a/test/interactive-helper/aptwebserver.cc b/test/interactive-helper/aptwebserver.cc index 6a411e24e..c933060e7 100644 --- a/test/interactive-helper/aptwebserver.cc +++ b/test/interactive-helper/aptwebserver.cc @@ -745,9 +745,12 @@ static void * handleClient(void * voidclient) /*{{{*/ } else { - std::ostringstream contentrange; - contentrange << "Content-Range: bytes */" << filesize; - headers.push_back(contentrange.str()); + if (_config->FindB("aptwebserver::support::content-range", true) == true) + { + std::ostringstream contentrange; + contentrange << "Content-Range: bytes */" << filesize; + headers.push_back(contentrange.str()); + } sendError(client, 416, *m, sendContent, "", headers); break; } -- cgit v1.2.3-70-g09d2 From 8eafc759544298211cd0bfaa3919afc0fadd47d1 Mon Sep 17 00:00:00 2001 From: David Kalnischkies Date: Wed, 13 May 2015 16:09:12 +0200 Subject: detect Releasefile IMS hits even if the server doesn't Not all servers we are talking to support If-Modified-Since and some are not even sending Last-Modified for us, so in an effort to detect such hits we run a hashsum check on the 'old' compared to the 'new' file, we got the hashes for the 'new' already for "free" from the methods anyway and hence just need to calculated the old ones. This allows us to detect hits even with unsupported servers, which in turn means we benefit from all the new hit behavior also here. --- apt-pkg/acquire-item.cc | 31 ++++++++++---- apt-pkg/acquire-item.h | 2 +- apt-pkg/acquire-worker.cc | 8 ++-- methods/https.cc | 2 +- test/integration/framework | 9 ++++ test/integration/test-apt-update-expected-size | 1 + test/integration/test-apt-update-not-modified | 58 +++++++++++++++++++++++++- test/integration/test-apt-update-rollback | 1 + test/integration/test-apt-update-transactions | 2 + 9 files changed, 99 insertions(+), 15 deletions(-) (limited to 'methods') diff --git a/apt-pkg/acquire-item.cc b/apt-pkg/acquire-item.cc index 1090912f5..78dace12c 100644 --- a/apt-pkg/acquire-item.cc +++ b/apt-pkg/acquire-item.cc @@ -253,7 +253,10 @@ bool pkgAcquire::Item::Rename(string From,string To) strprintf(S, _("rename failed, %s (%s -> %s)."), strerror(errno), From.c_str(),To.c_str()); Status = StatError; - ErrorText += S; + if (ErrorText.empty()) + ErrorText = S; + else + ErrorText = ErrorText + ": " + S; return false; } /*}}}*/ @@ -1794,7 +1797,7 @@ void pkgAcqMetaSig::Done(string Message,unsigned long long Size, if(AuthPass == false) { - if(CheckDownloadDone(Message) == true) + if(CheckDownloadDone(Message, Hashes) == true) { // destfile will be modified to point to MetaIndexFile for the // gpgv method, so we need to save it here @@ -1837,7 +1840,8 @@ void pkgAcqMetaSig::Failed(string Message,pkgAcquire::MethodConfig *Cnf)/*{{{*/ Status = StatDone; } else { _error->Error("%s", downgrade_msg.c_str()); - Rename(MetaIndexFile, MetaIndexFile+".FAILED"); + if (TransactionManager->IMSHit == false) + Rename(MetaIndexFile, MetaIndexFile+".FAILED"); Item::Failed("Message: " + downgrade_msg, Cnf); TransactionManager->AbortTransaction(); return; @@ -1922,12 +1926,12 @@ void pkgAcqMetaIndex::Done(string Message,unsigned long long Size, /*{{{*/ { Item::Done(Message,Size,Hashes,Cfg); - if(CheckDownloadDone(Message)) + if(CheckDownloadDone(Message, Hashes)) { // we have a Release file, now download the Signature, all further // verify/queue for additional downloads will be done in the // pkgAcqMetaSig::Done() code - std::string MetaIndexFile = DestFile; + std::string const MetaIndexFile = DestFile; new pkgAcqMetaSig(Owner, TransactionManager, MetaIndexSigURI, MetaIndexSigURIDesc, MetaIndexSigShortDesc, MetaIndexFile, IndexTargets, @@ -2008,7 +2012,7 @@ void pkgAcqMetaBase::QueueForSignatureVerify(const std::string &MetaIndexFile, } /*}}}*/ // pkgAcqMetaBase::CheckDownloadDone /*{{{*/ -bool pkgAcqMetaBase::CheckDownloadDone(const std::string &Message) +bool pkgAcqMetaBase::CheckDownloadDone(const std::string &Message, HashStringList const &Hashes) { // We have just finished downloading a Release file (it is not // verified yet) @@ -2031,7 +2035,18 @@ bool pkgAcqMetaBase::CheckDownloadDone(const std::string &Message) // make sure to verify against the right file on I-M-S hit IMSHit = StringToBool(LookupTag(Message,"IMS-Hit"),false); - if(IMSHit) + if (IMSHit == false) + { + // detect IMS-Hits servers haven't detected by Hash comparison + std::string FinalFile = GetFinalFilename(); + if (RealFileExists(FinalFile) && Hashes.VerifyFile(FinalFile) == true) + { + IMSHit = true; + unlink(DestFile.c_str()); + } + } + + if(IMSHit == true) { // for simplicity, the transaction manager is always InRelease // even if it doesn't exist. @@ -2273,7 +2288,7 @@ void pkgAcqMetaClearSig::Done(std::string Message,unsigned long long Size, if(AuthPass == false) { - if(CheckDownloadDone(Message) == true) + if(CheckDownloadDone(Message, Hashes) == true) QueueForSignatureVerify(DestFile, DestFile); return; } diff --git a/apt-pkg/acquire-item.h b/apt-pkg/acquire-item.h index 33a28671c..646de8416 100644 --- a/apt-pkg/acquire-item.h +++ b/apt-pkg/acquire-item.h @@ -429,7 +429,7 @@ class pkgAcqMetaBase : public pkgAcquire::Item /*{{{*/ * \param Message The message block received from the fetch * subprocess. */ - bool CheckDownloadDone(const std::string &Message); + bool CheckDownloadDone(const std::string &Message, HashStringList const &Hashes); /** \brief Queue the downloaded Signature for verification */ void QueueForSignatureVerify(const std::string &MetaIndexFile, diff --git a/apt-pkg/acquire-worker.cc b/apt-pkg/acquire-worker.cc index bee01e620..9254e20a3 100644 --- a/apt-pkg/acquire-worker.cc +++ b/apt-pkg/acquire-worker.cc @@ -333,13 +333,12 @@ bool pkgAcquire::Worker::RunMessages() // see if there is a hash to verify HashStringList ReceivedHashes; - HashStringList expectedHashes = Owner->HashSums(); - for (HashStringList::const_iterator hs = expectedHashes.begin(); hs != expectedHashes.end(); ++hs) + for (char const * const * type = HashString::SupportedHashes(); *type != NULL; ++type) { - std::string const tagname = hs->HashType() + "-Hash"; + std::string const tagname = std::string(*type) + "-Hash"; std::string const hashsum = LookupTag(Message, tagname.c_str()); if (hashsum.empty() == false) - ReceivedHashes.push_back(HashString(hs->HashType(), hashsum)); + ReceivedHashes.push_back(HashString(*type, hashsum)); } if(_config->FindB("Debug::pkgAcquire::Auth", false) == true) @@ -349,6 +348,7 @@ bool pkgAcquire::Worker::RunMessages() for (HashStringList::const_iterator hs = ReceivedHashes.begin(); hs != ReceivedHashes.end(); ++hs) std::clog << "\t- " << hs->toStr() << std::endl; std::clog << "ExpectedHash:" << endl; + HashStringList expectedHashes = Owner->HashSums(); for (HashStringList::const_iterator hs = expectedHashes.begin(); hs != expectedHashes.end(); ++hs) std::clog << "\t- " << hs->toStr() << std::endl; std::clog << endl; diff --git a/methods/https.cc b/methods/https.cc index 712e9ee73..fa143439a 100644 --- a/methods/https.cc +++ b/methods/https.cc @@ -444,7 +444,7 @@ bool HttpsMethod::Fetch(FetchItem *Itm) char err[255]; snprintf(err, sizeof(err) - 1, "HttpError%i", Server->Result); SetFailReason(err); - _error->Error("%s", err); + _error->Error("%i %s", Server->Result, Server->Code); // unlink, no need keep 401/404 page content in partial/ unlink(File->Name().c_str()); return false; diff --git a/test/integration/framework b/test/integration/framework index 2a53e8365..8c8936ead 100644 --- a/test/integration/framework +++ b/test/integration/framework @@ -1005,6 +1005,15 @@ signreleasefiles() { msgdone "info" } +redatereleasefiles() { + local DATE="$(date -d "$1" '+%a, %d %b %Y %H:%M:%S %Z')" + for release in $(find aptarchive/ -name 'Release'); do + sed -i "s/^Date: .*$/Date: ${DATE}/" $release + touch -d "$DATE" $release + done + signreleasefiles "${2:-Joe Sixpack}" +} + webserverconfig() { local WEBSERVER="${3:-http://localhost:8080}" local NOCHECK=false diff --git a/test/integration/test-apt-update-expected-size b/test/integration/test-apt-update-expected-size index 7efccaa57..55a5da848 100755 --- a/test/integration/test-apt-update-expected-size +++ b/test/integration/test-apt-update-expected-size @@ -26,6 +26,7 @@ test_inreleasetoobig() { } test_packagestoobig() { + redatereleasefiles '+1hour' # append junk at the end of the Packages.gz/Packages SIZE="$(stat --printf=%s aptarchive/dists/unstable/main/binary-i386/Packages)" find aptarchive/dists -name 'Packages*' | while read pkg; do diff --git a/test/integration/test-apt-update-not-modified b/test/integration/test-apt-update-not-modified index 2dc56e76c..a67ecb760 100755 --- a/test/integration/test-apt-update-not-modified +++ b/test/integration/test-apt-update-not-modified @@ -12,7 +12,7 @@ insertpackage 'unstable' 'apt' 'all' '1.0' setupaptarchive --no-update methodtest() { - msgmsg 'Test with' "$1" + msgmsg 'Test InRelease with' "$1" rm -rf rootdir/var/lib/apt/lists # get our cache populated testsuccess aptget update @@ -36,6 +36,62 @@ Reading package lists..." aptget update Get:1 $1 unstable/main amd64 Packages [$(stat -c '%s' 'aptarchive/dists/unstable/main/binary-amd64/Packages.gz') B] Reading package lists..." aptget update testfileequal 'listsdir.lst' "$(listcurrentlistsdirectory)" + + webserverconfig 'aptwebserver::support::modified-since' 'false' + webserverconfig 'aptwebserver::support::last-modified' 'false' + testsuccessequal "Get:1 $1 unstable InRelease [$(stat -c '%s' 'aptarchive/dists/unstable/InRelease') B] +Reading package lists..." aptget update + webserverconfig 'aptwebserver::support::modified-since' 'true' + webserverconfig 'aptwebserver::support::last-modified' 'true' + + msgmsg 'Test Release.gpg with' "$1" + rm -rf rootdir/var/lib/apt/lists + cp -a aptarchive/dists aptarchive/dists.good + find aptarchive/dists -name 'InRelease' -delete + # get our cache populated + testsuccess aptget update + listcurrentlistsdirectory > listsdir.lst + + # hit again with a good cache + testsuccessequal "Ign $1 unstable InRelease + 404 Not Found +Hit $1 unstable Release +Hit $1 unstable Release.gpg +Reading package lists..." aptget update + testfileequal 'listsdir.lst' "$(listcurrentlistsdirectory)" + + # drop an architecture, which means the file should be gone now + configarchitecture 'i386' + sed '/_binary-amd64_Packages/ d' listsdir.lst > listsdir-without-amd64.lst + testsuccessequal "Ign $1 unstable InRelease + 404 Not Found +Hit $1 unstable Release +Hit $1 unstable Release.gpg +Reading package lists..." aptget update + testfileequal 'listsdir-without-amd64.lst' "$(listcurrentlistsdirectory)" + + # readd arch so its downloaded again + configarchitecture 'amd64' 'i386' + testsuccessequal "Ign $1 unstable InRelease + 404 Not Found +Hit $1 unstable Release +Hit $1 unstable Release.gpg +Get:1 $1 unstable/main amd64 Packages [$(stat -c '%s' 'aptarchive/dists/unstable/main/binary-amd64/Packages.gz') B] +Reading package lists..." aptget update + testfileequal 'listsdir.lst' "$(listcurrentlistsdirectory)" + + webserverconfig 'aptwebserver::support::modified-since' 'false' + webserverconfig 'aptwebserver::support::last-modified' 'false' + testsuccessequal "Ign $1 unstable InRelease + 404 Not Found +Get:1 $1 unstable Release [$(stat -c '%s' 'aptarchive/dists/unstable/Release') B] +Get:2 $1 unstable Release.gpg [$(stat -c '%s' 'aptarchive/dists/unstable/Release.gpg') B] +Reading package lists..." aptget update + webserverconfig 'aptwebserver::support::modified-since' 'true' + webserverconfig 'aptwebserver::support::last-modified' 'true' + + rm -rf aptarchive/dists + cp -a aptarchive/dists.good aptarchive/dists } changetowebserver diff --git a/test/integration/test-apt-update-rollback b/test/integration/test-apt-update-rollback index f4500b69d..29fe1ab56 100755 --- a/test/integration/test-apt-update-rollback +++ b/test/integration/test-apt-update-rollback @@ -158,6 +158,7 @@ test_inrelease_to_broken_gzip() { msgmsg "Test InRelease to broken gzip" start_with_good_inrelease + redatereleasefiles '+2hours' # append junk at the end of the compressed file echo "lala" >> $APTARCHIVE/dists/unstable/main/source/Sources.gz touch -d '+2min' $APTARCHIVE/dists/unstable/main/source/Sources.gz diff --git a/test/integration/test-apt-update-transactions b/test/integration/test-apt-update-transactions index 67dd633f9..63b318056 100755 --- a/test/integration/test-apt-update-transactions +++ b/test/integration/test-apt-update-transactions @@ -47,6 +47,7 @@ testrun() { testsetup() { msgmsg 'Test with no initial data over' "$1" + redatereleasefiles 'now' rm -rf rootdir/var/lib/apt/lists mkdir -p rootdir/var/lib/apt/lists/partial listcurrentlistsdirectory > listsdir.lst @@ -55,6 +56,7 @@ testsetup() { msgmsg 'Test with initial data over' "$1" testsuccess aptget update listcurrentlistsdirectory > listsdir.lst + redatereleasefiles '+1hour' testrun 'listsdir.lst' } -- cgit v1.2.3-70-g09d2 From ceafe8a6edc815df2923ba892894617829e9d3c2 Mon Sep 17 00:00:00 2001 From: Michael Vogt Date: Fri, 22 May 2015 15:28:53 +0200 Subject: Fix endless loop in apt-get update that can cause disk fillup The apt http code parses Content-Length and Content-Range. For both requests the variable "Size" is used and the semantic for this Size is the total file size. However Content-Length is not the entire file size for partital file requests. For servers that send the Content-Range header first and then the Content-Length header this can lead to globbing of Size so that its less than the real file size. This may lead to a subsequent passing of a negative number into the CircleBuf which leads to a endless loop that writes data. Thanks to Anton Blanchard for the analysis and initial patch. LP: #1445239 --- methods/http.cc | 2 +- methods/server.cc | 20 +++++++++++++++----- methods/server.h | 3 ++- test/interactive-helper/aptwebserver.cc | 6 +++--- 4 files changed, 21 insertions(+), 10 deletions(-) (limited to 'methods') diff --git a/methods/http.cc b/methods/http.cc index 1b996db98..ad90c9891 100644 --- a/methods/http.cc +++ b/methods/http.cc @@ -443,7 +443,7 @@ bool HttpServerState::RunData(FileFd * const File) else if (JunkSize != 0) In.Limit(JunkSize); else - In.Limit(Size - StartPos); + In.Limit(DownloadSize); // Just transfer the whole block. do diff --git a/methods/server.cc b/methods/server.cc index e321e0230..ba0a8864b 100644 --- a/methods/server.cc +++ b/methods/server.cc @@ -164,15 +164,22 @@ bool ServerState::HeaderLine(string Line) Encoding = Stream; HaveContent = true; - unsigned long long * SizePtr = &Size; + unsigned long long * DownloadSizePtr = &DownloadSize; if (Result == 416) - SizePtr = &JunkSize; + DownloadSizePtr = &JunkSize; - *SizePtr = strtoull(Val.c_str(), NULL, 10); - if (*SizePtr >= std::numeric_limits::max()) + *DownloadSizePtr = strtoull(Val.c_str(), NULL, 10); + if (*DownloadSizePtr >= std::numeric_limits::max()) return _error->Errno("HeaderLine", _("The HTTP server sent an invalid Content-Length header")); - else if (*SizePtr == 0) + else if (*DownloadSizePtr == 0) HaveContent = false; + + // On partial content (206) the Content-Length less than the real + // size, so do not set it here but leave that to the Content-Range + // header instead + if(Result != 206 && Size == 0) + Size = DownloadSize; + return true; } @@ -193,6 +200,9 @@ bool ServerState::HeaderLine(string Line) return _error->Error(_("The HTTP server sent an invalid Content-Range header")); if ((unsigned long long)StartPos > Size) return _error->Error(_("This HTTP server has broken range support")); + + // figure out what we will download + DownloadSize = Size - StartPos; return true; } diff --git a/methods/server.h b/methods/server.h index 1b81e3549..ed3cb456a 100644 --- a/methods/server.h +++ b/methods/server.h @@ -34,7 +34,8 @@ struct ServerState char Code[360]; // These are some statistics from the last parsed header lines - unsigned long long Size; // size of the usable content (aka: the file) + unsigned long long Size; // total size of the usable content (aka: the file) + unsigned long long DownloadSize; // size we actually download (can be smaller than Size if we have partial content) unsigned long long JunkSize; // size of junk content (aka: server error pages) unsigned long long StartPos; time_t Date; diff --git a/test/interactive-helper/aptwebserver.cc b/test/interactive-helper/aptwebserver.cc index cd52da692..9c67b67e4 100644 --- a/test/interactive-helper/aptwebserver.cc +++ b/test/interactive-helper/aptwebserver.cc @@ -654,13 +654,13 @@ static void * handleClient(void * voidclient) /*{{{*/ if (filesize > filestart) { data.Skip(filestart); - std::ostringstream contentlength; - contentlength << "Content-Length: " << (filesize - filestart); - headers.push_back(contentlength.str()); std::ostringstream contentrange; contentrange << "Content-Range: bytes " << filestart << "-" << filesize - 1 << "/" << filesize; headers.push_back(contentrange.str()); + std::ostringstream contentlength; + contentlength << "Content-Length: " << (filesize - filestart); + headers.push_back(contentlength.str()); sendHead(client, 206, headers); if (sendContent == true) sendFile(client, headers, data); -- cgit v1.2.3-70-g09d2 From 6291f60e86718697f261519a6818e1d5ee433216 Mon Sep 17 00:00:00 2001 From: Michael Vogt Date: Fri, 22 May 2015 15:40:18 +0200 Subject: Rename "Size" in ServerState to TotalFileSize The variable "Size" was misleading and caused bug #1445239. To avoid similar issues in the future, rename it to make the meaning more obvious. git-dch: ignore --- methods/https.cc | 4 ++-- methods/server.cc | 20 ++++++++++---------- methods/server.h | 14 ++++++++++---- 3 files changed, 22 insertions(+), 16 deletions(-) (limited to 'methods') diff --git a/methods/https.cc b/methods/https.cc index 3a5981b58..12fc6c70f 100644 --- a/methods/https.cc +++ b/methods/https.cc @@ -55,10 +55,10 @@ HttpsMethod::parse_header(void *buffer, size_t size, size_t nmemb, void *userp) { if (me->Server->Result != 416 && me->Server->StartPos != 0) ; - else if (me->Server->Result == 416 && me->Server->Size == me->File->FileSize()) + else if (me->Server->Result == 416 && me->Server->TotalFileSize == me->File->FileSize()) { me->Server->Result = 200; - me->Server->StartPos = me->Server->Size; + me->Server->StartPos = me->Server->TotalFileSize; // the actual size is not important for https as curl will deal with it // by itself and e.g. doesn't bother us with transport-encoding… me->Server->JunkSize = std::numeric_limits::max(); diff --git a/methods/server.cc b/methods/server.cc index ba0a8864b..6c05700a5 100644 --- a/methods/server.cc +++ b/methods/server.cc @@ -54,7 +54,7 @@ ServerState::RunHeadersResult ServerState::RunHeaders(FileFd * const File, Major = 0; Minor = 0; Result = 0; - Size = 0; + TotalFileSize = 0; JunkSize = 0; StartPos = 0; Encoding = Closes; @@ -177,8 +177,8 @@ bool ServerState::HeaderLine(string Line) // On partial content (206) the Content-Length less than the real // size, so do not set it here but leave that to the Content-Range // header instead - if(Result != 206 && Size == 0) - Size = DownloadSize; + if(Result != 206 && TotalFileSize == 0) + TotalFileSize = DownloadSize; return true; } @@ -194,15 +194,15 @@ bool ServerState::HeaderLine(string Line) HaveContent = true; // §14.16 says 'byte-range-resp-spec' should be a '*' in case of 416 - if (Result == 416 && sscanf(Val.c_str(), "bytes */%llu",&Size) == 1) + if (Result == 416 && sscanf(Val.c_str(), "bytes */%llu",&TotalFileSize) == 1) ; // we got the expected filesize which is all we wanted - else if (sscanf(Val.c_str(),"bytes %llu-%*u/%llu",&StartPos,&Size) != 2) + else if (sscanf(Val.c_str(),"bytes %llu-%*u/%llu",&StartPos,&TotalFileSize) != 2) return _error->Error(_("The HTTP server sent an invalid Content-Range header")); - if ((unsigned long long)StartPos > Size) + if ((unsigned long long)StartPos > TotalFileSize) return _error->Error(_("This HTTP server has broken range support")); // figure out what we will download - DownloadSize = Size - StartPos; + DownloadSize = TotalFileSize - StartPos; return true; } @@ -313,7 +313,7 @@ ServerMethod::DealWithHeaders(FetchResult &Res) struct stat SBuf; if (stat(Queue->DestFile.c_str(),&SBuf) >= 0 && SBuf.st_size > 0) { - if ((unsigned long long)SBuf.st_size == Server->Size) + if ((unsigned long long)SBuf.st_size == Server->TotalFileSize) { // the file is completely downloaded, but was not moved if (Server->HaveContent == true) @@ -323,7 +323,7 @@ ServerMethod::DealWithHeaders(FetchResult &Res) Server->RunData(&DevNull); } Server->HaveContent = false; - Server->StartPos = Server->Size; + Server->StartPos = Server->TotalFileSize; Server->Result = 200; } else if (unlink(Queue->DestFile.c_str()) == 0) @@ -349,7 +349,7 @@ ServerMethod::DealWithHeaders(FetchResult &Res) // This is some sort of 2xx 'data follows' reply Res.LastModified = Server->Date; - Res.Size = Server->Size; + Res.Size = Server->TotalFileSize; // Open the file delete File; diff --git a/methods/server.h b/methods/server.h index ed3cb456a..8c14282b6 100644 --- a/methods/server.h +++ b/methods/server.h @@ -34,10 +34,16 @@ struct ServerState char Code[360]; // These are some statistics from the last parsed header lines - unsigned long long Size; // total size of the usable content (aka: the file) - unsigned long long DownloadSize; // size we actually download (can be smaller than Size if we have partial content) - unsigned long long JunkSize; // size of junk content (aka: server error pages) + + // total size of the usable content (aka: the file) + unsigned long long TotalFileSize; + // size we actually download (can be smaller than Size if we have partial content) + unsigned long long DownloadSize; + // size of junk content (aka: server error pages) + unsigned long long JunkSize; + // The start of the data (for partial content) unsigned long long StartPos; + time_t Date; bool HaveContent; enum {Chunked,Stream,Closes} Encoding; @@ -73,7 +79,7 @@ struct ServerState RunHeadersResult RunHeaders(FileFd * const File, const std::string &Uri); bool Comp(URI Other) const {return Other.Host == ServerName.Host && Other.Port == ServerName.Port;}; - virtual void Reset() {Major = 0; Minor = 0; Result = 0; Code[0] = '\0'; Size = 0; JunkSize = 0; + virtual void Reset() {Major = 0; Minor = 0; Result = 0; Code[0] = '\0'; TotalFileSize = 0; JunkSize = 0; StartPos = 0; Encoding = Closes; time(&Date); HaveContent = false; State = Header; Persistent = false; Pipeline = true;}; virtual bool WriteResponse(std::string const &Data) = 0; -- cgit v1.2.3-70-g09d2 From 65759e00eff0513c34f584b99420b72fe0e5073e Mon Sep 17 00:00:00 2001 From: Michael Vogt Date: Fri, 22 May 2015 16:27:08 +0200 Subject: Update methods/https.cc now that ServerState::Size is renamed Git-Dch: ignore --- methods/https.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'methods') diff --git a/methods/https.cc b/methods/https.cc index 81060122c..c97367323 100644 --- a/methods/https.cc +++ b/methods/https.cc @@ -69,7 +69,7 @@ HttpsMethod::parse_header(void *buffer, size_t size, size_t nmemb, void *userp) me->File->Truncate(me->Server->StartPos); me->File->Seek(me->Server->StartPos); - me->Res.Size = me->Server->Size; + me->Res.Size = me->Server->TotalFileSize; } else if (me->Server->HeaderLine(line) == false) return 0; -- cgit v1.2.3-70-g09d2 From 448c38bdcd72b52f11ec5f326f822cf57653f81c Mon Sep 17 00:00:00 2001 From: David Kalnischkies Date: Sat, 6 Jun 2015 12:28:00 +0200 Subject: rework hashsum verification in the acquire system MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Having every item having its own code to verify the file(s) it handles is an errorprune process and easy to break, especially if items move through various stages (download, uncompress, patching, …). With a giant rework we centralize (most of) the verification to have a better enforcement rate and (hopefully) less chance for bugs, but it breaks the ABI bigtime in exchange – and as we break it anyway, it is broken even harder. It shouldn't effect most frontends as they don't deal with the acquire system at all or implement their own items, but some do and will need to be patched (might be an opportunity to use apt on-board material). The theory is simple: Items implement methods to decide if hashes need to be checked (in this stage) and to return the expected hashes for this item (in this stage). The verification itself is done in worker message passing which has the benefit that a hashsum error is now a proper error for the acquire system rather than a Done() which is later revised to a Failed(). --- apt-pkg/acquire-item.cc | 3750 ++++++++++---------- apt-pkg/acquire-item.h | 677 ++-- apt-pkg/acquire-method.cc | 5 +- apt-pkg/acquire-worker.cc | 189 +- apt-pkg/contrib/hashes.cc | 10 + apt-pkg/contrib/hashes.h | 9 + apt-pkg/deb/debindexfile.cc | 4 +- apt-pkg/deb/debmetaindex.cc | 78 +- apt-pkg/indexrecords.cc | 2 +- apt-pkg/pkgcache.h | 7 +- methods/file.cc | 6 +- test/integration/test-apt-get-source-authenticated | 2 +- test/integration/test-apt-sources-deb822 | 55 +- test/integration/test-apt-update-expected-size | 2 +- test/integration/test-apt-update-file | 4 + test/integration/test-apt-update-nofallback | 11 +- test/integration/test-apt-update-not-modified | 30 +- test/integration/test-apt-update-stale | 9 +- .../test-bug-595691-empty-and-broken-archive-files | 2 +- .../test-ubuntu-bug-1098738-apt-get-source-md5sum | 8 + test/libapt/acqprogress_test.cc | 6 +- test/libapt/hashsums_test.cc | 3 + 22 files changed, 2411 insertions(+), 2458 deletions(-) (limited to 'methods') diff --git a/apt-pkg/acquire-item.cc b/apt-pkg/acquire-item.cc index cf89717c4..ec6ec6e84 100644 --- a/apt-pkg/acquire-item.cc +++ b/apt-pkg/acquire-item.cc @@ -107,36 +107,296 @@ static bool AllowInsecureRepositories(indexRecords const * const MetaIndexParser } /*}}}*/ +// all ::HashesRequired and ::GetExpectedHashes implementations /*{{{*/ +/* ::GetExpectedHashes is abstract and has to be implemented by all subclasses. + It is best to implement it as broadly as possible, while ::HashesRequired defaults + to true and should be as restrictive as possible for false cases. Note that if + a hash is returned by ::GetExpectedHashes it must match. Only if it doesn't + ::HashesRequired is called to evaluate if its okay to have no hashes. */ +APT_CONST bool pkgAcqTransactionItem::HashesRequired() const +{ + /* signed repositories obviously have a parser and good hashes. + unsigned repositories, too, as even if we can't trust them for security, + we can at least trust them for integrity of the download itself. + Only repositories without a Release file can (obviously) not have + hashes – and they are very uncommon and strongly discouraged */ + return TransactionManager->MetaIndexParser != NULL; +} +HashStringList pkgAcqTransactionItem::GetExpectedHashes() const +{ + return GetExpectedHashesFor(GetMetaKey()); +} + +APT_CONST bool pkgAcqMetaBase::HashesRequired() const +{ + // Release and co have no hashes 'by design'. + return false; +} +HashStringList pkgAcqMetaBase::GetExpectedHashes() const +{ + return HashStringList(); +} + +APT_CONST bool pkgAcqIndexDiffs::HashesRequired() const +{ + /* FIXME: We have only hashes for uncompressed pdiffs. + rred uncompresses them on the fly while parsing. + In StateFetchDiff state we also uncompress on the fly for hash check. + Hashes are checked while searching for (next) patch to apply. */ + return false; +} +HashStringList pkgAcqIndexDiffs::GetExpectedHashes() const +{ + return HashStringList(); +} + +APT_CONST bool pkgAcqIndexMergeDiffs::HashesRequired() const +{ + /* @see #pkgAcqIndexDiffs::HashesRequired, with the difference that + we can check the rred result after all patches are applied as + we know the expected result rather than potentially apply more patches */ + return State == StateApplyDiff; +} +HashStringList pkgAcqIndexMergeDiffs::GetExpectedHashes() const +{ + if (State == StateApplyDiff) + return GetExpectedHashesFor(Target->MetaKey); + return HashStringList(); +} + +APT_CONST bool pkgAcqArchive::HashesRequired() const +{ + return LocalSource == false; +} +HashStringList pkgAcqArchive::GetExpectedHashes() const +{ + // figured out while parsing the records + return ExpectedHashes; +} + +APT_CONST bool pkgAcqFile::HashesRequired() const +{ + // supplied as parameter at creation time, so the caller decides + return ExpectedHashes.usable(); +} +HashStringList pkgAcqFile::GetExpectedHashes() const +{ + return ExpectedHashes; +} + /*}}}*/ +// Acquire::Item::QueueURI and specialisations from child classes /*{{{*/ +bool pkgAcquire::Item::QueueURI(pkgAcquire::ItemDesc &Item) +{ + Owner->Enqueue(Item); + return true; +} +/* The idea here is that an item isn't queued if it exists on disk and the + transition manager was a hit as this means that the files it contains + the checksums for can't be updated either (or they are and we are asking + for a hashsum mismatch to happen which helps nobody) */ +bool pkgAcqTransactionItem::QueueURI(pkgAcquire::ItemDesc &Item) +{ + std::string const FinalFile = GetFinalFilename(); + if (TransactionManager != NULL && TransactionManager->IMSHit == true && + FileExists(FinalFile) == true) + { + PartialFile = DestFile = FinalFile; + Status = StatDone; + return false; + } + return pkgAcquire::Item::QueueURI(Item); +} +/* The transition manager InRelease itself (or its older sisters-in-law + Release & Release.gpg) is always queued as this allows us to rerun gpgv + on it to verify that we aren't stalled with old files */ +bool pkgAcqMetaBase::QueueURI(pkgAcquire::ItemDesc &Item) +{ + return pkgAcquire::Item::QueueURI(Item); +} +/* the Diff/Index needs to queue also the up-to-date complete index file + to ensure that the list cleaner isn't eating it */ +bool pkgAcqDiffIndex::QueueURI(pkgAcquire::ItemDesc &Item) +{ + if (pkgAcqTransactionItem::QueueURI(Item) == true) + return true; + QueueOnIMSHit(); + return false; +} + /*}}}*/ +// Acquire::Item::GetFinalFilename and specialisations for child classes /*{{{*/ +std::string pkgAcquire::Item::GetFinalFilename() const +{ + return GetFinalFileNameFromURI(Desc.URI); +} +std::string pkgAcqDiffIndex::GetFinalFilename() const +{ + // the logic we inherent from pkgAcqBaseIndex isn't what we need here + return pkgAcquire::Item::GetFinalFilename(); +} +std::string pkgAcqIndex::GetFinalFilename() const +{ + std::string const FinalFile = GetFinalFileNameFromURI(Target->URI); + return GetCompressedFileName(Target->URI, FinalFile, CurrentCompressionExtension); +} +std::string pkgAcqMetaSig::GetFinalFilename() const +{ + return GetFinalFileNameFromURI(Target->URI); +} +std::string pkgAcqBaseIndex::GetFinalFilename() const +{ + return GetFinalFileNameFromURI(Target->URI); +} +std::string pkgAcqMetaBase::GetFinalFilename() const +{ + return GetFinalFileNameFromURI(DataTarget.URI); +} +std::string pkgAcqArchive::GetFinalFilename() const +{ + return _config->FindDir("Dir::Cache::Archives") + flNotDir(StoreFilename); +} + /*}}}*/ +// pkgAcqTransactionItem::GetMetaKey and specialisations for child classes /*{{{*/ +std::string pkgAcqTransactionItem::GetMetaKey() const +{ + return Target->MetaKey; +} +std::string pkgAcqIndex::GetMetaKey() const +{ + if (Stage == STAGE_DECOMPRESS_AND_VERIFY || CurrentCompressionExtension == "uncompressed") + return Target->MetaKey; + return Target->MetaKey + "." + CurrentCompressionExtension; +} +std::string pkgAcqDiffIndex::GetMetaKey() const +{ + return Target->MetaKey + ".diff/Index"; +} + /*}}}*/ +//pkgAcqTransactionItem::TransactionState and specialisations for child classes /*{{{*/ +bool pkgAcqTransactionItem::TransactionState(TransactionStates const state) +{ + bool const Debug = _config->FindB("Debug::Acquire::Transaction", false); + switch(state) + { + case TransactionAbort: + if(Debug == true) + std::clog << " Cancel: " << DestFile << std::endl; + if (Status == pkgAcquire::Item::StatIdle) + { + Status = pkgAcquire::Item::StatDone; + Dequeue(); + } + break; + case TransactionCommit: + if(PartialFile != "") + { + if(Debug == true) + std::clog << "mv " << PartialFile << " -> "<< DestFile << " # " << DescURI() << std::endl; + + Rename(PartialFile, DestFile); + } else { + if(Debug == true) + std::clog << "rm " << DestFile << " # " << DescURI() << std::endl; + unlink(DestFile.c_str()); + } + break; + } + return true; +} +bool pkgAcqMetaBase::TransactionState(TransactionStates const state) +{ + // Do not remove InRelease on IMSHit of Release.gpg [yes, this is very edgecasey] + if (TransactionManager->IMSHit == false) + return pkgAcqTransactionItem::TransactionState(state); + return true; +} +bool pkgAcqIndex::TransactionState(TransactionStates const state) +{ + if (pkgAcqTransactionItem::TransactionState(state) == false) + return false; + + switch (state) + { + case TransactionAbort: + if (Stage == STAGE_DECOMPRESS_AND_VERIFY) + { + // keep the compressed file, but drop the decompressed + EraseFileName.clear(); + if (PartialFile.empty() == false && flExtension(PartialFile) == "decomp") + unlink(PartialFile.c_str()); + } + break; + case TransactionCommit: + if (EraseFileName.empty() == false) + unlink(EraseFileName.c_str()); + break; + } + return true; +} +bool pkgAcqDiffIndex::TransactionState(TransactionStates const state) +{ + if (pkgAcqTransactionItem::TransactionState(state) == false) + return false; + + switch (state) + { + case TransactionCommit: + break; + case TransactionAbort: + std::string const Partial = GetPartialFileNameFromURI(Target->URI); + unlink(Partial.c_str()); + break; + } + + return true; +} + /*}}}*/ // Acquire::Item::Item - Constructor /*{{{*/ APT_IGNORE_DEPRECATED_PUSH -pkgAcquire::Item::Item(pkgAcquire *Owner, - HashStringList const &ExpectedHashes, - pkgAcqMetaBase *TransactionManager) - : Owner(Owner), FileSize(0), PartialSize(0), Mode(0), ID(0), Complete(false), - Local(false), QueueCounter(0), TransactionManager(TransactionManager), - ExpectedAdditionalItems(0), ExpectedHashes(ExpectedHashes) +pkgAcquire::Item::Item(pkgAcquire * const Owner) : + FileSize(0), PartialSize(0), Mode(0), Complete(false), Local(false), + QueueCounter(0), ExpectedAdditionalItems(0), Owner(Owner) { Owner->Add(this); Status = StatIdle; - if(TransactionManager != NULL) - TransactionManager->Add(this); } APT_IGNORE_DEPRECATED_POP /*}}}*/ // Acquire::Item::~Item - Destructor /*{{{*/ -// --------------------------------------------------------------------- -/* */ pkgAcquire::Item::~Item() { Owner->Remove(this); } /*}}}*/ +std::string pkgAcquire::Item::Custom600Headers() const /*{{{*/ +{ + return std::string(); +} + /*}}}*/ +std::string pkgAcquire::Item::ShortDesc() const /*{{{*/ +{ + return DescURI(); +} + /*}}}*/ +APT_CONST void pkgAcquire::Item::Finished() /*{{{*/ +{ +} + /*}}}*/ +APT_PURE pkgAcquire * pkgAcquire::Item::GetOwner() const /*{{{*/ +{ + return Owner; +} + /*}}}*/ +APT_CONST bool pkgAcquire::Item::IsTrusted() const /*{{{*/ +{ + return false; +} + /*}}}*/ // Acquire::Item::Failed - Item failed to download /*{{{*/ // --------------------------------------------------------------------- /* We return to an idle state if there are still other queues that could fetch this object */ -void pkgAcquire::Item::Failed(string Message,pkgAcquire::MethodConfig *Cnf) +void pkgAcquire::Item::Failed(string const &Message,pkgAcquire::MethodConfig const * const Cnf) { if(ErrorText.empty()) ErrorText = LookupTag(Message,"Message"); @@ -169,59 +429,28 @@ void pkgAcquire::Item::Failed(string Message,pkgAcquire::MethodConfig *Cnf) Complete = false; Dequeue(); } - else - Status = StatIdle; - // check fail reason string const FailReason = LookupTag(Message, "FailReason"); - if(FailReason == "MaximumSizeExceeded") + if (FailReason == "MaximumSizeExceeded") RenameOnError(MaximumSizeExceeded); + else if (Status == StatAuthError) + RenameOnError(HashSumMismatch); // report mirror failure back to LP if we actually use a mirror - if(FailReason.empty() == false) + if (FailReason.empty() == false) ReportMirrorFailure(FailReason); else ReportMirrorFailure(ErrorText); -} - /*}}}*/ -bool pkgAcquire::Item::TransactionState(TransactionStates const state) /*{{{*/ -{ - bool const Debug = _config->FindB("Debug::Acquire::Transaction", false); - switch(state) - { - case TransactionAbort: - if(Debug == true) - std::clog << " Cancel: " << DestFile << std::endl; - if (Status == pkgAcquire::Item::StatIdle) - { - Status = pkgAcquire::Item::StatDone; - Dequeue(); - } - break; - case TransactionCommit: - if(PartialFile != "") - { - if(Debug == true) - std::clog << "mv " << PartialFile << " -> "<< DestFile << " # " << DescURI() << std::endl; - Rename(PartialFile, DestFile); - } else { - if(Debug == true) - std::clog << "rm " << DestFile << " # " << DescURI() << std::endl; - unlink(DestFile.c_str()); - } - // mark that this transaction is finished - TransactionManager = 0; - break; - } - return true; + if (QueueCounter > 1) + Status = StatIdle; } /*}}}*/ // Acquire::Item::Start - Item has begun to download /*{{{*/ // --------------------------------------------------------------------- -/* Stash status and the file size. Note that setting Complete means +/* Stash status and the file size. Note that setting Complete means sub-phases of the acquire process such as decompresion are operating */ -void pkgAcquire::Item::Start(string /*Message*/,unsigned long long Size) +void pkgAcquire::Item::Start(string const &/*Message*/, unsigned long long const Size) { Status = StatFetching; ErrorText.clear(); @@ -230,22 +459,24 @@ void pkgAcquire::Item::Start(string /*Message*/,unsigned long long Size) } /*}}}*/ // Acquire::Item::Done - Item downloaded OK /*{{{*/ -// --------------------------------------------------------------------- -/* */ -void pkgAcquire::Item::Done(string Message,unsigned long long Size,HashStringList const &/*Hash*/, - pkgAcquire::MethodConfig * /*Cnf*/) +void pkgAcquire::Item::Done(string const &Message, HashStringList const &Hashes, + pkgAcquire::MethodConfig const * const /*Cnf*/) { // We just downloaded something.. string FileName = LookupTag(Message,"Filename"); UsedMirror = LookupTag(Message,"UsedMirror"); - if (Complete == false && !Local && FileName == DestFile) + unsigned long long const downloadedSize = Hashes.FileSize(); + if (downloadedSize != 0) { - if (Owner->Log != 0) - Owner->Log->Fetched(Size,atoi(LookupTag(Message,"Resume-Point","0").c_str())); - } + if (Complete == false && !Local && FileName == DestFile) + { + if (Owner->Log != 0) + Owner->Log->Fetched(Hashes.FileSize(),atoi(LookupTag(Message,"Resume-Point","0").c_str())); + } - if (FileSize == 0) - FileSize= Size; + if (FileSize == 0) + FileSize= downloadedSize; + } Status = StatDone; ErrorText = string(); Owner->Dequeue(this); @@ -255,7 +486,7 @@ void pkgAcquire::Item::Done(string Message,unsigned long long Size,HashStringLis // --------------------------------------------------------------------- /* This helper function is used by a lot of item methods as their final step */ -bool pkgAcquire::Item::Rename(string From,string To) +bool pkgAcquire::Item::Rename(string const &From,string const &To) { if (From == To || rename(From.c_str(),To.c_str()) == 0) return true; @@ -271,76 +502,40 @@ bool pkgAcquire::Item::Rename(string From,string To) return false; } /*}}}*/ -// Acquire::Item::QueueURI and specialisations from child classes /*{{{*/ -/* The idea here is that an item isn't queued if it exists on disk and the - transition manager was a hit as this means that the files it contains - the checksums for can't be updated either (or they are and we are asking - for a hashsum mismatch to happen which helps nobody) */ -bool pkgAcquire::Item::QueueURI(ItemDesc &Item) +void pkgAcquire::Item::Dequeue() /*{{{*/ { - std::string const FinalFile = GetFinalFilename(); - if (TransactionManager != NULL && TransactionManager->IMSHit == true && - FileExists(FinalFile) == true) - { - PartialFile = DestFile = FinalFile; - Status = StatDone; - return false; - } - - Owner->Enqueue(Item); - return true; + Owner->Dequeue(this); } -/* The transition manager InRelease itself (or its older sisters-in-law - Release & Release.gpg) is always queued as this allows us to rerun gpgv - on it to verify that we aren't stalled with old files */ -bool pkgAcqMetaBase::QueueURI(pkgAcquire::ItemDesc &Item) -{ - Owner->Enqueue(Item); - return true; -} -/* the Diff/Index needs to queue also the up-to-date complete index file - to ensure that the list cleaner isn't eating it */ -bool pkgAcqDiffIndex::QueueURI(pkgAcquire::ItemDesc &Item) -{ - if (pkgAcquire::Item::QueueURI(Item) == true) - return true; - QueueOnIMSHit(); - return false; -} - /*}}}*/ -void pkgAcquire::Item::Dequeue() /*{{{*/ -{ - Owner->Dequeue(this); -} - /*}}}*/ -bool pkgAcquire::Item::RenameOnError(pkgAcquire::Item::RenameOnErrorState const error)/*{{{*/ + /*}}}*/ +bool pkgAcquire::Item::RenameOnError(pkgAcquire::Item::RenameOnErrorState const error)/*{{{*/ { if (RealFileExists(DestFile)) Rename(DestFile, DestFile + ".FAILED"); + std::string errtext; switch (error) { case HashSumMismatch: - ErrorText = _("Hash Sum mismatch"); + errtext = _("Hash Sum mismatch"); Status = StatAuthError; ReportMirrorFailure("HashChecksumFailure"); break; case SizeMismatch: - ErrorText = _("Size mismatch"); + errtext = _("Size mismatch"); Status = StatAuthError; ReportMirrorFailure("SizeFailure"); break; case InvalidFormat: - ErrorText = _("Invalid file format"); + errtext = _("Invalid file format"); Status = StatError; // do not report as usually its not the mirrors fault, but Portal/Proxy break; case SignatureError: - ErrorText = _("Signature error"); + errtext = _("Signature error"); Status = StatError; break; case NotClearsigned: - ErrorText = _("Does not start with a cleartext signature"); + errtext = _("Does not start with a cleartext signature"); Status = StatError; break; case MaximumSizeExceeded: @@ -351,6 +546,8 @@ bool pkgAcquire::Item::RenameOnError(pkgAcquire::Item::RenameOnErrorState const // no handling here, done by callers break; } + if (ErrorText.empty()) + ErrorText = errtext; return false; } /*}}}*/ @@ -360,15 +557,8 @@ void pkgAcquire::Item::SetActiveSubprocess(const std::string &subprocess)/*{{{*/ APT_IGNORE_DEPRECATED(Mode = ActiveSubprocess.c_str();) } /*}}}*/ -// Acquire::Item::GetFinalFilename - Return the full final file path /*{{{*/ -std::string pkgAcquire::Item::GetFinalFilename() const -{ - return GetFinalFileNameFromURI(Desc.URI); -} - /*}}}*/ // Acquire::Item::ReportMirrorFailure /*{{{*/ -// --------------------------------------------------------------------- -void pkgAcquire::Item::ReportMirrorFailure(string FailCode) +void pkgAcquire::Item::ReportMirrorFailure(string const &FailCode) { // we only act if a mirror was used at all if(UsedMirror.empty()) @@ -411,2083 +601,1897 @@ void pkgAcquire::Item::ReportMirrorFailure(string FailCode) } } /*}}}*/ -// AcqDiffIndex::AcqDiffIndex - Constructor /*{{{*/ -// --------------------------------------------------------------------- -/* Get the DiffIndex file first and see if there are patches available - * If so, create a pkgAcqIndexDiffs fetcher that will get and apply the - * patches. If anything goes wrong in that process, it will fall back to - * the original packages file - */ -pkgAcqDiffIndex::pkgAcqDiffIndex(pkgAcquire *Owner, - pkgAcqMetaBase *TransactionManager, - IndexTarget const * const Target, - HashStringList const &ExpectedHashes, - indexRecords *MetaIndexParser) - : pkgAcqBaseIndex(Owner, TransactionManager, Target, ExpectedHashes, - MetaIndexParser) +std::string pkgAcquire::Item::HashSum() const /*{{{*/ { - - Debug = _config->FindB("Debug::pkgAcquire::Diffs",false); - - RealURI = Target->URI; - Desc.Owner = this; - Desc.Description = Target->Description + ".diff/Index"; - Desc.ShortDesc = Target->ShortDesc; - Desc.URI = Target->URI + ".diff/Index"; - - DestFile = GetPartialFileNameFromURI(Desc.URI); + HashStringList const hashes = GetExpectedHashes(); + HashString const * const hs = hashes.find(NULL); + return hs != NULL ? hs->toStr() : ""; +} + /*}}}*/ - if(Debug) - std::clog << "pkgAcqDiffIndex: " << Desc.URI << std::endl; +pkgAcqTransactionItem::pkgAcqTransactionItem(pkgAcquire * const Owner, /*{{{*/ + pkgAcqMetaBase * const TransactionManager, IndexTarget const * const Target) : + pkgAcquire::Item(Owner), Target(Target), TransactionManager(TransactionManager) +{ + if (TransactionManager != this) + TransactionManager->Add(this); +} + /*}}}*/ +pkgAcqTransactionItem::~pkgAcqTransactionItem() /*{{{*/ +{ +} + /*}}}*/ +HashStringList pkgAcqTransactionItem::GetExpectedHashesFor(std::string const MetaKey) const /*{{{*/ +{ + if (TransactionManager->MetaIndexParser == NULL) + return HashStringList(); + indexRecords::checkSum * const R = TransactionManager->MetaIndexParser->Lookup(MetaKey); + if (R == NULL) + return HashStringList(); + return R->Hashes; +} + /*}}}*/ - // look for the current package file - CurrentPackagesFile = GetFinalFileNameFromURI(RealURI); +// AcqMetaBase - Constructor /*{{{*/ +pkgAcqMetaBase::pkgAcqMetaBase(pkgAcquire * const Owner, + pkgAcqMetaBase * const TransactionManager, + std::vector const * const IndexTargets, + IndexTarget const &DataTarget, + indexRecords * const MetaIndexParser) +: pkgAcqTransactionItem(Owner, TransactionManager, NULL), DataTarget(DataTarget), + MetaIndexParser(MetaIndexParser), LastMetaIndexParser(NULL), IndexTargets(IndexTargets), + AuthPass(false), IMSHit(false) +{ +} + /*}}}*/ +// AcqMetaBase::Add - Add a item to the current Transaction /*{{{*/ +void pkgAcqMetaBase::Add(pkgAcqTransactionItem * const I) +{ + Transaction.push_back(I); +} + /*}}}*/ +// AcqMetaBase::AbortTransaction - Abort the current Transaction /*{{{*/ +void pkgAcqMetaBase::AbortTransaction() +{ + if(_config->FindB("Debug::Acquire::Transaction", false) == true) + std::clog << "AbortTransaction: " << TransactionManager << std::endl; - // FIXME: this file:/ check is a hack to prevent fetching - // from local sources. this is really silly, and - // should be fixed cleanly as soon as possible - if(!FileExists(CurrentPackagesFile) || - Desc.URI.substr(0,strlen("file:/")) == "file:/") + // ensure the toplevel is in error state too + for (std::vector::iterator I = Transaction.begin(); + I != Transaction.end(); ++I) { - // we don't have a pkg file or we don't want to queue - Failed("No index file, local or canceld by user", NULL); - return; + (*I)->TransactionState(TransactionAbort); } + Transaction.clear(); +} + /*}}}*/ +// AcqMetaBase::TransactionHasError - Check for errors in Transaction /*{{{*/ +APT_PURE bool pkgAcqMetaBase::TransactionHasError() const +{ + for (std::vector::const_iterator I = Transaction.begin(); + I != Transaction.end(); ++I) + { + switch((*I)->Status) { + case StatDone: break; + case StatIdle: break; + case StatAuthError: return true; + case StatError: return true; + case StatTransientNetworkError: return true; + case StatFetching: break; + } + } + return false; +} + /*}}}*/ +// AcqMetaBase::CommitTransaction - Commit a transaction /*{{{*/ +void pkgAcqMetaBase::CommitTransaction() +{ + if(_config->FindB("Debug::Acquire::Transaction", false) == true) + std::clog << "CommitTransaction: " << this << std::endl; - if(Debug) - std::clog << "pkgAcqDiffIndex::pkgAcqDiffIndex(): " - << CurrentPackagesFile << std::endl; - - QueueURI(Desc); - + // move new files into place *and* remove files that are not + // part of the transaction but are still on disk + for (std::vector::iterator I = Transaction.begin(); + I != Transaction.end(); ++I) + { + (*I)->TransactionState(TransactionCommit); + } + Transaction.clear(); } /*}}}*/ -// Acquire::Item::GetFinalFilename - Return the full final file path /*{{{*/ -std::string pkgAcqDiffIndex::GetFinalFilename() const +// AcqMetaBase::TransactionStageCopy - Stage a file for copying /*{{{*/ +void pkgAcqMetaBase::TransactionStageCopy(pkgAcqTransactionItem * const I, + const std::string &From, + const std::string &To) { - // the logic we inherent from pkgAcqBaseIndex isn't what we need here - return pkgAcquire::Item::GetFinalFilename(); + I->PartialFile = From; + I->DestFile = To; } /*}}}*/ -// AcqIndex::Custom600Headers - Insert custom request headers /*{{{*/ +// AcqMetaBase::TransactionStageRemoval - Stage a file for removal /*{{{*/ +void pkgAcqMetaBase::TransactionStageRemoval(pkgAcqTransactionItem * const I, + const std::string &FinalFile) +{ + I->PartialFile = ""; + I->DestFile = FinalFile; +} + /*}}}*/ +// AcqMetaBase::GenerateAuthWarning - Check gpg authentication error /*{{{*/ +bool pkgAcqMetaBase::CheckStopAuthentication(pkgAcquire::Item * const I, const std::string &Message) +{ + // FIXME: this entire function can do now that we disallow going to + // a unauthenticated state and can cleanly rollback + + string const Final = I->GetFinalFilename(); + if(FileExists(Final)) + { + I->Status = StatTransientNetworkError; + _error->Warning(_("An error occurred during the signature " + "verification. The repository is not updated " + "and the previous index files will be used. " + "GPG error: %s: %s\n"), + Desc.Description.c_str(), + LookupTag(Message,"Message").c_str()); + RunScripts("APT::Update::Auth-Failure"); + return true; + } else if (LookupTag(Message,"Message").find("NODATA") != string::npos) { + /* Invalid signature file, reject (LP: #346386) (Closes: #627642) */ + _error->Error(_("GPG error: %s: %s"), + Desc.Description.c_str(), + LookupTag(Message,"Message").c_str()); + I->Status = StatError; + return true; + } else { + _error->Warning(_("GPG error: %s: %s"), + Desc.Description.c_str(), + LookupTag(Message,"Message").c_str()); + } + // gpgv method failed + ReportMirrorFailure("GPGFailure"); + return false; +} + /*}}}*/ +// AcqMetaBase::Custom600Headers - Get header for AcqMetaBase /*{{{*/ // --------------------------------------------------------------------- -/* The only header we use is the last-modified header. */ -#if APT_PKG_ABI >= 413 -string pkgAcqDiffIndex::Custom600Headers() const -#else -string pkgAcqDiffIndex::Custom600Headers() -#endif +string pkgAcqMetaBase::Custom600Headers() const { - string const Final = GetFinalFilename(); + std::string Header = "\nIndex-File: true"; + std::string MaximumSize; + strprintf(MaximumSize, "\nMaximum-Size: %i", + _config->FindI("Acquire::MaxReleaseFileSize", 10*1000*1000)); + Header += MaximumSize; - if(Debug) - std::clog << "Custom600Header-IMS: " << Final << std::endl; + string const FinalFile = GetFinalFilename(); struct stat Buf; - if (stat(Final.c_str(),&Buf) != 0) - return "\nIndex-File: true"; - - return "\nIndex-File: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime); + if (stat(FinalFile.c_str(),&Buf) == 0) + Header += "\nLast-Modified: " + TimeRFC1123(Buf.st_mtime); + + return Header; } /*}}}*/ -void pkgAcqDiffIndex::QueueOnIMSHit() const /*{{{*/ +// AcqMetaBase::QueueForSignatureVerify /*{{{*/ +void pkgAcqMetaBase::QueueForSignatureVerify(pkgAcqTransactionItem * const I, std::string const &File, std::string const &Signature) { - // list cleanup needs to know that this file as well as the already - // present index is ours, so we create an empty diff to save it for us - new pkgAcqIndexDiffs(Owner, TransactionManager, Target, - ExpectedHashes, MetaIndexParser); + AuthPass = true; + I->Desc.URI = "gpgv:" + Signature; + I->DestFile = File; + QueueURI(I->Desc); + I->SetActiveSubprocess("gpgv"); } /*}}}*/ -bool pkgAcqDiffIndex::ParseDiffIndex(string IndexDiffFile) /*{{{*/ +// AcqMetaBase::CheckDownloadDone /*{{{*/ +bool pkgAcqMetaBase::CheckDownloadDone(pkgAcqTransactionItem * const I, const std::string &Message, HashStringList const &Hashes) const { - // failing here is fine: our caller will take care of trying to - // get the complete file if patching fails - if(Debug) - std::clog << "pkgAcqDiffIndex::ParseIndexDiff() " << IndexDiffFile - << std::endl; - - FileFd Fd(IndexDiffFile,FileFd::ReadOnly); - pkgTagFile TF(&Fd); - if (_error->PendingError() == true) - return false; - - pkgTagSection Tags; - if(unlikely(TF.Step(Tags) == false)) - return false; - - HashStringList ServerHashes; - unsigned long long ServerSize = 0; + // We have just finished downloading a Release file (it is not + // verified yet) - for (char const * const * type = HashString::SupportedHashes(); *type != NULL; ++type) + string const FileName = LookupTag(Message,"Filename"); + if (FileName.empty() == true) { - std::string tagname = *type; - tagname.append("-Current"); - std::string const tmp = Tags.FindS(tagname.c_str()); - if (tmp.empty() == true) - continue; - - string hash; - unsigned long long size; - std::stringstream ss(tmp); - ss >> hash >> size; - if (unlikely(hash.empty() == true)) - continue; - if (unlikely(ServerSize != 0 && ServerSize != size)) - continue; - ServerHashes.push_back(HashString(*type, hash)); - ServerSize = size; + I->Status = StatError; + I->ErrorText = "Method gave a blank filename"; + return false; } - if (ServerHashes.usable() == false) + if (FileName != I->DestFile) { - if (Debug == true) - std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": Did not find a good hashsum in the index" << std::endl; + I->Local = true; + I->Desc.URI = "copy:" + FileName; + I->QueueURI(I->Desc); return false; } - if (ServerHashes != HashSums()) + // make sure to verify against the right file on I-M-S hit + bool IMSHit = StringToBool(LookupTag(Message,"IMS-Hit"), false); + if (IMSHit == false && Hashes.usable()) { - if (Debug == true) + // detect IMS-Hits servers haven't detected by Hash comparison + std::string const FinalFile = I->GetFinalFilename(); + if (RealFileExists(FinalFile) && Hashes.VerifyFile(FinalFile) == true) { - std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": Index has different hashes than parser, probably older, so fail pdiffing" << std::endl; - printHashSumComparision(CurrentPackagesFile, ServerHashes, HashSums()); + IMSHit = true; + unlink(I->DestFile.c_str()); } - return false; } - if (ServerHashes.VerifyFile(CurrentPackagesFile) == true) + if(IMSHit == true) { - // we have the same sha1 as the server so we are done here - if(Debug) - std::clog << "pkgAcqDiffIndex: Package file " << CurrentPackagesFile << " is up-to-date" << std::endl; - QueueOnIMSHit(); - return true; + // for simplicity, the transaction manager is always InRelease + // even if it doesn't exist. + if (TransactionManager != NULL) + TransactionManager->IMSHit = true; + I->PartialFile = I->DestFile = I->GetFinalFilename(); } - FileFd fd(CurrentPackagesFile, FileFd::ReadOnly); - Hashes LocalHashesCalc; - LocalHashesCalc.AddFD(fd); - HashStringList const LocalHashes = LocalHashesCalc.GetHashStringList(); + // set Item to complete as the remaining work is all local (verify etc) + I->Complete = true; - if(Debug) - std::clog << "Server-Current: " << ServerHashes.find(NULL)->toStr() << " and we start at " - << fd.Name() << " " << fd.FileSize() << " " << LocalHashes.find(NULL)->toStr() << std::endl; + return true; +} + /*}}}*/ +bool pkgAcqMetaBase::CheckAuthDone(string const &Message) /*{{{*/ +{ + // At this point, the gpgv method has succeeded, so there is a + // valid signature from a key in the trusted keyring. We + // perform additional verification of its contents, and use them + // to verify the indexes we are about to download - // parse all of (provided) history - vector available_patches; - bool firstAcceptedHashes = true; - for (char const * const * type = HashString::SupportedHashes(); *type != NULL; ++type) + if (TransactionManager->IMSHit == false) { - if (LocalHashes.find(*type) == NULL) - continue; - - std::string tagname = *type; - tagname.append("-History"); - std::string const tmp = Tags.FindS(tagname.c_str()); - if (tmp.empty() == true) - continue; - - string hash, filename; - unsigned long long size; - std::stringstream ss(tmp); - - while (ss >> hash >> size >> filename) + // open the last (In)Release if we have it + std::string const FinalFile = GetFinalFilename(); + std::string FinalRelease; + std::string FinalInRelease; + if (APT::String::Endswith(FinalFile, "InRelease")) { - if (unlikely(hash.empty() == true || filename.empty() == true)) - continue; - - // see if we have a record for this file already - std::vector::iterator cur = available_patches.begin(); - for (; cur != available_patches.end(); ++cur) - { - if (cur->file != filename || unlikely(cur->result_size != size)) - continue; - cur->result_hashes.push_back(HashString(*type, hash)); - break; - } - if (cur != available_patches.end()) - continue; - if (firstAcceptedHashes == true) - { - DiffInfo next; - next.file = filename; - next.result_hashes.push_back(HashString(*type, hash)); - next.result_size = size; - next.patch_size = 0; - available_patches.push_back(next); - } + FinalInRelease = FinalFile; + FinalRelease = FinalFile.substr(0, FinalFile.length() - strlen("InRelease")) + "Release"; + } + else + { + FinalInRelease = FinalFile.substr(0, FinalFile.length() - strlen("Release")) + "InRelease"; + FinalRelease = FinalFile; + } + if (RealFileExists(FinalInRelease) || RealFileExists(FinalRelease)) + { + TransactionManager->LastMetaIndexParser = new indexRecords; + _error->PushToStack(); + if (RealFileExists(FinalInRelease)) + TransactionManager->LastMetaIndexParser->Load(FinalInRelease); else + TransactionManager->LastMetaIndexParser->Load(FinalRelease); + // its unlikely to happen, but if what we have is bad ignore it + if (_error->PendingError()) { - if (Debug == true) - std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": File " << filename - << " wasn't in the list for the first parsed hash! (history)" << std::endl; - break; + delete TransactionManager->LastMetaIndexParser; + TransactionManager->LastMetaIndexParser = NULL; } + _error->RevertToStack(); } - firstAcceptedHashes = false; } - if (unlikely(available_patches.empty() == true)) + if (TransactionManager->MetaIndexParser->Load(DestFile) == false) { - if (Debug) - std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": " - << "Couldn't find any patches for the patch series." << std::endl; + Status = StatAuthError; + ErrorText = TransactionManager->MetaIndexParser->ErrorText; return false; } - for (char const * const * type = HashString::SupportedHashes(); *type != NULL; ++type) + if (!VerifyVendor(Message)) { - if (LocalHashes.find(*type) == NULL) - continue; + Status = StatAuthError; + return false; + } - std::string tagname = *type; - tagname.append("-Patches"); - std::string const tmp = Tags.FindS(tagname.c_str()); - if (tmp.empty() == true) - continue; + if (_config->FindB("Debug::pkgAcquire::Auth", false)) + std::cerr << "Signature verification succeeded: " + << DestFile << std::endl; - string hash, filename; - unsigned long long size; - std::stringstream ss(tmp); + // Download further indexes with verification + QueueIndexes(true); - while (ss >> hash >> size >> filename) + return true; +} + /*}}}*/ +void pkgAcqMetaBase::QueueIndexes(bool const verify) /*{{{*/ +{ + // at this point the real Items are loaded in the fetcher + ExpectedAdditionalItems = 0; + + vector ::const_iterator Target; + for (Target = IndexTargets->begin(); + Target != IndexTargets->end(); + ++Target) + { + if (verify == true && TransactionManager->MetaIndexParser->Exists((*Target)->MetaKey) == false) { - if (unlikely(hash.empty() == true || filename.empty() == true)) + // optional target that we do not have in the Release file are skipped + if ((*Target)->IsOptional()) continue; - // see if we have a record for this file already - std::vector::iterator cur = available_patches.begin(); - for (; cur != available_patches.end(); ++cur) - { - if (cur->file != filename) - continue; - if (unlikely(cur->patch_size != 0 && cur->patch_size != size)) - continue; - cur->patch_hashes.push_back(HashString(*type, hash)); - cur->patch_size = size; - break; - } - if (cur != available_patches.end()) - continue; - if (Debug == true) - std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": File " << filename - << " wasn't in the list for the first parsed hash! (patches)" << std::endl; - break; + Status = StatAuthError; + strprintf(ErrorText, _("Unable to find expected entry '%s' in Release file (Wrong sources.list entry or malformed file)"), (*Target)->MetaKey.c_str()); + return; } + + /* Queue the Index file (Packages, Sources, Translation-$foo + (either diff or full packages files, depending + on the users option) - we also check if the PDiff Index file is listed + in the Meta-Index file. Ideal would be if pkgAcqDiffIndex would test this + instead, but passing the required info to it is to much hassle */ + if(_config->FindB("Acquire::PDiffs",true) == true && (verify == false || + TransactionManager->MetaIndexParser->Exists((*Target)->MetaKey + ".diff/Index") == true)) + new pkgAcqDiffIndex(Owner, TransactionManager, *Target); + else + new pkgAcqIndex(Owner, TransactionManager, *Target); } +} + /*}}}*/ +bool pkgAcqMetaBase::VerifyVendor(string const &Message) /*{{{*/ +{ + string::size_type pos; - bool foundStart = false; - for (std::vector::iterator cur = available_patches.begin(); - cur != available_patches.end(); ++cur) + // check for missing sigs (that where not fatal because otherwise we had + // bombed earlier) + string missingkeys; + string msg = _("There is no public key available for the " + "following key IDs:\n"); + pos = Message.find("NO_PUBKEY "); + if (pos != std::string::npos) { - if (LocalHashes != cur->result_hashes) - continue; - - available_patches.erase(available_patches.begin(), cur); - foundStart = true; - break; + string::size_type start = pos+strlen("NO_PUBKEY "); + string Fingerprint = Message.substr(start, Message.find("\n")-start); + missingkeys += (Fingerprint); } + if(!missingkeys.empty()) + _error->Warning("%s", (msg + missingkeys).c_str()); - if (foundStart == false || unlikely(available_patches.empty() == true)) + string Transformed = TransactionManager->MetaIndexParser->GetExpectedDist(); + + if (Transformed == "../project/experimental") { - if (Debug) - std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": " - << "Couldn't find the start of the patch series." << std::endl; - return false; + Transformed = "experimental"; } - // patching with too many files is rather slow compared to a fast download - unsigned long const fileLimit = _config->FindI("Acquire::PDiffs::FileLimit", 0); - if (fileLimit != 0 && fileLimit < available_patches.size()) + pos = Transformed.rfind('/'); + if (pos != string::npos) { - if (Debug) - std::clog << "Need " << available_patches.size() << " diffs (Limit is " << fileLimit - << ") so fallback to complete download" << std::endl; - return false; + Transformed = Transformed.substr(0, pos); } - // calculate the size of all patches we have to get - // note that all sizes are uncompressed, while we download compressed files - unsigned long long patchesSize = 0; - for (std::vector::const_iterator cur = available_patches.begin(); - cur != available_patches.end(); ++cur) - patchesSize += cur->patch_size; - unsigned long long const sizeLimit = ServerSize * _config->FindI("Acquire::PDiffs::SizeLimit", 100); - if (sizeLimit > 0 && (sizeLimit/100) < patchesSize) + if (Transformed == ".") { - if (Debug) - std::clog << "Need " << patchesSize << " bytes (Limit is " << sizeLimit/100 - << ") so fallback to complete download" << std::endl; - return false; + Transformed = ""; } - // we have something, queue the diffs - string::size_type const last_space = Description.rfind(" "); - if(last_space != string::npos) - Description.erase(last_space, Description.size()-last_space); + if (_config->FindB("Acquire::Check-Valid-Until", true) == true && + TransactionManager->MetaIndexParser->GetValidUntil() > 0) { + time_t const invalid_since = time(NULL) - TransactionManager->MetaIndexParser->GetValidUntil(); + if (invalid_since > 0) + { + std::string errmsg; + strprintf(errmsg, + // TRANSLATOR: The first %s is the URL of the bad Release file, the second is + // the time since then the file is invalid - formated in the same way as in + // the download progress display (e.g. 7d 3h 42min 1s) + _("Release file for %s is expired (invalid since %s). " + "Updates for this repository will not be applied."), + DataTarget.URI.c_str(), TimeToStr(invalid_since).c_str()); + if (ErrorText.empty()) + ErrorText = errmsg; + return _error->Error("%s", errmsg.c_str()); + } + } - /* decide if we should download patches one by one or in one go: - The first is good if the server merges patches, but many don't so client - based merging can be attempt in which case the second is better. - "bad things" will happen if patches are merged on the server, - but client side merging is attempt as well */ - bool pdiff_merge = _config->FindB("Acquire::PDiffs::Merge", true); - if (pdiff_merge == true) + /* Did we get a file older than what we have? This is a last minute IMS hit and doubles + as a prevention of downgrading us to older (still valid) files */ + if (TransactionManager->IMSHit == false && TransactionManager->LastMetaIndexParser != NULL && + TransactionManager->LastMetaIndexParser->GetDate() > TransactionManager->MetaIndexParser->GetDate()) { - // reprepro adds this flag if it has merged patches on the server - std::string const precedence = Tags.FindS("X-Patch-Precedence"); - pdiff_merge = (precedence != "merged"); + TransactionManager->IMSHit = true; + unlink(DestFile.c_str()); + PartialFile = DestFile = GetFinalFilename(); + delete TransactionManager->MetaIndexParser; + TransactionManager->MetaIndexParser = TransactionManager->LastMetaIndexParser; + TransactionManager->LastMetaIndexParser = NULL; } - if (pdiff_merge == false) + if (_config->FindB("Debug::pkgAcquire::Auth", false)) { - new pkgAcqIndexDiffs(Owner, TransactionManager, Target, ExpectedHashes, - MetaIndexParser, available_patches); + std::cerr << "Got Codename: " << TransactionManager->MetaIndexParser->GetDist() << std::endl; + std::cerr << "Expecting Dist: " << TransactionManager->MetaIndexParser->GetExpectedDist() << std::endl; + std::cerr << "Transformed Dist: " << Transformed << std::endl; } - else + + if (TransactionManager->MetaIndexParser->CheckDist(Transformed) == false) { - std::vector *diffs = new std::vector(available_patches.size()); - for(size_t i = 0; i < available_patches.size(); ++i) - (*diffs)[i] = new pkgAcqIndexMergeDiffs(Owner, TransactionManager, - Target, - ExpectedHashes, - MetaIndexParser, - available_patches[i], - diffs); + // This might become fatal one day +// Status = StatAuthError; +// ErrorText = "Conflicting distribution; expected " +// + MetaIndexParser->GetExpectedDist() + " but got " +// + MetaIndexParser->GetDist(); +// return false; + if (!Transformed.empty()) + { + _error->Warning(_("Conflicting distribution: %s (expected %s but got %s)"), + Desc.Description.c_str(), + Transformed.c_str(), + TransactionManager->MetaIndexParser->GetDist().c_str()); + } } - Complete = false; - Status = StatDone; - Dequeue(); return true; } /*}}}*/ -void pkgAcqDiffIndex::Failed(string Message,pkgAcquire::MethodConfig * Cnf)/*{{{*/ -{ - Item::Failed(Message,Cnf); - Status = StatDone; - - if(Debug) - std::clog << "pkgAcqDiffIndex failed: " << Desc.URI << " with " << Message << std::endl - << "Falling back to normal index file acquire" << std::endl; - new pkgAcqIndex(Owner, TransactionManager, Target, ExpectedHashes, MetaIndexParser); +pkgAcqMetaClearSig::pkgAcqMetaClearSig(pkgAcquire * const Owner, /*{{{*/ + IndexTarget const &ClearsignedTarget, + IndexTarget const &DetachedDataTarget, IndexTarget const &DetachedSigTarget, + const vector* const IndexTargets, + indexRecords * const MetaIndexParser) : + pkgAcqMetaIndex(Owner, this, ClearsignedTarget, DetachedSigTarget, IndexTargets, MetaIndexParser), + ClearsignedTarget(ClearsignedTarget), + DetachedDataTarget(DetachedDataTarget), DetachedSigTarget(DetachedSigTarget) +{ + // index targets + (worst case:) Release/Release.gpg + ExpectedAdditionalItems = IndexTargets->size() + 2; + TransactionManager->Add(this); } /*}}}*/ -bool pkgAcqDiffIndex::TransactionState(TransactionStates const state) /*{{{*/ +pkgAcqMetaClearSig::~pkgAcqMetaClearSig() /*{{{*/ { - if (pkgAcquire::Item::TransactionState(state) == false) - return false; - - switch (state) - { - case TransactionCommit: - break; - case TransactionAbort: - std::string const Partial = GetPartialFileNameFromURI(RealURI); - unlink(Partial.c_str()); - break; - } - - return true; } /*}}}*/ -void pkgAcqDiffIndex::Done(string Message,unsigned long long Size,HashStringList const &Hashes, /*{{{*/ - pkgAcquire::MethodConfig *Cnf) +// pkgAcqMetaClearSig::Custom600Headers - Insert custom request headers /*{{{*/ +string pkgAcqMetaClearSig::Custom600Headers() const { - if(Debug) - std::clog << "pkgAcqDiffIndex::Done(): " << Desc.URI << std::endl; - - Item::Done(Message, Size, Hashes, Cnf); + string Header = pkgAcqMetaBase::Custom600Headers(); + Header += "\nFail-Ignore: true"; + return Header; +} + /*}}}*/ +// pkgAcqMetaClearSig::Done - We got a file /*{{{*/ +class APT_HIDDEN DummyItem : public pkgAcquire::Item +{ + IndexTarget const * const Target; + public: + virtual std::string DescURI() const {return Target->URI;}; + virtual HashStringList GetExpectedHashes() const {return HashStringList();}; - // verify the index target - if(Target && Target->MetaKey != "" && MetaIndexParser && Hashes.usable()) + DummyItem(pkgAcquire * const Owner, IndexTarget const * const Target) : + pkgAcquire::Item(Owner), Target(Target) { - std::string IndexMetaKey = Target->MetaKey + ".diff/Index"; - indexRecords::checkSum *Record = MetaIndexParser->Lookup(IndexMetaKey); - if(Record && Record->Hashes.usable() && Hashes != Record->Hashes) - { - RenameOnError(HashSumMismatch); - printHashSumComparision(RealURI, Record->Hashes, Hashes); - Failed(Message, Cnf); - return; - } - + Status = StatDone; + DestFile = GetFinalFileNameFromURI(Target->URI); } +}; +void pkgAcqMetaClearSig::Done(std::string const &Message, + HashStringList const &Hashes, + pkgAcquire::MethodConfig const * const Cnf) +{ + Item::Done(Message, Hashes, Cnf); - string const FinalFile = GetFinalFilename(); - if(StringToBool(LookupTag(Message,"IMS-Hit"),false)) - DestFile = FinalFile; - - if(ParseDiffIndex(DestFile) == false) + // if we expect a ClearTextSignature (InRelease), ensure that + // this is what we get and if not fail to queue a + // Release/Release.gpg, see #346386 + if (FileExists(DestFile) && !StartsWithGPGClearTextSignature(DestFile)) { - Failed("Message: Couldn't parse pdiff index", Cnf); - // queue for final move - this should happen even if we fail - // while parsing (e.g. on sizelimit) and download the complete file. - TransactionManager->TransactionStageCopy(this, DestFile, FinalFile); + pkgAcquire::Item::Failed(Message, Cnf); + RenameOnError(NotClearsigned); + TransactionManager->AbortTransaction(); return; } - TransactionManager->TransactionStageCopy(this, DestFile, FinalFile); - - Complete = true; - Status = StatDone; - Dequeue(); - - return; + if(AuthPass == false) + { + if(CheckDownloadDone(this, Message, Hashes) == true) + QueueForSignatureVerify(this, DestFile, DestFile); + return; + } + else if(CheckAuthDone(Message) == true) + { + if (TransactionManager->IMSHit == false) + TransactionManager->TransactionStageCopy(this, DestFile, GetFinalFilename()); + else if (RealFileExists(GetFinalFilename()) == false) + { + // We got an InRelease file IMSHit, but we haven't one, which means + // we had a valid Release/Release.gpg combo stepping in, which we have + // to 'acquire' now to ensure list cleanup isn't removing them + new DummyItem(Owner, &DetachedDataTarget); + new DummyItem(Owner, &DetachedSigTarget); + } + } } /*}}}*/ -// AcqIndexDiffs::AcqIndexDiffs - Constructor /*{{{*/ -// --------------------------------------------------------------------- -/* The package diff is added to the queue. one object is constructed - * for each diff and the index - */ -pkgAcqIndexDiffs::pkgAcqIndexDiffs(pkgAcquire *Owner, - pkgAcqMetaBase *TransactionManager, - struct IndexTarget const * const Target, - HashStringList const &ExpectedHashes, - indexRecords *MetaIndexParser, - vector diffs) - : pkgAcqBaseIndex(Owner, TransactionManager, Target, ExpectedHashes, MetaIndexParser), - available_patches(diffs) +void pkgAcqMetaClearSig::Failed(string const &Message,pkgAcquire::MethodConfig const * const Cnf) /*{{{*/ { - DestFile = GetPartialFileNameFromURI(Target->URI); - - Debug = _config->FindB("Debug::pkgAcquire::Diffs",false); + Item::Failed(Message, Cnf); - RealURI = Target->URI; - Desc.Owner = this; - Description = Target->Description; - Desc.ShortDesc = Target->ShortDesc; + // we failed, we will not get additional items from this method + ExpectedAdditionalItems = 0; - if(available_patches.empty() == true) + if (AuthPass == false) { - // we are done (yeah!), check hashes against the final file - DestFile = GetFinalFileNameFromURI(Target->URI); - Finish(true); + // Queue the 'old' InRelease file for removal if we try Release.gpg + // as otherwise the file will stay around and gives a false-auth + // impression (CVE-2012-0214) + TransactionManager->TransactionStageRemoval(this, GetFinalFilename()); + Status = StatDone; + + new pkgAcqMetaIndex(Owner, TransactionManager, DetachedDataTarget, DetachedSigTarget, IndexTargets, TransactionManager->MetaIndexParser); } else { - // patching needs to be bootstrapped with the 'old' version - std::string const PartialFile = GetPartialFileNameFromURI(RealURI); - if (RealFileExists(PartialFile) == false) + if(CheckStopAuthentication(this, Message)) + return; + + _error->Warning(_("The data from '%s' is not signed. Packages " + "from that repository can not be authenticated."), + ClearsignedTarget.Description.c_str()); + + // No Release file was present, or verification failed, so fall + // back to queueing Packages files without verification + // only allow going further if the users explicitely wants it + if(AllowInsecureRepositories(TransactionManager->MetaIndexParser, TransactionManager, this) == true) { - if (symlink(GetFinalFilename().c_str(), PartialFile.c_str()) != 0) + Status = StatDone; + + /* InRelease files become Release files, otherwise + * they would be considered as trusted later on */ + string const FinalRelease = GetFinalFileNameFromURI(DetachedDataTarget.URI); + string const PartialRelease = GetPartialFileNameFromURI(DetachedDataTarget.URI); + string const FinalReleasegpg = GetFinalFileNameFromURI(DetachedSigTarget.URI); + string const FinalInRelease = GetFinalFilename(); + Rename(DestFile, PartialRelease); + TransactionManager->TransactionStageCopy(this, PartialRelease, FinalRelease); + + if (RealFileExists(FinalReleasegpg) || RealFileExists(FinalInRelease)) { - Failed("Link creation of " + PartialFile + " to " + GetFinalFilename() + " failed", NULL); - return; + // open the last Release if we have it + if (TransactionManager->IMSHit == false) + { + TransactionManager->LastMetaIndexParser = new indexRecords; + _error->PushToStack(); + if (RealFileExists(FinalInRelease)) + TransactionManager->LastMetaIndexParser->Load(FinalInRelease); + else + TransactionManager->LastMetaIndexParser->Load(FinalRelease); + // its unlikely to happen, but if what we have is bad ignore it + if (_error->PendingError()) + { + delete TransactionManager->LastMetaIndexParser; + TransactionManager->LastMetaIndexParser = NULL; + } + _error->RevertToStack(); + } } - } - // get the next diff - State = StateFetchDiff; - QueueNextDiff(); + // we parse the indexes here because at this point the user wanted + // a repository that may potentially harm him + if (TransactionManager->MetaIndexParser->Load(PartialRelease) == false || VerifyVendor(Message) == false) + /* expired Release files are still a problem you need extra force for */; + else + QueueIndexes(true); + } } } /*}}}*/ -void pkgAcqIndexDiffs::Failed(string Message,pkgAcquire::MethodConfig * Cnf)/*{{{*/ -{ - Item::Failed(Message,Cnf); - Status = StatDone; - if(Debug) - std::clog << "pkgAcqIndexDiffs failed: " << Desc.URI << " with " << Message << std::endl - << "Falling back to normal index file acquire" << std::endl; - DestFile = GetPartialFileNameFromURI(Target->URI); - RenameOnError(PDiffError); - new pkgAcqIndex(Owner, TransactionManager, Target, ExpectedHashes, MetaIndexParser); - Finish(); -} - /*}}}*/ -// Finish - helper that cleans the item out of the fetcher queue /*{{{*/ -void pkgAcqIndexDiffs::Finish(bool allDone) +pkgAcqMetaIndex::pkgAcqMetaIndex(pkgAcquire * const Owner, /*{{{*/ + pkgAcqMetaBase * const TransactionManager, + IndexTarget const &DataTarget, + IndexTarget const &DetachedSigTarget, + vector const * const IndexTargets, + indexRecords * const MetaIndexParser) : + pkgAcqMetaBase(Owner, TransactionManager, IndexTargets, DataTarget, MetaIndexParser), + DetachedSigTarget(DetachedSigTarget) { - if(Debug) - std::clog << "pkgAcqIndexDiffs::Finish(): " - << allDone << " " - << Desc.URI << std::endl; - - // we restore the original name, this is required, otherwise - // the file will be cleaned - if(allDone) - { - if(HashSums().usable() && !HashSums().VerifyFile(DestFile)) - { - RenameOnError(HashSumMismatch); - Dequeue(); - return; - } + if(_config->FindB("Debug::Acquire::Transaction", false) == true) + std::clog << "New pkgAcqMetaIndex with TransactionManager " + << this->TransactionManager << std::endl; - TransactionManager->TransactionStageCopy(this, DestFile, GetFinalFilename()); + DestFile = GetPartialFileNameFromURI(DataTarget.URI); - // this is for the "real" finish - Complete = true; - Status = StatDone; - Dequeue(); - if(Debug) - std::clog << "\n\nallDone: " << DestFile << "\n" << std::endl; - return; - } + // Create the item + Desc.Description = DataTarget.Description; + Desc.Owner = this; + Desc.ShortDesc = DataTarget.ShortDesc; + Desc.URI = DataTarget.URI; - if(Debug) - std::clog << "Finishing: " << Desc.URI << std::endl; - Complete = false; - Status = StatDone; - Dequeue(); - return; + // we expect more item + ExpectedAdditionalItems = IndexTargets->size(); + QueueURI(Desc); } /*}}}*/ -bool pkgAcqIndexDiffs::QueueNextDiff() /*{{{*/ +void pkgAcqMetaIndex::Done(string const &Message, /*{{{*/ + HashStringList const &Hashes, + pkgAcquire::MethodConfig const * const Cfg) { - // calc sha1 of the just patched file - std::string const FinalFile = GetPartialFileNameFromURI(RealURI); + Item::Done(Message,Hashes,Cfg); - if(!FileExists(FinalFile)) + if(CheckDownloadDone(this, Message, Hashes)) { - Failed("Message: No FinalFile " + FinalFile + " available", NULL); - return false; + // we have a Release file, now download the Signature, all further + // verify/queue for additional downloads will be done in the + // pkgAcqMetaSig::Done() code + new pkgAcqMetaSig(Owner, TransactionManager, &DetachedSigTarget, this); } +} + /*}}}*/ +// pkgAcqMetaIndex::Failed - no Release file present /*{{{*/ +void pkgAcqMetaIndex::Failed(string const &Message, + pkgAcquire::MethodConfig const * const Cnf) +{ + pkgAcquire::Item::Failed(Message, Cnf); + Status = StatDone; - FileFd fd(FinalFile, FileFd::ReadOnly); - Hashes LocalHashesCalc; - LocalHashesCalc.AddFD(fd); - HashStringList const LocalHashes = LocalHashesCalc.GetHashStringList(); - - if(Debug) - std::clog << "QueueNextDiff: " << FinalFile << " (" << LocalHashes.find(NULL)->toStr() << ")" << std::endl; + _error->Warning(_("The repository '%s' does not have a Release file. " + "This is deprecated, please contact the owner of the " + "repository."), DataTarget.Description.c_str()); - if (unlikely(LocalHashes.usable() == false || ExpectedHashes.usable() == false)) + // No Release file was present so fall + // back to queueing Packages files without verification + // only allow going further if the users explicitely wants it + if(AllowInsecureRepositories(TransactionManager->MetaIndexParser, TransactionManager, this) == true) { - Failed("Local/Expected hashes are not usable", NULL); - return false; - } - + // ensure old Release files are removed + TransactionManager->TransactionStageRemoval(this, GetFinalFilename()); + delete TransactionManager->MetaIndexParser; + TransactionManager->MetaIndexParser = NULL; - // final file reached before all patches are applied - if(LocalHashes == ExpectedHashes) - { - Finish(true); - return true; - } - - // remove all patches until the next matching patch is found - // this requires the Index file to be ordered - for(vector::iterator I = available_patches.begin(); - available_patches.empty() == false && - I != available_patches.end() && - I->result_hashes != LocalHashes; - ++I) - { - available_patches.erase(I); + // queue without any kind of hashsum support + QueueIndexes(false); } +} + /*}}}*/ +void pkgAcqMetaIndex::Finished() /*{{{*/ +{ + if(_config->FindB("Debug::Acquire::Transaction", false) == true) + std::clog << "Finished: " << DestFile <TransactionHasError() == false) + TransactionManager->CommitTransaction(); +} + /*}}}*/ +std::string pkgAcqMetaIndex::DescURI() const /*{{{*/ +{ + return DataTarget.URI; +} + /*}}}*/ - // error checking and falling back if no patch was found - if(available_patches.empty() == true) - { - Failed("No patches left to reach target", NULL); - return false; - } +// AcqMetaSig::AcqMetaSig - Constructor /*{{{*/ +pkgAcqMetaSig::pkgAcqMetaSig(pkgAcquire * const Owner, + pkgAcqMetaBase * const TransactionManager, + IndexTarget const * const Target, + pkgAcqMetaIndex * const MetaIndex) : + pkgAcqTransactionItem(Owner, TransactionManager, Target), MetaIndex(MetaIndex) +{ + DestFile = GetPartialFileNameFromURI(Target->URI); - // queue the right diff - Desc.URI = RealURI + ".diff/" + available_patches[0].file + ".gz"; - Desc.Description = Description + " " + available_patches[0].file + string(".pdiff"); - DestFile = GetPartialFileNameFromURI(RealURI + ".diff/" + available_patches[0].file); + // remove any partial downloaded sig-file in partial/. + // it may confuse proxies and is too small to warrant a + // partial download anyway + unlink(DestFile.c_str()); - if(Debug) - std::clog << "pkgAcqIndexDiffs::QueueNextDiff(): " << Desc.URI << std::endl; + // set the TransactionManager + if(_config->FindB("Debug::Acquire::Transaction", false) == true) + std::clog << "New pkgAcqMetaSig with TransactionManager " + << TransactionManager << std::endl; - QueueURI(Desc); + // Create the item + Desc.Description = Target->Description; + Desc.Owner = this; + Desc.ShortDesc = Target->ShortDesc; + Desc.URI = Target->URI; - return true; + // If we got a hit for Release, we will get one for Release.gpg too (or obscure errors), + // so we skip the download step and go instantly to verification + if (TransactionManager->IMSHit == true && RealFileExists(GetFinalFilename())) + { + Complete = true; + Status = StatDone; + PartialFile = DestFile = GetFinalFilename(); + MetaIndexFileSignature = DestFile; + MetaIndex->QueueForSignatureVerify(this, MetaIndex->DestFile, DestFile); + } + else + QueueURI(Desc); } /*}}}*/ -void pkgAcqIndexDiffs::Done(string Message,unsigned long long Size, HashStringList const &Hashes, /*{{{*/ - pkgAcquire::MethodConfig *Cnf) +pkgAcqMetaSig::~pkgAcqMetaSig() /*{{{*/ { - if(Debug) - std::clog << "pkgAcqIndexDiffs::Done(): " << Desc.URI << std::endl; - - Item::Done(Message, Size, Hashes, Cnf); - - // FIXME: verify this download too before feeding it to rred - std::string const FinalFile = GetPartialFileNameFromURI(RealURI); - - // success in downloading a diff, enter ApplyDiff state - if(State == StateFetchDiff) +} + /*}}}*/ +// AcqMetaSig::Done - The signature was downloaded/verified /*{{{*/ +void pkgAcqMetaSig::Done(string const &Message, HashStringList const &Hashes, + pkgAcquire::MethodConfig const * const Cfg) +{ + if (MetaIndexFileSignature.empty() == false) { - FileFd fd(DestFile, FileFd::ReadOnly, FileFd::Gzip); - class Hashes LocalHashesCalc; - LocalHashesCalc.AddFD(fd); - HashStringList const LocalHashes = LocalHashesCalc.GetHashStringList(); + DestFile = MetaIndexFileSignature; + MetaIndexFileSignature.clear(); + } + Item::Done(Message, Hashes, Cfg); - if (fd.Size() != available_patches[0].patch_size || - available_patches[0].patch_hashes != LocalHashes) + if(MetaIndex->AuthPass == false) + { + if(MetaIndex->CheckDownloadDone(this, Message, Hashes) == true) { - // patchfiles are dated, so bad indicates a bad download, so kill it - unlink(DestFile.c_str()); - Failed("Patch has Size/Hashsum mismatch", NULL); - return; + // destfile will be modified to point to MetaIndexFile for the + // gpgv method, so we need to save it here + MetaIndexFileSignature = DestFile; + MetaIndex->QueueForSignatureVerify(this, MetaIndex->DestFile, DestFile); + } + return; + } + else if(MetaIndex->CheckAuthDone(Message) == true) + { + if (TransactionManager->IMSHit == false) + { + TransactionManager->TransactionStageCopy(this, DestFile, GetFinalFilename()); + TransactionManager->TransactionStageCopy(MetaIndex, MetaIndex->DestFile, MetaIndex->GetFinalFilename()); } + } +} + /*}}}*/ +void pkgAcqMetaSig::Failed(string const &Message,pkgAcquire::MethodConfig const * const Cnf)/*{{{*/ +{ + Item::Failed(Message,Cnf); - // rred excepts the patch as $FinalFile.ed - Rename(DestFile,FinalFile+".ed"); + // check if we need to fail at this point + if (MetaIndex->AuthPass == true && MetaIndex->CheckStopAuthentication(this, Message)) + return; - if(Debug) - std::clog << "Sending to rred method: " << FinalFile << std::endl; + string const FinalRelease = MetaIndex->GetFinalFilename(); + string const FinalReleasegpg = GetFinalFilename(); + string const FinalInRelease = TransactionManager->GetFinalFilename(); - State = StateApplyDiff; - Local = true; - Desc.URI = "rred:" + FinalFile; - QueueURI(Desc); - SetActiveSubprocess("rred"); - return; - } + if (RealFileExists(FinalReleasegpg) || RealFileExists(FinalInRelease)) + { + std::string downgrade_msg; + strprintf(downgrade_msg, _("The repository '%s' is no longer signed."), + MetaIndex->DataTarget.Description.c_str()); + if(_config->FindB("Acquire::AllowDowngradeToInsecureRepositories")) + { + // meh, the users wants to take risks (we still mark the packages + // from this repository as unauthenticated) + _error->Warning("%s", downgrade_msg.c_str()); + _error->Warning(_("This is normally not allowed, but the option " + "Acquire::AllowDowngradeToInsecureRepositories was " + "given to override it.")); + Status = StatDone; + } else { + _error->Error("%s", downgrade_msg.c_str()); + if (TransactionManager->IMSHit == false) + Rename(MetaIndex->DestFile, MetaIndex->DestFile + ".FAILED"); + Item::Failed("Message: " + downgrade_msg, Cnf); + TransactionManager->AbortTransaction(); + return; + } + } + else + _error->Warning(_("The data from '%s' is not signed. Packages " + "from that repository can not be authenticated."), + MetaIndex->DataTarget.Description.c_str()); + // ensures that a Release.gpg file in the lists/ is removed by the transaction + TransactionManager->TransactionStageRemoval(this, DestFile); - // success in download/apply a diff, queue next (if needed) - if(State == StateApplyDiff) + // only allow going further if the users explicitely wants it + if(AllowInsecureRepositories(TransactionManager->MetaIndexParser, TransactionManager, this) == true) { - // remove the just applied patch - available_patches.erase(available_patches.begin()); - unlink((FinalFile + ".ed").c_str()); - - // move into place - if(Debug) + if (RealFileExists(FinalReleasegpg) || RealFileExists(FinalInRelease)) { - std::clog << "Moving patched file in place: " << std::endl - << DestFile << " -> " << FinalFile << std::endl; + // open the last Release if we have it + if (TransactionManager->IMSHit == false) + { + TransactionManager->LastMetaIndexParser = new indexRecords; + _error->PushToStack(); + if (RealFileExists(FinalInRelease)) + TransactionManager->LastMetaIndexParser->Load(FinalInRelease); + else + TransactionManager->LastMetaIndexParser->Load(FinalRelease); + // its unlikely to happen, but if what we have is bad ignore it + if (_error->PendingError()) + { + delete TransactionManager->LastMetaIndexParser; + TransactionManager->LastMetaIndexParser = NULL; + } + _error->RevertToStack(); + } } - Rename(DestFile,FinalFile); - chmod(FinalFile.c_str(),0644); - // see if there is more to download - if(available_patches.empty() == false) { - new pkgAcqIndexDiffs(Owner, TransactionManager, Target, - ExpectedHashes, MetaIndexParser, - available_patches); - return Finish(); - } else - // update - DestFile = FinalFile; - return Finish(true); + // we parse the indexes here because at this point the user wanted + // a repository that may potentially harm him + if (TransactionManager->MetaIndexParser->Load(MetaIndex->DestFile) == false || MetaIndex->VerifyVendor(Message) == false) + /* expired Release files are still a problem you need extra force for */; + else + MetaIndex->QueueIndexes(true); + + TransactionManager->TransactionStageCopy(MetaIndex, MetaIndex->DestFile, MetaIndex->GetFinalFilename()); + } + + // FIXME: this is used often (e.g. in pkgAcqIndexTrans) so refactor + if (Cnf->LocalOnly == true || + StringToBool(LookupTag(Message,"Transient-Failure"),false) == false) + { + // Ignore this + Status = StatDone; } } /*}}}*/ -// AcqIndexMergeDiffs::AcqIndexMergeDiffs - Constructor /*{{{*/ -pkgAcqIndexMergeDiffs::pkgAcqIndexMergeDiffs(pkgAcquire *Owner, - pkgAcqMetaBase *TransactionManager, - struct IndexTarget const * const Target, - HashStringList const &ExpectedHashes, - indexRecords *MetaIndexParser, - DiffInfo const &patch, - std::vector const * const allPatches) - : pkgAcqBaseIndex(Owner, TransactionManager, Target, ExpectedHashes, MetaIndexParser), - patch(patch), allPatches(allPatches), State(StateFetchDiff) + + +// AcqBaseIndex - Constructor /*{{{*/ +pkgAcqBaseIndex::pkgAcqBaseIndex(pkgAcquire * const Owner, + pkgAcqMetaBase * const TransactionManager, + IndexTarget const * const Target) +: pkgAcqTransactionItem(Owner, TransactionManager, Target) +{ +} + /*}}}*/ + +// AcqDiffIndex::AcqDiffIndex - Constructor /*{{{*/ +// --------------------------------------------------------------------- +/* Get the DiffIndex file first and see if there are patches available + * If so, create a pkgAcqIndexDiffs fetcher that will get and apply the + * patches. If anything goes wrong in that process, it will fall back to + * the original packages file + */ +pkgAcqDiffIndex::pkgAcqDiffIndex(pkgAcquire * const Owner, + pkgAcqMetaBase * const TransactionManager, + IndexTarget const * const Target) + : pkgAcqBaseIndex(Owner, TransactionManager, Target) { Debug = _config->FindB("Debug::pkgAcquire::Diffs",false); - RealURI = Target->URI; Desc.Owner = this; - Description = Target->Description; + Desc.Description = Target->Description + ".diff/Index"; Desc.ShortDesc = Target->ShortDesc; + Desc.URI = Target->URI + ".diff/Index"; - Desc.URI = RealURI + ".diff/" + patch.file + ".gz"; - Desc.Description = Description + " " + patch.file + string(".pdiff"); + DestFile = GetPartialFileNameFromURI(Desc.URI); + + if(Debug) + std::clog << "pkgAcqDiffIndex: " << Desc.URI << std::endl; - DestFile = GetPartialFileNameFromURI(RealURI + ".diff/" + patch.file); + // look for the current package file + CurrentPackagesFile = GetFinalFileNameFromURI(Target->URI); + + // FIXME: this file:/ check is a hack to prevent fetching + // from local sources. this is really silly, and + // should be fixed cleanly as soon as possible + if(!FileExists(CurrentPackagesFile) || + Desc.URI.substr(0,strlen("file:/")) == "file:/") + { + // we don't have a pkg file or we don't want to queue + Failed("No index file, local or canceld by user", NULL); + return; + } if(Debug) - std::clog << "pkgAcqIndexMergeDiffs: " << Desc.URI << std::endl; + std::clog << "pkgAcqDiffIndex::pkgAcqDiffIndex(): " + << CurrentPackagesFile << std::endl; QueueURI(Desc); + } /*}}}*/ -void pkgAcqIndexMergeDiffs::Failed(string Message,pkgAcquire::MethodConfig * Cnf)/*{{{*/ +// AcqIndex::Custom600Headers - Insert custom request headers /*{{{*/ +// --------------------------------------------------------------------- +/* The only header we use is the last-modified header. */ +string pkgAcqDiffIndex::Custom600Headers() const { - if(Debug) - std::clog << "pkgAcqIndexMergeDiffs failed: " << Desc.URI << " with " << Message << std::endl; - - Item::Failed(Message,Cnf); - Status = StatDone; + string const Final = GetFinalFilename(); - // check if we are the first to fail, otherwise we are done here - State = StateDoneDiff; - for (std::vector::const_iterator I = allPatches->begin(); - I != allPatches->end(); ++I) - if ((*I)->State == StateErrorDiff) - return; + if(Debug) + std::clog << "Custom600Header-IMS: " << Final << std::endl; - // first failure means we should fallback - State = StateErrorDiff; - if (Debug) - std::clog << "Falling back to normal index file acquire" << std::endl; - DestFile = GetPartialFileNameFromURI(Target->URI); - RenameOnError(PDiffError); - new pkgAcqIndex(Owner, TransactionManager, Target, ExpectedHashes, MetaIndexParser); + struct stat Buf; + if (stat(Final.c_str(),&Buf) != 0) + return "\nIndex-File: true"; + + return "\nIndex-File: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime); +} + /*}}}*/ +void pkgAcqDiffIndex::QueueOnIMSHit() const /*{{{*/ +{ + // list cleanup needs to know that this file as well as the already + // present index is ours, so we create an empty diff to save it for us + new pkgAcqIndexDiffs(Owner, TransactionManager, Target); } /*}}}*/ -void pkgAcqIndexMergeDiffs::Done(string Message,unsigned long long Size,HashStringList const &Hashes, /*{{{*/ - pkgAcquire::MethodConfig *Cnf) +bool pkgAcqDiffIndex::ParseDiffIndex(string const &IndexDiffFile) /*{{{*/ { + // failing here is fine: our caller will take care of trying to + // get the complete file if patching fails if(Debug) - std::clog << "pkgAcqIndexMergeDiffs::Done(): " << Desc.URI << std::endl; - - Item::Done(Message,Size,Hashes,Cnf); - - // FIXME: verify download before feeding it to rred - string const FinalFile = GetPartialFileNameFromURI(RealURI); - - if (State == StateFetchDiff) - { - FileFd fd(DestFile, FileFd::ReadOnly, FileFd::Gzip); - class Hashes LocalHashesCalc; - LocalHashesCalc.AddFD(fd); - HashStringList const LocalHashes = LocalHashesCalc.GetHashStringList(); - - if (fd.Size() != patch.patch_size || patch.patch_hashes != LocalHashes) - { - // patchfiles are dated, so bad indicates a bad download, so kill it - unlink(DestFile.c_str()); - Failed("Patch has Size/Hashsum mismatch", NULL); - return; - } + std::clog << "pkgAcqDiffIndex::ParseIndexDiff() " << IndexDiffFile + << std::endl; - // rred expects the patch as $FinalFile.ed.$patchname.gz - Rename(DestFile, FinalFile + ".ed." + patch.file + ".gz"); + FileFd Fd(IndexDiffFile,FileFd::ReadOnly); + pkgTagFile TF(&Fd); + if (_error->PendingError() == true) + return false; - // check if this is the last completed diff - State = StateDoneDiff; - for (std::vector::const_iterator I = allPatches->begin(); - I != allPatches->end(); ++I) - if ((*I)->State != StateDoneDiff) - { - if(Debug) - std::clog << "Not the last done diff in the batch: " << Desc.URI << std::endl; - return; - } + pkgTagSection Tags; + if(unlikely(TF.Step(Tags) == false)) + return false; - // this is the last completed diff, so we are ready to apply now - State = StateApplyDiff; + HashStringList ServerHashes; + unsigned long long ServerSize = 0; - // patching needs to be bootstrapped with the 'old' version - if (symlink(GetFinalFilename().c_str(), FinalFile.c_str()) != 0) - { - Failed("Link creation of " + FinalFile + " to " + GetFinalFilename() + " failed", NULL); - return; - } + for (char const * const * type = HashString::SupportedHashes(); *type != NULL; ++type) + { + std::string tagname = *type; + tagname.append("-Current"); + std::string const tmp = Tags.FindS(tagname.c_str()); + if (tmp.empty() == true) + continue; - if(Debug) - std::clog << "Sending to rred method: " << FinalFile << std::endl; + string hash; + unsigned long long size; + std::stringstream ss(tmp); + ss >> hash >> size; + if (unlikely(hash.empty() == true)) + continue; + if (unlikely(ServerSize != 0 && ServerSize != size)) + continue; + ServerHashes.push_back(HashString(*type, hash)); + ServerSize = size; + } - Local = true; - Desc.URI = "rred:" + FinalFile; - QueueURI(Desc); - SetActiveSubprocess("rred"); - return; + if (ServerHashes.usable() == false) + { + if (Debug == true) + std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": Did not find a good hashsum in the index" << std::endl; + return false; } - // success in download/apply all diffs, clean up - else if (State == StateApplyDiff) + + HashStringList const TargetFileHashes = GetExpectedHashesFor(Target->MetaKey); + if (TargetFileHashes.usable() == false || ServerHashes != TargetFileHashes) { - // see if we really got the expected file - if(ExpectedHashes.usable() && ExpectedHashes != Hashes) + if (Debug == true) { - RenameOnError(HashSumMismatch); - return; + std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": Index has different hashes than parser, probably older, so fail pdiffing" << std::endl; + printHashSumComparision(CurrentPackagesFile, ServerHashes, TargetFileHashes); } + return false; + } - // move the result into place - std::string const Final = GetFinalFilename(); + if (ServerHashes.VerifyFile(CurrentPackagesFile) == true) + { + // we have the same sha1 as the server so we are done here if(Debug) - std::clog << "Queue patched file in place: " << std::endl - << DestFile << " -> " << Final << std::endl; + std::clog << "pkgAcqDiffIndex: Package file " << CurrentPackagesFile << " is up-to-date" << std::endl; + QueueOnIMSHit(); + return true; + } - // queue for copy by the transaction manager - TransactionManager->TransactionStageCopy(this, DestFile, Final); + FileFd fd(CurrentPackagesFile, FileFd::ReadOnly); + Hashes LocalHashesCalc; + LocalHashesCalc.AddFD(fd); + HashStringList const LocalHashes = LocalHashesCalc.GetHashStringList(); - // ensure the ed's are gone regardless of list-cleanup - for (std::vector::const_iterator I = allPatches->begin(); - I != allPatches->end(); ++I) - { - std::string const PartialFile = GetPartialFileNameFromURI(RealURI); - std::string patch = PartialFile + ".ed." + (*I)->patch.file + ".gz"; - unlink(patch.c_str()); - } - unlink(FinalFile.c_str()); + if(Debug) + std::clog << "Server-Current: " << ServerHashes.find(NULL)->toStr() << " and we start at " + << fd.Name() << " " << fd.FileSize() << " " << LocalHashes.find(NULL)->toStr() << std::endl; - // all set and done - Complete = true; - if(Debug) - std::clog << "allDone: " << DestFile << "\n" << std::endl; - } -} - /*}}}*/ -// AcqBaseIndex - Constructor /*{{{*/ -pkgAcqBaseIndex::pkgAcqBaseIndex(pkgAcquire *Owner, - pkgAcqMetaBase *TransactionManager, - struct IndexTarget const * const Target, - HashStringList const &ExpectedHashes, - indexRecords *MetaIndexParser) -: Item(Owner, ExpectedHashes, TransactionManager), Target(Target), - MetaIndexParser(MetaIndexParser) -{ -} - /*}}}*/ -// AcqBaseIndex::VerifyHashByMetaKey - verify hash for the given metakey /*{{{*/ -bool pkgAcqBaseIndex::VerifyHashByMetaKey(HashStringList const &Hashes) -{ - if(MetaKey != "" && Hashes.usable()) + // parse all of (provided) history + vector available_patches; + bool firstAcceptedHashes = true; + for (char const * const * type = HashString::SupportedHashes(); *type != NULL; ++type) { - indexRecords::checkSum *Record = MetaIndexParser->Lookup(MetaKey); - if(Record && Record->Hashes.usable() && Hashes != Record->Hashes) - { - printHashSumComparision(RealURI, Record->Hashes, Hashes); - return false; - } - } - return true; -} - /*}}}*/ -// AcqBaseIndex::GetFinalFilename - Return the full final file path /*{{{*/ -std::string pkgAcqBaseIndex::GetFinalFilename() const -{ - return GetFinalFileNameFromURI(RealURI); -} - /*}}}*/ -// AcqIndex::AcqIndex - Constructor /*{{{*/ -// --------------------------------------------------------------------- -/* The package file is added to the queue and a second class is - instantiated to fetch the revision file */ -pkgAcqIndex::pkgAcqIndex(pkgAcquire *Owner, - string URI,string URIDesc,string ShortDesc, - HashStringList const &ExpectedHash) - : pkgAcqBaseIndex(Owner, 0, NULL, ExpectedHash, NULL) -{ - RealURI = URI; - - AutoSelectCompression(); - Init(URI, URIDesc, ShortDesc); + if (LocalHashes.find(*type) == NULL) + continue; - if(_config->FindB("Debug::Acquire::Transaction", false) == true) - std::clog << "New pkgIndex with TransactionManager " - << TransactionManager << std::endl; -} - /*}}}*/ -// AcqIndex::AcqIndex - Constructor /*{{{*/ -pkgAcqIndex::pkgAcqIndex(pkgAcquire *Owner, - pkgAcqMetaBase *TransactionManager, - IndexTarget const *Target, - HashStringList const &ExpectedHash, - indexRecords *MetaIndexParser) - : pkgAcqBaseIndex(Owner, TransactionManager, Target, ExpectedHash, - MetaIndexParser) -{ - RealURI = Target->URI; + std::string tagname = *type; + tagname.append("-History"); + std::string const tmp = Tags.FindS(tagname.c_str()); + if (tmp.empty() == true) + continue; - // autoselect the compression method - AutoSelectCompression(); - Init(Target->URI, Target->Description, Target->ShortDesc); + string hash, filename; + unsigned long long size; + std::stringstream ss(tmp); - if(_config->FindB("Debug::Acquire::Transaction", false) == true) - std::clog << "New pkgIndex with TransactionManager " - << TransactionManager << std::endl; -} - /*}}}*/ -// AcqIndex::AutoSelectCompression - Select compression /*{{{*/ -void pkgAcqIndex::AutoSelectCompression() -{ - std::vector types = APT::Configuration::getCompressionTypes(); - CompressionExtensions = ""; - if (ExpectedHashes.usable()) - { - for (std::vector::const_iterator t = types.begin(); - t != types.end(); ++t) + while (ss >> hash >> size >> filename) { - std::string CompressedMetaKey = string(Target->MetaKey).append(".").append(*t); - if (*t == "uncompressed" || - MetaIndexParser->Exists(CompressedMetaKey) == true) - CompressionExtensions.append(*t).append(" "); + if (unlikely(hash.empty() == true || filename.empty() == true)) + continue; + + // see if we have a record for this file already + std::vector::iterator cur = available_patches.begin(); + for (; cur != available_patches.end(); ++cur) + { + if (cur->file != filename || unlikely(cur->result_size != size)) + continue; + cur->result_hashes.push_back(HashString(*type, hash)); + break; + } + if (cur != available_patches.end()) + continue; + if (firstAcceptedHashes == true) + { + DiffInfo next; + next.file = filename; + next.result_hashes.push_back(HashString(*type, hash)); + next.result_size = size; + next.patch_size = 0; + available_patches.push_back(next); + } + else + { + if (Debug == true) + std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": File " << filename + << " wasn't in the list for the first parsed hash! (history)" << std::endl; + break; + } } + firstAcceptedHashes = false; } - else + + if (unlikely(available_patches.empty() == true)) { - for (std::vector::const_iterator t = types.begin(); t != types.end(); ++t) - CompressionExtensions.append(*t).append(" "); + if (Debug) + std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": " + << "Couldn't find any patches for the patch series." << std::endl; + return false; } - if (CompressionExtensions.empty() == false) - CompressionExtensions.erase(CompressionExtensions.end()-1); -} - /*}}}*/ -// AcqIndex::Init - defered Constructor /*{{{*/ -void pkgAcqIndex::Init(string const &URI, string const &URIDesc, - string const &ShortDesc) -{ - Stage = STAGE_DOWNLOAD; - - DestFile = GetPartialFileNameFromURI(URI); - CurrentCompressionExtension = CompressionExtensions.substr(0, CompressionExtensions.find(' ')); - if (CurrentCompressionExtension == "uncompressed") + for (char const * const * type = HashString::SupportedHashes(); *type != NULL; ++type) { - Desc.URI = URI; - if(Target) - MetaKey = string(Target->MetaKey); - } - else - { - Desc.URI = URI + '.' + CurrentCompressionExtension; - DestFile = DestFile + '.' + CurrentCompressionExtension; - if(Target) - MetaKey = string(Target->MetaKey) + '.' + CurrentCompressionExtension; - } - - // load the filesize - if(MetaIndexParser) - { - indexRecords::checkSum *Record = MetaIndexParser->Lookup(MetaKey); - if(Record) - FileSize = Record->Size; - - InitByHashIfNeeded(MetaKey); - } - - Desc.Description = URIDesc; - Desc.Owner = this; - Desc.ShortDesc = ShortDesc; - - QueueURI(Desc); -} - /*}}}*/ -// AcqIndex::AdjustForByHash - modify URI for by-hash support /*{{{*/ -void pkgAcqIndex::InitByHashIfNeeded(const std::string MetaKey) -{ - // TODO: - // - (maybe?) add support for by-hash into the sources.list as flag - // - make apt-ftparchive generate the hashes (and expire?) - std::string HostKnob = "APT::Acquire::" + ::URI(Desc.URI).Host + "::By-Hash"; - if(_config->FindB("APT::Acquire::By-Hash", false) == true || - _config->FindB(HostKnob, false) == true || - MetaIndexParser->GetSupportsAcquireByHash()) - { - indexRecords::checkSum *Record = MetaIndexParser->Lookup(MetaKey); - if(Record) - { - // FIXME: should we really use the best hash here? or a fixed one? - const HashString *TargetHash = Record->Hashes.find(""); - std::string ByHash = "/by-hash/" + TargetHash->HashType() + "/" + TargetHash->HashValue(); - size_t trailing_slash = Desc.URI.find_last_of("/"); - Desc.URI = Desc.URI.replace( - trailing_slash, - Desc.URI.substr(trailing_slash+1).size()+1, - ByHash); - } else { - _error->Warning( - "Fetching ByHash requested but can not find record for %s", - MetaKey.c_str()); - } - } -} - /*}}}*/ -// AcqIndex::Custom600Headers - Insert custom request headers /*{{{*/ -// --------------------------------------------------------------------- -/* The only header we use is the last-modified header. */ -#if APT_PKG_ABI >= 413 -string pkgAcqIndex::Custom600Headers() const -#else -string pkgAcqIndex::Custom600Headers() -#endif -{ - string Final = GetFinalFilename(); - - string msg = "\nIndex-File: true"; - struct stat Buf; - if (stat(Final.c_str(),&Buf) == 0) - msg += "\nLast-Modified: " + TimeRFC1123(Buf.st_mtime); + if (LocalHashes.find(*type) == NULL) + continue; - if(Target->IsOptional()) - msg += "\nFail-Ignore: true"; + std::string tagname = *type; + tagname.append("-Patches"); + std::string const tmp = Tags.FindS(tagname.c_str()); + if (tmp.empty() == true) + continue; - return msg; -} - /*}}}*/ -// pkgAcqIndex::Failed - getting the indexfile failed /*{{{*/ -void pkgAcqIndex::Failed(string Message,pkgAcquire::MethodConfig *Cnf) -{ - Item::Failed(Message,Cnf); + string hash, filename; + unsigned long long size; + std::stringstream ss(tmp); - // authorisation matches will not be fixed by other compression types - if (Status != StatAuthError) - { - size_t const nextExt = CompressionExtensions.find(' '); - if (nextExt != std::string::npos) + while (ss >> hash >> size >> filename) { - CompressionExtensions = CompressionExtensions.substr(nextExt+1); - Init(RealURI, Desc.Description, Desc.ShortDesc); - Status = StatIdle; - return; - } - } - - if(Target->IsOptional() && ExpectedHashes.empty() && Stage == STAGE_DOWNLOAD) - Status = StatDone; - else - TransactionManager->AbortTransaction(); -} - /*}}}*/ -bool pkgAcqIndex::TransactionState(TransactionStates const state) /*{{{*/ -{ - if (pkgAcquire::Item::TransactionState(state) == false) - return false; + if (unlikely(hash.empty() == true || filename.empty() == true)) + continue; - switch (state) - { - case TransactionAbort: - if (Stage == STAGE_DECOMPRESS_AND_VERIFY) + // see if we have a record for this file already + std::vector::iterator cur = available_patches.begin(); + for (; cur != available_patches.end(); ++cur) { - // keep the compressed file, but drop the decompressed - EraseFileName.clear(); - if (PartialFile.empty() == false && flExtension(PartialFile) == "decomp") - unlink(PartialFile.c_str()); + if (cur->file != filename) + continue; + if (unlikely(cur->patch_size != 0 && cur->patch_size != size)) + continue; + cur->patch_hashes.push_back(HashString(*type, hash)); + cur->patch_size = size; + break; } + if (cur != available_patches.end()) + continue; + if (Debug == true) + std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": File " << filename + << " wasn't in the list for the first parsed hash! (patches)" << std::endl; break; - case TransactionCommit: - if (EraseFileName.empty() == false) - unlink(EraseFileName.c_str()); - break; - } - return true; -} - /*}}}*/ -// pkgAcqIndex::GetFinalFilename - Return the full final file path /*{{{*/ -std::string pkgAcqIndex::GetFinalFilename() const -{ - std::string const FinalFile = GetFinalFileNameFromURI(RealURI); - return GetCompressedFileName(RealURI, FinalFile, CurrentCompressionExtension); -} - /*}}}*/ -// AcqIndex::ReverifyAfterIMS - Reverify index after an ims-hit /*{{{*/ -void pkgAcqIndex::ReverifyAfterIMS() -{ - // update destfile to *not* include the compression extension when doing - // a reverify (as its uncompressed on disk already) - DestFile = GetCompressedFileName(RealURI, GetPartialFileNameFromURI(RealURI), CurrentCompressionExtension); - - // copy FinalFile into partial/ so that we check the hash again - string FinalFile = GetFinalFilename(); - Stage = STAGE_DECOMPRESS_AND_VERIFY; - Desc.URI = "copy:" + FinalFile; - QueueURI(Desc); -} - /*}}}*/ -// AcqIndex::ValidateFile - Validate the content of the downloaded file /*{{{*/ -bool pkgAcqIndex::ValidateFile(const std::string &FileName) -{ - // FIXME: this can go away once we only ever download stuff that - // has a valid hash and we never do GET based probing - // FIXME2: this also leaks debian-isms into the code and should go therefore - - /* Always validate the index file for correctness (all indexes must - * have a Package field) (LP: #346386) (Closes: #627642) - */ - FileFd fd(FileName, FileFd::ReadOnly, FileFd::Extension); - // Only test for correctness if the content of the file is not empty - // (empty is ok) - if (fd.Size() > 0) - { - pkgTagSection sec; - pkgTagFile tag(&fd); - - // all our current indexes have a field 'Package' in each section - if (_error->PendingError() == true || - tag.Step(sec) == false || - sec.Exists("Package") == false) - return false; - } - return true; -} - /*}}}*/ -// AcqIndex::Done - Finished a fetch /*{{{*/ -// --------------------------------------------------------------------- -/* This goes through a number of states.. On the initial fetch the - method could possibly return an alternate filename which points - to the uncompressed version of the file. If this is so the file - is copied into the partial directory. In all other cases the file - is decompressed with a compressed uri. */ -void pkgAcqIndex::Done(string Message, - unsigned long long Size, - HashStringList const &Hashes, - pkgAcquire::MethodConfig *Cfg) -{ - Item::Done(Message,Size,Hashes,Cfg); - - switch(Stage) - { - case STAGE_DOWNLOAD: - StageDownloadDone(Message, Hashes, Cfg); - break; - case STAGE_DECOMPRESS_AND_VERIFY: - StageDecompressDone(Message, Hashes, Cfg); - break; - } -} - /*}}}*/ -// AcqIndex::StageDownloadDone - Queue for decompress and verify /*{{{*/ -void pkgAcqIndex::StageDownloadDone(string Message, - HashStringList const &Hashes, - pkgAcquire::MethodConfig *Cfg) -{ - // First check if the calculcated Hash of the (compressed) downloaded - // file matches the hash we have in the MetaIndexRecords for this file - if(VerifyHashByMetaKey(Hashes) == false) - { - RenameOnError(HashSumMismatch); - Failed(Message, Cfg); - return; - } - - Complete = true; - - // Handle the unzipd case - string FileName = LookupTag(Message,"Alt-Filename"); - if (FileName.empty() == false) - { - Stage = STAGE_DECOMPRESS_AND_VERIFY; - Local = true; - DestFile += ".decomp"; - Desc.URI = "copy:" + FileName; - QueueURI(Desc); - SetActiveSubprocess("copy"); - return; - } - - FileName = LookupTag(Message,"Filename"); - if (FileName.empty() == true) - { - Status = StatError; - ErrorText = "Method gave a blank filename"; - } - - // Methods like e.g. "file:" will give us a (compressed) FileName that is - // not the "DestFile" we set, in this case we uncompress from the local file - if (FileName != DestFile) - Local = true; - else - EraseFileName = FileName; - - // we need to verify the file against the current Release file again - // on if-modfied-since hit to avoid a stale attack against us - if(StringToBool(LookupTag(Message,"IMS-Hit"),false) == true) - { - // The files timestamp matches, reverify by copy into partial/ - EraseFileName = ""; - ReverifyAfterIMS(); - return; - } - - // If we have compressed indexes enabled, queue for hash verification - if (_config->FindB("Acquire::GzipIndexes",false)) - { - DestFile = GetPartialFileNameFromURI(RealURI + '.' + CurrentCompressionExtension); - EraseFileName = ""; - Stage = STAGE_DECOMPRESS_AND_VERIFY; - Desc.URI = "copy:" + FileName; - QueueURI(Desc); - SetActiveSubprocess("copy"); - return; - } - - // get the binary name for your used compression type - string decompProg; - if(CurrentCompressionExtension == "uncompressed") - decompProg = "copy"; - else - decompProg = _config->Find(string("Acquire::CompressionTypes::").append(CurrentCompressionExtension),""); - if(decompProg.empty() == true) - { - _error->Error("Unsupported extension: %s", CurrentCompressionExtension.c_str()); - return; - } - - // queue uri for the next stage - Stage = STAGE_DECOMPRESS_AND_VERIFY; - DestFile += ".decomp"; - Desc.URI = decompProg + ":" + FileName; - QueueURI(Desc); - SetActiveSubprocess(decompProg); -} - /*}}}*/ -// pkgAcqIndex::StageDecompressDone - Final verification /*{{{*/ -void pkgAcqIndex::StageDecompressDone(string Message, - HashStringList const &Hashes, - pkgAcquire::MethodConfig *Cfg) -{ - if (ExpectedHashes.usable() && ExpectedHashes != Hashes) - { - Desc.URI = RealURI; - RenameOnError(HashSumMismatch); - printHashSumComparision(RealURI, ExpectedHashes, Hashes); - Failed(Message, Cfg); - return; - } - - if(!ValidateFile(DestFile)) - { - RenameOnError(InvalidFormat); - Failed(Message, Cfg); - return; - } - - // Done, queue for rename on transaction finished - TransactionManager->TransactionStageCopy(this, DestFile, GetFinalFilename()); - - return; -} - /*}}}*/ -// AcqMetaBase - Constructor /*{{{*/ -pkgAcqMetaBase::pkgAcqMetaBase(pkgAcquire *Owner, - const std::vector* IndexTargets, - indexRecords* MetaIndexParser, - std::string const &RealURI, - HashStringList const &ExpectedHashes, - pkgAcqMetaBase *TransactionManager) -: Item(Owner, ExpectedHashes, TransactionManager), - MetaIndexParser(MetaIndexParser), LastMetaIndexParser(NULL), IndexTargets(IndexTargets), - AuthPass(false), RealURI(RealURI), IMSHit(false) -{ -} - /*}}}*/ -// AcqMetaBase::Add - Add a item to the current Transaction /*{{{*/ -void pkgAcqMetaBase::Add(Item *I) -{ - Transaction.push_back(I); -} - /*}}}*/ -// AcqMetaBase::AbortTransaction - Abort the current Transaction /*{{{*/ -void pkgAcqMetaBase::AbortTransaction() -{ - if(_config->FindB("Debug::Acquire::Transaction", false) == true) - std::clog << "AbortTransaction: " << TransactionManager << std::endl; - - // ensure the toplevel is in error state too - for (std::vector::iterator I = Transaction.begin(); - I != Transaction.end(); ++I) - { - (*I)->TransactionState(TransactionAbort); - } - Transaction.clear(); -} - /*}}}*/ -// AcqMetaBase::TransactionHasError - Check for errors in Transaction /*{{{*/ -bool pkgAcqMetaBase::TransactionHasError() -{ - for (pkgAcquire::ItemIterator I = Transaction.begin(); - I != Transaction.end(); ++I) - { - switch((*I)->Status) { - case StatDone: break; - case StatIdle: break; - case StatAuthError: return true; - case StatError: return true; - case StatTransientNetworkError: return true; - case StatFetching: break; } } - return false; -} - /*}}}*/ -// AcqMetaBase::CommitTransaction - Commit a transaction /*{{{*/ -void pkgAcqMetaBase::CommitTransaction() -{ - if(_config->FindB("Debug::Acquire::Transaction", false) == true) - std::clog << "CommitTransaction: " << this << std::endl; - - // move new files into place *and* remove files that are not - // part of the transaction but are still on disk - for (std::vector::iterator I = Transaction.begin(); - I != Transaction.end(); ++I) - { - (*I)->TransactionState(TransactionCommit); - } - Transaction.clear(); -} - /*}}}*/ -bool pkgAcqMetaBase::TransactionState(TransactionStates const state) /*{{{*/ -{ - // Do not remove InRelease on IMSHit of Release.gpg [yes, this is very edgecasey] - if (TransactionManager->IMSHit == false) - return pkgAcquire::Item::TransactionState(state); - return true; -} - /*}}}*/ -// AcqMetaBase::TransactionStageCopy - Stage a file for copying /*{{{*/ -void pkgAcqMetaBase::TransactionStageCopy(Item *I, - const std::string &From, - const std::string &To) -{ - I->PartialFile = From; - I->DestFile = To; -} - /*}}}*/ -// AcqMetaBase::TransactionStageRemoval - Stage a file for removal /*{{{*/ -void pkgAcqMetaBase::TransactionStageRemoval(Item *I, - const std::string &FinalFile) -{ - I->PartialFile = ""; - I->DestFile = FinalFile; -} - /*}}}*/ -// AcqMetaBase::GenerateAuthWarning - Check gpg authentication error /*{{{*/ -bool pkgAcqMetaBase::CheckStopAuthentication(pkgAcquire::Item * const I, const std::string &Message) -{ - // FIXME: this entire function can do now that we disallow going to - // a unauthenticated state and can cleanly rollback - string const Final = I->GetFinalFilename(); - if(FileExists(Final)) - { - I->Status = StatTransientNetworkError; - _error->Warning(_("An error occurred during the signature " - "verification. The repository is not updated " - "and the previous index files will be used. " - "GPG error: %s: %s\n"), - Desc.Description.c_str(), - LookupTag(Message,"Message").c_str()); - RunScripts("APT::Update::Auth-Failure"); - return true; - } else if (LookupTag(Message,"Message").find("NODATA") != string::npos) { - /* Invalid signature file, reject (LP: #346386) (Closes: #627642) */ - _error->Error(_("GPG error: %s: %s"), - Desc.Description.c_str(), - LookupTag(Message,"Message").c_str()); - I->Status = StatError; - return true; - } else { - _error->Warning(_("GPG error: %s: %s"), - Desc.Description.c_str(), - LookupTag(Message,"Message").c_str()); + bool foundStart = false; + for (std::vector::iterator cur = available_patches.begin(); + cur != available_patches.end(); ++cur) + { + if (LocalHashes != cur->result_hashes) + continue; + + available_patches.erase(available_patches.begin(), cur); + foundStart = true; + break; } - // gpgv method failed - ReportMirrorFailure("GPGFailure"); - return false; -} - /*}}}*/ -// AcqMetaSig::AcqMetaSig - Constructor /*{{{*/ -pkgAcqMetaSig::pkgAcqMetaSig(pkgAcquire *Owner, - pkgAcqMetaBase *TransactionManager, - string const &URI, string const &URIDesc,string const &ShortDesc, - pkgAcqMetaIndex * const MetaIndex) : - pkgAcquire::Item(Owner, HashStringList(), TransactionManager), MetaIndex(MetaIndex), - URIDesc(URIDesc), RealURI(URI) -{ - DestFile = GetPartialFileNameFromURI(URI); - // remove any partial downloaded sig-file in partial/. - // it may confuse proxies and is too small to warrant a - // partial download anyway - unlink(DestFile.c_str()); + if (foundStart == false || unlikely(available_patches.empty() == true)) + { + if (Debug) + std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": " + << "Couldn't find the start of the patch series." << std::endl; + return false; + } - // set the TransactionManager - if(_config->FindB("Debug::Acquire::Transaction", false) == true) - std::clog << "New pkgAcqMetaSig with TransactionManager " - << TransactionManager << std::endl; + // patching with too many files is rather slow compared to a fast download + unsigned long const fileLimit = _config->FindI("Acquire::PDiffs::FileLimit", 0); + if (fileLimit != 0 && fileLimit < available_patches.size()) + { + if (Debug) + std::clog << "Need " << available_patches.size() << " diffs (Limit is " << fileLimit + << ") so fallback to complete download" << std::endl; + return false; + } - // Create the item - Desc.Description = URIDesc; - Desc.Owner = this; - Desc.ShortDesc = ShortDesc; - Desc.URI = URI; + // calculate the size of all patches we have to get + // note that all sizes are uncompressed, while we download compressed files + unsigned long long patchesSize = 0; + for (std::vector::const_iterator cur = available_patches.begin(); + cur != available_patches.end(); ++cur) + patchesSize += cur->patch_size; + unsigned long long const sizeLimit = ServerSize * _config->FindI("Acquire::PDiffs::SizeLimit", 100); + if (sizeLimit > 0 && (sizeLimit/100) < patchesSize) + { + if (Debug) + std::clog << "Need " << patchesSize << " bytes (Limit is " << sizeLimit/100 + << ") so fallback to complete download" << std::endl; + return false; + } - // If we got a hit for Release, we will get one for Release.gpg too (or obscure errors), - // so we skip the download step and go instantly to verification - if (TransactionManager->IMSHit == true && RealFileExists(GetFinalFilename())) + // we have something, queue the diffs + string::size_type const last_space = Description.rfind(" "); + if(last_space != string::npos) + Description.erase(last_space, Description.size()-last_space); + + /* decide if we should download patches one by one or in one go: + The first is good if the server merges patches, but many don't so client + based merging can be attempt in which case the second is better. + "bad things" will happen if patches are merged on the server, + but client side merging is attempt as well */ + bool pdiff_merge = _config->FindB("Acquire::PDiffs::Merge", true); + if (pdiff_merge == true) { - Complete = true; - Status = StatDone; - PartialFile = DestFile = GetFinalFilename(); - MetaIndexFileSignature = DestFile; - MetaIndex->QueueForSignatureVerify(this, MetaIndex->DestFile, DestFile); + // reprepro adds this flag if it has merged patches on the server + std::string const precedence = Tags.FindS("X-Patch-Precedence"); + pdiff_merge = (precedence != "merged"); } + + if (pdiff_merge == false) + new pkgAcqIndexDiffs(Owner, TransactionManager, Target, available_patches); else - QueueURI(Desc); -} - /*}}}*/ -pkgAcqMetaSig::~pkgAcqMetaSig() /*{{{*/ -{ + { + std::vector *diffs = new std::vector(available_patches.size()); + for(size_t i = 0; i < available_patches.size(); ++i) + (*diffs)[i] = new pkgAcqIndexMergeDiffs(Owner, TransactionManager, + Target, + available_patches[i], + diffs); + } + + Complete = false; + Status = StatDone; + Dequeue(); + return true; } /*}}}*/ -// pkgAcqMetaSig::GetFinalFilename - Return the full final file path /*{{{*/ -std::string pkgAcqMetaSig::GetFinalFilename() const +void pkgAcqDiffIndex::Failed(string const &Message,pkgAcquire::MethodConfig const * const Cnf)/*{{{*/ { - return GetFinalFileNameFromURI(RealURI); + Item::Failed(Message,Cnf); + Status = StatDone; + + if(Debug) + std::clog << "pkgAcqDiffIndex failed: " << Desc.URI << " with " << Message << std::endl + << "Falling back to normal index file acquire" << std::endl; + + new pkgAcqIndex(Owner, TransactionManager, Target); } /*}}}*/ -// pkgAcqMetaSig::Done - The signature was downloaded/verified /*{{{*/ -// --------------------------------------------------------------------- -/* The only header we use is the last-modified header. */ -void pkgAcqMetaSig::Done(string Message,unsigned long long Size, - HashStringList const &Hashes, - pkgAcquire::MethodConfig *Cfg) +void pkgAcqDiffIndex::Done(string const &Message,HashStringList const &Hashes, /*{{{*/ + pkgAcquire::MethodConfig const * const Cnf) { - if (MetaIndexFileSignature.empty() == false) - { - DestFile = MetaIndexFileSignature; - MetaIndexFileSignature.clear(); - } - Item::Done(Message, Size, Hashes, Cfg); + if(Debug) + std::clog << "pkgAcqDiffIndex::Done(): " << Desc.URI << std::endl; - if(MetaIndex->AuthPass == false) + Item::Done(Message, Hashes, Cnf); + + string const FinalFile = GetFinalFilename(); + if(StringToBool(LookupTag(Message,"IMS-Hit"),false)) + DestFile = FinalFile; + + if(ParseDiffIndex(DestFile) == false) { - if(MetaIndex->CheckDownloadDone(this, Message, Hashes) == true) - { - // destfile will be modified to point to MetaIndexFile for the - // gpgv method, so we need to save it here - MetaIndexFileSignature = DestFile; - MetaIndex->QueueForSignatureVerify(this, MetaIndex->DestFile, DestFile); - } + Failed("Message: Couldn't parse pdiff index", Cnf); + // queue for final move - this should happen even if we fail + // while parsing (e.g. on sizelimit) and download the complete file. + TransactionManager->TransactionStageCopy(this, DestFile, FinalFile); return; } - else if(MetaIndex->CheckAuthDone(Message) == true) - { - if (TransactionManager->IMSHit == false) - { - TransactionManager->TransactionStageCopy(this, DestFile, GetFinalFilename()); - TransactionManager->TransactionStageCopy(MetaIndex, MetaIndex->DestFile, MetaIndex->GetFinalFilename()); - } - } + + TransactionManager->TransactionStageCopy(this, DestFile, FinalFile); + + Complete = true; + Status = StatDone; + Dequeue(); + + return; } /*}}}*/ -void pkgAcqMetaSig::Failed(string Message,pkgAcquire::MethodConfig *Cnf)/*{{{*/ + +// AcqIndexDiffs::AcqIndexDiffs - Constructor /*{{{*/ +// --------------------------------------------------------------------- +/* The package diff is added to the queue. one object is constructed + * for each diff and the index + */ +pkgAcqIndexDiffs::pkgAcqIndexDiffs(pkgAcquire * const Owner, + pkgAcqMetaBase * const TransactionManager, + IndexTarget const * const Target, + vector const &diffs) + : pkgAcqBaseIndex(Owner, TransactionManager, Target), + available_patches(diffs) { - Item::Failed(Message,Cnf); + DestFile = GetPartialFileNameFromURI(Target->URI); - // check if we need to fail at this point - if (MetaIndex->AuthPass == true && MetaIndex->CheckStopAuthentication(this, Message)) - return; + Debug = _config->FindB("Debug::pkgAcquire::Diffs",false); - string const FinalRelease = MetaIndex->GetFinalFilename(); - string const FinalReleasegpg = GetFinalFilename(); - string const FinalInRelease = TransactionManager->GetFinalFilename(); + Desc.Owner = this; + Description = Target->Description; + Desc.ShortDesc = Target->ShortDesc; - if (RealFileExists(FinalReleasegpg) || RealFileExists(FinalInRelease)) + if(available_patches.empty() == true) { - std::string downgrade_msg; - strprintf(downgrade_msg, _("The repository '%s' is no longer signed."), - MetaIndex->URIDesc.c_str()); - if(_config->FindB("Acquire::AllowDowngradeToInsecureRepositories")) - { - // meh, the users wants to take risks (we still mark the packages - // from this repository as unauthenticated) - _error->Warning("%s", downgrade_msg.c_str()); - _error->Warning(_("This is normally not allowed, but the option " - "Acquire::AllowDowngradeToInsecureRepositories was " - "given to override it.")); - Status = StatDone; - } else { - _error->Error("%s", downgrade_msg.c_str()); - if (TransactionManager->IMSHit == false) - Rename(MetaIndex->DestFile, MetaIndex->DestFile + ".FAILED"); - Item::Failed("Message: " + downgrade_msg, Cnf); - TransactionManager->AbortTransaction(); - return; - } + // we are done (yeah!), check hashes against the final file + DestFile = GetFinalFileNameFromURI(Target->URI); + Finish(true); } else - _error->Warning(_("The data from '%s' is not signed. Packages " - "from that repository can not be authenticated."), - MetaIndex->URIDesc.c_str()); - - // ensures that a Release.gpg file in the lists/ is removed by the transaction - TransactionManager->TransactionStageRemoval(this, DestFile); - - // only allow going further if the users explicitely wants it - if(AllowInsecureRepositories(MetaIndex->MetaIndexParser, TransactionManager, this) == true) { - if (RealFileExists(FinalReleasegpg) || RealFileExists(FinalInRelease)) + // patching needs to be bootstrapped with the 'old' version + std::string const PartialFile = GetPartialFileNameFromURI(Target->URI); + if (RealFileExists(PartialFile) == false) { - // open the last Release if we have it - if (TransactionManager->IMSHit == false) + if (symlink(GetFinalFilename().c_str(), PartialFile.c_str()) != 0) { - MetaIndex->LastMetaIndexParser = new indexRecords; - _error->PushToStack(); - if (RealFileExists(FinalInRelease)) - MetaIndex->LastMetaIndexParser->Load(FinalInRelease); - else - MetaIndex->LastMetaIndexParser->Load(FinalRelease); - // its unlikely to happen, but if what we have is bad ignore it - if (_error->PendingError()) - { - delete MetaIndex->LastMetaIndexParser; - MetaIndex->LastMetaIndexParser = NULL; - } - _error->RevertToStack(); + Failed("Link creation of " + PartialFile + " to " + GetFinalFilename() + " failed", NULL); + return; } } - // we parse the indexes here because at this point the user wanted - // a repository that may potentially harm him - MetaIndex->MetaIndexParser->Load(MetaIndex->DestFile); - if (MetaIndex->VerifyVendor(Message) == false) - /* expired Release files are still a problem you need extra force for */; - else - MetaIndex->QueueIndexes(true); - - TransactionManager->TransactionStageCopy(MetaIndex, MetaIndex->DestFile, MetaIndex->GetFinalFilename()); + // get the next diff + State = StateFetchDiff; + QueueNextDiff(); } +} + /*}}}*/ +void pkgAcqIndexDiffs::Failed(string const &Message,pkgAcquire::MethodConfig const * const Cnf)/*{{{*/ +{ + Item::Failed(Message,Cnf); + Status = StatDone; - // FIXME: this is used often (e.g. in pkgAcqIndexTrans) so refactor - if (Cnf->LocalOnly == true || - StringToBool(LookupTag(Message,"Transient-Failure"),false) == false) + if(Debug) + std::clog << "pkgAcqIndexDiffs failed: " << Desc.URI << " with " << Message << std::endl + << "Falling back to normal index file acquire" << std::endl; + DestFile = GetPartialFileNameFromURI(Target->URI); + RenameOnError(PDiffError); + new pkgAcqIndex(Owner, TransactionManager, Target); + Finish(); +} + /*}}}*/ +// Finish - helper that cleans the item out of the fetcher queue /*{{{*/ +void pkgAcqIndexDiffs::Finish(bool allDone) +{ + if(Debug) + std::clog << "pkgAcqIndexDiffs::Finish(): " + << allDone << " " + << Desc.URI << std::endl; + + // we restore the original name, this is required, otherwise + // the file will be cleaned + if(allDone) { - // Ignore this + TransactionManager->TransactionStageCopy(this, DestFile, GetFinalFilename()); + + // this is for the "real" finish + Complete = true; Status = StatDone; + Dequeue(); + if(Debug) + std::clog << "\n\nallDone: " << DestFile << "\n" << std::endl; + return; } + + if(Debug) + std::clog << "Finishing: " << Desc.URI << std::endl; + Complete = false; + Status = StatDone; + Dequeue(); + return; } /*}}}*/ -pkgAcqMetaIndex::pkgAcqMetaIndex(pkgAcquire *Owner, /*{{{*/ - pkgAcqMetaBase *TransactionManager, - string URI,string URIDesc,string ShortDesc, - string MetaIndexSigURI,string MetaIndexSigURIDesc, string MetaIndexSigShortDesc, - const vector* IndexTargets, - indexRecords* MetaIndexParser) : - pkgAcqMetaBase(Owner, IndexTargets, MetaIndexParser, URI, HashStringList(), - TransactionManager), - URIDesc(URIDesc), ShortDesc(ShortDesc), - MetaIndexSigURI(MetaIndexSigURI), MetaIndexSigURIDesc(MetaIndexSigURIDesc), - MetaIndexSigShortDesc(MetaIndexSigShortDesc) +bool pkgAcqIndexDiffs::QueueNextDiff() /*{{{*/ { - if(TransactionManager == NULL) + // calc sha1 of the just patched file + std::string const FinalFile = GetPartialFileNameFromURI(Target->URI); + + if(!FileExists(FinalFile)) { - this->TransactionManager = this; - this->TransactionManager->Add(this); + Failed("Message: No FinalFile " + FinalFile + " available", NULL); + return false; } - if(_config->FindB("Debug::Acquire::Transaction", false) == true) - std::clog << "New pkgAcqMetaIndex with TransactionManager " - << this->TransactionManager << std::endl; - - - Init(URIDesc, ShortDesc); -} - /*}}}*/ -// pkgAcqMetaIndex::Init - Delayed constructor /*{{{*/ -void pkgAcqMetaIndex::Init(std::string URIDesc, std::string ShortDesc) -{ - DestFile = GetPartialFileNameFromURI(RealURI); - - // Create the item - Desc.Description = URIDesc; - Desc.Owner = this; - Desc.ShortDesc = ShortDesc; - Desc.URI = RealURI; + FileFd fd(FinalFile, FileFd::ReadOnly); + Hashes LocalHashesCalc; + LocalHashesCalc.AddFD(fd); + HashStringList const LocalHashes = LocalHashesCalc.GetHashStringList(); - // we expect more item - ExpectedAdditionalItems = IndexTargets->size(); - QueueURI(Desc); -} - /*}}}*/ -void pkgAcqMetaIndex::Done(string Message,unsigned long long Size, /*{{{*/ - HashStringList const &Hashes, - pkgAcquire::MethodConfig *Cfg) -{ - Item::Done(Message,Size,Hashes,Cfg); + if(Debug) + std::clog << "QueueNextDiff: " << FinalFile << " (" << LocalHashes.find(NULL)->toStr() << ")" << std::endl; - if(CheckDownloadDone(this, Message, Hashes)) + HashStringList const TargetFileHashes = GetExpectedHashesFor(Target->MetaKey); + if (unlikely(LocalHashes.usable() == false || TargetFileHashes.usable() == false)) { - // we have a Release file, now download the Signature, all further - // verify/queue for additional downloads will be done in the - // pkgAcqMetaSig::Done() code - new pkgAcqMetaSig(Owner, TransactionManager, - MetaIndexSigURI, MetaIndexSigURIDesc, - MetaIndexSigShortDesc, this); + Failed("Local/Expected hashes are not usable", NULL); + return false; } -} - /*}}}*/ -bool pkgAcqMetaBase::CheckAuthDone(string Message) /*{{{*/ -{ - // At this point, the gpgv method has succeeded, so there is a - // valid signature from a key in the trusted keyring. We - // perform additional verification of its contents, and use them - // to verify the indexes we are about to download - if (TransactionManager->IMSHit == false) + + // final file reached before all patches are applied + if(LocalHashes == TargetFileHashes) { - // open the last (In)Release if we have it - std::string const FinalFile = GetFinalFilename(); - std::string FinalRelease; - std::string FinalInRelease; - if (APT::String::Endswith(FinalFile, "InRelease")) - { - FinalInRelease = FinalFile; - FinalRelease = FinalFile.substr(0, FinalFile.length() - strlen("InRelease")) + "Release"; - } - else - { - FinalInRelease = FinalFile.substr(0, FinalFile.length() - strlen("Release")) + "InRelease"; - FinalRelease = FinalFile; - } - if (RealFileExists(FinalInRelease) || RealFileExists(FinalRelease)) - { - LastMetaIndexParser = new indexRecords; - _error->PushToStack(); - if (RealFileExists(FinalInRelease)) - LastMetaIndexParser->Load(FinalInRelease); - else - LastMetaIndexParser->Load(FinalRelease); - // its unlikely to happen, but if what we have is bad ignore it - if (_error->PendingError()) - { - delete LastMetaIndexParser; - LastMetaIndexParser = NULL; - } - _error->RevertToStack(); - } + Finish(true); + return true; } - if (!MetaIndexParser->Load(DestFile)) + // remove all patches until the next matching patch is found + // this requires the Index file to be ordered + for(vector::iterator I = available_patches.begin(); + available_patches.empty() == false && + I != available_patches.end() && + I->result_hashes != LocalHashes; + ++I) { - Status = StatAuthError; - ErrorText = MetaIndexParser->ErrorText; - return false; + available_patches.erase(I); } - if (!VerifyVendor(Message)) + // error checking and falling back if no patch was found + if(available_patches.empty() == true) { - Status = StatAuthError; + Failed("No patches left to reach target", NULL); return false; } - if (_config->FindB("Debug::pkgAcquire::Auth", false)) - std::cerr << "Signature verification succeeded: " - << DestFile << std::endl; + // queue the right diff + Desc.URI = Target->URI + ".diff/" + available_patches[0].file + ".gz"; + Desc.Description = Description + " " + available_patches[0].file + string(".pdiff"); + DestFile = GetPartialFileNameFromURI(Target->URI + ".diff/" + available_patches[0].file); - // Download further indexes with verification - QueueIndexes(true); + if(Debug) + std::clog << "pkgAcqIndexDiffs::QueueNextDiff(): " << Desc.URI << std::endl; + + QueueURI(Desc); return true; } /*}}}*/ -// pkgAcqMetaBase::Custom600Headers - Get header for AcqMetaBase /*{{{*/ -// --------------------------------------------------------------------- -#if APT_PKG_ABI >= 413 -string pkgAcqMetaBase::Custom600Headers() const -#else -string pkgAcqMetaBase::Custom600Headers() -#endif +void pkgAcqIndexDiffs::Done(string const &Message, HashStringList const &Hashes, /*{{{*/ + pkgAcquire::MethodConfig const * const Cnf) { - std::string Header = "\nIndex-File: true"; - std::string MaximumSize; - strprintf(MaximumSize, "\nMaximum-Size: %i", - _config->FindI("Acquire::MaxReleaseFileSize", 10*1000*1000)); - Header += MaximumSize; - - string const FinalFile = GetFinalFilename(); + if(Debug) + std::clog << "pkgAcqIndexDiffs::Done(): " << Desc.URI << std::endl; - struct stat Buf; - if (stat(FinalFile.c_str(),&Buf) == 0) - Header += "\nLast-Modified: " + TimeRFC1123(Buf.st_mtime); + Item::Done(Message, Hashes, Cnf); - return Header; -} - /*}}}*/ -// pkgAcqMetaBase::GetFinalFilename - Return the full final file path /*{{{*/ -std::string pkgAcqMetaBase::GetFinalFilename() const -{ - return GetFinalFileNameFromURI(RealURI); -} - /*}}}*/ -// pkgAcqMetaBase::QueueForSignatureVerify /*{{{*/ -void pkgAcqMetaBase::QueueForSignatureVerify(pkgAcquire::Item * const I, std::string const &File, std::string const &Signature) -{ - AuthPass = true; - I->Desc.URI = "gpgv:" + Signature; - I->DestFile = File; - QueueURI(I->Desc); - I->SetActiveSubprocess("gpgv"); -} - /*}}}*/ -// pkgAcqMetaBase::CheckDownloadDone /*{{{*/ -bool pkgAcqMetaBase::CheckDownloadDone(pkgAcquire::Item * const I, const std::string &Message, HashStringList const &Hashes) const -{ - // We have just finished downloading a Release file (it is not - // verified yet) + // FIXME: verify this download too before feeding it to rred + std::string const FinalFile = GetPartialFileNameFromURI(Target->URI); - string const FileName = LookupTag(Message,"Filename"); - if (FileName.empty() == true) + // success in downloading a diff, enter ApplyDiff state + if(State == StateFetchDiff) { - I->Status = StatError; - I->ErrorText = "Method gave a blank filename"; - return false; - } + FileFd fd(DestFile, FileFd::ReadOnly, FileFd::Gzip); + class Hashes LocalHashesCalc; + LocalHashesCalc.AddFD(fd); + HashStringList const LocalHashes = LocalHashesCalc.GetHashStringList(); - if (FileName != I->DestFile) - { - I->Local = true; - I->Desc.URI = "copy:" + FileName; - I->QueueURI(I->Desc); - return false; - } + if (fd.Size() != available_patches[0].patch_size || + available_patches[0].patch_hashes != LocalHashes) + { + // patchfiles are dated, so bad indicates a bad download, so kill it + unlink(DestFile.c_str()); + Failed("Patch has Size/Hashsum mismatch", NULL); + return; + } - // make sure to verify against the right file on I-M-S hit - bool IMSHit = StringToBool(LookupTag(Message,"IMS-Hit"), false); - if (IMSHit == false && Hashes.usable()) + // rred excepts the patch as $FinalFile.ed + Rename(DestFile,FinalFile+".ed"); + + if(Debug) + std::clog << "Sending to rred method: " << FinalFile << std::endl; + + State = StateApplyDiff; + Local = true; + Desc.URI = "rred:" + FinalFile; + QueueURI(Desc); + SetActiveSubprocess("rred"); + return; + } + + + // success in download/apply a diff, queue next (if needed) + if(State == StateApplyDiff) { - // detect IMS-Hits servers haven't detected by Hash comparison - std::string const FinalFile = I->GetFinalFilename(); - if (RealFileExists(FinalFile) && Hashes.VerifyFile(FinalFile) == true) + // remove the just applied patch + available_patches.erase(available_patches.begin()); + unlink((FinalFile + ".ed").c_str()); + + // move into place + if(Debug) { - IMSHit = true; - unlink(I->DestFile.c_str()); + std::clog << "Moving patched file in place: " << std::endl + << DestFile << " -> " << FinalFile << std::endl; } - } + Rename(DestFile,FinalFile); + chmod(FinalFile.c_str(),0644); - if(IMSHit == true) - { - // for simplicity, the transaction manager is always InRelease - // even if it doesn't exist. - if (TransactionManager != NULL) - TransactionManager->IMSHit = true; - I->PartialFile = I->DestFile = I->GetFinalFilename(); + // see if there is more to download + if(available_patches.empty() == false) { + new pkgAcqIndexDiffs(Owner, TransactionManager, Target, + available_patches); + return Finish(); + } else + // update + DestFile = FinalFile; + return Finish(true); } +} + /*}}}*/ + +// AcqIndexMergeDiffs::AcqIndexMergeDiffs - Constructor /*{{{*/ +pkgAcqIndexMergeDiffs::pkgAcqIndexMergeDiffs(pkgAcquire * const Owner, + pkgAcqMetaBase * const TransactionManager, + IndexTarget const * const Target, + DiffInfo const &patch, + std::vector const * const allPatches) + : pkgAcqBaseIndex(Owner, TransactionManager, Target), + patch(patch), allPatches(allPatches), State(StateFetchDiff) +{ + Debug = _config->FindB("Debug::pkgAcquire::Diffs",false); + + Desc.Owner = this; + Description = Target->Description; + Desc.ShortDesc = Target->ShortDesc; + + Desc.URI = Target->URI + ".diff/" + patch.file + ".gz"; + Desc.Description = Description + " " + patch.file + string(".pdiff"); + + DestFile = GetPartialFileNameFromURI(Target->URI + ".diff/" + patch.file); + + if(Debug) + std::clog << "pkgAcqIndexMergeDiffs: " << Desc.URI << std::endl; + + QueueURI(Desc); +} + /*}}}*/ +void pkgAcqIndexMergeDiffs::Failed(string const &Message,pkgAcquire::MethodConfig const * const Cnf)/*{{{*/ +{ + if(Debug) + std::clog << "pkgAcqIndexMergeDiffs failed: " << Desc.URI << " with " << Message << std::endl; - // set Item to complete as the remaining work is all local (verify etc) - I->Complete = true; + Item::Failed(Message,Cnf); + Status = StatDone; - return true; + // check if we are the first to fail, otherwise we are done here + State = StateDoneDiff; + for (std::vector::const_iterator I = allPatches->begin(); + I != allPatches->end(); ++I) + if ((*I)->State == StateErrorDiff) + return; + + // first failure means we should fallback + State = StateErrorDiff; + if (Debug) + std::clog << "Falling back to normal index file acquire" << std::endl; + DestFile = GetPartialFileNameFromURI(Target->URI); + RenameOnError(PDiffError); + new pkgAcqIndex(Owner, TransactionManager, Target); } /*}}}*/ -void pkgAcqMetaBase::QueueIndexes(bool verify) /*{{{*/ +void pkgAcqIndexMergeDiffs::Done(string const &Message, HashStringList const &Hashes, /*{{{*/ + pkgAcquire::MethodConfig const * const Cnf) { - // at this point the real Items are loaded in the fetcher - ExpectedAdditionalItems = 0; + if(Debug) + std::clog << "pkgAcqIndexMergeDiffs::Done(): " << Desc.URI << std::endl; - vector ::const_iterator Target; - for (Target = IndexTargets->begin(); - Target != IndexTargets->end(); - ++Target) - { - HashStringList ExpectedIndexHashes; - const indexRecords::checkSum *Record = MetaIndexParser->Lookup((*Target)->MetaKey); + Item::Done(Message, Hashes, Cnf); - // optional target that we do not have in the Release file are - // skipped - if (verify == true && Record == NULL && (*Target)->IsOptional()) - continue; + // FIXME: verify download before feeding it to rred + string const FinalFile = GetPartialFileNameFromURI(Target->URI); + + if (State == StateFetchDiff) + { + FileFd fd(DestFile, FileFd::ReadOnly, FileFd::Gzip); + class Hashes LocalHashesCalc; + LocalHashesCalc.AddFD(fd); + HashStringList const LocalHashes = LocalHashesCalc.GetHashStringList(); - // targets without a hash record are a error when verify is required - if (verify == true && Record == NULL) + if (fd.Size() != patch.patch_size || patch.patch_hashes != LocalHashes) { - Status = StatAuthError; - strprintf(ErrorText, _("Unable to find expected entry '%s' in Release file (Wrong sources.list entry or malformed file)"), (*Target)->MetaKey.c_str()); - return; + // patchfiles are dated, so bad indicates a bad download, so kill it + unlink(DestFile.c_str()); + Failed("Patch has Size/Hashsum mismatch", NULL); + return; } - if (Record) - ExpectedIndexHashes = Record->Hashes; - - if (_config->FindB("Debug::pkgAcquire::Auth", false)) - { - std::cerr << "Queueing: " << (*Target)->URI << std::endl - << "Expected Hash:" << std::endl; - for (HashStringList::const_iterator hs = ExpectedIndexHashes.begin(); hs != ExpectedIndexHashes.end(); ++hs) - std::cerr << "\t- " << hs->toStr() << std::endl; - std::cerr << "For: " << ((Record == NULL) ? "" : Record->MetaKeyFilename) << std::endl; + // rred expects the patch as $FinalFile.ed.$patchname.gz + Rename(DestFile, FinalFile + ".ed." + patch.file + ".gz"); + + // check if this is the last completed diff + State = StateDoneDiff; + for (std::vector::const_iterator I = allPatches->begin(); + I != allPatches->end(); ++I) + if ((*I)->State != StateDoneDiff) + { + if(Debug) + std::clog << "Not the last done diff in the batch: " << Desc.URI << std::endl; + return; + } + + // this is the last completed diff, so we are ready to apply now + State = StateApplyDiff; + // patching needs to be bootstrapped with the 'old' version + if (symlink(GetFinalFilename().c_str(), FinalFile.c_str()) != 0) + { + Failed("Link creation of " + FinalFile + " to " + GetFinalFilename() + " failed", NULL); + return; } - if (verify == true && ExpectedIndexHashes.empty() == true) + + if(Debug) + std::clog << "Sending to rred method: " << FinalFile << std::endl; + + Local = true; + Desc.URI = "rred:" + FinalFile; + QueueURI(Desc); + SetActiveSubprocess("rred"); + return; + } + // success in download/apply all diffs, clean up + else if (State == StateApplyDiff) + { + // move the result into place + std::string const Final = GetFinalFilename(); + if(Debug) + std::clog << "Queue patched file in place: " << std::endl + << DestFile << " -> " << Final << std::endl; + + // queue for copy by the transaction manager + TransactionManager->TransactionStageCopy(this, DestFile, Final); + + // ensure the ed's are gone regardless of list-cleanup + for (std::vector::const_iterator I = allPatches->begin(); + I != allPatches->end(); ++I) { - Status = StatAuthError; - strprintf(ErrorText, _("Unable to find hash sum for '%s' in Release file"), (*Target)->MetaKey.c_str()); - return; + std::string const PartialFile = GetPartialFileNameFromURI(Target->URI); + std::string patch = PartialFile + ".ed." + (*I)->patch.file + ".gz"; + unlink(patch.c_str()); } + unlink(FinalFile.c_str()); - /* Queue the Index file (Packages, Sources, Translation-$foo - (either diff or full packages files, depending - on the users option) - we also check if the PDiff Index file is listed - in the Meta-Index file. Ideal would be if pkgAcqDiffIndex would test this - instead, but passing the required info to it is to much hassle */ - if(_config->FindB("Acquire::PDiffs",true) == true && (verify == false || - MetaIndexParser->Exists((*Target)->MetaKey + ".diff/Index") == true)) - new pkgAcqDiffIndex(Owner, TransactionManager, *Target, ExpectedIndexHashes, MetaIndexParser); - else - new pkgAcqIndex(Owner, TransactionManager, *Target, ExpectedIndexHashes, MetaIndexParser); + // all set and done + Complete = true; + if(Debug) + std::clog << "allDone: " << DestFile << "\n" << std::endl; } } /*}}}*/ -bool pkgAcqMetaBase::VerifyVendor(string Message) /*{{{*/ + +// AcqIndex::AcqIndex - Constructor /*{{{*/ +pkgAcqIndex::pkgAcqIndex(pkgAcquire * const Owner, + pkgAcqMetaBase * const TransactionManager, + IndexTarget const * const Target) + : pkgAcqBaseIndex(Owner, TransactionManager, Target) { - string::size_type pos; + // autoselect the compression method + AutoSelectCompression(); + Init(Target->URI, Target->Description, Target->ShortDesc); - // check for missing sigs (that where not fatal because otherwise we had - // bombed earlier) - string missingkeys; - string msg = _("There is no public key available for the " - "following key IDs:\n"); - pos = Message.find("NO_PUBKEY "); - if (pos != std::string::npos) + if(_config->FindB("Debug::Acquire::Transaction", false) == true) + std::clog << "New pkgIndex with TransactionManager " + << TransactionManager << std::endl; +} + /*}}}*/ +// AcqIndex::AutoSelectCompression - Select compression /*{{{*/ +void pkgAcqIndex::AutoSelectCompression() +{ + std::vector types = APT::Configuration::getCompressionTypes(); + CompressionExtensions = ""; + if (TransactionManager->MetaIndexParser != NULL && TransactionManager->MetaIndexParser->Exists(Target->MetaKey)) { - string::size_type start = pos+strlen("NO_PUBKEY "); - string Fingerprint = Message.substr(start, Message.find("\n")-start); - missingkeys += (Fingerprint); + for (std::vector::const_iterator t = types.begin(); + t != types.end(); ++t) + { + std::string CompressedMetaKey = string(Target->MetaKey).append(".").append(*t); + if (*t == "uncompressed" || + TransactionManager->MetaIndexParser->Exists(CompressedMetaKey) == true) + CompressionExtensions.append(*t).append(" "); + } } - if(!missingkeys.empty()) - _error->Warning("%s", (msg + missingkeys).c_str()); - - string Transformed = MetaIndexParser->GetExpectedDist(); - - if (Transformed == "../project/experimental") + else { - Transformed = "experimental"; + for (std::vector::const_iterator t = types.begin(); t != types.end(); ++t) + CompressionExtensions.append(*t).append(" "); } + if (CompressionExtensions.empty() == false) + CompressionExtensions.erase(CompressionExtensions.end()-1); +} + /*}}}*/ +// AcqIndex::Init - defered Constructor /*{{{*/ +void pkgAcqIndex::Init(string const &URI, string const &URIDesc, + string const &ShortDesc) +{ + Stage = STAGE_DOWNLOAD; - pos = Transformed.rfind('/'); - if (pos != string::npos) - { - Transformed = Transformed.substr(0, pos); - } + DestFile = GetPartialFileNameFromURI(URI); - if (Transformed == ".") + size_t const nextExt = CompressionExtensions.find(' '); + if (nextExt == std::string::npos) { - Transformed = ""; + CurrentCompressionExtension = CompressionExtensions; + CompressionExtensions.clear(); } - - if (_config->FindB("Acquire::Check-Valid-Until", true) == true && - MetaIndexParser->GetValidUntil() > 0) { - time_t const invalid_since = time(NULL) - MetaIndexParser->GetValidUntil(); - if (invalid_since > 0) - { - std::string errmsg; - strprintf(errmsg, - // TRANSLATOR: The first %s is the URL of the bad Release file, the second is - // the time since then the file is invalid - formated in the same way as in - // the download progress display (e.g. 7d 3h 42min 1s) - _("Release file for %s is expired (invalid since %s). " - "Updates for this repository will not be applied."), - RealURI.c_str(), TimeToStr(invalid_since).c_str()); - if (ErrorText.empty()) - ErrorText = errmsg; - return _error->Error("%s", errmsg.c_str()); - } + else + { + CurrentCompressionExtension = CompressionExtensions.substr(0, nextExt); + CompressionExtensions = CompressionExtensions.substr(nextExt+1); } - /* Did we get a file older than what we have? This is a last minute IMS hit and doubles - as a prevention of downgrading us to older (still valid) files */ - if (TransactionManager->IMSHit == false && LastMetaIndexParser != NULL && - LastMetaIndexParser->GetDate() > MetaIndexParser->GetDate()) + if (CurrentCompressionExtension == "uncompressed") { - TransactionManager->IMSHit = true; - unlink(DestFile.c_str()); - PartialFile = DestFile = GetFinalFilename(); - delete MetaIndexParser; - MetaIndexParser = LastMetaIndexParser; - LastMetaIndexParser = NULL; + Desc.URI = URI; } - - if (_config->FindB("Debug::pkgAcquire::Auth", false)) + else if (unlikely(CurrentCompressionExtension.empty())) + return; + else { - std::cerr << "Got Codename: " << MetaIndexParser->GetDist() << std::endl; - std::cerr << "Expecting Dist: " << MetaIndexParser->GetExpectedDist() << std::endl; - std::cerr << "Transformed Dist: " << Transformed << std::endl; + Desc.URI = URI + '.' + CurrentCompressionExtension; + DestFile = DestFile + '.' + CurrentCompressionExtension; } - if (MetaIndexParser->CheckDist(Transformed) == false) + if(TransactionManager->MetaIndexParser != NULL) + InitByHashIfNeeded(); + + Desc.Description = URIDesc; + Desc.Owner = this; + Desc.ShortDesc = ShortDesc; + + QueueURI(Desc); +} + /*}}}*/ +// AcqIndex::AdjustForByHash - modify URI for by-hash support /*{{{*/ +void pkgAcqIndex::InitByHashIfNeeded() +{ + // TODO: + // - (maybe?) add support for by-hash into the sources.list as flag + // - make apt-ftparchive generate the hashes (and expire?) + std::string HostKnob = "APT::Acquire::" + ::URI(Desc.URI).Host + "::By-Hash"; + if(_config->FindB("APT::Acquire::By-Hash", false) == true || + _config->FindB(HostKnob, false) == true || + TransactionManager->MetaIndexParser->GetSupportsAcquireByHash()) { - // This might become fatal one day -// Status = StatAuthError; -// ErrorText = "Conflicting distribution; expected " -// + MetaIndexParser->GetExpectedDist() + " but got " -// + MetaIndexParser->GetDist(); -// return false; - if (!Transformed.empty()) + HashStringList const Hashes = GetExpectedHashes(); + if(Hashes.usable()) { - _error->Warning(_("Conflicting distribution: %s (expected %s but got %s)"), - Desc.Description.c_str(), - Transformed.c_str(), - MetaIndexParser->GetDist().c_str()); + // FIXME: should we really use the best hash here? or a fixed one? + HashString const * const TargetHash = Hashes.find(""); + std::string const ByHash = "/by-hash/" + TargetHash->HashType() + "/" + TargetHash->HashValue(); + size_t const trailing_slash = Desc.URI.find_last_of("/"); + Desc.URI = Desc.URI.replace( + trailing_slash, + Desc.URI.substr(trailing_slash+1).size()+1, + ByHash); + } else { + _error->Warning( + "Fetching ByHash requested but can not find record for %s", + GetMetaKey().c_str()); } } - - return true; } /*}}}*/ -// pkgAcqMetaIndex::Failed - no Release file present /*{{{*/ -void pkgAcqMetaIndex::Failed(string Message, - pkgAcquire::MethodConfig * Cnf) +// AcqIndex::Custom600Headers - Insert custom request headers /*{{{*/ +// --------------------------------------------------------------------- +/* The only header we use is the last-modified header. */ +string pkgAcqIndex::Custom600Headers() const { - pkgAcquire::Item::Failed(Message, Cnf); - Status = StatDone; - - _error->Warning(_("The repository '%s' does not have a Release file. " - "This is deprecated, please contact the owner of the " - "repository."), URIDesc.c_str()); + string Final = GetFinalFilename(); - // No Release file was present so fall - // back to queueing Packages files without verification - // only allow going further if the users explicitely wants it - if(AllowInsecureRepositories(MetaIndexParser, TransactionManager, this) == true) - { - // Done, queue for rename on transaction finished - if (FileExists(DestFile)) - TransactionManager->TransactionStageCopy(this, DestFile, GetFinalFilename()); + string msg = "\nIndex-File: true"; + struct stat Buf; + if (stat(Final.c_str(),&Buf) == 0) + msg += "\nLast-Modified: " + TimeRFC1123(Buf.st_mtime); - // queue without any kind of hashsum support - QueueIndexes(false); - } + if(Target->IsOptional()) + msg += "\nFail-Ignore: true"; + + return msg; } /*}}}*/ -void pkgAcqMetaIndex::Finished() /*{{{*/ +// AcqIndex::Failed - getting the indexfile failed /*{{{*/ +void pkgAcqIndex::Failed(string const &Message,pkgAcquire::MethodConfig const * const Cnf) { - if(_config->FindB("Debug::Acquire::Transaction", false) == true) - std::clog << "Finished: " << DestFile <TransactionHasError() == false) - TransactionManager->CommitTransaction(); + Item::Failed(Message,Cnf); + + // authorisation matches will not be fixed by other compression types + if (Status != StatAuthError) + { + if (CompressionExtensions.empty() == false) + { + Init(Target->URI, Desc.Description, Desc.ShortDesc); + Status = StatIdle; + return; + } + } + + if(Target->IsOptional() && GetExpectedHashes().empty() && Stage == STAGE_DOWNLOAD) + Status = StatDone; + else + TransactionManager->AbortTransaction(); } /*}}}*/ -pkgAcqMetaClearSig::pkgAcqMetaClearSig(pkgAcquire *Owner, /*{{{*/ - string const &URI, string const &URIDesc, string const &ShortDesc, - string const &MetaIndexURI, string const &MetaIndexURIDesc, string const &MetaIndexShortDesc, - string const &MetaSigURI, string const &MetaSigURIDesc, string const &MetaSigShortDesc, - const vector* IndexTargets, - indexRecords* MetaIndexParser) : - pkgAcqMetaIndex(Owner, NULL, URI, URIDesc, ShortDesc, MetaSigURI, MetaSigURIDesc,MetaSigShortDesc, IndexTargets, MetaIndexParser), - MetaIndexURI(MetaIndexURI), MetaIndexURIDesc(MetaIndexURIDesc), MetaIndexShortDesc(MetaIndexShortDesc), - MetaSigURI(MetaSigURI), MetaSigURIDesc(MetaSigURIDesc), MetaSigShortDesc(MetaSigShortDesc) +// AcqIndex::ReverifyAfterIMS - Reverify index after an ims-hit /*{{{*/ +void pkgAcqIndex::ReverifyAfterIMS() { - // index targets + (worst case:) Release/Release.gpg - ExpectedAdditionalItems = IndexTargets->size() + 2; + // update destfile to *not* include the compression extension when doing + // a reverify (as its uncompressed on disk already) + DestFile = GetCompressedFileName(Target->URI, GetPartialFileNameFromURI(Target->URI), CurrentCompressionExtension); + + // copy FinalFile into partial/ so that we check the hash again + string FinalFile = GetFinalFilename(); + Stage = STAGE_DECOMPRESS_AND_VERIFY; + Desc.URI = "copy:" + FinalFile; + QueueURI(Desc); } /*}}}*/ -pkgAcqMetaClearSig::~pkgAcqMetaClearSig() /*{{{*/ +// AcqIndex::ValidateFile - Validate the content of the downloaded file /*{{{*/ +bool pkgAcqIndex::ValidateFile(const std::string &FileName) { + // FIXME: this can go away once we only ever download stuff that + // has a valid hash and we never do GET based probing + // FIXME2: this also leaks debian-isms into the code and should go therefore + + /* Always validate the index file for correctness (all indexes must + * have a Package field) (LP: #346386) (Closes: #627642) + */ + FileFd fd(FileName, FileFd::ReadOnly, FileFd::Extension); + // Only test for correctness if the content of the file is not empty + // (empty is ok) + if (fd.Size() > 0) + { + pkgTagSection sec; + pkgTagFile tag(&fd); + + // all our current indexes have a field 'Package' in each section + if (_error->PendingError() == true || + tag.Step(sec) == false || + sec.Exists("Package") == false) + return false; + } + return true; } /*}}}*/ -// pkgAcqMetaClearSig::Custom600Headers - Insert custom request headers /*{{{*/ -#if APT_PKG_ABI >= 413 -string pkgAcqMetaClearSig::Custom600Headers() const -#else -string pkgAcqMetaClearSig::Custom600Headers() -#endif +// AcqIndex::Done - Finished a fetch /*{{{*/ +// --------------------------------------------------------------------- +/* This goes through a number of states.. On the initial fetch the + method could possibly return an alternate filename which points + to the uncompressed version of the file. If this is so the file + is copied into the partial directory. In all other cases the file + is decompressed with a compressed uri. */ +void pkgAcqIndex::Done(string const &Message, + HashStringList const &Hashes, + pkgAcquire::MethodConfig const * const Cfg) { - string Header = pkgAcqMetaBase::Custom600Headers(); - Header += "\nFail-Ignore: true"; - return Header; + Item::Done(Message,Hashes,Cfg); + + switch(Stage) + { + case STAGE_DOWNLOAD: + StageDownloadDone(Message, Hashes, Cfg); + break; + case STAGE_DECOMPRESS_AND_VERIFY: + StageDecompressDone(Message, Hashes, Cfg); + break; + } } /*}}}*/ -// pkgAcqMetaClearSig::Done - We got a file /*{{{*/ -class APT_HIDDEN DummyItem : public pkgAcquire::Item +// AcqIndex::StageDownloadDone - Queue for decompress and verify /*{{{*/ +void pkgAcqIndex::StageDownloadDone(string const &Message, HashStringList const &, + pkgAcquire::MethodConfig const * const) { - std::string URI; - public: - virtual std::string DescURI() {return URI;}; + Complete = true; - DummyItem(pkgAcquire *Owner, std::string const &URI) : pkgAcquire::Item(Owner), URI(URI) + // Handle the unzipd case + string FileName = LookupTag(Message,"Alt-Filename"); + if (FileName.empty() == false) { - Status = StatDone; - DestFile = GetFinalFileNameFromURI(URI); + Stage = STAGE_DECOMPRESS_AND_VERIFY; + Local = true; + DestFile += ".decomp"; + Desc.URI = "copy:" + FileName; + QueueURI(Desc); + SetActiveSubprocess("copy"); + return; } -}; -void pkgAcqMetaClearSig::Done(std::string Message,unsigned long long Size, - HashStringList const &Hashes, - pkgAcquire::MethodConfig *Cnf) -{ - Item::Done(Message, Size, Hashes, Cnf); - // if we expect a ClearTextSignature (InRelease), ensure that - // this is what we get and if not fail to queue a - // Release/Release.gpg, see #346386 - if (FileExists(DestFile) && !StartsWithGPGClearTextSignature(DestFile)) + FileName = LookupTag(Message,"Filename"); + if (FileName.empty() == true) { - pkgAcquire::Item::Failed(Message, Cnf); - RenameOnError(NotClearsigned); - TransactionManager->AbortTransaction(); - return; + Status = StatError; + ErrorText = "Method gave a blank filename"; } - if(AuthPass == false) + // Methods like e.g. "file:" will give us a (compressed) FileName that is + // not the "DestFile" we set, in this case we uncompress from the local file + if (FileName != DestFile) + Local = true; + else + EraseFileName = FileName; + + // we need to verify the file against the current Release file again + // on if-modfied-since hit to avoid a stale attack against us + if(StringToBool(LookupTag(Message,"IMS-Hit"),false) == true) { - if(CheckDownloadDone(this, Message, Hashes) == true) - QueueForSignatureVerify(this, DestFile, DestFile); + // The files timestamp matches, reverify by copy into partial/ + EraseFileName = ""; + ReverifyAfterIMS(); return; } - else if(CheckAuthDone(Message) == true) + + // If we have compressed indexes enabled, queue for hash verification + if (_config->FindB("Acquire::GzipIndexes",false)) { - if (TransactionManager->IMSHit == false) - TransactionManager->TransactionStageCopy(this, DestFile, GetFinalFilename()); - else if (RealFileExists(GetFinalFilename()) == false) - { - // We got an InRelease file IMSHit, but we haven't one, which means - // we had a valid Release/Release.gpg combo stepping in, which we have - // to 'acquire' now to ensure list cleanup isn't removing them - new DummyItem(Owner, MetaIndexURI); - new DummyItem(Owner, MetaSigURI); - } + DestFile = GetPartialFileNameFromURI(Target->URI + '.' + CurrentCompressionExtension); + EraseFileName = ""; + Stage = STAGE_DECOMPRESS_AND_VERIFY; + Desc.URI = "copy:" + FileName; + QueueURI(Desc); + SetActiveSubprocess("copy"); + return; + } + + // get the binary name for your used compression type + string decompProg; + if(CurrentCompressionExtension == "uncompressed") + decompProg = "copy"; + else + decompProg = _config->Find(string("Acquire::CompressionTypes::").append(CurrentCompressionExtension),""); + if(decompProg.empty() == true) + { + _error->Error("Unsupported extension: %s", CurrentCompressionExtension.c_str()); + return; } + + // queue uri for the next stage + Stage = STAGE_DECOMPRESS_AND_VERIFY; + DestFile += ".decomp"; + Desc.URI = decompProg + ":" + FileName; + QueueURI(Desc); + SetActiveSubprocess(decompProg); } /*}}}*/ -void pkgAcqMetaClearSig::Failed(string Message,pkgAcquire::MethodConfig *Cnf) /*{{{*/ +// AcqIndex::StageDecompressDone - Final verification /*{{{*/ +void pkgAcqIndex::StageDecompressDone(string const &Message, + HashStringList const &, + pkgAcquire::MethodConfig const * const Cfg) { - Item::Failed(Message, Cnf); - - // we failed, we will not get additional items from this method - ExpectedAdditionalItems = 0; - - if (AuthPass == false) + if(!ValidateFile(DestFile)) { - // Queue the 'old' InRelease file for removal if we try Release.gpg - // as otherwise the file will stay around and gives a false-auth - // impression (CVE-2012-0214) - TransactionManager->TransactionStageRemoval(this, GetFinalFilename()); - Status = StatDone; - - new pkgAcqMetaIndex(Owner, TransactionManager, - MetaIndexURI, MetaIndexURIDesc, MetaIndexShortDesc, - MetaSigURI, MetaSigURIDesc, MetaSigShortDesc, - IndexTargets, MetaIndexParser); + RenameOnError(InvalidFormat); + Failed(Message, Cfg); + return; } - else - { - if(CheckStopAuthentication(this, Message)) - return; - _error->Warning(_("The data from '%s' is not signed. Packages " - "from that repository can not be authenticated."), - URIDesc.c_str()); - - // No Release file was present, or verification failed, so fall - // back to queueing Packages files without verification - // only allow going further if the users explicitely wants it - if(AllowInsecureRepositories(MetaIndexParser, TransactionManager, this) == true) - { - Status = StatDone; + // Done, queue for rename on transaction finished + TransactionManager->TransactionStageCopy(this, DestFile, GetFinalFilename()); - /* Always move the meta index, even if gpgv failed. This ensures - * that PackageFile objects are correctly filled in */ - if (FileExists(DestFile)) - { - string FinalFile = GetFinalFilename(); - /* InRelease files become Release files, otherwise - * they would be considered as trusted later on */ - RealURI = RealURI.replace(RealURI.rfind("InRelease"), 9, - "Release"); - FinalFile = FinalFile.replace(FinalFile.rfind("InRelease"), 9, - "Release"); - - // Done, queue for rename on transaction finished - TransactionManager->TransactionStageCopy(this, DestFile, FinalFile); - } - QueueIndexes(false); - } - } + return; } /*}}}*/ + + // AcqArchive::AcqArchive - Constructor /*{{{*/ // --------------------------------------------------------------------- /* This just sets up the initial fetch environment and queues the first possibilitiy */ -pkgAcqArchive::pkgAcqArchive(pkgAcquire *Owner,pkgSourceList *Sources, - pkgRecords *Recs,pkgCache::VerIterator const &Version, +pkgAcqArchive::pkgAcqArchive(pkgAcquire * const Owner,pkgSourceList * const Sources, + pkgRecords * const Recs,pkgCache::VerIterator const &Version, string &StoreFilename) : - Item(Owner, HashStringList()), Version(Version), Sources(Sources), Recs(Recs), - StoreFilename(StoreFilename), Vf(Version.FileList()), + Item(Owner), LocalSource(false), Version(Version), Sources(Sources), Recs(Recs), + StoreFilename(StoreFilename), Vf(Version.FileList()), Trusted(false) { Retries = _config->FindI("Acquire::Retries",0); @@ -2573,15 +2577,17 @@ bool pkgAcqArchive::QueueNext() { for (; Vf.end() == false; ++Vf) { + pkgCache::PkgFileIterator const PkgF = Vf.File(); // Ignore not source sources - if ((Vf.File()->Flags & pkgCache::Flag::NotSource) != 0) + if ((PkgF->Flags & pkgCache::Flag::NotSource) != 0) continue; // Try to cross match against the source list pkgIndexFile *Index; - if (Sources->FindIndex(Vf.File(),Index) == false) + if (Sources->FindIndex(PkgF, Index) == false) continue; - + LocalSource = (PkgF->Flags & pkgCache::Flag::LocalSource) == pkgCache::Flag::LocalSource; + // only try to get a trusted package from another source if that source // is also trusted if(Trusted && !Index->IsTrusted()) @@ -2681,25 +2687,10 @@ bool pkgAcqArchive::QueueNext() // AcqArchive::Done - Finished fetching /*{{{*/ // --------------------------------------------------------------------- /* */ -void pkgAcqArchive::Done(string Message,unsigned long long Size, HashStringList const &CalcHashes, - pkgAcquire::MethodConfig *Cfg) +void pkgAcqArchive::Done(string const &Message, HashStringList const &Hashes, + pkgAcquire::MethodConfig const * const Cfg) { - Item::Done(Message, Size, CalcHashes, Cfg); - - // Check the size - if (Size != Version->Size) - { - RenameOnError(SizeMismatch); - return; - } - - // FIXME: could this empty() check impose *any* sort of security issue? - if(ExpectedHashes.usable() && ExpectedHashes != CalcHashes) - { - RenameOnError(HashSumMismatch); - printHashSumComparision(DestFile, ExpectedHashes, CalcHashes); - return; - } + Item::Done(Message, Hashes, Cfg); // Grab the output filename string FileName = LookupTag(Message,"Filename"); @@ -2726,23 +2717,17 @@ void pkgAcqArchive::Done(string Message,unsigned long long Size, HashStringList Complete = true; } /*}}}*/ -// Acquire::Item::GetFinalFilename - Return the full final file path /*{{{*/ -std::string pkgAcqArchive::GetFinalFilename() const -{ - return _config->FindDir("Dir::Cache::Archives") + flNotDir(StoreFilename); -} - /*}}}*/ // AcqArchive::Failed - Failure handler /*{{{*/ // --------------------------------------------------------------------- /* Here we try other sources */ -void pkgAcqArchive::Failed(string Message,pkgAcquire::MethodConfig *Cnf) +void pkgAcqArchive::Failed(string const &Message,pkgAcquire::MethodConfig const * const Cnf) { Item::Failed(Message,Cnf); - /* We don't really want to retry on failed media swaps, this prevents + /* We don't really want to retry on failed media swaps, this prevents that. An interesting observation is that permanent failures are not recorded. */ - if (Cnf->Removable == true && + if (Cnf->Removable == true && StringToBool(LookupTag(Message,"Transient-Failure"),false) == true) { // Vf = Version.FileList(); @@ -2770,21 +2755,12 @@ void pkgAcqArchive::Failed(string Message,pkgAcquire::MethodConfig *Cnf) } } /*}}}*/ -// AcqArchive::IsTrusted - Determine whether this archive comes from a trusted source /*{{{*/ -// --------------------------------------------------------------------- -#if APT_PKG_ABI >= 413 -APT_PURE bool pkgAcqArchive::IsTrusted() const -#else -APT_PURE bool pkgAcqArchive::IsTrusted() -#endif +APT_PURE bool pkgAcqArchive::IsTrusted() const /*{{{*/ { return Trusted; } /*}}}*/ -// AcqArchive::Finished - Fetching has finished, tidy up /*{{{*/ -// --------------------------------------------------------------------- -/* */ -void pkgAcqArchive::Finished() +void pkgAcqArchive::Finished() /*{{{*/ { if (Status == pkgAcquire::Item::StatDone && Complete == true) @@ -2792,17 +2768,26 @@ void pkgAcqArchive::Finished() StoreFilename = string(); } /*}}}*/ +std::string pkgAcqArchive::DescURI() const /*{{{*/ +{ + return Desc.URI; +} + /*}}}*/ +std::string pkgAcqArchive::ShortDesc() const /*{{{*/ +{ + return Desc.ShortDesc; +} + /*}}}*/ + // AcqFile::pkgAcqFile - Constructor /*{{{*/ -// --------------------------------------------------------------------- -/* The file is added to the queue */ -pkgAcqFile::pkgAcqFile(pkgAcquire *Owner,string URI, HashStringList const &Hashes, - unsigned long long Size,string Dsc,string ShortDesc, +pkgAcqFile::pkgAcqFile(pkgAcquire * const Owner,string const &URI, HashStringList const &Hashes, + unsigned long long const Size,string const &Dsc,string const &ShortDesc, const string &DestDir, const string &DestFilename, - bool IsIndexFile) : - Item(Owner, Hashes), IsIndexFile(IsIndexFile) + bool const IsIndexFile) : + Item(Owner), IsIndexFile(IsIndexFile), ExpectedHashes(Hashes) { Retries = _config->FindI("Acquire::Retries",0); - + if(!DestFilename.empty()) DestFile = DestFilename; else if(!DestDir.empty()) @@ -2817,7 +2802,7 @@ pkgAcqFile::pkgAcqFile(pkgAcquire *Owner,string URI, HashStringList const &Hashe // Set the short description to the archive component Desc.ShortDesc = ShortDesc; - + // Get the transfer sizes FileSize = Size; struct stat Buf; @@ -2834,21 +2819,11 @@ pkgAcqFile::pkgAcqFile(pkgAcquire *Owner,string URI, HashStringList const &Hashe } /*}}}*/ // AcqFile::Done - Item downloaded OK /*{{{*/ -// --------------------------------------------------------------------- -/* */ -void pkgAcqFile::Done(string Message,unsigned long long Size,HashStringList const &CalcHashes, - pkgAcquire::MethodConfig *Cnf) +void pkgAcqFile::Done(string const &Message,HashStringList const &CalcHashes, + pkgAcquire::MethodConfig const * const Cnf) { - Item::Done(Message,Size,CalcHashes,Cnf); + Item::Done(Message,CalcHashes,Cnf); - // Check the hash - if(ExpectedHashes.usable() && ExpectedHashes != CalcHashes) - { - RenameOnError(HashSumMismatch); - printHashSumComparision(DestFile, ExpectedHashes, CalcHashes); - return; - } - string FileName = LookupTag(Message,"Filename"); if (FileName.empty() == true) { @@ -2858,11 +2833,11 @@ void pkgAcqFile::Done(string Message,unsigned long long Size,HashStringList cons } Complete = true; - + // The files timestamp matches if (StringToBool(LookupTag(Message,"IMS-Hit"),false) == true) return; - + // We have to copy it into place if (FileName != DestFile) { @@ -2874,7 +2849,7 @@ void pkgAcqFile::Done(string Message,unsigned long long Size,HashStringList cons QueueURI(Desc); return; } - + // Erase the file if it is a symlink so we can overwrite it struct stat St; if (lstat(DestFile.c_str(),&St) == 0) @@ -2882,7 +2857,7 @@ void pkgAcqFile::Done(string Message,unsigned long long Size,HashStringList cons if (S_ISLNK(St.st_mode) != 0) unlink(DestFile.c_str()); } - + // Symlink the file if (symlink(FileName.c_str(),DestFile.c_str()) != 0) { @@ -2894,14 +2869,14 @@ void pkgAcqFile::Done(string Message,unsigned long long Size,HashStringList cons ErrorText = msg.str(); Status = StatError; Complete = false; - } + } } } /*}}}*/ // AcqFile::Failed - Failure handler /*{{{*/ // --------------------------------------------------------------------- /* Here we try other sources */ -void pkgAcqFile::Failed(string Message,pkgAcquire::MethodConfig *Cnf) +void pkgAcqFile::Failed(string const &Message, pkgAcquire::MethodConfig const * const Cnf) { Item::Failed(Message,Cnf); @@ -2918,14 +2893,7 @@ void pkgAcqFile::Failed(string Message,pkgAcquire::MethodConfig *Cnf) } /*}}}*/ -// AcqIndex::Custom600Headers - Insert custom request headers /*{{{*/ -// --------------------------------------------------------------------- -/* The only header we use is the last-modified header. */ -#if APT_PKG_ABI >= 413 -string pkgAcqFile::Custom600Headers() const -#else -string pkgAcqFile::Custom600Headers() -#endif +string pkgAcqFile::Custom600Headers() const /*{{{*/ { if (IsIndexFile) return "\nIndex-File: true"; diff --git a/apt-pkg/acquire-item.h b/apt-pkg/acquire-item.h index 07c86f31b..97d5ea1dd 100644 --- a/apt-pkg/acquire-item.h +++ b/apt-pkg/acquire-item.h @@ -6,15 +6,15 @@ Acquire Item - Item to acquire When an item is instantiated it will add it self to the local list in - the Owner Acquire class. Derived classes will then call QueueURI to - register all the URI's they wish to fetch at the initial moment. - + the Owner Acquire class. Derived classes will then call QueueURI to + register all the URI's they wish to fetch at the initial moment. + Three item classes are provided to provide functionality for downloading of Index, Translation and Packages files. - + A Archive class is provided for downloading .deb files. It does Hash checking and source location as well as a retry algorithm. - + ##################################################################### */ /*}}}*/ #ifndef PKGLIB_ACQUIRE_ITEM_H @@ -49,7 +49,48 @@ class pkgSourceList; class IndexTarget; class pkgAcqMetaBase; -/** \brief Represents the process by which a pkgAcquire object should {{{ +class APT_HIDDEN IndexTarget /*{{{*/ +/** \brief Information about an index file. */ +{ + public: + /** \brief A URI from which the index file can be downloaded. */ + std::string const URI; + + /** \brief A description of the index file. */ + std::string const Description; + + /** \brief A shorter description of the index file. */ + std::string const ShortDesc; + + /** \brief The key by which this index file should be + * looked up within the meta signature file. + */ + std::string const MetaKey; + + virtual bool IsOptional() const { + return false; + } + + IndexTarget(std::string const &MetaKey, std::string const &ShortDesc, + std::string const &LongDesc, std::string const &URI) : + URI(URI), Description(LongDesc), ShortDesc(ShortDesc), MetaKey(MetaKey) {} +}; + /*}}}*/ +class APT_HIDDEN OptionalIndexTarget : public IndexTarget /*{{{*/ +/** \brief Information about an optional index file. */ +{ + public: + virtual bool IsOptional() const { + return true; + } + + OptionalIndexTarget(std::string const &MetaKey, std::string const &ShortDesc, + std::string const &LongDesc, std::string const &URI) : + IndexTarget(MetaKey, ShortDesc, LongDesc, URI) {} +}; + /*}}}*/ +class pkgAcquire::Item : public WeakPointable /*{{{*/ +/** \brief Represents the process by which a pkgAcquire object should * retrieve a file or a collection of files. * * By convention, Item subclasses should insert themselves into the @@ -61,46 +102,7 @@ class pkgAcqMetaBase; * * \see pkgAcquire */ -class pkgAcquire::Item : public WeakPointable -{ - friend class pkgAcqMetaBase; - - void *d; - - protected: - - /** \brief The acquire object with which this item is associated. */ - pkgAcquire *Owner; - - /** \brief Insert this item into its owner's queue. - * - * The method is designed to check if the request would end - * in an IMSHit and if it determines that it would, it isn't - * queueing the Item and instead sets it to completion instantly. - * - * \param Item Metadata about this item (its URI and - * description). - * \return true if the item was inserted, false if IMSHit was detected - */ - virtual bool QueueURI(ItemDesc &Item); - - /** \brief Remove this item from its owner's queue. */ - void Dequeue(); - - /** \brief Rename a file without modifying its timestamp. - * - * Many item methods call this as their final action. - * - * \param From The file to be renamed. - * - * \param To The new name of \a From. If \a To exists it will be - * overwritten. - */ - bool Rename(std::string From,std::string To); - - /** \brief Get the full pathname of the final file for the current URI */ - virtual std::string GetFinalFilename() const; - +{ public: /** \brief The current status of this item. */ @@ -125,7 +127,7 @@ class pkgAcquire::Item : public WeakPointable */ StatAuthError, - /** \brief The item was could not be downloaded because of + /** \brief The item was could not be downloaded because of * a transient network error (e.g. network down) */ StatTransientNetworkError, @@ -153,11 +155,11 @@ class pkgAcquire::Item : public WeakPointable std::string ActiveSubprocess; /** \brief A client-supplied unique identifier. - * + * * This field is initalized to 0; it is meant to be filled in by * clients that wish to use it to uniquely identify items. * - * \todo it's unused in apt itself + * APT progress reporting will store an ID there as shown in "Get:42 …" */ unsigned long ID; @@ -173,6 +175,7 @@ class pkgAcquire::Item : public WeakPointable * download progress indicator's overall statistics. */ bool Local; + std::string UsedMirror; /** \brief The number of fetch queues into which this item has been @@ -185,9 +188,6 @@ class pkgAcquire::Item : public WeakPointable */ unsigned int QueueCounter; - /** \brief TransactionManager */ - pkgAcqMetaBase *TransactionManager; - /** \brief The number of additional fetch items that are expected * once this item is done. * @@ -197,15 +197,12 @@ class pkgAcquire::Item : public WeakPointable * progress. */ unsigned int ExpectedAdditionalItems; - + /** \brief The name of the file into which the retrieved object * will be written. */ std::string DestFile; - /** \brief storge name until a transaction is finished */ - std::string PartialFile; - /** \brief Invoked by the acquire worker when the object couldn't * be fetched. * @@ -219,7 +216,7 @@ class pkgAcquire::Item : public WeakPointable * * \sa pkgAcqMethod */ - virtual void Failed(std::string Message,pkgAcquire::MethodConfig *Cnf); + virtual void Failed(std::string const &Message,pkgAcquire::MethodConfig const * const Cnf); /** \brief Invoked by the acquire worker when the object was * fetched successfully. @@ -234,25 +231,24 @@ class pkgAcquire::Item : public WeakPointable * * \param Message Data from the acquire method. Use LookupTag() * to parse it. - * \param Size The size of the object that was fetched. * \param Hashes The HashSums of the object that was fetched. * \param Cnf The method via which the object was fetched. * * \sa pkgAcqMethod */ - virtual void Done(std::string Message, unsigned long long Size, HashStringList const &Hashes, - pkgAcquire::MethodConfig *Cnf); + virtual void Done(std::string const &Message, HashStringList const &Hashes, + pkgAcquire::MethodConfig const * const Cnf); /** \brief Invoked when the worker starts to fetch this object. * * \param Message RFC822-formatted data from the worker process. * Use LookupTag() to parse it. * - * \param Size The size of the object being fetched. + * \param Hashes The expected hashes of the object being fetched. * * \sa pkgAcqMethod */ - virtual void Start(std::string Message,unsigned long long Size); + virtual void Start(std::string const &Message, unsigned long long const Size); /** \brief Custom headers to be sent to the fetch process. * @@ -262,61 +258,55 @@ class pkgAcquire::Item : public WeakPointable * line, so they should (if nonempty) have a leading newline and * no trailing newline. */ -#if APT_PKG_ABI >= 413 - virtual std::string Custom600Headers() const {return std::string();}; -#else - virtual std::string Custom600Headers() {return std::string();}; -#endif + virtual std::string Custom600Headers() const; /** \brief A "descriptive" URI-like string. * * \return a URI that should be used to describe what is being fetched. */ - virtual std::string DescURI() = 0; + virtual std::string DescURI() const = 0; /** \brief Short item description. * * \return a brief description of the object being fetched. */ - virtual std::string ShortDesc() {return DescURI();} + virtual std::string ShortDesc() const; /** \brief Invoked by the worker when the download is completely done. */ - virtual void Finished() {}; - - /** \brief HashSums + virtual void Finished(); + + /** \return HashSums the DestFile is supposed to have in this stage */ + virtual HashStringList GetExpectedHashes() const = 0; + /** \return the 'best' hash for display proposes like --print-uris */ + std::string HashSum() const; + + /** \return if having no hashes is a hard failure or not * - * \return the HashSums of this object, if applicable; otherwise, an - * empty list. + * Idealy this is always \b true for every subclass, but thanks to + * historical grow we don't have hashes for all files in all cases + * in all steps, so it is slightly more complicated than it should be. */ - HashStringList HashSums() const {return ExpectedHashes;}; - std::string HashSum() const {HashStringList const hashes = HashSums(); HashString const * const hs = hashes.find(NULL); return hs != NULL ? hs->toStr() : ""; }; + virtual bool HashesRequired() const { return true; } /** \return the acquire process with which this item is associated. */ - pkgAcquire *GetOwner() const {return Owner;}; -#if APT_PKG_ABI < 413 - pkgAcquire *GetOwner() {return Owner;}; -#endif + pkgAcquire *GetOwner() const; /** \return \b true if this object is being fetched from a trusted source. */ -#if APT_PKG_ABI >= 413 - virtual bool IsTrusted() const {return false;}; -#else - virtual bool IsTrusted() {return false;}; -#endif - + virtual bool IsTrusted() const; + /** \brief Report mirror problem - * + * * This allows reporting mirror failures back to a centralized * server. The apt-report-mirror-failure script is called for this - * + * * \param FailCode A short failure string that is send */ - void ReportMirrorFailure(std::string FailCode); + void ReportMirrorFailure(std::string const &FailCode); /** \brief Set the name of the current active subprocess * * See also #ActiveSubprocess */ - void SetActiveSubprocess(const std::string &subprocess); + void SetActiveSubprocess(std::string const &subprocess); /** \brief Initialize an item. * @@ -325,11 +315,8 @@ class pkgAcquire::Item : public WeakPointable * manually invoke QueueURI() to do so). * * \param Owner The new owner of this item. - * \param ExpectedHashes of the file represented by this item */ - Item(pkgAcquire *Owner, - HashStringList const &ExpectedHashes=HashStringList(), - pkgAcqMetaBase *TransactionManager=NULL); + Item(pkgAcquire * const Owner); /** \brief Remove this item from its owner's queue by invoking * pkgAcquire::Remove. @@ -337,6 +324,11 @@ class pkgAcquire::Item : public WeakPointable virtual ~Item(); protected: + /** \brief The acquire object with which this item is associated. */ + pkgAcquire * const Owner; + + /** \brief The item that is currently being downloaded. */ + pkgAcquire::ItemDesc Desc; enum RenameOnErrorState { HashSumMismatch, @@ -354,63 +346,99 @@ class pkgAcquire::Item : public WeakPointable */ bool RenameOnError(RenameOnErrorState const state); + /** \brief Insert this item into its owner's queue. + * + * The method is designed to check if the request would end + * in an IMSHit and if it determines that it would, it isn't + * queueing the Item and instead sets it to completion instantly. + * + * \param Item Metadata about this item (its URI and + * description). + * \return true if the item was inserted, false if IMSHit was detected + */ + virtual bool QueueURI(ItemDesc &Item); + + /** \brief Remove this item from its owner's queue. */ + void Dequeue(); + + /** \brief Rename a file without modifying its timestamp. + * + * Many item methods call this as their final action. + * + * \param From The file to be renamed. + * + * \param To The new name of \a From. If \a To exists it will be + * overwritten. If \a From and \a To are equal nothing happens. + */ + bool Rename(std::string const &From, std::string const &To); + + /** \brief Get the full pathname of the final file for the current URI */ + virtual std::string GetFinalFilename() const; + + private: + void *d; + + friend class pkgAcqMetaBase; +}; + /*}}}*/ +class APT_HIDDEN pkgAcqTransactionItem: public pkgAcquire::Item /*{{{*/ +/** \brief baseclass for the indexes files to manage them all together */ +{ + protected: + IndexTarget const * const Target; + HashStringList GetExpectedHashesFor(std::string const MetaKey) const; + + bool QueueURI(pkgAcquire::ItemDesc &Item); + + public: + /** \brief storge name until a transaction is finished */ + std::string PartialFile; + + /** \brief TransactionManager */ + pkgAcqMetaBase * const TransactionManager; + enum TransactionStates { TransactionCommit, TransactionAbort, }; virtual bool TransactionState(TransactionStates const state); - /** \brief The HashSums of the item is supposed to have than done */ - HashStringList ExpectedHashes; + virtual std::string DescURI() const { return Target->URI; } + virtual HashStringList GetExpectedHashes() const; + virtual std::string GetMetaKey() const; + virtual bool HashesRequired() const; - /** \brief The item that is currently being downloaded. */ - pkgAcquire::ItemDesc Desc; -}; - /*}}}*/ -/** \brief Information about an index patch (aka diff). */ /*{{{*/ -struct APT_HIDDEN DiffInfo { - /** The filename of the diff. */ - std::string file; - /** The hashes of the diff */ - HashStringList result_hashes; - - /** The hashes of the file after the diff is applied */ - HashStringList patch_hashes; + pkgAcqTransactionItem(pkgAcquire * const Owner, pkgAcqMetaBase * const TransactionManager, IndexTarget const * const Target); + virtual ~pkgAcqTransactionItem(); - /** The size of the file after the diff is applied */ - unsigned long long result_size; - - /** The size of the diff itself */ - unsigned long long patch_size; + friend class pkgAcqMetaBase; }; /*}}}*/ -class pkgAcqMetaBase : public pkgAcquire::Item /*{{{*/ +class APT_HIDDEN pkgAcqMetaBase : public pkgAcqTransactionItem /*{{{*/ +/** \brief the manager of a transaction */ { void *d; protected: - std::vector Transaction; + std::vector Transaction; + IndexTarget const DataTarget; + public: /** \brief A package-system-specific parser for the meta-index file. */ indexRecords *MetaIndexParser; indexRecords *LastMetaIndexParser; + protected: /** \brief The index files which should be looked up in the meta-index * and then downloaded. */ - const std::vector* IndexTargets; + const std::vector* const IndexTargets; /** \brief If \b true, the index's signature is currently being verified. */ bool AuthPass; - /** \brief The URI of the signature file. Unlike Desc.URI, this is - * never modified; it is used to determine the file that is being - * downloaded. - */ - std::string RealURI; - /** \brief Starts downloading the individual index files. * * \param verify If \b true, only indices whose expected hashsum @@ -419,7 +447,7 @@ class pkgAcqMetaBase : public pkgAcquire::Item /*{{{*/ * #StatAuthError if there is a mismatch). If verify is \b false, * no hashsum checking will be performed. */ - void QueueIndexes(bool verify); + void QueueIndexes(bool const verify); /** \brief Called when a file is finished being retrieved. * @@ -430,16 +458,12 @@ class pkgAcqMetaBase : public pkgAcquire::Item /*{{{*/ * \param Message The message block received from the fetch * subprocess. */ - bool CheckDownloadDone(pkgAcquire::Item * const I, const std::string &Message, HashStringList const &Hashes) const; + bool CheckDownloadDone(pkgAcqTransactionItem * const I, const std::string &Message, HashStringList const &Hashes) const; /** \brief Queue the downloaded Signature for verification */ - void QueueForSignatureVerify(pkgAcquire::Item * const I, std::string const &File, std::string const &Signature); + void QueueForSignatureVerify(pkgAcqTransactionItem * const I, std::string const &File, std::string const &Signature); -#if APT_PKG_ABI >= 413 virtual std::string Custom600Headers() const; -#else - virtual std::string Custom600Headers(); -#endif /** \brief Called when authentication succeeded. * @@ -450,7 +474,7 @@ class pkgAcqMetaBase : public pkgAcquire::Item /*{{{*/ * \param Message The message block received from the fetch * subprocess. */ - bool CheckAuthDone(std::string Message); + bool CheckAuthDone(std::string const &Message); /** Check if the current item should fail at this point */ bool CheckStopAuthentication(pkgAcquire::Item * const I, const std::string &Message); @@ -460,7 +484,7 @@ class pkgAcqMetaBase : public pkgAcquire::Item /*{{{*/ * * \return \b true if no fatal errors were encountered. */ - bool VerifyVendor(std::string Message); + bool VerifyVendor(std::string const &Message); virtual bool TransactionState(TransactionStates const state); @@ -468,33 +492,32 @@ class pkgAcqMetaBase : public pkgAcquire::Item /*{{{*/ // This refers more to the Transaction-Manager than the actual file bool IMSHit; - virtual std::string DescURI() {return RealURI; }; virtual bool QueueURI(pkgAcquire::ItemDesc &Item); + virtual HashStringList GetExpectedHashes() const; + virtual bool HashesRequired() const; // transaction code - void Add(Item *I); + void Add(pkgAcqTransactionItem * const I); void AbortTransaction(); - bool TransactionHasError() APT_PURE; + bool TransactionHasError() const; void CommitTransaction(); /** \brief Stage (queue) a copy action when the transaction is committed */ - void TransactionStageCopy(Item *I, - const std::string &From, + void TransactionStageCopy(pkgAcqTransactionItem * const I, + const std::string &From, const std::string &To); /** \brief Stage (queue) a removal action when the transaction is committed */ - void TransactionStageRemoval(Item *I, const std::string &FinalFile); + void TransactionStageRemoval(pkgAcqTransactionItem * const I, const std::string &FinalFile); /** \brief Get the full pathname of the final file for the current URI */ virtual std::string GetFinalFilename() const; - pkgAcqMetaBase(pkgAcquire *Owner, - const std::vector* IndexTargets, - indexRecords* MetaIndexParser, - std::string const &RealURI, - HashStringList const &ExpectedHashes=HashStringList(), - pkgAcqMetaBase *TransactionManager=NULL); + pkgAcqMetaBase(pkgAcquire * const Owner, pkgAcqMetaBase * const TransactionManager, + std::vector const * const IndexTargets, + IndexTarget const &DataTarget, + indexRecords* const MetaIndexParser); }; /*}}}*/ /** \brief An item that is responsible for downloading the meta-index {{{ @@ -512,36 +535,24 @@ class APT_HIDDEN pkgAcqMetaIndex : public pkgAcqMetaBase void *d; protected: - std::string URIDesc; - std::string ShortDesc; - - /** \brief The URI of the meta-index file for the detached signature */ - std::string MetaIndexSigURI; - - /** \brief A "URI-style" description of the meta-index file */ - std::string MetaIndexSigURIDesc; - - /** \brief A brief description of the meta-index file */ - std::string MetaIndexSigShortDesc; + IndexTarget const DetachedSigTarget; /** \brief delayed constructor */ - void Init(std::string URIDesc, std::string ShortDesc); - + void Init(std::string const &URIDesc, std::string const &ShortDesc); + public: + virtual std::string DescURI() const; // Specialized action members - virtual void Failed(std::string Message,pkgAcquire::MethodConfig *Cnf); - virtual void Done(std::string Message,unsigned long long Size, HashStringList const &Hashes, - pkgAcquire::MethodConfig *Cnf); + virtual void Failed(std::string const &Message,pkgAcquire::MethodConfig const * const Cnf); + virtual void Done(std::string const &Message, HashStringList const &Hashes, + pkgAcquire::MethodConfig const * const Cnf); virtual void Finished(); /** \brief Create a new pkgAcqMetaIndex. */ - pkgAcqMetaIndex(pkgAcquire *Owner, - pkgAcqMetaBase *TransactionManager, - std::string URI,std::string URIDesc, std::string ShortDesc, - std::string MetaIndexSigURI, std::string MetaIndexSigURIDesc, std::string MetaIndexSigShortDesc, - const std::vector* IndexTargets, - indexRecords* MetaIndexParser); + pkgAcqMetaIndex(pkgAcquire * const Owner, pkgAcqMetaBase * const TransactionManager, + IndexTarget const &DataTarget, IndexTarget const &DetachedSigTarget, + const std::vector* const IndexTargets, indexRecords * const MetaIndexParser); friend class pkgAcqMetaSig; }; @@ -554,7 +565,7 @@ class APT_HIDDEN pkgAcqMetaIndex : public pkgAcqMetaBase * * \sa pkgAcqMetaIndex */ -class APT_HIDDEN pkgAcqMetaSig : public pkgAcquire::Item +class APT_HIDDEN pkgAcqMetaSig : public pkgAcqTransactionItem { void *d; @@ -565,29 +576,20 @@ class APT_HIDDEN pkgAcqMetaSig : public pkgAcquire::Item protected: - /** \brief Long URI description used in the acquire system */ - std::string URIDesc; - - /** \brief URI used to get the file */ - std::string RealURI; - /** \brief Get the full pathname of the final file for the current URI */ virtual std::string GetFinalFilename() const; public: - virtual std::string DescURI() {return RealURI;}; + virtual bool HashesRequired() const { return false; } // Specialized action members - virtual void Failed(std::string Message,pkgAcquire::MethodConfig *Cnf); - virtual void Done(std::string Message,unsigned long long Size, - HashStringList const &Hashes, - pkgAcquire::MethodConfig *Cnf); + virtual void Failed(std::string const &Message,pkgAcquire::MethodConfig const * const Cnf); + virtual void Done(std::string const &Message, HashStringList const &Hashes, + pkgAcquire::MethodConfig const * const Cnf); /** \brief Create a new pkgAcqMetaSig. */ - pkgAcqMetaSig(pkgAcquire *Owner, - pkgAcqMetaBase *TransactionManager, - std::string const &URI,std::string const &URIDesc, - std::string const &ShortDesc, pkgAcqMetaIndex * const MetaIndex); + pkgAcqMetaSig(pkgAcquire * const Owner, pkgAcqMetaBase * const TransactionManager, IndexTarget const * const Target, + pkgAcqMetaIndex * const MetaIndex); virtual ~pkgAcqMetaSig(); }; /*}}}*/ @@ -596,78 +598,37 @@ class APT_HIDDEN pkgAcqMetaClearSig : public pkgAcqMetaIndex { void *d; - /** \brief The URI of the meta-index file for the detached signature */ - std::string MetaIndexURI; - - /** \brief A "URI-style" description of the meta-index file */ - std::string MetaIndexURIDesc; - - /** \brief A brief description of the meta-index file */ - std::string MetaIndexShortDesc; - - /** \brief The URI of the detached meta-signature file if the clearsigned one failed. */ - std::string MetaSigURI; - - /** \brief A "URI-style" description of the meta-signature file */ - std::string MetaSigURIDesc; - - /** \brief A brief description of the meta-signature file */ - std::string MetaSigShortDesc; + IndexTarget const ClearsignedTarget; + IndexTarget const DetachedDataTarget; + IndexTarget const DetachedSigTarget; public: - virtual void Failed(std::string Message,pkgAcquire::MethodConfig *Cnf); -#if APT_PKG_ABI >= 413 + virtual void Failed(std::string const &Message,pkgAcquire::MethodConfig const * const Cnf); virtual std::string Custom600Headers() const; -#else - virtual std::string Custom600Headers(); -#endif - virtual void Done(std::string Message,unsigned long long Size, - HashStringList const &Hashes, - pkgAcquire::MethodConfig *Cnf); + virtual void Done(std::string const &Message, HashStringList const &Hashes, + pkgAcquire::MethodConfig const * const Cnf); /** \brief Create a new pkgAcqMetaClearSig. */ - pkgAcqMetaClearSig(pkgAcquire *Owner, - std::string const &URI, std::string const &URIDesc, std::string const &ShortDesc, - std::string const &MetaIndexURI, std::string const &MetaIndexURIDesc, std::string const &MetaIndexShortDesc, - std::string const &MetaSigURI, std::string const &MetaSigURIDesc, std::string const &MetaSigShortDesc, - const std::vector* IndexTargets, - indexRecords* MetaIndexParser); + pkgAcqMetaClearSig(pkgAcquire * const Owner, + IndexTarget const &ClearsignedTarget, + IndexTarget const &DetachedDataTarget, + IndexTarget const &DetachedSigTarget, + std::vector const * const IndexTargets, + indexRecords * const MetaIndexParser); virtual ~pkgAcqMetaClearSig(); }; /*}}}*/ -/** \brief Common base class for all classes that deal with fetching {{{ - indexes - */ -class pkgAcqBaseIndex : public pkgAcquire::Item +/** \brief Common base class for all classes that deal with fetching indexes {{{*/ +class APT_HIDDEN pkgAcqBaseIndex : public pkgAcqTransactionItem { void *d; - protected: - /** \brief Pointer to the IndexTarget data - */ - const struct IndexTarget * Target; - - /** \brief Pointer to the indexRecords parser */ - indexRecords *MetaIndexParser; - - /** \brief The MetaIndex Key */ - std::string MetaKey; - - /** \brief The URI of the index file to recreate at our end (either - * by downloading it or by applying partial patches). - */ - std::string RealURI; - - bool VerifyHashByMetaKey(HashStringList const &Hashes); - + public: /** \brief Get the full pathname of the final file for the current URI */ virtual std::string GetFinalFilename() const; - pkgAcqBaseIndex(pkgAcquire *Owner, - pkgAcqMetaBase *TransactionManager, - struct IndexTarget const * const Target, - HashStringList const &ExpectedHashes, - indexRecords *MetaIndexParser); + pkgAcqBaseIndex(pkgAcquire * const Owner, pkgAcqMetaBase * const TransactionManager, + IndexTarget const * const Target); }; /*}}}*/ /** \brief An item that is responsible for fetching an index file of {{{ @@ -705,15 +666,12 @@ class APT_HIDDEN pkgAcqDiffIndex : public pkgAcqBaseIndex virtual bool TransactionState(TransactionStates const state); public: // Specialized action members - virtual void Failed(std::string Message,pkgAcquire::MethodConfig *Cnf); - virtual void Done(std::string Message,unsigned long long Size, HashStringList const &Hashes, - pkgAcquire::MethodConfig *Cnf); - virtual std::string DescURI() {return RealURI + "Index";}; -#if APT_PKG_ABI >= 413 + virtual void Failed(std::string const &Message, pkgAcquire::MethodConfig const * const Cnf); + virtual void Done(std::string const &Message, HashStringList const &Hashes, + pkgAcquire::MethodConfig const * const Cnf); + virtual std::string DescURI() const {return Target->URI + "Index";}; virtual std::string Custom600Headers() const; -#else - virtual std::string Custom600Headers(); -#endif + virtual std::string GetMetaKey() const; /** \brief Parse the Index file for a set of Packages diffs. * @@ -725,7 +683,7 @@ class APT_HIDDEN pkgAcqDiffIndex : public pkgAcqBaseIndex * \return \b true if the Index file was successfully parsed, \b * false otherwise. */ - bool ParseDiffIndex(std::string IndexDiffFile); + bool ParseDiffIndex(std::string const &IndexDiffFile); /** \brief Create a new pkgAcqDiffIndex. * @@ -736,18 +694,30 @@ class APT_HIDDEN pkgAcqDiffIndex : public pkgAcqBaseIndex * \param URIDesc A long description of the list file to download. * * \param ShortDesc A short description of the list file to download. - * - * \param ExpectedHashes The list file's hashsums which are expected. */ - pkgAcqDiffIndex(pkgAcquire *Owner, - pkgAcqMetaBase *TransactionManager, - struct IndexTarget const * const Target, - HashStringList const &ExpectedHashes, - indexRecords *MetaIndexParser); + pkgAcqDiffIndex(pkgAcquire * const Owner, pkgAcqMetaBase * const TransactionManager, + IndexTarget const * const Target); private: APT_HIDDEN void QueueOnIMSHit() const; }; /*}}}*/ +struct APT_HIDDEN DiffInfo { /*{{{*/ + /** The filename of the diff. */ + std::string file; + + /** The hashes of the diff */ + HashStringList result_hashes; + + /** The hashes of the file after the diff is applied */ + HashStringList patch_hashes; + + /** The size of the file after the diff is applied */ + unsigned long long result_size; + + /** The size of the diff itself */ + unsigned long long patch_size; +}; + /*}}}*/ /** \brief An item that is responsible for fetching client-merge patches {{{ * that need to be applied to a given package index file. * @@ -801,10 +771,12 @@ class APT_HIDDEN pkgAcqIndexMergeDiffs : public pkgAcqBaseIndex * This method will fall back to downloading the whole index file * outright; its arguments are ignored. */ - virtual void Failed(std::string Message,pkgAcquire::MethodConfig *Cnf); - virtual void Done(std::string Message,unsigned long long Size, HashStringList const &Hashes, - pkgAcquire::MethodConfig *Cnf); - virtual std::string DescURI() {return RealURI + "Index";}; + virtual void Failed(std::string const &Message,pkgAcquire::MethodConfig const * const Cnf); + virtual void Done(std::string const &Message, HashStringList const &Hashes, + pkgAcquire::MethodConfig const * const Cnf); + virtual std::string DescURI() const {return Target->URI + "Index";}; + virtual HashStringList GetExpectedHashes() const; + virtual bool HashesRequired() const; /** \brief Create an index merge-diff item. * @@ -817,22 +789,15 @@ class APT_HIDDEN pkgAcqIndexMergeDiffs : public pkgAcqBaseIndex * * \param ShortDesc A brief description of this item. * - * \param ExpectedHashes The expected md5sum of the completely - * reconstructed package index file; the index file will be tested - * against this value when it is entirely reconstructed. - * * \param patch contains infos about the patch this item is supposed * to download which were read from the index * * \param allPatches contains all related items so that each item can * check if it was the last one to complete the download step */ - pkgAcqIndexMergeDiffs(pkgAcquire *Owner, - pkgAcqMetaBase *TransactionManager, - struct IndexTarget const * const Target, - HashStringList const &ExpectedHash, - indexRecords *MetaIndexParser, - DiffInfo const &patch, + pkgAcqIndexMergeDiffs(pkgAcquire * const Owner, pkgAcqMetaBase * const TransactionManager, + IndexTarget const * const Target, + DiffInfo const &patch, std::vector const * const allPatches); }; /*}}}*/ @@ -875,7 +840,7 @@ class APT_HIDDEN pkgAcqIndexDiffs : public pkgAcqBaseIndex * \param allDone If \b true, the file was entirely reconstructed, * and its md5sum is verified. */ - APT_HIDDEN void Finish(bool allDone=false); + APT_HIDDEN void Finish(bool const allDone=false); protected: @@ -905,26 +870,25 @@ class APT_HIDDEN pkgAcqIndexDiffs : public pkgAcqBaseIndex /** \brief The diff is currently being fetched. */ StateFetchDiff, - - /** \brief The diff is currently being uncompressed. */ - StateUnzipDiff, // FIXME: No longer used /** \brief The diff is currently being applied. */ StateApplyDiff } State; public: - + /** \brief Called when the patch file failed to be downloaded. * * This method will fall back to downloading the whole index file * outright; its arguments are ignored. */ - virtual void Failed(std::string Message,pkgAcquire::MethodConfig *Cnf); + virtual void Failed(std::string const &Message,pkgAcquire::MethodConfig const * const Cnf); - virtual void Done(std::string Message,unsigned long long Size, HashStringList const &Hashes, - pkgAcquire::MethodConfig *Cnf); - virtual std::string DescURI() {return RealURI + "IndexDiffs";}; + virtual void Done(std::string const &Message, HashStringList const &Hashes, + pkgAcquire::MethodConfig const * const Cnf); + virtual std::string DescURI() const {return Target->URI + "IndexDiffs";}; + virtual HashStringList GetExpectedHashes() const; + virtual bool HashesRequired() const; /** \brief Create an index diff item. * @@ -940,20 +904,13 @@ class APT_HIDDEN pkgAcqIndexDiffs : public pkgAcqBaseIndex * * \param ShortDesc A brief description of this item. * - * \param ExpectedHashes The expected hashsums of the completely - * reconstructed package index file; the index file will be tested - * against this value when it is entirely reconstructed. - * * \param diffs The remaining diffs from the index of diffs. They * should be ordered so that each diff appears before any diff * that depends on it. */ - pkgAcqIndexDiffs(pkgAcquire *Owner, - pkgAcqMetaBase *TransactionManager, - struct IndexTarget const * const Target, - HashStringList const &ExpectedHash, - indexRecords *MetaIndexParser, - std::vector diffs=std::vector()); + pkgAcqIndexDiffs(pkgAcquire * const Owner, pkgAcqMetaBase * const TransactionManager, + IndexTarget const * const Target, + std::vector const &diffs=std::vector()); }; /*}}}*/ /** \brief An acquire item that is responsible for fetching an index {{{ @@ -981,16 +938,16 @@ class APT_HIDDEN pkgAcqIndex : public pkgAcqBaseIndex AllStages Stage; /** \brief Handle what needs to be done when the download is done */ - void StageDownloadDone(std::string Message, + void StageDownloadDone(std::string const &Message, HashStringList const &Hashes, - pkgAcquire::MethodConfig *Cfg); + pkgAcquire::MethodConfig const * const Cfg); /** \brief Handle what needs to be done when the decompression/copy is * done */ - void StageDecompressDone(std::string Message, + void StageDecompressDone(std::string const &Message, HashStringList const &Hashes, - pkgAcquire::MethodConfig *Cfg); + pkgAcquire::MethodConfig const * const Cfg); /** \brief If \b set, this partially downloaded file will be * removed when the download completes. @@ -1006,7 +963,7 @@ class APT_HIDDEN pkgAcqIndex : public pkgAcqBaseIndex std::string CurrentCompressionExtension; /** \brief Do the changes needed to fetch via AptByHash (if needed) */ - void InitByHashIfNeeded(const std::string MetaKey); + void InitByHashIfNeeded(); /** \brief Auto select the right compression to use */ void AutoSelectCompression(); @@ -1015,7 +972,7 @@ class APT_HIDDEN pkgAcqIndex : public pkgAcqBaseIndex void ReverifyAfterIMS(); /** \brief Validate the downloaded index file */ - bool ValidateFile(const std::string &FileName); + bool ValidateFile(std::string const &FileName); /** \brief Get the full pathname of the final file for the current URI */ virtual std::string GetFinalFilename() const; @@ -1024,82 +981,20 @@ class APT_HIDDEN pkgAcqIndex : public pkgAcqBaseIndex public: // Specialized action members - virtual void Failed(std::string Message,pkgAcquire::MethodConfig *Cnf); - virtual void Done(std::string Message,unsigned long long Size, - HashStringList const &Hashes, - pkgAcquire::MethodConfig *Cnf); -#if APT_PKG_ABI >= 413 + virtual void Failed(std::string const &Message,pkgAcquire::MethodConfig const * const Cnf); + virtual void Done(std::string const &Message, HashStringList const &Hashes, + pkgAcquire::MethodConfig const * const Cnf); virtual std::string Custom600Headers() const; -#else - virtual std::string Custom600Headers(); -#endif - virtual std::string DescURI() {return Desc.URI;}; + virtual std::string DescURI() const {return Desc.URI;}; + virtual std::string GetMetaKey() const; + + pkgAcqIndex(pkgAcquire * const Owner, pkgAcqMetaBase * const TransactionManager, + IndexTarget const * const Target); - /** \brief Create a pkgAcqIndex. - * - * \param Owner The pkgAcquire object with which this item is - * associated. - * - * \param URI The URI of the index file that is to be downloaded. - * - * \param URIDesc A "URI-style" description of this index file. - * - * \param ShortDesc A brief description of this index file. - * - * \param ExpectedHashes The expected hashsum of this index file. - * - * \param compressExt The compression-related extension with which - * this index file should be downloaded, or "" to autodetect - * Compression types can be set with config Acquire::CompressionTypes, - * default is ".lzma" or ".bz2" (if the needed binaries are present) - * fallback is ".gz" or none. - */ - pkgAcqIndex(pkgAcquire *Owner,std::string URI,std::string URIDesc, - std::string ShortDesc, HashStringList const &ExpectedHashes); - pkgAcqIndex(pkgAcquire *Owner, pkgAcqMetaBase *TransactionManager, - IndexTarget const * const Target, - HashStringList const &ExpectedHash, - indexRecords *MetaIndexParser); - void Init(std::string const &URI, std::string const &URIDesc, std::string const &ShortDesc); }; /*}}}*/ -/** \brief Information about an index file. */ /*{{{*/ -class APT_HIDDEN IndexTarget -{ - void *d; - - public: - /** \brief A URI from which the index file can be downloaded. */ - std::string URI; - - /** \brief A description of the index file. */ - std::string Description; - - /** \brief A shorter description of the index file. */ - std::string ShortDesc; - - /** \brief The key by which this index file should be - * looked up within the meta signature file. - */ - std::string MetaKey; - - virtual bool IsOptional() const { - return false; - } -}; - /*}}}*/ -/** \brief Information about an optional index file. */ /*{{{*/ -class APT_HIDDEN OptionalIndexTarget : public IndexTarget -{ - void *d; - - virtual bool IsOptional() const { - return true; - } -}; - /*}}}*/ /** \brief An item that is responsible for fetching a package file. {{{ * * If the package file already exists in the cache, nothing will be @@ -1109,6 +1004,9 @@ class pkgAcqArchive : public pkgAcquire::Item { void *d; + bool LocalSource; + HashStringList ExpectedHashes; + protected: /** \brief The package version being fetched. */ pkgCache::VerIterator Version; @@ -1141,7 +1039,7 @@ class pkgAcqArchive : public pkgAcquire::Item /** \brief \b true if this version file is being downloaded from a * trusted source. */ - bool Trusted; + bool Trusted; /** \brief Queue up the next available file for this version. */ bool QueueNext(); @@ -1151,17 +1049,15 @@ class pkgAcqArchive : public pkgAcquire::Item public: - virtual void Failed(std::string Message,pkgAcquire::MethodConfig *Cnf); - virtual void Done(std::string Message,unsigned long long Size, HashStringList const &Hashes, - pkgAcquire::MethodConfig *Cnf); - virtual std::string DescURI() {return Desc.URI;}; - virtual std::string ShortDesc() {return Desc.ShortDesc;}; + virtual void Failed(std::string const &Message,pkgAcquire::MethodConfig const * const Cnf); + virtual void Done(std::string const &Message, HashStringList const &Hashes, + pkgAcquire::MethodConfig const * const Cnf); + virtual std::string DescURI() const; + virtual std::string ShortDesc() const; virtual void Finished(); -#if APT_PKG_ABI >= 413 virtual bool IsTrusted() const; -#else - virtual bool IsTrusted(); -#endif + virtual HashStringList GetExpectedHashes() const; + virtual bool HashesRequired() const; /** \brief Create a new pkgAcqArchive. * @@ -1181,8 +1077,8 @@ class pkgAcqArchive : public pkgAcquire::Item * basename in the constructor, and filled in with a fully * qualified filename once the download finishes. */ - pkgAcqArchive(pkgAcquire *Owner,pkgSourceList *Sources, - pkgRecords *Recs,pkgCache::VerIterator const &Version, + pkgAcqArchive(pkgAcquire * const Owner,pkgSourceList * const Sources, + pkgRecords * const Recs,pkgCache::VerIterator const &Version, std::string &StoreFilename); }; /*}}}*/ @@ -1200,22 +1096,21 @@ class pkgAcqFile : public pkgAcquire::Item * Acquire::Retries. */ unsigned int Retries; - + /** \brief Should this file be considered a index file */ bool IsIndexFile; + HashStringList const ExpectedHashes; public: - + virtual HashStringList GetExpectedHashes() const; + virtual bool HashesRequired() const; + // Specialized action members - virtual void Failed(std::string Message,pkgAcquire::MethodConfig *Cnf); - virtual void Done(std::string Message,unsigned long long Size, HashStringList const &CalcHashes, - pkgAcquire::MethodConfig *Cnf); - virtual std::string DescURI() {return Desc.URI;}; -#if APT_PKG_ABI >= 413 + virtual void Failed(std::string const &Message,pkgAcquire::MethodConfig const * const Cnf); + virtual void Done(std::string const &Message, HashStringList const &CalcHashes, + pkgAcquire::MethodConfig const * const Cnf); + virtual std::string DescURI() const {return Desc.URI;}; virtual std::string Custom600Headers() const; -#else - virtual std::string Custom600Headers(); -#endif /** \brief Create a new pkgAcqFile object. * @@ -1248,10 +1143,10 @@ class pkgAcqFile : public pkgAcquire::Item * is the absolute name to which the file should be downloaded. */ - pkgAcqFile(pkgAcquire *Owner, std::string URI, HashStringList const &Hashes, unsigned long long Size, - std::string Desc, std::string ShortDesc, - const std::string &DestDir="", const std::string &DestFilename="", - bool IsIndexFile=false); + pkgAcqFile(pkgAcquire * const Owner, std::string const &URI, HashStringList const &Hashes, unsigned long long const Size, + std::string const &Desc, std::string const &ShortDesc, + std::string const &DestDir="", std::string const &DestFilename="", + bool const IsIndexFile=false); }; /*}}}*/ /** @} */ diff --git a/apt-pkg/acquire-method.cc b/apt-pkg/acquire-method.cc index c29ef469e..b77096efd 100644 --- a/apt-pkg/acquire-method.cc +++ b/apt-pkg/acquire-method.cc @@ -376,7 +376,10 @@ int pkgAcqMethod::Run(bool Single) Tmp->ExpectedHashes.push_back(HashString(*t, hash)); } char *End; - Tmp->MaximumSize = strtoll(LookupTag(Message, "Maximum-Size", "0").c_str(), &End, 10); + if (Tmp->ExpectedHashes.FileSize() > 0) + Tmp->MaximumSize = Tmp->ExpectedHashes.FileSize(); + else + Tmp->MaximumSize = strtoll(LookupTag(Message, "Maximum-Size", "0").c_str(), &End, 10); Tmp->Next = 0; // Append it to the list diff --git a/apt-pkg/acquire-worker.cc b/apt-pkg/acquire-worker.cc index 9254e20a3..099a1f87d 100644 --- a/apt-pkg/acquire-worker.cc +++ b/apt-pkg/acquire-worker.cc @@ -55,8 +55,8 @@ pkgAcquire::Worker::Worker(Queue *Q,MethodConfig *Cnf, CurrentItem = 0; TotalSize = 0; CurrentSize = 0; - - Construct(); + + Construct(); } /*}}}*/ // Worker::Worker - Constructor for method config startup /*{{{*/ @@ -70,8 +70,8 @@ pkgAcquire::Worker::Worker(MethodConfig *Cnf) CurrentItem = 0; TotalSize = 0; CurrentSize = 0; - - Construct(); + + Construct(); } /*}}}*/ // Worker::Construct - Constructor helper /*{{{*/ @@ -136,7 +136,7 @@ bool pkgAcquire::Worker::Start() } for (int I = 0; I != 4; I++) SetCloseExec(Pipes[I],true); - + // Fork off the process Process = ExecFork(); if (Process == 0) @@ -145,9 +145,9 @@ bool pkgAcquire::Worker::Start() dup2(Pipes[1],STDOUT_FILENO); dup2(Pipes[2],STDIN_FILENO); SetCloseExec(STDOUT_FILENO,false); - SetCloseExec(STDIN_FILENO,false); + SetCloseExec(STDIN_FILENO,false); SetCloseExec(STDERR_FILENO,false); - + const char *Args[2]; Args[0] = Method.c_str(); Args[1] = 0; @@ -165,7 +165,7 @@ bool pkgAcquire::Worker::Start() close(Pipes[2]); OutReady = false; InReady = true; - + // Read the configuration data if (WaitFd(InFd) == false || ReadMessages() == false) @@ -174,7 +174,7 @@ bool pkgAcquire::Worker::Start() RunMessages(); if (OwnerQ != 0) SendConfiguration(); - + return true; } /*}}}*/ @@ -201,7 +201,7 @@ bool pkgAcquire::Worker::RunMessages() if (Debug == true) clog << " <- " << Access << ':' << QuoteString(Message,"\n") << endl; - + // Fetch the message number char *End; int Number = strtol(Message.c_str(),&End,10); @@ -215,15 +215,15 @@ bool pkgAcquire::Worker::RunMessages() // update used mirror string UsedMirror = LookupTag(Message,"UsedMirror", ""); - if (!UsedMirror.empty() && + if (!UsedMirror.empty() && Itm && - Itm->Description.find(" ") != string::npos) + Itm->Description.find(" ") != string::npos) { Itm->Description.replace(0, Itm->Description.find(" "), UsedMirror); // FIXME: will we need this as well? //Itm->ShortDesc = UsedMirror; } - + // Determine the message number and dispatch switch (Number) { @@ -232,18 +232,18 @@ bool pkgAcquire::Worker::RunMessages() if (Capabilities(Message) == false) return _error->Error("Unable to process Capabilities message from %s",Access.c_str()); break; - + // 101 Log case 101: if (Debug == true) clog << " <- (log) " << LookupTag(Message,"Message") << endl; break; - + // 102 Status case 102: Status = LookupTag(Message,"Message"); break; - + // 103 Redirect case 103: { @@ -252,7 +252,7 @@ bool pkgAcquire::Worker::RunMessages() _error->Error("Method gave invalid 103 Redirect message"); break; } - + string NewURI = LookupTag(Message,"New-URI",URI.c_str()); Itm->URI = NewURI; @@ -272,7 +272,7 @@ bool pkgAcquire::Worker::RunMessages() Log->Done(Desc); break; } - + // 200 URI Start case 200: { @@ -281,23 +281,23 @@ bool pkgAcquire::Worker::RunMessages() _error->Error("Method gave invalid 200 URI Start message"); break; } - + CurrentItem = Itm; CurrentSize = 0; TotalSize = strtoull(LookupTag(Message,"Size","0").c_str(), NULL, 10); ResumePoint = strtoull(LookupTag(Message,"Resume-Point","0").c_str(), NULL, 10); - Itm->Owner->Start(Message,strtoull(LookupTag(Message,"Size","0").c_str(), NULL, 10)); + Itm->Owner->Start(Message, TotalSize); // Display update before completion if (Log != 0 && Log->MorePulses == true) Log->Pulse(Itm->Owner->GetOwner()); - + if (Log != 0) Log->Fetch(*Itm); break; } - + // 201 URI Done case 201: { @@ -306,7 +306,7 @@ bool pkgAcquire::Worker::RunMessages() _error->Error("Method gave invalid 201 URI Done message"); break; } - + pkgAcquire::Item *Owner = Itm->Owner; pkgAcquire::ItemDesc Desc = *Itm; @@ -316,22 +316,11 @@ bool pkgAcquire::Worker::RunMessages() // Display update before completion if (Log != 0 && Log->MorePulses == true) Log->Pulse(Owner->GetOwner()); - + OwnerQ->ItemDone(Itm); - unsigned long long const ServerSize = strtoull(LookupTag(Message,"Size","0").c_str(), NULL, 10); - bool isHit = StringToBool(LookupTag(Message,"IMS-Hit"),false) || - StringToBool(LookupTag(Message,"Alt-IMS-Hit"),false); - // Using the https method the server might return 200, but the - // If-Modified-Since condition is not satsified, libcurl will - // discard the download. In this case, however, TotalSize will be - // set to the actual size of the file, while ServerSize will be set - // to 0. Therefore, if the item is marked as a hit and the - // downloaded size (ServerSize) is 0, we ignore TotalSize. - if (TotalSize != 0 && (!isHit || ServerSize != 0) && ServerSize != TotalSize) - _error->Warning("Size of file %s is not what the server reported %s %llu", - Owner->DestFile.c_str(), LookupTag(Message,"Size","0").c_str(),TotalSize); - - // see if there is a hash to verify + + HashStringList const ExpectedHashes = Owner->GetExpectedHashes(); + // see if we got hashes to verify HashStringList ReceivedHashes; for (char const * const * type = HashString::SupportedHashes(); *type != NULL; ++type) { @@ -340,6 +329,18 @@ bool pkgAcquire::Worker::RunMessages() if (hashsum.empty() == false) ReceivedHashes.push_back(HashString(*type, hashsum)); } + // not all methods always sent Hashes our way + if (ExpectedHashes.usable() == true && ReceivedHashes.usable() == false) + { + std::string const filename = LookupTag(Message, "Filename", Owner->DestFile.c_str()); + if (filename.empty() == false && RealFileExists(filename)) + { + Hashes calc(ExpectedHashes); + FileFd file(filename, FileFd::ReadOnly, FileFd::None); + calc.AddFD(file); + ReceivedHashes = calc.GetHashStringList(); + } + } if(_config->FindB("Debug::pkgAcquire::Auth", false) == true) { @@ -348,30 +349,66 @@ bool pkgAcquire::Worker::RunMessages() for (HashStringList::const_iterator hs = ReceivedHashes.begin(); hs != ReceivedHashes.end(); ++hs) std::clog << "\t- " << hs->toStr() << std::endl; std::clog << "ExpectedHash:" << endl; - HashStringList expectedHashes = Owner->HashSums(); - for (HashStringList::const_iterator hs = expectedHashes.begin(); hs != expectedHashes.end(); ++hs) + for (HashStringList::const_iterator hs = ExpectedHashes.begin(); hs != ExpectedHashes.end(); ++hs) std::clog << "\t- " << hs->toStr() << std::endl; std::clog << endl; } - Owner->Done(Message, ServerSize, ReceivedHashes, Config); - ItemDone(); - // Log that we are done - if (Log != 0) + // decide if what we got is what we expected + bool consideredOkay = false; + bool const isIMSHit = StringToBool(LookupTag(Message,"IMS-Hit"),false) || + StringToBool(LookupTag(Message,"Alt-IMS-Hit"),false); + if (ExpectedHashes.usable()) { - if (isHit) + if (ReceivedHashes.usable() == false) { - /* Hide 'hits' for local only sources - we also manage to - hide gets */ - if (Config->LocalOnly == false) - Log->IMSHit(Desc); - } + /* IMS-Hits can't be checked here as we will have uncompressed file, + but the hashes for the compressed file. What we have was good through + so all we have to ensure later is that we are not stalled. */ + consideredOkay = isIMSHit; + } + else if (ReceivedHashes == ExpectedHashes) + consideredOkay = true; else - Log->Done(Desc); + consideredOkay = false; + + } + else if (Owner->HashesRequired() == true) + consideredOkay = false; + else + consideredOkay = true; + + if (consideredOkay == true) + { + Owner->Done(Message, ReceivedHashes, Config); + ItemDone(); + + // Log that we are done + if (Log != 0) + { + if (isIMSHit) + { + /* Hide 'hits' for local only sources - we also manage to + hide gets */ + if (Config->LocalOnly == false) + Log->IMSHit(Desc); + } + else + Log->Done(Desc); + } + } + else + { + Owner->Status = pkgAcquire::Item::StatAuthError; + Owner->Failed(Message,Config); + ItemDone(); + + if (Log != 0) + Log->Fail(Desc); } break; - } - + } + // 400 URI Failure case 400: { @@ -408,18 +445,18 @@ bool pkgAcquire::Worker::RunMessages() Log->Fail(Desc); break; - } - + } + // 401 General Failure case 401: _error->Error("Method %s General failure: %s",Access.c_str(),LookupTag(Message,"Message").c_str()); break; - + // 403 Media Change case 403: - MediaChange(Message); + MediaChange(Message); break; - } + } } return true; } @@ -432,7 +469,7 @@ bool pkgAcquire::Worker::Capabilities(string Message) { if (Config == 0) return true; - + Config->Version = LookupTag(Message,"Version"); Config->SingleInstance = StringToBool(LookupTag(Message,"Single-Instance"),false); Config->Pipeline = StringToBool(LookupTag(Message,"Pipeline"),false); @@ -447,13 +484,13 @@ bool pkgAcquire::Worker::Capabilities(string Message) clog << "Configured access method " << Config->Access << endl; clog << "Version:" << Config->Version << " SingleInstance:" << Config->SingleInstance << - " Pipeline:" << Config->Pipeline << - " SendConfig:" << Config->SendConfig << - " LocalOnly: " << Config->LocalOnly << - " NeedsCleanup: " << Config->NeedsCleanup << + " Pipeline:" << Config->Pipeline << + " SendConfig:" << Config->SendConfig << + " LocalOnly: " << Config->LocalOnly << + " NeedsCleanup: " << Config->NeedsCleanup << " Removable: " << Config->Removable << endl; } - + return true; } /*}}}*/ @@ -463,10 +500,10 @@ bool pkgAcquire::Worker::Capabilities(string Message) bool pkgAcquire::Worker::MediaChange(string Message) { int status_fd = _config->FindI("APT::Status-Fd",-1); - if(status_fd > 0) + if(status_fd > 0) { string Media = LookupTag(Message,"Media"); - string Drive = LookupTag(Message,"Drive"); + string Drive = LookupTag(Message,"Drive"); ostringstream msg,status; ioprintf(msg,_("Please insert the disc labeled: " "'%s' " @@ -536,12 +573,12 @@ bool pkgAcquire::Worker::QueueItem(pkgAcquire::Queue::QItem *Item) { if (OutFd == -1) return false; - + string Message = "600 URI Acquire\n"; Message.reserve(300); Message += "URI: " + Item->URI; Message += "\nFilename: " + Item->Owner->DestFile; - HashStringList const hsl = Item->Owner->HashSums(); + HashStringList const hsl = Item->Owner->GetExpectedHashes(); for (HashStringList::const_iterator hs = hsl.begin(); hs != hsl.end(); ++hs) Message += "\nExpected-" + hs->HashType() + ": " + hs->HashValue(); if(Item->Owner->FileSize > 0) @@ -564,7 +601,7 @@ bool pkgAcquire::Worker::QueueItem(pkgAcquire::Queue::QItem *Item) clog << " -> " << Access << ':' << QuoteString(Message,"\n") << endl; OutQueue += Message; OutReady = true; - + return true; } /*}}}*/ @@ -586,7 +623,7 @@ bool pkgAcquire::Worker::OutFdReady() OutQueue.erase(0,Res); if (OutQueue.empty() == true) OutReady = false; - + return true; } /*}}}*/ @@ -608,7 +645,7 @@ bool pkgAcquire::Worker::InFdReady() bool pkgAcquire::Worker::MethodFailure() { _error->Error("Method %s has died unexpectedly!",Access.c_str()); - + // do not reap the child here to show meaningfull error to the user ExecWait(Process,Access.c_str(),false); Process = -1; @@ -620,26 +657,22 @@ bool pkgAcquire::Worker::MethodFailure() InReady = false; OutQueue = string(); MessageQueue.erase(MessageQueue.begin(),MessageQueue.end()); - + return false; } /*}}}*/ -// Worker::Pulse - Called periodically /*{{{*/ +// Worker::Pulse - Called periodically /*{{{*/ // --------------------------------------------------------------------- /* */ void pkgAcquire::Worker::Pulse() { if (CurrentItem == 0) return; - + struct stat Buf; if (stat(CurrentItem->Owner->DestFile.c_str(),&Buf) != 0) return; CurrentSize = Buf.st_size; - - // Hmm? Should not happen... - if (CurrentSize > TotalSize && TotalSize != 0) - TotalSize = CurrentSize; } /*}}}*/ // Worker::ItemDone - Called when the current item is finished /*{{{*/ diff --git a/apt-pkg/contrib/hashes.cc b/apt-pkg/contrib/hashes.cc index 0fa443b4a..11a7e479b 100644 --- a/apt-pkg/contrib/hashes.cc +++ b/apt-pkg/contrib/hashes.cc @@ -23,6 +23,7 @@ #include #include #include +#include #include #include /*}}}*/ @@ -178,6 +179,15 @@ HashString const * HashStringList::find(char const * const type) const /*{{{*/ return NULL; } /*}}}*/ +unsigned long long HashStringList::FileSize() const /*{{{*/ +{ + HashString const * const hsf = find("Checksum-FileSize"); + if (hsf == NULL) + return 0; + std::string const hv = hsf->HashValue(); + return strtoull(hv.c_str(), NULL, 10); +} + /*}}}*/ bool HashStringList::supported(char const * const type) /*{{{*/ { for (char const * const * t = HashString::SupportedHashes(); *t != NULL; ++t) diff --git a/apt-pkg/contrib/hashes.h b/apt-pkg/contrib/hashes.h index ac13c8ace..176ce4faa 100644 --- a/apt-pkg/contrib/hashes.h +++ b/apt-pkg/contrib/hashes.h @@ -87,6 +87,15 @@ class HashStringList */ HashString const * find(char const * const type) const; HashString const * find(std::string const &type) const { return find(type.c_str()); } + + /** finds the filesize hash and returns it as number + * + * @return beware: if the size isn't known we return \b 0 here, + * just like we would do for an empty file. If that is a problem + * for you have to get the size manually out of the list. + */ + unsigned long long FileSize() const; + /** check if the given hash type is supported * * @param type to check diff --git a/apt-pkg/deb/debindexfile.cc b/apt-pkg/deb/debindexfile.cc index d672b4fd8..185248619 100644 --- a/apt-pkg/deb/debindexfile.cc +++ b/apt-pkg/deb/debindexfile.cc @@ -742,13 +742,13 @@ bool debDebPkgFileIndex::Merge(pkgCacheGenerator& Gen, OpProgress* Prog) const // and give it to the list parser debDebFileParser Parser(DebControl, DebFile); - if(Gen.SelectFile(DebFile, "local", *this) == false) + if(Gen.SelectFile(DebFile, "local", *this, pkgCache::Flag::LocalSource) == false) return _error->Error("Problem with SelectFile %s", DebFile.c_str()); pkgCache::PkgFileIterator File = Gen.GetCurFile(); File->Size = DebControl->Size(); File->mtime = DebControl->ModificationTime(); - + if (Gen.MergeList(Parser) == false) return _error->Error("Problem with MergeLister for %s", DebFile.c_str()); diff --git a/apt-pkg/deb/debmetaindex.cc b/apt-pkg/deb/debmetaindex.cc index aa2db8149..eb5e78e3b 100644 --- a/apt-pkg/deb/debmetaindex.cc +++ b/apt-pkg/deb/debmetaindex.cc @@ -192,11 +192,13 @@ vector * debReleaseIndex::ComputeIndexTargets() const { vector const SectionEntries = src->second; for (vector::const_iterator I = SectionEntries.begin(); I != SectionEntries.end(); ++I) { - IndexTarget * Target = new IndexTarget(); - Target->ShortDesc = "Sources"; - Target->MetaKey = SourceIndexURISuffix(Target->ShortDesc.c_str(), (*I)->Section); - Target->URI = SourceIndexURI(Target->ShortDesc.c_str(), (*I)->Section); - Target->Description = Info (Target->ShortDesc.c_str(), (*I)->Section); + char const * const ShortDesc = "Sources"; + IndexTarget * const Target = new IndexTarget( + SourceIndexURISuffix(ShortDesc, (*I)->Section), + ShortDesc, + Info(ShortDesc, (*I)->Section), + SourceIndexURI(ShortDesc, (*I)->Section) + ); IndexTargets->push_back (Target); } } @@ -212,11 +214,13 @@ vector * debReleaseIndex::ComputeIndexTargets() const { continue; for (vector ::const_iterator I = a->second.begin(); I != a->second.end(); ++I) { - IndexTarget * Target = new IndexTarget(); - Target->ShortDesc = "Packages"; - Target->MetaKey = IndexURISuffix(Target->ShortDesc.c_str(), (*I)->Section, a->first); - Target->URI = IndexURI(Target->ShortDesc.c_str(), (*I)->Section, a->first); - Target->Description = Info (Target->ShortDesc.c_str(), (*I)->Section, a->first); + char const * const ShortDesc = "Packages"; + IndexTarget * const Target = new IndexTarget( + IndexURISuffix(ShortDesc, (*I)->Section, a->first), + ShortDesc, + Info (ShortDesc, (*I)->Section, a->first), + IndexURI(ShortDesc, (*I)->Section, a->first) + ); IndexTargets->push_back (Target); sections.insert((*I)->Section); } @@ -235,11 +239,13 @@ vector * debReleaseIndex::ComputeIndexTargets() const { s != sections.end(); ++s) { for (std::vector::const_iterator l = lang.begin(); l != lang.end(); ++l) { - IndexTarget * Target = new OptionalIndexTarget(); - Target->ShortDesc = "Translation-" + *l; - Target->MetaKey = TranslationIndexURISuffix(l->c_str(), *s); - Target->URI = TranslationIndexURI(l->c_str(), *s); - Target->Description = Info (Target->ShortDesc.c_str(), *s); + std::string const ShortDesc = "Translation-" + *l; + IndexTarget * const Target = new OptionalIndexTarget( + TranslationIndexURISuffix(l->c_str(), *s), + ShortDesc, + Info (ShortDesc.c_str(), *s), + TranslationIndexURI(l->c_str(), *s) + ); IndexTargets->push_back(Target); } } @@ -249,8 +255,6 @@ vector * debReleaseIndex::ComputeIndexTargets() const { /*}}}*/ bool debReleaseIndex::GetIndexes(pkgAcquire *Owner, bool const &GetAll) const { - bool const tryInRelease = _config->FindB("Acquire::TryInRelease", true); - indexRecords * const iR = new indexRecords(Dist); if (Trusted == ALWAYS_TRUSTED) iR->SetTrusted(true); @@ -258,37 +262,17 @@ bool debReleaseIndex::GetIndexes(pkgAcquire *Owner, bool const &GetAll) const iR->SetTrusted(false); // special case for --print-uris - if (GetAll) { - vector *targets = ComputeIndexTargets(); - for (vector ::const_iterator Target = targets->begin(); Target != targets->end(); ++Target) { - new pkgAcqIndex(Owner, (*Target)->URI, (*Target)->Description, - (*Target)->ShortDesc, HashStringList()); - } - delete targets; - - // this is normally created in pkgAcqMetaSig, but if we run - // in --print-uris mode, we add it here - if (tryInRelease == false) - new pkgAcqMetaIndex(Owner, NULL, - MetaIndexURI("Release"), - MetaIndexInfo("Release"), "Release", - MetaIndexURI("Release.gpg"), MetaIndexInfo("Release.gpg"), "Release.gpg", - ComputeIndexTargets(), - iR); + vector const * const targets = ComputeIndexTargets(); +#define APT_TARGET(X) IndexTarget("", X, MetaIndexInfo(X), MetaIndexURI(X)) + pkgAcqMetaBase * const TransactionManager = new pkgAcqMetaClearSig(Owner, + APT_TARGET("InRelease"), APT_TARGET("Release"), APT_TARGET("Release.gpg"), + targets, iR); +#undef APT_TARGET + if (GetAll) + { + for (vector ::const_iterator Target = targets->begin(); Target != targets->end(); ++Target) + new pkgAcqIndex(Owner, TransactionManager, *Target); } - if (tryInRelease == true) - new pkgAcqMetaClearSig(Owner, - MetaIndexURI("InRelease"), MetaIndexInfo("InRelease"), "InRelease", - MetaIndexURI("Release"), MetaIndexInfo("Release"), "Release", - MetaIndexURI("Release.gpg"), MetaIndexInfo("Release.gpg"), "Release.gpg", - ComputeIndexTargets(), - iR); - else - new pkgAcqMetaIndex(Owner, NULL, - MetaIndexURI("Release"), MetaIndexInfo("Release"), "Release", - MetaIndexURI("Release.gpg"), MetaIndexInfo("Release.gpg"), "Release.gpg", - ComputeIndexTargets(), - iR); return true; } diff --git a/apt-pkg/indexrecords.cc b/apt-pkg/indexrecords.cc index de2617833..c26868cac 100644 --- a/apt-pkg/indexrecords.cc +++ b/apt-pkg/indexrecords.cc @@ -73,7 +73,7 @@ APT_PURE indexRecords::checkSum *indexRecords::Lookup(const string MetaKey) APT_PURE bool indexRecords::Exists(string const &MetaKey) const { - return Entries.count(MetaKey) == 1; + return Entries.find(MetaKey) != Entries.end(); } bool indexRecords::Load(const string Filename) /*{{{*/ diff --git a/apt-pkg/pkgcache.h b/apt-pkg/pkgcache.h index 2ba23c5c0..b4d56611a 100644 --- a/apt-pkg/pkgcache.h +++ b/apt-pkg/pkgcache.h @@ -190,7 +190,12 @@ class pkgCache /*{{{*/ struct Flag { enum PkgFlags {Auto=(1<<0),Essential=(1<<3),Important=(1<<4)}; - enum PkgFFlags {NotSource=(1<<0),NotAutomatic=(1<<1),ButAutomaticUpgrades=(1<<2)}; + enum PkgFFlags { + NotSource=(1<<0), /*!< packages can't be fetched from here, e.g. dpkg/status file */ + NotAutomatic=(1<<1), /*!< archive has a default pin of 1 */ + ButAutomaticUpgrades=(1<<2), /*!< (together with the previous) archive has a default pin of 100 */ + LocalSource=(1<<3), /*!< local sources can't and will not be verified by hashes */ + }; }; protected: diff --git a/methods/file.cc b/methods/file.cc index 043ab04b8..353e54bd5 100644 --- a/methods/file.cc +++ b/methods/file.cc @@ -57,7 +57,11 @@ bool FileMethod::Fetch(FetchItem *Itm) Res.LastModified = Buf.st_mtime; Res.IMSHit = false; if (Itm->LastModified == Buf.st_mtime && Itm->LastModified != 0) - Res.IMSHit = true; + { + unsigned long long const filesize = Itm->ExpectedHashes.FileSize(); + if (filesize != 0 && filesize == Res.Size) + Res.IMSHit = true; + } } // See if the uncompressed file exists and reuse it diff --git a/test/integration/test-apt-get-source-authenticated b/test/integration/test-apt-get-source-authenticated index 685bc566b..da63f7cb3 100755 --- a/test/integration/test-apt-get-source-authenticated +++ b/test/integration/test-apt-get-source-authenticated @@ -1,7 +1,7 @@ #!/bin/sh # # Regression test for debian bug #749795. Ensure that we fail with -# a error if apt-get source foo will download a source that comes +# an error if apt-get source foo will download a source that comes # from a unauthenticated repository # set -e diff --git a/test/integration/test-apt-sources-deb822 b/test/integration/test-apt-sources-deb822 index d8b2334ad..51fe7bcfe 100755 --- a/test/integration/test-apt-sources-deb822 +++ b/test/integration/test-apt-sources-deb822 @@ -23,46 +23,45 @@ Description: summay msgtest 'Test sources.list' 'old style' echo "deb http://ftp.debian.org/debian stable main" > $SOURCES -testequal --nomsg "'http://ftp.debian.org/debian/dists/stable/main/binary-i386/Packages.bz2' ftp.debian.org_debian_dists_stable_main_binary-i386_Packages 0 -'http://ftp.debian.org/debian/dists/stable/main/i18n/Translation-en.bz2' ftp.debian.org_debian_dists_stable_main_i18n_Translation-en 0 -'http://ftp.debian.org/debian/dists/stable/InRelease' ftp.debian.org_debian_dists_stable_InRelease 0 " aptget update --print-uris +testequal --nomsg "'http://ftp.debian.org/debian/dists/stable/InRelease' ftp.debian.org_debian_dists_stable_InRelease 0 +'http://ftp.debian.org/debian/dists/stable/main/binary-i386/Packages.bz2' ftp.debian.org_debian_dists_stable_main_binary-i386_Packages 0 +'http://ftp.debian.org/debian/dists/stable/main/i18n/Translation-en.bz2' ftp.debian.org_debian_dists_stable_main_i18n_Translation-en 0 " aptget update --print-uris msgtest 'Test sources.list' 'simple deb822' echo "$BASE" > $SOURCES -testequal --nomsg "'http://ftp.debian.org/debian/dists/stable/main/binary-i386/Packages.bz2' ftp.debian.org_debian_dists_stable_main_binary-i386_Packages 0 -'http://ftp.debian.org/debian/dists/stable/main/i18n/Translation-en.bz2' ftp.debian.org_debian_dists_stable_main_i18n_Translation-en 0 -'http://ftp.debian.org/debian/dists/stable/InRelease' ftp.debian.org_debian_dists_stable_InRelease 0 " aptget update --print-uris - +testequal --nomsg "'http://ftp.debian.org/debian/dists/stable/InRelease' ftp.debian.org_debian_dists_stable_InRelease 0 +'http://ftp.debian.org/debian/dists/stable/main/binary-i386/Packages.bz2' ftp.debian.org_debian_dists_stable_main_binary-i386_Packages 0 +'http://ftp.debian.org/debian/dists/stable/main/i18n/Translation-en.bz2' ftp.debian.org_debian_dists_stable_main_i18n_Translation-en 0 " aptget update --print-uris msgtest 'Test deb822 with' 'two entries' # Two entries echo "$BASE" > $SOURCES echo "" >> $SOURCES echo "$BASE" | sed s/stable/unstable/ >> $SOURCES -testequal --nomsg "'http://ftp.debian.org/debian/dists/stable/main/binary-i386/Packages.bz2' ftp.debian.org_debian_dists_stable_main_binary-i386_Packages 0 +testequal --nomsg "'http://ftp.debian.org/debian/dists/stable/InRelease' ftp.debian.org_debian_dists_stable_InRelease 0 +'http://ftp.debian.org/debian/dists/stable/main/binary-i386/Packages.bz2' ftp.debian.org_debian_dists_stable_main_binary-i386_Packages 0 'http://ftp.debian.org/debian/dists/stable/main/i18n/Translation-en.bz2' ftp.debian.org_debian_dists_stable_main_i18n_Translation-en 0 -'http://ftp.debian.org/debian/dists/stable/InRelease' ftp.debian.org_debian_dists_stable_InRelease 0 +'http://ftp.debian.org/debian/dists/unstable/InRelease' ftp.debian.org_debian_dists_unstable_InRelease 0 'http://ftp.debian.org/debian/dists/unstable/main/binary-i386/Packages.bz2' ftp.debian.org_debian_dists_unstable_main_binary-i386_Packages 0 -'http://ftp.debian.org/debian/dists/unstable/main/i18n/Translation-en.bz2' ftp.debian.org_debian_dists_unstable_main_i18n_Translation-en 0 -'http://ftp.debian.org/debian/dists/unstable/InRelease' ftp.debian.org_debian_dists_unstable_InRelease 0 " aptget update --print-uris +'http://ftp.debian.org/debian/dists/unstable/main/i18n/Translation-en.bz2' ftp.debian.org_debian_dists_unstable_main_i18n_Translation-en 0 " aptget update --print-uris # two suite entries msgtest 'Test deb822 with' 'two Suite entries' echo "$BASE" | sed -e "s/stable/stable unstable/" > $SOURCES -testequal --nomsg "'http://ftp.debian.org/debian/dists/stable/main/binary-i386/Packages.bz2' ftp.debian.org_debian_dists_stable_main_binary-i386_Packages 0 +testequal --nomsg "'http://ftp.debian.org/debian/dists/stable/InRelease' ftp.debian.org_debian_dists_stable_InRelease 0 +'http://ftp.debian.org/debian/dists/stable/main/binary-i386/Packages.bz2' ftp.debian.org_debian_dists_stable_main_binary-i386_Packages 0 'http://ftp.debian.org/debian/dists/stable/main/i18n/Translation-en.bz2' ftp.debian.org_debian_dists_stable_main_i18n_Translation-en 0 -'http://ftp.debian.org/debian/dists/stable/InRelease' ftp.debian.org_debian_dists_stable_InRelease 0 +'http://ftp.debian.org/debian/dists/unstable/InRelease' ftp.debian.org_debian_dists_unstable_InRelease 0 'http://ftp.debian.org/debian/dists/unstable/main/binary-i386/Packages.bz2' ftp.debian.org_debian_dists_unstable_main_binary-i386_Packages 0 -'http://ftp.debian.org/debian/dists/unstable/main/i18n/Translation-en.bz2' ftp.debian.org_debian_dists_unstable_main_i18n_Translation-en 0 -'http://ftp.debian.org/debian/dists/unstable/InRelease' ftp.debian.org_debian_dists_unstable_InRelease 0 " aptget update --print-uris +'http://ftp.debian.org/debian/dists/unstable/main/i18n/Translation-en.bz2' ftp.debian.org_debian_dists_unstable_main_i18n_Translation-en 0 " aptget update --print-uris msgtest 'Test deb822' 'architecture option' echo "$BASE" > $SOURCES echo "Architectures: amd64 armel" >> $SOURCES -testequal --nomsg "'http://ftp.debian.org/debian/dists/stable/main/binary-amd64/Packages.bz2' ftp.debian.org_debian_dists_stable_main_binary-amd64_Packages 0 +testequal --nomsg "'http://ftp.debian.org/debian/dists/stable/InRelease' ftp.debian.org_debian_dists_stable_InRelease 0 +'http://ftp.debian.org/debian/dists/stable/main/binary-amd64/Packages.bz2' ftp.debian.org_debian_dists_stable_main_binary-amd64_Packages 0 'http://ftp.debian.org/debian/dists/stable/main/binary-armel/Packages.bz2' ftp.debian.org_debian_dists_stable_main_binary-armel_Packages 0 -'http://ftp.debian.org/debian/dists/stable/main/i18n/Translation-en.bz2' ftp.debian.org_debian_dists_stable_main_i18n_Translation-en 0 -'http://ftp.debian.org/debian/dists/stable/InRelease' ftp.debian.org_debian_dists_stable_InRelease 0 " aptget update --print-uris +'http://ftp.debian.org/debian/dists/stable/main/i18n/Translation-en.bz2' ftp.debian.org_debian_dists_stable_main_i18n_Translation-en 0 " aptget update --print-uris msgtest 'Test old-style sources.list file which has' 'malformed dist' @@ -85,20 +84,20 @@ testempty aptget update --print-uris # multiple URIs msgtest 'Test deb822 sources.list file which has' 'Multiple URIs work' echo "$BASE" | sed -e 's#http://ftp.debian.org/debian#http://ftp.debian.org/debian http://ftp.de.debian.org/debian#' > $SOURCES -testequal --nomsg "'http://ftp.de.debian.org/debian/dists/stable/main/binary-i386/Packages.bz2' ftp.de.debian.org_debian_dists_stable_main_binary-i386_Packages 0 +testequal --nomsg "'http://ftp.de.debian.org/debian/dists/stable/InRelease' ftp.de.debian.org_debian_dists_stable_InRelease 0 +'http://ftp.de.debian.org/debian/dists/stable/main/binary-i386/Packages.bz2' ftp.de.debian.org_debian_dists_stable_main_binary-i386_Packages 0 'http://ftp.de.debian.org/debian/dists/stable/main/i18n/Translation-en.bz2' ftp.de.debian.org_debian_dists_stable_main_i18n_Translation-en 0 -'http://ftp.de.debian.org/debian/dists/stable/InRelease' ftp.de.debian.org_debian_dists_stable_InRelease 0 +'http://ftp.debian.org/debian/dists/stable/InRelease' ftp.debian.org_debian_dists_stable_InRelease 0 'http://ftp.debian.org/debian/dists/stable/main/binary-i386/Packages.bz2' ftp.debian.org_debian_dists_stable_main_binary-i386_Packages 0 -'http://ftp.debian.org/debian/dists/stable/main/i18n/Translation-en.bz2' ftp.debian.org_debian_dists_stable_main_i18n_Translation-en 0 -'http://ftp.debian.org/debian/dists/stable/InRelease' ftp.debian.org_debian_dists_stable_InRelease 0 " aptget update --print-uris +'http://ftp.debian.org/debian/dists/stable/main/i18n/Translation-en.bz2' ftp.debian.org_debian_dists_stable_main_i18n_Translation-en 0 " aptget update --print-uris # multiple Type in one field msgtest 'Test deb822 sources.list file which has' 'Multiple Types work' echo "$BASE" | sed -e 's#Types: deb#Types: deb deb-src#' > $SOURCES -testequal --nomsg "'http://ftp.debian.org/debian/dists/stable/main/source/Sources.bz2' ftp.debian.org_debian_dists_stable_main_source_Sources 0 +testequal --nomsg "'http://ftp.debian.org/debian/dists/stable/InRelease' ftp.debian.org_debian_dists_stable_InRelease 0 +'http://ftp.debian.org/debian/dists/stable/main/source/Sources.bz2' ftp.debian.org_debian_dists_stable_main_source_Sources 0 'http://ftp.debian.org/debian/dists/stable/main/binary-i386/Packages.bz2' ftp.debian.org_debian_dists_stable_main_binary-i386_Packages 0 -'http://ftp.debian.org/debian/dists/stable/main/i18n/Translation-en.bz2' ftp.debian.org_debian_dists_stable_main_i18n_Translation-en 0 -'http://ftp.debian.org/debian/dists/stable/InRelease' ftp.debian.org_debian_dists_stable_InRelease 0 " aptget update --print-uris +'http://ftp.debian.org/debian/dists/stable/main/i18n/Translation-en.bz2' ftp.debian.org_debian_dists_stable_main_i18n_Translation-en 0 " aptget update --print-uris # a Suite msgtest 'Test deb822 sources.list file which has' 'a exact path and no sections' @@ -107,6 +106,6 @@ Types: deb URIs: http://emacs.naquadah.org Suites: stable/ EOF -testequal --nomsg "'http://emacs.naquadah.org/stable/Packages.bz2' emacs.naquadah.org_stable_Packages 0 -'http://emacs.naquadah.org/stable/en.bz2' emacs.naquadah.org_stable_en 0 -'http://emacs.naquadah.org/stable/InRelease' emacs.naquadah.org_stable_InRelease 0 " aptget update --print-uris +testequal --nomsg "'http://emacs.naquadah.org/stable/InRelease' emacs.naquadah.org_stable_InRelease 0 +'http://emacs.naquadah.org/stable/Packages.bz2' emacs.naquadah.org_stable_Packages 0 +'http://emacs.naquadah.org/stable/en.bz2' emacs.naquadah.org_stable_en 0 " aptget update --print-uris diff --git a/test/integration/test-apt-update-expected-size b/test/integration/test-apt-update-expected-size index 55a5da848..55bba8188 100755 --- a/test/integration/test-apt-update-expected-size +++ b/test/integration/test-apt-update-expected-size @@ -35,7 +35,7 @@ test_packagestoobig() { done NEW_SIZE="$(stat --printf=%s aptarchive/dists/unstable/main/binary-i386/Packages)" testfailuremsg "W: Failed to fetch ${1}/dists/unstable/main/binary-i386/Packages Writing more data than expected ($NEW_SIZE > $SIZE) -E: Some index files failed to download. They have been ignored, or old ones used instead." aptget update -o Debug::pkgAcquire::Worker=0 -o Debug::Acquire::Transaction=0 +E: Some index files failed to download. They have been ignored, or old ones used instead." aptget update -o Debug::pkgAcquire::Worker=1 -o Debug::Acquire::Transaction=0 } methodtest() { diff --git a/test/integration/test-apt-update-file b/test/integration/test-apt-update-file index 1ecf9a38a..665f94fa5 100755 --- a/test/integration/test-apt-update-file +++ b/test/integration/test-apt-update-file @@ -22,6 +22,10 @@ addtrap 'prefix' 'chmod 750 aptarchive/dists/unstable/main/binary-amd64;' chmod 550 aptarchive/dists/unstable/main/binary-amd64 testsuccess aptget update + +# the release files aren't an IMS-hit, but the indexes are +redatereleasefiles '+1 hour' + testsuccess aptget update -o Debug::pkgAcquire::Auth=1 cp -a rootdir/tmp/testsuccess.output rootdir/tmp/update.output diff --git a/test/integration/test-apt-update-nofallback b/test/integration/test-apt-update-nofallback index db4430ea3..f132bcf8e 100755 --- a/test/integration/test-apt-update-nofallback +++ b/test/integration/test-apt-update-nofallback @@ -28,6 +28,7 @@ Description: an autogenerated evil package EOF # avoid ims hit touch -d '+1hour' aptarchive/dists/unstable/main/binary-i386/Packages + compressfile aptarchive/dists/unstable/main/binary-i386/Packages } assert_update_is_refused_and_last_good_state_used() @@ -87,16 +88,16 @@ test_from_inrelease_to_unsigned_with_override() { # setup archive with InRelease file setupaptarchive_with_lists_clean - # FIXME: is not what the server reported 4104 4106 - testsuccess aptget update #-o Debug::pkgAcquire::Worker=1 + testsuccess aptget update # simulate moving to a unsigned but otherwise valid repo simulate_mitm_and_inject_evil_package - generatereleasefiles + generatereleasefiles '+2 hours' + find $APTARCHIVE -name '*Packages*' -exec touch -d '+2 hours' {} \; # and ensure we can update to it (with enough force) testwarning aptget update --allow-insecure-repositories \ - -o Acquire::AllowDowngradeToInsecureRepositories=1 + -o Acquire::AllowDowngradeToInsecureRepositories=1 -o Debug::pkgAcquire::Worker=1 -o Debug::pkgAcquire::Auth=1 # but that the individual packages are still considered untrusted testfailureequal "WARNING: The following packages cannot be authenticated! evil @@ -167,7 +168,7 @@ test_inrelease_to_invalid_inrelease() listcurrentlistsdirectory > lists.before # now remove InRelease and subvert Release do no longer verify - sed -i 's/Codename.*/Codename: evil!'/ $APTARCHIVE/dists/unstable/InRelease + sed -i 's/^Codename:.*/Codename: evil!/' $APTARCHIVE/dists/unstable/InRelease inject_evil_package testwarningequal "W: An error occurred during the signature verification. The repository is not updated and the previous index files will be used. GPG error: file: unstable InRelease: The following signatures were invalid: BADSIG 5A90D141DBAC8DAE Joe Sixpack (APT Testcases Dummy) diff --git a/test/integration/test-apt-update-not-modified b/test/integration/test-apt-update-not-modified index bac33d531..a490f00de 100755 --- a/test/integration/test-apt-update-not-modified +++ b/test/integration/test-apt-update-not-modified @@ -43,7 +43,9 @@ Version: 1 EOF compressfile aptarchive/dists/unstable/main/binary-amd64/Packages testfailureequal "Hit $1 unstable InRelease -Get:1 $1 unstable/main amd64 Packages [$(stat -c '%s' 'aptarchive/dists.good/unstable/main/binary-amd64/Packages.gz') B] +Get:1 $1 unstable/main amd64 Packages [$(stat -c '%s' 'aptarchive/dists/unstable/main/binary-amd64/Packages.gz') B] +Err $1 unstable/main amd64 Packages + Hash Sum mismatch W: Failed to fetch $1/dists/unstable/main/binary-amd64/Packages.gz Hash Sum mismatch E: Some index files failed to download. They have been ignored, or old ones used instead." aptget update @@ -87,8 +89,32 @@ Hit $1 unstable Release Reading package lists..." aptget update testfileequal 'listsdir-without-amd64.lst' "$(listcurrentlistsdirectory)" - # readd arch so its downloaded again + # readd arch so its downloaded again… configarchitecture 'amd64' 'i386' + # … but oh noes, hashsum mismatch! + find aptarchive/dists/unstable/main/binary-amd64/ -type f -delete + cat >> aptarchive/dists/unstable/main/binary-amd64/Packages < rootdir/etc/apt/apt.conf.d/00nolanguages -testsuccess aptget update +testsuccess aptget update -o Debug::pkgAcquire::Worker=1 -o Debug::Acquire::http=1 listcurrentlistsdirectory > lists.before # insert new version @@ -26,7 +26,7 @@ mkdir aptarchive/dists/unstable/main/binary-i386/saved cp -p aptarchive/dists/unstable/main/binary-i386/Packages* \ aptarchive/dists/unstable/main/binary-i386/saved insertpackage 'unstable' 'foo' 'all' '2.0' - +touch -d '+1 hour' aptarchive/dists/unstable/main/binary-i386/Packages compressfile aptarchive/dists/unstable/main/binary-i386/Packages # ensure that we do not get a I-M-S hit for the Release file @@ -39,7 +39,6 @@ cp -p aptarchive/dists/unstable/main/binary-i386/saved/Packages* \ aptarchive/dists/unstable/main/binary-i386/ # ensure this raises an error -testfailureequal "W: Failed to fetch http://localhost:8080/dists/unstable/main/binary-i386/Packages Hash Sum mismatch - -E: Some index files failed to download. They have been ignored, or old ones used instead." aptget update -qq +testfailuremsg "W: Failed to fetch copy:$(readlink -f ./rootdir)/var/lib/apt/lists/localhost:8080_dists_unstable_main_binary-i386_Packages Hash Sum mismatch +E: Some index files failed to download. They have been ignored, or old ones used instead." aptget update -o Debug::pkgAcquire::Worker=1 -o Debug::Acquire::http=1 testfileequal lists.before "$(listcurrentlistsdirectory)" diff --git a/test/integration/test-bug-595691-empty-and-broken-archive-files b/test/integration/test-bug-595691-empty-and-broken-archive-files index bca07268c..486b8ba02 100755 --- a/test/integration/test-bug-595691-empty-and-broken-archive-files +++ b/test/integration/test-bug-595691-empty-and-broken-archive-files @@ -12,7 +12,7 @@ setupflataptarchive testaptgetupdate() { rm -rf rootdir/var/lib/apt - aptget update 2>> testaptgetupdate.diff >> testaptgetupdate.diff || true + aptget update >testaptgetupdate.diff 2>&1 || true sed -i -e '/Ign /,+1d' -e '/Release/ d' -e 's#Get:[0-9]\+ #Get: #' -e 's#\[[0-9]* [kMGTPY]*B\]#\[\]#' testaptgetupdate.diff GIVEN="$1" shift diff --git a/test/integration/test-ubuntu-bug-1098738-apt-get-source-md5sum b/test/integration/test-ubuntu-bug-1098738-apt-get-source-md5sum index ec74a750b..555d8fcaa 100755 --- a/test/integration/test-ubuntu-bug-1098738-apt-get-source-md5sum +++ b/test/integration/test-ubuntu-bug-1098738-apt-get-source-md5sum @@ -176,7 +176,11 @@ testmismatch() { Building dependency tree... Need to get 6 B of source archives. Get:1 http://localhost:8080/ $1 1.0 (dsc) [3 B] +Err http://localhost:8080/ $1 1.0 (dsc) + Hash Sum mismatch Get:2 http://localhost:8080/ $1 1.0 (tar) [3 B] +Err http://localhost:8080/ $1 1.0 (tar) + Hash Sum mismatch E: Failed to fetch http://localhost:8080/${1}_1.0.dsc Hash Sum mismatch E: Failed to fetch http://localhost:8080/${1}_1.0.tar.gz Hash Sum mismatch @@ -238,6 +242,8 @@ Building dependency tree... Need to get 6 B of source archives. Get:1 http://localhost:8080/ pkg-mixed-sha1-bad 1.0 (tar) [3 B] Get:2 http://localhost:8080/ pkg-mixed-sha1-bad 1.0 (dsc) [3 B] +Err http://localhost:8080/ pkg-mixed-sha1-bad 1.0 (dsc) + Hash Sum mismatch E: Failed to fetch http://localhost:8080/pkg-mixed-sha1-bad_1.0.dsc Hash Sum mismatch E: Failed to fetch some archives.' aptget source -d pkg-mixed-sha1-bad @@ -247,6 +253,8 @@ testfailureequal 'Reading package lists... Building dependency tree... Need to get 6 B of source archives. Get:1 http://localhost:8080/ pkg-mixed-sha2-bad 1.0 (tar) [3 B] +Err http://localhost:8080/ pkg-mixed-sha2-bad 1.0 (tar) + Hash Sum mismatch Get:2 http://localhost:8080/ pkg-mixed-sha2-bad 1.0 (dsc) [3 B] E: Failed to fetch http://localhost:8080/pkg-mixed-sha2-bad_1.0.tar.gz Hash Sum mismatch diff --git a/test/libapt/acqprogress_test.cc b/test/libapt/acqprogress_test.cc index 288e05aca..c634733d4 100644 --- a/test/libapt/acqprogress_test.cc +++ b/test/libapt/acqprogress_test.cc @@ -1,4 +1,5 @@ #include +#include #include #include #include @@ -10,9 +11,10 @@ class TestItem: public pkgAcquire::Item { public: - TestItem(pkgAcquire * const Acq) : pkgAcquire::Item(Acq, "", NULL) {} + TestItem(pkgAcquire * const Acq) : pkgAcquire::Item(Acq) {} - virtual std::string DescURI() { return ""; } + virtual std::string DescURI() const { return ""; } + virtual HashStringList GetExpectedHashes() const { return HashStringList(); } }; diff --git a/test/libapt/hashsums_test.cc b/test/libapt/hashsums_test.cc index edcd8a11a..63c63ecd3 100644 --- a/test/libapt/hashsums_test.cc +++ b/test/libapt/hashsums_test.cc @@ -306,6 +306,7 @@ TEST(HashSumsTest, HashStringList) EXPECT_EQ(NULL, list.find(NULL)); EXPECT_EQ(NULL, list.find("")); EXPECT_EQ(NULL, list.find("MD5Sum")); + EXPECT_EQ(0, list.FileSize()); // empty lists aren't equal HashStringList list2; @@ -316,6 +317,8 @@ TEST(HashSumsTest, HashStringList) list.push_back(HashString("Checksum-FileSize", "29")); EXPECT_FALSE(list.empty()); EXPECT_FALSE(list.usable()); + EXPECT_EQ(1, list.size()); + EXPECT_EQ(29, list.FileSize()); Hashes hashes; hashes.Add("The quick brown fox jumps over the lazy dog"); -- cgit v1.2.3-70-g09d2 From 3679515479136179e0d95325a6559fcc6d0af7f8 Mon Sep 17 00:00:00 2001 From: David Kalnischkies Date: Sat, 6 Jun 2015 19:16:45 +0200 Subject: check patch hashes in rred worker instead of in the handler MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit rred is responsible for unpacking and reading the patch files in one go, but we currently only have hashes for the uncompressed patch files, so the handler read the entire patch file before dispatching it to the worker which would read it again – both with an implicit uncompress. Worse, while the workers operate in parallel the handler is the central orchestration unit, so having it busy with work means the workers do (potentially) nothing. This means rred is working with 'untrusted' data, which is bad. Yet, having the unpack in the handler meant that the untrusted uncompress was done as root which isn't better either. Now, we have it at least contained in a binary which we can harden a bit better. In the long run, we want hashes for the compressed patch files through to be safe. --- apt-pkg/acquire-item.cc | 95 +++++++++++++++++++++++---------------- apt-pkg/acquire-item.h | 2 + apt-pkg/acquire-method.cc | 14 +++--- apt-pkg/acquire-method.h | 5 ++- methods/rred.cc | 62 ++++++++++++++++++++----- test/integration/test-pdiff-usage | 2 + 6 files changed, 121 insertions(+), 59 deletions(-) (limited to 'methods') diff --git a/apt-pkg/acquire-item.cc b/apt-pkg/acquire-item.cc index ec6ec6e84..7b69ee993 100644 --- a/apt-pkg/acquire-item.cc +++ b/apt-pkg/acquire-item.cc @@ -95,6 +95,19 @@ static std::string GetCompressedFileName(std::string const &URI, std::string con return Name; } /*}}}*/ +static std::string GetMergeDiffsPatchFileName(std::string const &Final, std::string const &Patch)/*{{{*/ +{ + // rred expects the patch as $FinalFile.ed.$patchname.gz + return Final + ".ed." + Patch + ".gz"; +} + /*}}}*/ +static std::string GetDiffsPatchFileName(std::string const &Final) /*{{{*/ +{ + // rred expects the patch as $FinalFile.ed + return Final + ".ed"; +} + /*}}}*/ + static bool AllowInsecureRepositories(indexRecords const * const MetaIndexParser, pkgAcqMetaBase * const TransactionManager, pkgAcquire::Item * const I) /*{{{*/ { if(MetaIndexParser->IsAlwaysTrusted() || _config->FindB("Acquire::AllowInsecureRepositories") == true) @@ -1860,6 +1873,9 @@ void pkgAcqIndexDiffs::Failed(string const &Message,pkgAcquire::MethodConfig con << "Falling back to normal index file acquire" << std::endl; DestFile = GetPartialFileNameFromURI(Target->URI); RenameOnError(PDiffError); + std::string const patchname = GetDiffsPatchFileName(DestFile); + if (RealFileExists(patchname)) + rename(patchname.c_str(), std::string(patchname + ".FAILED").c_str()); new pkgAcqIndex(Owner, TransactionManager, Target); Finish(); } @@ -1968,28 +1984,13 @@ void pkgAcqIndexDiffs::Done(string const &Message, HashStringList const &Hashes, Item::Done(Message, Hashes, Cnf); - // FIXME: verify this download too before feeding it to rred std::string const FinalFile = GetPartialFileNameFromURI(Target->URI); + std::string const PatchFile = GetDiffsPatchFileName(FinalFile); // success in downloading a diff, enter ApplyDiff state if(State == StateFetchDiff) { - FileFd fd(DestFile, FileFd::ReadOnly, FileFd::Gzip); - class Hashes LocalHashesCalc; - LocalHashesCalc.AddFD(fd); - HashStringList const LocalHashes = LocalHashesCalc.GetHashStringList(); - - if (fd.Size() != available_patches[0].patch_size || - available_patches[0].patch_hashes != LocalHashes) - { - // patchfiles are dated, so bad indicates a bad download, so kill it - unlink(DestFile.c_str()); - Failed("Patch has Size/Hashsum mismatch", NULL); - return; - } - - // rred excepts the patch as $FinalFile.ed - Rename(DestFile,FinalFile+".ed"); + Rename(DestFile, PatchFile); if(Debug) std::clog << "Sending to rred method: " << FinalFile << std::endl; @@ -2000,18 +2001,17 @@ void pkgAcqIndexDiffs::Done(string const &Message, HashStringList const &Hashes, QueueURI(Desc); SetActiveSubprocess("rred"); return; - } - + } // success in download/apply a diff, queue next (if needed) if(State == StateApplyDiff) { // remove the just applied patch available_patches.erase(available_patches.begin()); - unlink((FinalFile + ".ed").c_str()); + unlink(PatchFile.c_str()); // move into place - if(Debug) + if(Debug) { std::clog << "Moving patched file in place: " << std::endl << DestFile << " -> " << FinalFile << std::endl; @@ -2031,6 +2031,18 @@ void pkgAcqIndexDiffs::Done(string const &Message, HashStringList const &Hashes, } } /*}}}*/ +std::string pkgAcqIndexDiffs::Custom600Headers() const /*{{{*/ +{ + if(State != StateApplyDiff) + return pkgAcqBaseIndex::Custom600Headers(); + std::ostringstream patchhashes; + HashStringList const ExpectedHashes = available_patches[0].patch_hashes; + for (HashStringList::const_iterator hs = ExpectedHashes.begin(); hs != ExpectedHashes.end(); ++hs) + patchhashes << "\nPatch-0-" << hs->HashType() << "-Hash: " << hs->HashValue(); + patchhashes << pkgAcqBaseIndex::Custom600Headers(); + return patchhashes.str(); +} + /*}}}*/ // AcqIndexMergeDiffs::AcqIndexMergeDiffs - Constructor /*{{{*/ pkgAcqIndexMergeDiffs::pkgAcqIndexMergeDiffs(pkgAcquire * const Owner, @@ -2079,6 +2091,9 @@ void pkgAcqIndexMergeDiffs::Failed(string const &Message,pkgAcquire::MethodConfi std::clog << "Falling back to normal index file acquire" << std::endl; DestFile = GetPartialFileNameFromURI(Target->URI); RenameOnError(PDiffError); + std::string const patchname = GetMergeDiffsPatchFileName(DestFile, patch.file); + if (RealFileExists(patchname)) + rename(patchname.c_str(), std::string(patchname + ".FAILED").c_str()); new pkgAcqIndex(Owner, TransactionManager, Target); } /*}}}*/ @@ -2090,26 +2105,10 @@ void pkgAcqIndexMergeDiffs::Done(string const &Message, HashStringList const &Ha Item::Done(Message, Hashes, Cnf); - // FIXME: verify download before feeding it to rred string const FinalFile = GetPartialFileNameFromURI(Target->URI); - if (State == StateFetchDiff) { - FileFd fd(DestFile, FileFd::ReadOnly, FileFd::Gzip); - class Hashes LocalHashesCalc; - LocalHashesCalc.AddFD(fd); - HashStringList const LocalHashes = LocalHashesCalc.GetHashStringList(); - - if (fd.Size() != patch.patch_size || patch.patch_hashes != LocalHashes) - { - // patchfiles are dated, so bad indicates a bad download, so kill it - unlink(DestFile.c_str()); - Failed("Patch has Size/Hashsum mismatch", NULL); - return; - } - - // rred expects the patch as $FinalFile.ed.$patchname.gz - Rename(DestFile, FinalFile + ".ed." + patch.file + ".gz"); + Rename(DestFile, GetMergeDiffsPatchFileName(FinalFile, patch.file)); // check if this is the last completed diff State = StateDoneDiff; @@ -2158,7 +2157,7 @@ void pkgAcqIndexMergeDiffs::Done(string const &Message, HashStringList const &Ha I != allPatches->end(); ++I) { std::string const PartialFile = GetPartialFileNameFromURI(Target->URI); - std::string patch = PartialFile + ".ed." + (*I)->patch.file + ".gz"; + std::string const patch = GetMergeDiffsPatchFileName(PartialFile, (*I)->patch.file); unlink(patch.c_str()); } unlink(FinalFile.c_str()); @@ -2170,6 +2169,24 @@ void pkgAcqIndexMergeDiffs::Done(string const &Message, HashStringList const &Ha } } /*}}}*/ +std::string pkgAcqIndexMergeDiffs::Custom600Headers() const /*{{{*/ +{ + if(State != StateApplyDiff) + return pkgAcqBaseIndex::Custom600Headers(); + std::ostringstream patchhashes; + unsigned int seen_patches = 0; + for (std::vector::const_iterator I = allPatches->begin(); + I != allPatches->end(); ++I) + { + HashStringList const ExpectedHashes = (*I)->patch.patch_hashes; + for (HashStringList::const_iterator hs = ExpectedHashes.begin(); hs != ExpectedHashes.end(); ++hs) + patchhashes << "\nPatch-" << seen_patches << "-" << hs->HashType() << "-Hash: " << hs->HashValue(); + ++seen_patches; + } + patchhashes << pkgAcqBaseIndex::Custom600Headers(); + return patchhashes.str(); +} + /*}}}*/ // AcqIndex::AcqIndex - Constructor /*{{{*/ pkgAcqIndex::pkgAcqIndex(pkgAcquire * const Owner, diff --git a/apt-pkg/acquire-item.h b/apt-pkg/acquire-item.h index 97d5ea1dd..f24af1aec 100644 --- a/apt-pkg/acquire-item.h +++ b/apt-pkg/acquire-item.h @@ -774,6 +774,7 @@ class APT_HIDDEN pkgAcqIndexMergeDiffs : public pkgAcqBaseIndex virtual void Failed(std::string const &Message,pkgAcquire::MethodConfig const * const Cnf); virtual void Done(std::string const &Message, HashStringList const &Hashes, pkgAcquire::MethodConfig const * const Cnf); + virtual std::string Custom600Headers() const; virtual std::string DescURI() const {return Target->URI + "Index";}; virtual HashStringList GetExpectedHashes() const; virtual bool HashesRequired() const; @@ -886,6 +887,7 @@ class APT_HIDDEN pkgAcqIndexDiffs : public pkgAcqBaseIndex virtual void Done(std::string const &Message, HashStringList const &Hashes, pkgAcquire::MethodConfig const * const Cnf); + virtual std::string Custom600Headers() const; virtual std::string DescURI() const {return Target->URI + "IndexDiffs";}; virtual HashStringList GetExpectedHashes() const; virtual bool HashesRequired() const; diff --git a/apt-pkg/acquire-method.cc b/apt-pkg/acquire-method.cc index b77096efd..a8fc75f8e 100644 --- a/apt-pkg/acquire-method.cc +++ b/apt-pkg/acquire-method.cc @@ -388,14 +388,14 @@ int pkgAcqMethod::Run(bool Single) *I = Tmp; if (QueueBack == 0) QueueBack = Tmp; - + // Notify that this item is to be fetched. - if (Fetch(Tmp) == false) + if (URIAcquire(Message, Tmp) == false) Fail(); - - break; - } - } + + break; + } + } } Exit(); @@ -403,8 +403,6 @@ int pkgAcqMethod::Run(bool Single) } /*}}}*/ // AcqMethod::PrintStatus - privately really send a log/status message /*{{{*/ -// --------------------------------------------------------------------- -/* */ void pkgAcqMethod::PrintStatus(char const * const header, const char* Format, va_list &args) const { diff --git a/apt-pkg/acquire-method.h b/apt-pkg/acquire-method.h index 399454892..6480eb4b5 100644 --- a/apt-pkg/acquire-method.h +++ b/apt-pkg/acquire-method.h @@ -76,11 +76,12 @@ class pkgAcqMethod std::string FailReason; std::string UsedMirror; std::string IP; - + // Handlers for messages virtual bool Configuration(std::string Message); virtual bool Fetch(FetchItem * /*Item*/) {return true;}; - + virtual bool URIAcquire(std::string const &/*Message*/, FetchItem *Itm) { return Fetch(Itm); }; + // Outgoing messages void Fail(bool Transient = false); inline void Fail(const char *Why, bool Transient = false) {Fail(std::string(Why),Transient);}; diff --git a/methods/rred.cc b/methods/rred.cc index 554ac99b4..3da33c126 100644 --- a/methods/rred.cc +++ b/methods/rred.cc @@ -388,7 +388,7 @@ class Patch { public: - void read_diff(FileFd &f) + void read_diff(FileFd &f, Hashes * const h) { char buffer[BLOCK_SIZE]; bool cmdwanted = true; @@ -396,6 +396,8 @@ class Patch { Change ch(0); while(f.ReadLine(buffer, sizeof(buffer))) { + if (h != NULL) + h->Add(buffer); if (cmdwanted) { char *m, *c; size_t s, e; @@ -519,8 +521,29 @@ class RredMethod : public pkgAcqMethod { private: bool Debug; + struct PDiffFile { + std::string FileName; + HashStringList ExpectedHashes; + PDiffFile(std::string const &FileName, HashStringList const &ExpectedHashes) : + FileName(FileName), ExpectedHashes(ExpectedHashes) {} + }; + + HashStringList ReadExpectedHashesForPatch(unsigned int const patch, std::string const &Message) + { + HashStringList ExpectedHashes; + for (char const * const * type = HashString::SupportedHashes(); *type != NULL; ++type) + { + std::string tagname; + strprintf(tagname, "Patch-%d-%s-Hash", patch, *type); + std::string const hashsum = LookupTag(Message, tagname.c_str()); + if (hashsum.empty() == false) + ExpectedHashes.push_back(HashString(*type, hashsum)); + } + return ExpectedHashes; + } + protected: - virtual bool Fetch(FetchItem *Itm) { + virtual bool URIAcquire(std::string const &Message, FetchItem *Itm) { Debug = _config->FindB("Debug::pkgAcquire::RRed", false); URI Get = Itm->Uri; std::string Path = Get.Host + Get.Path; // rred:/path - no host @@ -534,11 +557,17 @@ class RredMethod : public pkgAcqMethod { } else URIStart(Res); - std::vector patchpaths; + std::vector patchfiles; Patch patch; if (FileExists(Path + ".ed") == true) - patchpaths.push_back(Path + ".ed"); + { + HashStringList const ExpectedHashes = ReadExpectedHashesForPatch(0, Message); + std::string const FileName = Path + ".ed"; + if (ExpectedHashes.usable() == false) + return _error->Error("No hashes found for uncompressed patch: %s", FileName.c_str()); + patchfiles.push_back(PDiffFile(FileName, ExpectedHashes)); + } else { _error->PushToStack(); @@ -546,18 +575,27 @@ class RredMethod : public pkgAcqMethod { _error->RevertToStack(); std::string const baseName = Path + ".ed."; + unsigned int seen_patches = 0; for (std::vector::const_iterator p = patches.begin(); p != patches.end(); ++p) + { if (p->compare(0, baseName.length(), baseName) == 0) - patchpaths.push_back(*p); + { + HashStringList const ExpectedHashes = ReadExpectedHashesForPatch(seen_patches, Message); + if (ExpectedHashes.usable() == false) + return _error->Error("No hashes found for uncompressed patch %d: %s", seen_patches, p->c_str()); + patchfiles.push_back(PDiffFile(*p, ExpectedHashes)); + ++seen_patches; + } + } } std::string patch_name; - for (std::vector::iterator I = patchpaths.begin(); - I != patchpaths.end(); + for (std::vector::iterator I = patchfiles.begin(); + I != patchfiles.end(); ++I) { - patch_name = *I; + patch_name = I->FileName; if (Debug == true) std::clog << "Patching " << Path << " with " << patch_name << std::endl; @@ -569,8 +607,12 @@ class RredMethod : public pkgAcqMethod { _error->DumpErrors(std::cerr); abort(); } - patch.read_diff(p); + Hashes patch_hash(I->ExpectedHashes); + patch.read_diff(p, &patch_hash); p.Close(); + HashStringList const hsl = patch_hash.GetHashStringList(); + if (hsl != I->ExpectedHashes) + return _error->Error("Patch %s doesn't have the expected hashsum", patch_name.c_str()); } if (Debug == true) @@ -643,7 +685,7 @@ int main(int argc, char **argv) _error->DumpErrors(std::cerr); exit(1); } - patch.read_diff(p); + patch.read_diff(p, NULL); } if (just_diff) { diff --git a/test/integration/test-pdiff-usage b/test/integration/test-pdiff-usage index 7d72a6944..73df61895 100755 --- a/test/integration/test-pdiff-usage +++ b/test/integration/test-pdiff-usage @@ -170,6 +170,8 @@ SHA256-Patches: generatereleasefiles '+1hour' signreleasefiles testsuccess aptget update "$@" + cp -f rootdir/tmp/testsuccess.output rootdir/tmp/aptgetupdate.output + testsuccess grep 'have the expected hashsum' rootdir/tmp/aptgetupdate.output testnopackage oldstuff testsuccessequal "$(cat ${PKGFILE}-new) " aptcache show apt newstuff -- cgit v1.2.3-70-g09d2 From 6d3e5bd8e08564c5eb12ecd869de5bd71e25f59d Mon Sep 17 00:00:00 2001 From: David Kalnischkies Date: Sun, 7 Jun 2015 02:17:15 +0200 Subject: add more parsing error checking for rred The rred parser is very accepting regarding 'invalid' files. Given that we can't trust the input it might be a bit too relaxed. In any case, checking for more errors can't hurt given that we support only a very specific subset of ed commands. --- methods/rred.cc | 70 +++++++++----- test/integration/test-method-rred | 194 ++++++++++++++++++++++++++++++++++++++ test/integration/test-pdiff-usage | 3 +- 3 files changed, 245 insertions(+), 22 deletions(-) create mode 100755 test/integration/test-method-rred (limited to 'methods') diff --git a/methods/rred.cc b/methods/rred.cc index 3da33c126..81ecf8553 100644 --- a/methods/rred.cc +++ b/methods/rred.cc @@ -21,6 +21,7 @@ #include #include +#include #include #include #include @@ -35,7 +36,7 @@ class MemBlock { char *start; size_t size; char *free; - struct MemBlock *next; + MemBlock *next; MemBlock(size_t size) : size(size), next(NULL) { @@ -116,7 +117,7 @@ struct Change { size_t add_len; /* bytes */ char *add; - Change(int off) + Change(size_t off) { offset = off; del_cnt = add_cnt = add_len = 0; @@ -388,30 +389,37 @@ class Patch { public: - void read_diff(FileFd &f, Hashes * const h) + bool read_diff(FileFd &f, Hashes * const h) { char buffer[BLOCK_SIZE]; bool cmdwanted = true; - Change ch(0); - while(f.ReadLine(buffer, sizeof(buffer))) - { + Change ch(std::numeric_limits::max()); + if (f.ReadLine(buffer, sizeof(buffer)) == NULL) + return _error->Error("Reading first line of patchfile %s failed", f.Name().c_str()); + do { if (h != NULL) h->Add(buffer); if (cmdwanted) { char *m, *c; size_t s, e; - s = strtol(buffer, &m, 10); - if (m == buffer) { - s = e = ch.offset + ch.add_cnt; - c = buffer; - } else if (*m == ',') { - m++; + errno = 0; + s = strtoul(buffer, &m, 10); + if (unlikely(m == buffer || s == ULONG_MAX || errno != 0)) + return _error->Error("Parsing patchfile %s failed: Expected an effected line start", f.Name().c_str()); + else if (*m == ',') { + ++m; e = strtol(m, &c, 10); + if (unlikely(m == c || e == ULONG_MAX || errno != 0)) + return _error->Error("Parsing patchfile %s failed: Expected an effected line end", f.Name().c_str()); + if (unlikely(e < s)) + return _error->Error("Parsing patchfile %s failed: Effected lines end %lu is before start %lu", f.Name().c_str(), e, s); } else { e = s; c = m; } + if (s > ch.offset) + return _error->Error("Parsing patchfile %s failed: Effected line is after previous effected line", f.Name().c_str()); switch(*c) { case 'a': cmdwanted = false; @@ -422,6 +430,8 @@ class Patch { ch.del_cnt = 0; break; case 'c': + if (unlikely(s == 0)) + return _error->Error("Parsing patchfile %s failed: Change command can't effect line zero", f.Name().c_str()); cmdwanted = false; ch.add = NULL; ch.add_cnt = 0; @@ -430,6 +440,8 @@ class Patch { ch.del_cnt = e - s + 1; break; case 'd': + if (unlikely(s == 0)) + return _error->Error("Parsing patchfile %s failed: Delete command can't effect line zero", f.Name().c_str()); ch.offset = s - 1; ch.del_cnt = e - s + 1; ch.add = NULL; @@ -437,9 +449,11 @@ class Patch { ch.add_len = 0; filechanges.add_change(ch); break; + default: + return _error->Error("Parsing patchfile %s failed: Unknown command", f.Name().c_str()); } } else { /* !cmdwanted */ - if (buffer[0] == '.' && buffer[1] == '\n') { + if (strcmp(buffer, ".\n") == 0) { cmdwanted = true; filechanges.add_change(ch); } else { @@ -465,7 +479,8 @@ class Patch { } } } - } + } while(f.ReadLine(buffer, sizeof(buffer))); + return true; } void write_diff(FILE *f) @@ -601,14 +616,14 @@ class RredMethod : public pkgAcqMethod { << std::endl; FileFd p; + Hashes patch_hash(I->ExpectedHashes); // all patches are compressed, even if the name doesn't reflect it - if (p.Open(patch_name, FileFd::ReadOnly, FileFd::Gzip) == false) { - std::cerr << "Could not open patch file " << patch_name << std::endl; + if (p.Open(patch_name, FileFd::ReadOnly, FileFd::Gzip) == false || + patch.read_diff(p, &patch_hash) == false) + { _error->DumpErrors(std::cerr); - abort(); + return false; } - Hashes patch_hash(I->ExpectedHashes); - patch.read_diff(p, &patch_hash); p.Close(); HashStringList const hsl = patch_hash.GetHashStringList(); if (hsl != I->ExpectedHashes) @@ -624,7 +639,6 @@ class RredMethod : public pkgAcqMethod { FILE *out = fopen(Itm->DestFile.c_str(), "w"); Hashes hash(Itm->ExpectedHashes); - patch.apply_against_file(out, inp, &hash); fclose(out); @@ -657,6 +671,16 @@ class RredMethod : public pkgAcqMethod { return true; } + bool Configuration(std::string Message) + { + if (pkgAcqMethod::Configuration(Message) == false) + return false; + + DropPrivsOrDie(); + + return true; + } + public: RredMethod() : pkgAcqMethod("2.0",SingleInstance | SendConfig), Debug(false) {} }; @@ -685,7 +709,11 @@ int main(int argc, char **argv) _error->DumpErrors(std::cerr); exit(1); } - patch.read_diff(p, NULL); + if (patch.read_diff(p, NULL) == false) + { + _error->DumpErrors(std::cerr); + exit(2); + } } if (just_diff) { diff --git a/test/integration/test-method-rred b/test/integration/test-method-rred new file mode 100755 index 000000000..a8de3ea28 --- /dev/null +++ b/test/integration/test-method-rred @@ -0,0 +1,194 @@ +#!/bin/sh +set -e + +TESTDIR=$(readlink -f $(dirname $0)) +. $TESTDIR/framework + +setupenvironment +configarchitecture 'i386' + +echo 'Package: coolstuff +Version: 0.8.15 +Description: collection of stuff + A lot, too much to iterate all, but at least this: + - stuff + - more stuff + - even more stuff + . + And a cow. + +Package: oldstuff +Version: 0-1 +Description: collection of outdated stuff + A lot, but of no use nowadays, but at least this: + - stuff + - more stuff + - even more stuff + . + And a dog.' > Packages + +testrred() { + msgtest "$1" "$2" + if [ -z "$3" ]; then + echo -n '' > Packages.ed + else + echo "$3" > Packages.ed + fi + rred() { + cat Packages | runapt "${METHODSDIR}/rred" "$@" + } + testsuccessequal "$4" --nomsg rred -f Packages.ed +} + +testrred 'Remove' 'first line' '1d' "$(tail -n +2 ./Packages)" +testrred 'Remove' 'empty line' '10d' "$(head -n 9 ./Packages) +$(tail -n 9 ./Packages)" +testrred 'Remove' 'line in a paragraph' '5d' "$(head -n 4 ./Packages) +$(tail -n 14 ./Packages)" +testrred 'Remove' 'last line' '19d' "$(head -n -1 ./Packages)" +testrred 'Remove' 'multiple single lines' '17d +7d' "$(sed -e '/^ - even more stuff$/ d' ./Packages)" +testrred 'Remove' 'first paragraph' '1,10d' "$(tail -n 9 ./Packages)" +testrred 'Remove' 'a few lines in the middle' '5,14d' "$(head -n 4 ./Packages) +$(tail -n 5 ./Packages)" +testrred 'Remove' 'second paragraph' '10,19d' "$(head -n 9 ./Packages)" +testrred 'Mass Remove' 'all stuff lines' '15,17d +13d +11d +5,7d +3d +1d' "$(sed '/stuff/ d' ./Packages)" + +testrred 'Single line add' 'first line' '0a +Format: 3.0 (native) +.' "Format: 3.0 (native) +$(cat ./Packages)" +testrred 'Single line add' 'last line' '19a +Multi-Arch: foreign +.' "$(cat ./Packages) +Multi-Arch: foreign" +testrred 'Single line add' 'middle' '9a +Multi-Arch: foreign +.' "$(head -n 9 ./Packages) +Multi-Arch: foreign +$(tail -n 10 ./Packages)" + +testrred 'Multi line add' 'first line' '0a +Format: 3.0 (native) +Source: apt +.' "Format: 3.0 (native) +Source: apt +$(cat ./Packages)" +testrred 'Multi line add' 'last line' '19a +Multi-Arch: foreign +Homepage: https://debian.org +.' "$(cat ./Packages) +Multi-Arch: foreign +Homepage: https://debian.org" +testrred 'Multi line add' 'middle' '9a +Multi-Arch: foreign +Homepage: https://debian.org +.' "$(head -n 9 ./Packages) +Multi-Arch: foreign +Homepage: https://debian.org +$(tail -n 10 ./Packages)" + +testrred 'Single line change' 'first line' '1c +Package: supercoolstuff +.' "Package: supercoolstuff +$(tail -n +2 ./Packages)" +testrred 'Single line change' 'in the middle' '9c + And a super cow. +.' "$(head -n 8 ./Packages) + And a super cow. +$(tail -n 10 ./Packages)" +testrred 'Single line change' 'an empty line' '10c + +.' "$(head -n 9 ./Packages) + +$(tail -n 9 ./Packages)" +testrred 'Single line change' 'a spacy line' '10c + +.' "$(head -n 9 ./Packages) + +$(tail -n 9 ./Packages)" +testrred 'Single line change' 'last line' '19c + And a cat. +.' "$(head -n -1 ./Packages) + And a cat." + +testrred 'Multi line change' 'exchange' '5,7c + - good stuff + - more good stuff + - even more good stuff +.' "$(head -n 4 ./Packages) + - good stuff + - more good stuff + - even more good stuff +$(tail -n 12 ./Packages)" +testrred 'Multi line change' 'less' '5,7c + - good stuff + - more good stuff +.' "$(head -n 4 ./Packages) + - good stuff + - more good stuff +$(tail -n 12 ./Packages)" +testrred 'Multi line change' 'more' '5,7c + - good stuff + - more good stuff + - even more good stuff + - bonus good stuff +.' "$(head -n 4 ./Packages) + - good stuff + - more good stuff + - even more good stuff + - bonus good stuff +$(tail -n 12 ./Packages)" + +failrred() { + msgtest 'Failure caused by' "$1" + echo "$2" > Packages.ed + rred() { + cat Packages | runapt "${METHODSDIR}/rred" "$@" + } + testfailure --nomsg rred -f Packages.ed +} + +failrred 'Bogus content' ' +' + +# not a problem per-se, but we want our parser to be really strict +failrred 'Empty patch file' '' +failrred 'Empty line patch file' ' +' +failrred 'Empty line before command' ' +1d' +failrred 'Empty line after command' '1d +' +failrred 'Empty line between commands' '17d + +7d' +failrred 'Empty spaces lines before command' ' +1d' +failrred 'Empty spaces lines after command' '1d + ' +failrred 'Empty spaces lines between commands' '17d + +7d' + +# the line before the first one can't be deleted/changed +failrred 'zero line delete' '0d' +failrred 'zero line change' '0c +Package: supercoolstuff +.' +# and this makes no sense at all +failrred 'negative line delete' '-1d' +failrred 'negative line change' '-1c +Package: supercoolstuff +.' +failrred 'negative line add' '-1a +Package: supercoolstuff +.' +failrred 'Wrong order of commands' '7d +17d' +failrred 'End before start' '7,6d' diff --git a/test/integration/test-pdiff-usage b/test/integration/test-pdiff-usage index 73df61895..7a9f6496b 100755 --- a/test/integration/test-pdiff-usage +++ b/test/integration/test-pdiff-usage @@ -165,7 +165,8 @@ SHA256-History: SHA256-Patches: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 19722 2010-08-18-2013.28 $(sha256sum $PATCHFILE | cut -d' ' -f 1) $(stat -c%s $PATCHFILE) $(basename $PATCHFILE)" > $PATCHINDEX - echo 'I am Mallory and I change files' >> $PATCHFILE + # needs to look like a valid command, otherwise the parser will fail before hashes are checked + echo '1d' >> $PATCHFILE cat $PATCHFILE | gzip > ${PATCHFILE}.gz generatereleasefiles '+1hour' signreleasefiles -- cgit v1.2.3-70-g09d2 From 4f51fd8636592a96aecf17c8bf4cfdb3ea2207cc Mon Sep 17 00:00:00 2001 From: David Kalnischkies Date: Mon, 8 Jun 2015 00:06:41 +0200 Subject: support hashes for compressed pdiff files MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit At the moment we only have hashes for the uncompressed pdiff files, but via the new '$HASH-Download' field in the .diff/Index hashes can be provided for the .gz compressed pdiff file, which apt will pick up now and use to verify the download. Now, we "just" need a buy in from the creators of repositories… --- apt-pkg/acquire-item.cc | 72 ++++++++++++++++++++++++++++++++------- apt-pkg/acquire-item.h | 11 +++--- apt-pkg/contrib/hashes.cc | 11 ++++-- apt-pkg/contrib/hashes.h | 7 ++++ apt-pkg/indexrecords.cc | 4 +-- methods/rred.cc | 2 +- test/integration/test-pdiff-usage | 46 +++++++++++++++++++++++-- 7 files changed, 125 insertions(+), 28 deletions(-) (limited to 'methods') diff --git a/apt-pkg/acquire-item.cc b/apt-pkg/acquire-item.cc index 7b69ee993..a3f47242f 100644 --- a/apt-pkg/acquire-item.cc +++ b/apt-pkg/acquire-item.cc @@ -152,14 +152,18 @@ HashStringList pkgAcqMetaBase::GetExpectedHashes() const APT_CONST bool pkgAcqIndexDiffs::HashesRequired() const { - /* FIXME: We have only hashes for uncompressed pdiffs. - rred uncompresses them on the fly while parsing. - In StateFetchDiff state we also uncompress on the fly for hash check. - Hashes are checked while searching for (next) patch to apply. */ + /* We don't always have the diff of the downloaded pdiff file. + What we have for sure is hashes for the uncompressed file, + but rred uncompresses them on the fly while parsing, so not handled here. + Hashes are (also) checked while searching for (next) patch to apply. */ + if (State == StateFetchDiff) + return available_patches[0].download_hashes.empty() == false; return false; } HashStringList pkgAcqIndexDiffs::GetExpectedHashes() const { + if (State == StateFetchDiff) + return available_patches[0].download_hashes; return HashStringList(); } @@ -168,11 +172,15 @@ APT_CONST bool pkgAcqIndexMergeDiffs::HashesRequired() const /* @see #pkgAcqIndexDiffs::HashesRequired, with the difference that we can check the rred result after all patches are applied as we know the expected result rather than potentially apply more patches */ + if (State == StateFetchDiff) + return patch.download_hashes.empty() == false; return State == StateApplyDiff; } HashStringList pkgAcqIndexMergeDiffs::GetExpectedHashes() const { - if (State == StateApplyDiff) + if (State == StateFetchDiff) + return patch.download_hashes; + else if (State == StateApplyDiff) return GetExpectedHashesFor(Target->MetaKey); return HashStringList(); } @@ -1618,7 +1626,7 @@ bool pkgAcqDiffIndex::ParseDiffIndex(string const &IndexDiffFile) /*{{{*/ std::vector::iterator cur = available_patches.begin(); for (; cur != available_patches.end(); ++cur) { - if (cur->file != filename || unlikely(cur->result_size != size)) + if (cur->file != filename) continue; cur->result_hashes.push_back(HashString(*type, hash)); break; @@ -1630,8 +1638,7 @@ bool pkgAcqDiffIndex::ParseDiffIndex(string const &IndexDiffFile) /*{{{*/ DiffInfo next; next.file = filename; next.result_hashes.push_back(HashString(*type, hash)); - next.result_size = size; - next.patch_size = 0; + next.result_hashes.FileSize(size); available_patches.push_back(next); } else @@ -1679,10 +1686,9 @@ bool pkgAcqDiffIndex::ParseDiffIndex(string const &IndexDiffFile) /*{{{*/ { if (cur->file != filename) continue; - if (unlikely(cur->patch_size != 0 && cur->patch_size != size)) - continue; + if (cur->patch_hashes.empty()) + cur->patch_hashes.FileSize(size); cur->patch_hashes.push_back(HashString(*type, hash)); - cur->patch_size = size; break; } if (cur != available_patches.end()) @@ -1694,6 +1700,48 @@ bool pkgAcqDiffIndex::ParseDiffIndex(string const &IndexDiffFile) /*{{{*/ } } + for (char const * const * type = HashString::SupportedHashes(); *type != NULL; ++type) + { + std::string tagname = *type; + tagname.append("-Download"); + std::string const tmp = Tags.FindS(tagname.c_str()); + if (tmp.empty() == true) + continue; + + string hash, filename; + unsigned long long size; + std::stringstream ss(tmp); + + // FIXME: all of pdiff supports only .gz compressed patches + while (ss >> hash >> size >> filename) + { + if (unlikely(hash.empty() == true || filename.empty() == true)) + continue; + if (unlikely(APT::String::Endswith(filename, ".gz") == false)) + continue; + filename.erase(filename.length() - 3); + + // see if we have a record for this file already + std::vector::iterator cur = available_patches.begin(); + for (; cur != available_patches.end(); ++cur) + { + if (cur->file != filename) + continue; + if (cur->download_hashes.empty()) + cur->download_hashes.FileSize(size); + cur->download_hashes.push_back(HashString(*type, hash)); + break; + } + if (cur != available_patches.end()) + continue; + if (Debug == true) + std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": File " << filename + << " wasn't in the list for the first parsed hash! (download)" << std::endl; + break; + } + } + + bool foundStart = false; for (std::vector::iterator cur = available_patches.begin(); cur != available_patches.end(); ++cur) @@ -1729,7 +1777,7 @@ bool pkgAcqDiffIndex::ParseDiffIndex(string const &IndexDiffFile) /*{{{*/ unsigned long long patchesSize = 0; for (std::vector::const_iterator cur = available_patches.begin(); cur != available_patches.end(); ++cur) - patchesSize += cur->patch_size; + patchesSize += cur->patch_hashes.FileSize(); unsigned long long const sizeLimit = ServerSize * _config->FindI("Acquire::PDiffs::SizeLimit", 100); if (sizeLimit > 0 && (sizeLimit/100) < patchesSize) { diff --git a/apt-pkg/acquire-item.h b/apt-pkg/acquire-item.h index f24af1aec..910e4131b 100644 --- a/apt-pkg/acquire-item.h +++ b/apt-pkg/acquire-item.h @@ -705,17 +705,14 @@ struct APT_HIDDEN DiffInfo { /*{{{*/ /** The filename of the diff. */ std::string file; - /** The hashes of the diff */ + /** The hashes of the file after the diff is applied */ HashStringList result_hashes; - /** The hashes of the file after the diff is applied */ + /** The hashes of the diff */ HashStringList patch_hashes; - /** The size of the file after the diff is applied */ - unsigned long long result_size; - - /** The size of the diff itself */ - unsigned long long patch_size; + /** The hashes of the compressed diff */ + HashStringList download_hashes; }; /*}}}*/ /** \brief An item that is responsible for fetching client-merge patches {{{ diff --git a/apt-pkg/contrib/hashes.cc b/apt-pkg/contrib/hashes.cc index 11a7e479b..46cf0ba08 100644 --- a/apt-pkg/contrib/hashes.cc +++ b/apt-pkg/contrib/hashes.cc @@ -188,6 +188,13 @@ unsigned long long HashStringList::FileSize() const /*{{{*/ return strtoull(hv.c_str(), NULL, 10); } /*}}}*/ +bool HashStringList::FileSize(unsigned long long const Size) /*{{{*/ +{ + std::string size; + strprintf(size, "%llu", Size); + return push_back(HashString("Checksum-FileSize", size)); +} + /*}}}*/ bool HashStringList::supported(char const * const type) /*{{{*/ { for (char const * const * t = HashString::SupportedHashes(); *t != NULL; ++t) @@ -361,9 +368,7 @@ APT_IGNORE_DEPRECATED_PUSH if ((d->CalcHashes & SHA512SUM) == SHA512SUM) hashes.push_back(HashString("SHA512", SHA512.Result().Value())); APT_IGNORE_DEPRECATED_POP - std::string SizeStr; - strprintf(SizeStr, "%llu", d->FileSize); - hashes.push_back(HashString("Checksum-FileSize", SizeStr)); + hashes.FileSize(d->FileSize); return hashes; } APT_IGNORE_DEPRECATED_PUSH diff --git a/apt-pkg/contrib/hashes.h b/apt-pkg/contrib/hashes.h index 176ce4faa..e8d84da9e 100644 --- a/apt-pkg/contrib/hashes.h +++ b/apt-pkg/contrib/hashes.h @@ -96,6 +96,13 @@ class HashStringList */ unsigned long long FileSize() const; + /** sets the filesize hash + * + * @param Size of the file + * @return @see #push_back + */ + bool FileSize(unsigned long long const Size); + /** check if the given hash type is supported * * @param type to check diff --git a/apt-pkg/indexrecords.cc b/apt-pkg/indexrecords.cc index c26868cac..7e6da9558 100644 --- a/apt-pkg/indexrecords.cc +++ b/apt-pkg/indexrecords.cc @@ -121,9 +121,7 @@ bool indexRecords::Load(const string Filename) /*{{{*/ indexRecords::checkSum *Sum = new indexRecords::checkSum; Sum->MetaKeyFilename = Name; Sum->Size = Size; - std::string SizeStr; - strprintf(SizeStr, "%llu", Size); - Sum->Hashes.push_back(HashString("Checksum-FileSize", SizeStr)); + Sum->Hashes.FileSize(Size); APT_IGNORE_DEPRECATED(Sum->Hash = HashString(HashString::SupportedHashes()[i],Hash);) Entries[Name] = Sum; } diff --git a/methods/rred.cc b/methods/rred.cc index 81ecf8553..12cf2b4a5 100644 --- a/methods/rred.cc +++ b/methods/rred.cc @@ -627,7 +627,7 @@ class RredMethod : public pkgAcqMethod { p.Close(); HashStringList const hsl = patch_hash.GetHashStringList(); if (hsl != I->ExpectedHashes) - return _error->Error("Patch %s doesn't have the expected hashsum", patch_name.c_str()); + return _error->Error("Hash Sum mismatch for uncompressed patch %s", patch_name.c_str()); } if (Debug == true) diff --git a/test/integration/test-pdiff-usage b/test/integration/test-pdiff-usage index 7a9f6496b..3295d5497 100755 --- a/test/integration/test-pdiff-usage +++ b/test/integration/test-pdiff-usage @@ -42,6 +42,8 @@ wasmergeused() { testrun() { msgmsg "Testcase: setup the base with: $*" + local DOWNLOADHASH=true + if [ "$1" = 'nohash' ]; then DOWNLOADHASH=false; shift; fi find aptarchive -name 'Packages*' -type f -delete cp ${PKGFILE} aptarchive/Packages compressfile 'aptarchive/Packages' @@ -76,6 +78,15 @@ SHA256-History: SHA256-Patches: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 19722 2010-08-18-2013.28 $(sha256sum $PATCHFILE | cut -d' ' -f 1) $(stat -c%s $PATCHFILE) $(basename $PATCHFILE)" > $PATCHINDEX + if $DOWNLOADHASH; then + echo "SHA1-Download: + 2365ac0ac57cde3d43c63145e8251a3bd5410213 197 2010-08-18-2013.28.gz + $(sha1sum ${PATCHFILE}.gz | cut -d' ' -f 1) $(stat -c%s ${PATCHFILE}.gz) $(basename ${PATCHFILE}.gz) +SHA256-Download: + d2a1b33187ed2d248eeae3b1223ea71791ea35f2138a713ed371332a6421f467 197 2010-08-18-2013.28.gz + $(sha256sum ${PATCHFILE}.gz | cut -d' ' -f 1) $(stat -c%s ${PATCHFILE}.gz) $(basename ${PATCHFILE}.gz)" >> $PATCHINDEX + fi + generatereleasefiles '+1hour' signreleasefiles find aptarchive -name 'Packages*' -type f -delete @@ -131,6 +142,17 @@ SHA256-Patches: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 19722 2010-08-18-2013.28 $(sha256sum $PATCHFILE | cut -d' ' -f 1) $(stat -c%s $PATCHFILE) $(basename $PATCHFILE) $(sha256sum ${PATCHFILE2} | cut -d' ' -f 1) $(stat -c%s ${PATCHFILE2}) $(basename ${PATCHFILE2})" > $PATCHINDEX + if $DOWNLOADHASH; then + echo "SHA1-Download: + 2365ac0ac57cde3d43c63145e8251a3bd5410213 197 2010-08-18-2013.28.gz + $(sha1sum ${PATCHFILE}.gz | cut -d' ' -f 1) $(stat -c%s ${PATCHFILE}.gz) $(basename ${PATCHFILE}.gz) + $(sha1sum ${PATCHFILE2}.gz | cut -d' ' -f 1) $(stat -c%s ${PATCHFILE2}.gz) $(basename ${PATCHFILE2}.gz) +SHA256-Download: + d2a1b33187ed2d248eeae3b1223ea71791ea35f2138a713ed371332a6421f467 197 2010-08-18-2013.28.gz + $(sha256sum ${PATCHFILE}.gz | cut -d' ' -f 1) $(stat -c%s ${PATCHFILE}.gz) $(basename ${PATCHFILE}.gz) + $(sha256sum ${PATCHFILE2}.gz | cut -d' ' -f 1) $(stat -c%s ${PATCHFILE2}.gz) $(basename ${PATCHFILE2}.gz)" >> $PATCHINDEX + fi + generatereleasefiles '+2hour' signreleasefiles cp -a aptarchive/Packages Packages-future @@ -150,6 +172,7 @@ SHA256-Patches: mkdir -p aptarchive/Packages.diff PATCHFILE="aptarchive/Packages.diff/$(date +%Y-%m-%d-%H%M.%S)" diff -e ${PKGFILE} ${PKGFILE}-new > ${PATCHFILE} || true + cat $PATCHFILE | gzip > ${PATCHFILE}.gz PATCHINDEX='aptarchive/Packages.diff/Index' echo "SHA1-Current: $(sha1sum ${PKGFILE}-new | cut -d' ' -f 1) $(stat -c%s ${PKGFILE}-new) SHA1-History: @@ -165,14 +188,22 @@ SHA256-History: SHA256-Patches: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 19722 2010-08-18-2013.28 $(sha256sum $PATCHFILE | cut -d' ' -f 1) $(stat -c%s $PATCHFILE) $(basename $PATCHFILE)" > $PATCHINDEX + if $DOWNLOADHASH; then + echo "SHA1-Download: + 2365ac0ac57cde3d43c63145e8251a3bd5410213 197 2010-08-18-2013.28.gz + $(sha1sum ${PATCHFILE}.gz | cut -d' ' -f 1) $(stat -c%s ${PATCHFILE}.gz) $(basename ${PATCHFILE}.gz) +SHA256-Download: + d2a1b33187ed2d248eeae3b1223ea71791ea35f2138a713ed371332a6421f467 197 2010-08-18-2013.28.gz + $(sha256sum ${PATCHFILE}.gz | cut -d' ' -f 1) $(stat -c%s ${PATCHFILE}.gz) $(basename ${PATCHFILE}.gz)" >> $PATCHINDEX + fi # needs to look like a valid command, otherwise the parser will fail before hashes are checked - echo '1d' >> $PATCHFILE + echo '1d' > $PATCHFILE cat $PATCHFILE | gzip > ${PATCHFILE}.gz generatereleasefiles '+1hour' signreleasefiles testsuccess aptget update "$@" cp -f rootdir/tmp/testsuccess.output rootdir/tmp/aptgetupdate.output - testsuccess grep 'have the expected hashsum' rootdir/tmp/aptgetupdate.output + testsuccess grep 'Hash Sum mismatch' rootdir/tmp/aptgetupdate.output testnopackage oldstuff testsuccessequal "$(cat ${PKGFILE}-new) " aptcache show apt newstuff @@ -201,6 +232,14 @@ SHA256-History: SHA256-Patches: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 19722 2010-08-18-2013.28 $(sha256sum $PATCHFILE | cut -d' ' -f 1) $(stat -c%s $PATCHFILE)000 $(basename $PATCHFILE)" > $PATCHINDEX + if $DOWNLOADHASH; then + echo "SHA1-Download: + 2365ac0ac57cde3d43c63145e8251a3bd5410213 197 2010-08-18-2013.28.gz + $(sha1sum ${PATCHFILE}.gz | cut -d' ' -f 1) $(stat -c%s ${PATCHFILE}.gz)000 $(basename ${PATCHFILE}.gz) +SHA256-Download: + d2a1b33187ed2d248eeae3b1223ea71791ea35f2138a713ed371332a6421f467 197 2010-08-18-2013.28.gz + $(sha256sum ${PATCHFILE}.gz | cut -d' ' -f 1) $(stat -c%s ${PATCHFILE}.gz)000 $(basename ${PATCHFILE}.gz)" >> $PATCHINDEX + fi generatereleasefiles '+1hour' signreleasefiles #find aptarchive -name 'Packages*' -type f -delete @@ -215,6 +254,9 @@ echo 'Debug::pkgAcquire::Diffs "true"; Debug::Acquire::Transaction "true"; Debug::pkgAcquire::rred "true";' > rootdir/etc/apt/apt.conf.d/rreddebug.conf +testrun nohash -o Acquire::PDiffs::Merge=0 -o APT::Get::List-Cleanup=1 +testrun nohash -o Acquire::PDiffs::Merge=1 -o APT::Get::List-Cleanup=1 + testrun -o Acquire::PDiffs::Merge=0 -o APT::Get::List-Cleanup=1 testrun -o Acquire::PDiffs::Merge=1 -o APT::Get::List-Cleanup=1 testrun -o Acquire::PDiffs::Merge=0 -o APT::Get::List-Cleanup=0 -- cgit v1.2.3-70-g09d2 From c69e8370947d765dd94f142d18dc11d5a76af443 Mon Sep 17 00:00:00 2001 From: David Kalnischkies Date: Tue, 9 Jun 2015 15:15:33 +0200 Subject: replace ULONG_MAX with c++ style std::numeric_limits MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit For some reason travis seems to be unhappy about it claiming it is not defined. Well, lets not think to deeply about it… Git-Dch: Ignore --- methods/rred.cc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) (limited to 'methods') diff --git a/methods/rred.cc b/methods/rred.cc index 12cf2b4a5..54123ab9c 100644 --- a/methods/rred.cc +++ b/methods/rred.cc @@ -405,12 +405,12 @@ class Patch { size_t s, e; errno = 0; s = strtoul(buffer, &m, 10); - if (unlikely(m == buffer || s == ULONG_MAX || errno != 0)) + if (unlikely(m == buffer || s == std::numeric_limits::max() || errno != 0)) return _error->Error("Parsing patchfile %s failed: Expected an effected line start", f.Name().c_str()); else if (*m == ',') { ++m; e = strtol(m, &c, 10); - if (unlikely(m == c || e == ULONG_MAX || errno != 0)) + if (unlikely(m == c || e == std::numeric_limits::max() || errno != 0)) return _error->Error("Parsing patchfile %s failed: Expected an effected line end", f.Name().c_str()); if (unlikely(e < s)) return _error->Error("Parsing patchfile %s failed: Effected lines end %lu is before start %lu", f.Name().c_str(), e, s); -- cgit v1.2.3-70-g09d2 From 9f697f69cf1adaced476598cfe08ab03c76c5d18 Mon Sep 17 00:00:00 2001 From: David Kalnischkies Date: Mon, 15 Jun 2015 12:51:22 +0200 Subject: ensure valid or remove destination file in file method 'file' isn't using the destination file per-se, but returns another name via "Filename" header. It still should deal with destination files as they could exist (pkgAcqFile e.g. creates links in that location) and are potentially bogus. --- methods/file.cc | 44 +++++++++++++++++++++++++++++++++----------- 1 file changed, 33 insertions(+), 11 deletions(-) (limited to 'methods') diff --git a/methods/file.cc b/methods/file.cc index 353e54bd5..5d5fffa67 100644 --- a/methods/file.cc +++ b/methods/file.cc @@ -48,8 +48,24 @@ bool FileMethod::Fetch(FetchItem *Itm) if (Get.Host.empty() == false) return _error->Error(_("Invalid URI, local URIS must not start with //")); - // See if the file exists struct stat Buf; + // deal with destination files which might linger around + if (lstat(Itm->DestFile.c_str(), &Buf) == 0) + { + if ((Buf.st_mode & S_IFREG) != 0) + { + if (Itm->LastModified == Buf.st_mtime && Itm->LastModified != 0) + { + HashStringList const hsl = Itm->ExpectedHashes; + if (Itm->ExpectedHashes.VerifyFile(File)) + Res.IMSHit = true; + } + } + } + if (Res.IMSHit != true) + unlink(Itm->DestFile.c_str()); + + // See if the file exists if (stat(File.c_str(),&Buf) == 0) { Res.Size = Buf.st_size; @@ -65,6 +81,8 @@ bool FileMethod::Fetch(FetchItem *Itm) } // See if the uncompressed file exists and reuse it + FetchResult AltRes; + AltRes.Filename.clear(); std::vector extensions = APT::Configuration::getCompressorExtensions(); for (std::vector::const_iterator ext = extensions.begin(); ext != extensions.end(); ++ext) { @@ -73,29 +91,33 @@ bool FileMethod::Fetch(FetchItem *Itm) std::string const unfile = File.substr(0, File.length() - ext->length() - 1); if (stat(unfile.c_str(),&Buf) == 0) { - FetchResult AltRes; AltRes.Size = Buf.st_size; AltRes.Filename = unfile; AltRes.LastModified = Buf.st_mtime; AltRes.IMSHit = false; if (Itm->LastModified == Buf.st_mtime && Itm->LastModified != 0) AltRes.IMSHit = true; - - URIDone(Res,&AltRes); - return true; + break; } // no break here as we could have situations similar to '.gz' vs '.tar.gz' here } } - if (Res.Filename.empty() == true) + if (Res.Filename.empty() == false) + { + Hashes Hash(Itm->ExpectedHashes); + FileFd Fd(Res.Filename, FileFd::ReadOnly); + Hash.AddFD(Fd); + Res.TakeHashes(Hash); + } + + if (AltRes.Filename.empty() == false) + URIDone(Res,&AltRes); + else if (Res.Filename.empty() == false) + URIDone(Res); + else return _error->Error(_("File not found")); - Hashes Hash(Itm->ExpectedHashes); - FileFd Fd(Res.Filename, FileFd::ReadOnly); - Hash.AddFD(Fd); - Res.TakeHashes(Hash); - URIDone(Res); return true; } /*}}}*/ -- cgit v1.2.3-70-g09d2 From ff86d7df6a53ff6283de4b9a858c1dad98ed887f Mon Sep 17 00:00:00 2001 From: David Kalnischkies Date: Mon, 15 Jun 2015 13:36:11 +0200 Subject: call URIStart in cdrom and file method MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit All other methods call it, so they should follow along even if the work they do afterwards is hardly breathtaking and usually results in a URIDone pretty soon, but the acquire system tells the individual item about this via a virtual method call, so even through none of our existing items contains any critical code in these, maybe one day they might. Consistency at least once… Which is also why this has a good sideeffect: file: and cdrom: requests appear now in the 'apt-get update' output. Finally - it never made sense to hide them for me. Okay, I guess it made before the new hit behavior, but now that you can actually see the difference in an update it makes sense to see if a file: repository changed or not as well. --- apt-pkg/acquire-item.cc | 13 ++++--------- apt-pkg/acquire-worker.cc | 1 - methods/cdrom.cc | 3 ++- methods/file.cc | 18 ++++++++++-------- test/integration/test-apt-cdrom | 6 ++++-- test/integration/test-apt-get-update-unauth-warning | 7 +++++++ test/integration/test-apt-progress-fd | 6 ++++-- .../test-bug-595691-empty-and-broken-archive-files | 20 ++++---------------- .../test-bug-596498-trusted-unsigned-repo | 8 +++++++- 9 files changed, 42 insertions(+), 40 deletions(-) (limited to 'methods') diff --git a/apt-pkg/acquire-item.cc b/apt-pkg/acquire-item.cc index dc4f61b56..50936b627 100644 --- a/apt-pkg/acquire-item.cc +++ b/apt-pkg/acquire-item.cc @@ -519,19 +519,14 @@ void pkgAcquire::Item::Done(string const &Message, HashStringList const &Hashes, pkgAcquire::MethodConfig const * const /*Cnf*/) { // We just downloaded something.. - string FileName = LookupTag(Message,"Filename"); UsedMirror = LookupTag(Message,"UsedMirror"); - unsigned long long const downloadedSize = Hashes.FileSize(); - if (downloadedSize != 0) + if (FileSize == 0) { - if (Complete == false && !Local && FileName == DestFile) + unsigned long long const downloadedSize = Hashes.FileSize(); + if (downloadedSize != 0) { - if (Owner->Log != 0) - Owner->Log->Fetched(Hashes.FileSize(),atoi(LookupTag(Message,"Resume-Point","0").c_str())); + FileSize = downloadedSize; } - - if (FileSize == 0) - FileSize= downloadedSize; } Status = StatDone; ErrorText = string(); diff --git a/apt-pkg/acquire-worker.cc b/apt-pkg/acquire-worker.cc index d6318a21b..ef195d44b 100644 --- a/apt-pkg/acquire-worker.cc +++ b/apt-pkg/acquire-worker.cc @@ -362,7 +362,6 @@ bool pkgAcquire::Worker::RunMessages() bool const isIMSHit = StringToBool(LookupTag(Message,"IMS-Hit"),false) || StringToBool(LookupTag(Message,"Alt-IMS-Hit"),false); - for (pkgAcquire::Queue::QItem::owner_iterator O = ItmOwners.begin(); O != ItmOwners.end(); ++O) { pkgAcquire::Item * const Owner = *O; diff --git a/methods/cdrom.cc b/methods/cdrom.cc index 10cb29f66..67265cfa3 100644 --- a/methods/cdrom.cc +++ b/methods/cdrom.cc @@ -260,7 +260,8 @@ bool CDROMMethod::Fetch(FetchItem *Itm) struct stat Buf; if (stat(Res.Filename.c_str(),&Buf) != 0) return _error->Error(_("File not found")); - + + URIStart(Res); if (NewID.empty() == false) CurrentID = NewID; Res.LastModified = Buf.st_mtime; diff --git a/methods/file.cc b/methods/file.cc index 5d5fffa67..5c76ec122 100644 --- a/methods/file.cc +++ b/methods/file.cc @@ -58,7 +58,10 @@ bool FileMethod::Fetch(FetchItem *Itm) { HashStringList const hsl = Itm->ExpectedHashes; if (Itm->ExpectedHashes.VerifyFile(File)) + { + Res.Filename = Itm->DestFile; Res.IMSHit = true; + } } } } @@ -78,7 +81,14 @@ bool FileMethod::Fetch(FetchItem *Itm) if (filesize != 0 && filesize == Res.Size) Res.IMSHit = true; } + + Hashes Hash(Itm->ExpectedHashes); + FileFd Fd(File, FileFd::ReadOnly); + Hash.AddFD(Fd); + Res.TakeHashes(Hash); } + if (Res.IMSHit == false) + URIStart(Res); // See if the uncompressed file exists and reuse it FetchResult AltRes; @@ -103,14 +113,6 @@ bool FileMethod::Fetch(FetchItem *Itm) } } - if (Res.Filename.empty() == false) - { - Hashes Hash(Itm->ExpectedHashes); - FileFd Fd(Res.Filename, FileFd::ReadOnly); - Hash.AddFD(Fd); - Res.TakeHashes(Hash); - } - if (AltRes.Filename.empty() == false) URIDone(Res,&AltRes); else if (Res.Filename.empty() == false) diff --git a/test/integration/test-apt-cdrom b/test/integration/test-apt-cdrom index 34b35f745..6a218ffb8 100755 --- a/test/integration/test-apt-cdrom +++ b/test/integration/test-apt-cdrom @@ -136,13 +136,15 @@ aptcache show testing -o Acquire::Languages=en | grep -q '^Description-en: ' && mv rootdir/media/cdrom-unmounted rootdir/media/cdrom-ejected msgmsg "ensure an update doesn't mess with cdrom sources" testsuccess aptget update -testfileequal rootdir/tmp/testsuccess.output 'Reading package lists...' +testfileequal rootdir/tmp/testsuccess.output 'Hit cdrom://Debian APT Testdisk 0.8.15 stable InRelease +Reading package lists...' mv rootdir/media/cdrom-ejected rootdir/media/cdrom-unmounted testcdromusage msgmsg 'and again to check that it withstands the temptation even if it could mount' testsuccess aptget update -testfileequal rootdir/tmp/testsuccess.output 'Reading package lists...' +testfileequal rootdir/tmp/testsuccess.output 'Hit cdrom://Debian APT Testdisk 0.8.15 stable InRelease +Reading package lists...' testcdromusage msgmsg 'Check that nothing touched our' 'CD-ROM' diff --git a/test/integration/test-apt-get-update-unauth-warning b/test/integration/test-apt-get-update-unauth-warning index ada7f7a26..1f4a14e23 100755 --- a/test/integration/test-apt-get-update-unauth-warning +++ b/test/integration/test-apt-get-update-unauth-warning @@ -9,6 +9,7 @@ TESTDIR=$(readlink -f $(dirname $0)) setupenvironment configarchitecture "i386" +configcompression '.' 'gz' # a "normal" package with source and binary buildsimplenativepackage 'foo' 'all' '2.0' @@ -31,11 +32,17 @@ testsuccessequal 'Listing...' apt list foo testequal 'lock partial' ls rootdir/var/lib/apt/lists +filesize() { + stat -c%s "$(aptget files --no-release-info --format '$(URI)' "Created-By: $1" | cut -d'/' -f 3- ).gz" +} # allow override testwarningequal "Ign file:$APTARCHIVE unstable InRelease File not found Ign file:$APTARCHIVE unstable Release File not found +Get:1 file:$APTARCHIVE unstable/main Sources [$(filesize 'Sources') B] +Get:2 file:$APTARCHIVE unstable/main i386 Packages [$(filesize 'Packages') B] +Get:3 file:$APTARCHIVE unstable/main Translation-en [$(filesize 'Translations') B] Reading package lists... W: The repository 'file:$APTARCHIVE unstable Release' does not have a Release file. This is deprecated, please contact the owner of the repository." aptget update --allow-insecure-repositories # ensure we can not install the package diff --git a/test/integration/test-apt-progress-fd b/test/integration/test-apt-progress-fd index 0c11aba7e..99b4ea050 100755 --- a/test/integration/test-apt-progress-fd +++ b/test/integration/test-apt-progress-fd @@ -16,6 +16,7 @@ setupaptarchive exec 3> apt-progress.log testsuccess aptget install testing=0.1 -y -o APT::Status-Fd=3 testfileequal './apt-progress.log' 'dlstatus:1:0:Retrieving file 1 of 1 +dlstatus:1:0:Retrieving file 1 of 1 dlstatus:1:20:Retrieving file 1 of 1 pmstatus:dpkg-exec:0:Running dpkg pmstatus:testing:0:Installing testing (amd64) @@ -32,6 +33,7 @@ pmstatus:dpkg-exec:83.3333:Running dpkg' exec 3> apt-progress.log testsuccess aptget install testing=0.8.15 -y -o APT::Status-Fd=3 testfileequal './apt-progress.log' 'dlstatus:1:0:Retrieving file 1 of 1 +dlstatus:1:0:Retrieving file 1 of 1 dlstatus:1:20:Retrieving file 1 of 1 pmstatus:dpkg-exec:0:Running dpkg pmstatus:testing:0:Installing testing (amd64) @@ -48,6 +50,7 @@ pmstatus:dpkg-exec:83.3333:Running dpkg' exec 3> apt-progress.log testsuccess aptget install testing=0.8.15 --reinstall -y -o APT::Status-Fd=3 testfileequal './apt-progress.log' 'dlstatus:1:0:Retrieving file 1 of 1 +dlstatus:1:0:Retrieving file 1 of 1 dlstatus:1:20:Retrieving file 1 of 1 pmstatus:dpkg-exec:0:Running dpkg pmstatus:testing:0:Installing testing (amd64) @@ -73,9 +76,8 @@ pmstatus:dpkg-exec:75:Running dpkg' # install non-native and ensure we get proper progress info exec 3> apt-progress.log testsuccess aptget install testing2:i386 -y -o APT::Status-Fd=3 - -# and compare testfileequal './apt-progress.log' 'dlstatus:1:0:Retrieving file 1 of 1 +dlstatus:1:0:Retrieving file 1 of 1 dlstatus:1:20:Retrieving file 1 of 1 pmstatus:dpkg-exec:0:Running dpkg pmstatus:testing2:0:Installing testing2 (i386) diff --git a/test/integration/test-bug-595691-empty-and-broken-archive-files b/test/integration/test-bug-595691-empty-and-broken-archive-files index b42212f5e..3042d116d 100755 --- a/test/integration/test-bug-595691-empty-and-broken-archive-files +++ b/test/integration/test-bug-595691-empty-and-broken-archive-files @@ -27,9 +27,6 @@ testaptgetupdate() { createemptyarchive() { find aptarchive/ \( -name "Packages*" -o -name "en*" \) -type f -delete - if [ "en" = "$1" ]; then - echo -n "" | $COMPRESSOR_CMD > aptarchive/Packages.$COMPRESS - fi touch aptarchive/Packages echo -n "" | $COMPRESSOR_CMD > aptarchive/${1}.$COMPRESS generatereleasefiles @@ -39,9 +36,6 @@ createemptyarchive() { createemptyfile() { find aptarchive/ \( -name "Packages*" -o -name "en*" \) -type f -delete - if [ "en" = "$1" ]; then - echo -n "" | $COMPRESSOR_CMD > aptarchive/Packages.$COMPRESS - fi touch aptarchive/Packages aptarchive/${1}.$COMPRESS generatereleasefiles signreleasefiles @@ -52,19 +46,13 @@ testoverfile() { local APTARCHIVE="$(readlink -f ./aptarchive)" forcecompressor "$1" - createemptyfile 'en' - testaptgetupdate 'Reading package lists...' "empty file en.$COMPRESS over file" - - createemptyarchive 'en' - testaptgetupdate 'Reading package lists...' "empty archive en.$COMPRESS over file" - createemptyarchive 'Packages' - # FIXME: Why omits the file transport the Packages Get line? - #Get:3 file: Packages [] - testaptgetupdate 'Reading package lists...' "empty archive Packages.$COMPRESS over file" + testaptgetupdate "Get: file:$APTARCHIVE Packages [] +Reading package lists..." "empty archive Packages.$COMPRESS over file" createemptyfile 'Packages' - testaptgetupdate "Err file:$APTARCHIVE Packages + testaptgetupdate "Get: file:$APTARCHIVE Packages +Err file:$APTARCHIVE Packages Empty files can't be valid archives W: Failed to fetch ${COMPRESSOR}:${APTARCHIVE}/Packages.$COMPRESS Empty files can't be valid archives diff --git a/test/integration/test-bug-596498-trusted-unsigned-repo b/test/integration/test-bug-596498-trusted-unsigned-repo index 4eb77b9a4..1ff0f1d8d 100755 --- a/test/integration/test-bug-596498-trusted-unsigned-repo +++ b/test/integration/test-bug-596498-trusted-unsigned-repo @@ -15,13 +15,17 @@ aptgetupdate() { ${1:-testwarning} aptget update --allow-insecure-repositories } -PKGTEXT="$(aptget install cool --assume-no -d | head -n 7)" +PKGTEXT="$(aptget install cool --assume-no -d | head -n 8)" +DOWNLOG="$(echo "$PKGTEXT" | tail -n 1)" +PKGTEXT="$(echo "$PKGTEXT" | head -n 7)" DEBFILE='rootdir/etc/apt/sources.list.d/apt-test-unstable-deb.list' testsuccessequal "$PKGTEXT +$DOWNLOG Download complete and in download only mode" aptget install cool --assume-no -d testsuccessequal "$PKGTEXT +$DOWNLOG Download complete and in download only mode" aptget install cool --assume-no -d --allow-unauthenticated sed -i -e 's#deb#deb [trusted=no]#' $DEBFILE @@ -47,10 +51,12 @@ testsuccessequal "$PKGTEXT WARNING: The following packages cannot be authenticated! cool Authentication warning overridden. +$DOWNLOG Download complete and in download only mode" aptget install cool --assume-no -d --allow-unauthenticated sed -i -e 's#deb#deb [trusted=yes]#' $DEBFILE aptgetupdate testsuccessequal "$PKGTEXT +$DOWNLOG Download complete and in download only mode" aptget install cool --assume-no -d -- cgit v1.2.3-70-g09d2