diff options
author | Michael Vogt <mvo@debian.org> | 2015-08-18 11:54:05 +0200 |
---|---|---|
committer | Michael Vogt <mvo@debian.org> | 2015-08-18 11:54:05 +0200 |
commit | 21248c0f00ee71412dbadc6ebf84011cf974346d (patch) | |
tree | 7dc1f5904399482d2128765b5b86d57a4ac5b3e1 /apt-pkg | |
parent | e5f34ad3b043abf033c1626eb8449b75955d6760 (diff) | |
parent | 4fc6b7570c3e97b65c118b58cdf6729fa94c9b03 (diff) |
Merge branch 'debian/experimental' into feature/srv-records
Conflicts:
cmdline/apt-helper.cc
cmdline/makefile
Diffstat (limited to 'apt-pkg')
94 files changed, 7519 insertions, 3957 deletions
diff --git a/apt-pkg/acquire-item.cc b/apt-pkg/acquire-item.cc index 30743addf..8155b9bfe 100644 --- a/apt-pkg/acquire-item.cc +++ b/apt-pkg/acquire-item.cc @@ -50,16 +50,79 @@ using namespace std; +static void printHashSumComparision(std::string const &URI, HashStringList const &Expected, HashStringList const &Actual) /*{{{*/ +{ + if (_config->FindB("Debug::Acquire::HashSumMismatch", false) == false) + return; + std::cerr << std::endl << URI << ":" << std::endl << " Expected Hash: " << std::endl; + for (HashStringList::const_iterator hs = Expected.begin(); hs != Expected.end(); ++hs) + std::cerr << "\t- " << hs->toStr() << std::endl; + std::cerr << " Actual Hash: " << std::endl; + for (HashStringList::const_iterator hs = Actual.begin(); hs != Actual.end(); ++hs) + std::cerr << "\t- " << hs->toStr() << std::endl; +} + /*}}}*/ +static std::string GetPartialFileName(std::string const &file) /*{{{*/ +{ + std::string DestFile = _config->FindDir("Dir::State::lists") + "partial/"; + DestFile += file; + return DestFile; +} + /*}}}*/ +static std::string GetPartialFileNameFromURI(std::string const &uri) /*{{{*/ +{ + return GetPartialFileName(URItoFileName(uri)); +} + /*}}}*/ +static std::string GetFinalFileNameFromURI(std::string const &uri) /*{{{*/ +{ + return _config->FindDir("Dir::State::lists") + URItoFileName(uri); +} + /*}}}*/ +static std::string GetCompressedFileName(std::string const &URI, std::string const &Name, std::string const &Ext) /*{{{*/ +{ + if (Ext.empty() || Ext == "uncompressed") + return Name; + + // do not reverify cdrom sources as apt-cdrom may rewrite the Packages + // file when its doing the indexcopy + if (URI.substr(0,6) == "cdrom:") + return Name; + + // adjust DestFile if its compressed on disk + if (_config->FindB("Acquire::GzipIndexes",false) == true) + return Name + '.' + Ext; + return Name; +} + /*}}}*/ +static bool AllowInsecureRepositories(indexRecords const * const MetaIndexParser, pkgAcqMetaBase * const TransactionManager, pkgAcquire::Item * const I) /*{{{*/ +{ + if(MetaIndexParser->IsAlwaysTrusted() || _config->FindB("Acquire::AllowInsecureRepositories") == true) + return true; + + _error->Error(_("Use --allow-insecure-repositories to force the update")); + TransactionManager->AbortTransaction(); + I->Status = pkgAcquire::Item::StatError; + return false; +} + /*}}}*/ + + // Acquire::Item::Item - Constructor /*{{{*/ -// --------------------------------------------------------------------- -/* */ -pkgAcquire::Item::Item(pkgAcquire *Owner) : Owner(Owner), FileSize(0), - PartialSize(0), Mode(0), ID(0), Complete(false), - Local(false), QueueCounter(0) +APT_IGNORE_DEPRECATED_PUSH +pkgAcquire::Item::Item(pkgAcquire *Owner, + HashStringList const &ExpectedHashes, + pkgAcqMetaBase *TransactionManager) + : Owner(Owner), FileSize(0), PartialSize(0), Mode(0), ID(0), Complete(false), + Local(false), QueueCounter(0), TransactionManager(TransactionManager), + ExpectedAdditionalItems(0), ExpectedHashes(ExpectedHashes) { Owner->Add(this); Status = StatIdle; + if(TransactionManager != NULL) + TransactionManager->Add(this); } +APT_IGNORE_DEPRECATED_POP /*}}}*/ // Acquire::Item::~Item - Destructor /*{{{*/ // --------------------------------------------------------------------- @@ -75,15 +138,15 @@ pkgAcquire::Item::~Item() fetch this object */ void pkgAcquire::Item::Failed(string Message,pkgAcquire::MethodConfig *Cnf) { - Status = StatIdle; - ErrorText = LookupTag(Message,"Message"); + if(ErrorText.empty()) + ErrorText = LookupTag(Message,"Message"); UsedMirror = LookupTag(Message,"UsedMirror"); if (QueueCounter <= 1) { /* This indicates that the file is not available right now but might be sometime later. If we do a retry cycle then this should be retried [CDROMs] */ - if (Cnf->LocalOnly == true && + if (Cnf != NULL && Cnf->LocalOnly == true && StringToBool(LookupTag(Message,"Transient-Failure"),false) == true) { Status = StatIdle; @@ -92,17 +155,57 @@ void pkgAcquire::Item::Failed(string Message,pkgAcquire::MethodConfig *Cnf) } Status = StatError; + Complete = false; Dequeue(); - } + } + else + Status = StatIdle; + + // check fail reason + string const FailReason = LookupTag(Message, "FailReason"); + if(FailReason == "MaximumSizeExceeded") + RenameOnError(MaximumSizeExceeded); // report mirror failure back to LP if we actually use a mirror - string FailReason = LookupTag(Message, "FailReason"); if(FailReason.size() != 0) ReportMirrorFailure(FailReason); else ReportMirrorFailure(ErrorText); } /*}}}*/ +bool pkgAcquire::Item::TransactionState(TransactionStates const state) /*{{{*/ +{ + bool const Debug = _config->FindB("Debug::Acquire::Transaction", false); + switch(state) + { + case TransactionAbort: + if(Debug == true) + std::clog << " Cancel: " << DestFile << std::endl; + if (Status == pkgAcquire::Item::StatIdle) + { + Status = pkgAcquire::Item::StatDone; + Dequeue(); + } + break; + case TransactionCommit: + if(PartialFile != "") + { + if(Debug == true) + std::clog << "mv " << PartialFile << " -> "<< DestFile << " # " << DescURI() << std::endl; + + Rename(PartialFile, DestFile); + } else { + if(Debug == true) + std::clog << "rm " << DestFile << " # " << DescURI() << std::endl; + unlink(DestFile.c_str()); + } + // mark that this transaction is finished + TransactionManager = 0; + break; + } + return true; +} + /*}}}*/ // Acquire::Item::Start - Item has begun to download /*{{{*/ // --------------------------------------------------------------------- /* Stash status and the file size. Note that setting Complete means @@ -110,6 +213,7 @@ void pkgAcquire::Item::Failed(string Message,pkgAcquire::MethodConfig *Cnf) void pkgAcquire::Item::Start(string /*Message*/,unsigned long long Size) { Status = StatFetching; + ErrorText.clear(); if (FileSize == 0 && Complete == false) FileSize = Size; } @@ -117,12 +221,12 @@ void pkgAcquire::Item::Start(string /*Message*/,unsigned long long Size) // Acquire::Item::Done - Item downloaded OK /*{{{*/ // --------------------------------------------------------------------- /* */ -void pkgAcquire::Item::Done(string Message,unsigned long long Size,string /*Hash*/, +void pkgAcquire::Item::Done(string Message,unsigned long long Size,HashStringList const &/*Hash*/, pkgAcquire::MethodConfig * /*Cnf*/) { // We just downloaded something.. string FileName = LookupTag(Message,"Filename"); - UsedMirror = LookupTag(Message,"UsedMirror"); + UsedMirror = LookupTag(Message,"UsedMirror"); if (Complete == false && !Local && FileName == DestFile) { if (Owner->Log != 0) @@ -140,21 +244,67 @@ void pkgAcquire::Item::Done(string Message,unsigned long long Size,string /*Hash // --------------------------------------------------------------------- /* This helper function is used by a lot of item methods as their final step */ -void pkgAcquire::Item::Rename(string From,string To) +bool pkgAcquire::Item::Rename(string From,string To) { - if (rename(From.c_str(),To.c_str()) != 0) - { - char S[300]; - snprintf(S,sizeof(S),_("rename failed, %s (%s -> %s)."),strerror(errno), - From.c_str(),To.c_str()); - Status = StatError; + if (From == To || rename(From.c_str(),To.c_str()) == 0) + return true; + + std::string S; + strprintf(S, _("rename failed, %s (%s -> %s)."), strerror(errno), + From.c_str(),To.c_str()); + Status = StatError; + if (ErrorText.empty()) ErrorText = S; - } + else + ErrorText = ErrorText + ": " + S; + return false; +} + /*}}}*/ +// Acquire::Item::QueueURI and specialisations from child classes /*{{{*/ +/* The idea here is that an item isn't queued if it exists on disk and the + transition manager was a hit as this means that the files it contains + the checksums for can't be updated either (or they are and we are asking + for a hashsum mismatch to happen which helps nobody) */ +bool pkgAcquire::Item::QueueURI(ItemDesc &Item) +{ + std::string const FinalFile = GetFinalFilename(); + if (TransactionManager != NULL && TransactionManager->IMSHit == true && + FileExists(FinalFile) == true) + { + PartialFile = DestFile = FinalFile; + Status = StatDone; + return false; + } + + Owner->Enqueue(Item); + return true; +} +/* The transition manager InRelease itself (or its older sisters-in-law + Release & Release.gpg) is always queued as this allows us to rerun gpgv + on it to verify that we aren't stalled with old files */ +bool pkgAcqMetaBase::QueueURI(pkgAcquire::ItemDesc &Item) +{ + Owner->Enqueue(Item); + return true; +} +/* the Diff/Index needs to queue also the up-to-date complete index file + to ensure that the list cleaner isn't eating it */ +bool pkgAcqDiffIndex::QueueURI(pkgAcquire::ItemDesc &Item) +{ + if (pkgAcquire::Item::QueueURI(Item) == true) + return true; + QueueOnIMSHit(); + return false; +} + /*}}}*/ +void pkgAcquire::Item::Dequeue() /*{{{*/ +{ + Owner->Dequeue(this); } /*}}}*/ bool pkgAcquire::Item::RenameOnError(pkgAcquire::Item::RenameOnErrorState const error)/*{{{*/ { - if(FileExists(DestFile)) + if (RealFileExists(DestFile)) Rename(DestFile, DestFile + ".FAILED"); switch (error) @@ -174,10 +324,37 @@ bool pkgAcquire::Item::RenameOnError(pkgAcquire::Item::RenameOnErrorState const Status = StatError; // do not report as usually its not the mirrors fault, but Portal/Proxy break; + case SignatureError: + ErrorText = _("Signature error"); + Status = StatError; + break; + case NotClearsigned: + ErrorText = _("Does not start with a cleartext signature"); + Status = StatError; + break; + case MaximumSizeExceeded: + // the method is expected to report a good error for this + Status = StatError; + break; + case PDiffError: + // no handling here, done by callers + break; } return false; } /*}}}*/ +void pkgAcquire::Item::SetActiveSubprocess(const std::string &subprocess)/*{{{*/ +{ + ActiveSubprocess = subprocess; + APT_IGNORE_DEPRECATED(Mode = ActiveSubprocess.c_str();) +} + /*}}}*/ +// Acquire::Item::GetFinalFilename - Return the full final file path /*{{{*/ +std::string pkgAcquire::Item::GetFinalFilename() const +{ + return GetFinalFileNameFromURI(Desc.URI); +} + /*}}}*/ // Acquire::Item::ReportMirrorFailure /*{{{*/ // --------------------------------------------------------------------- void pkgAcquire::Item::ReportMirrorFailure(string FailCode) @@ -192,148 +369,37 @@ void pkgAcquire::Item::ReportMirrorFailure(string FailCode) << " FailCode: " << FailCode << std::endl; #endif - const char *Args[40]; - unsigned int i = 0; string report = _config->Find("Methods::Mirror::ProblemReporting", "/usr/lib/apt/apt-report-mirror-failure"); if(!FileExists(report)) return; - Args[i++] = report.c_str(); - Args[i++] = UsedMirror.c_str(); - Args[i++] = DescURI().c_str(); - Args[i++] = FailCode.c_str(); - Args[i++] = NULL; + + std::vector<char const*> Args; + Args.push_back(report.c_str()); + Args.push_back(UsedMirror.c_str()); + Args.push_back(DescURI().c_str()); + Args.push_back(FailCode.c_str()); + Args.push_back(NULL); + pid_t pid = ExecFork(); - if(pid < 0) + if(pid < 0) { _error->Error("ReportMirrorFailure Fork failed"); return; } - else if(pid == 0) + else if(pid == 0) { - execvp(Args[0], (char**)Args); + execvp(Args[0], (char**)Args.data()); std::cerr << "Could not exec " << Args[0] << std::endl; _exit(100); } - if(!ExecWait(pid, "report-mirror-failure")) + if(!ExecWait(pid, "report-mirror-failure")) { _error->Warning("Couldn't report problem to '%s'", _config->Find("Methods::Mirror::ProblemReporting").c_str()); } } /*}}}*/ -// AcqSubIndex::AcqSubIndex - Constructor /*{{{*/ -// --------------------------------------------------------------------- -/* Get a sub-index file based on checksums from a 'master' file and - possibly query additional files */ -pkgAcqSubIndex::pkgAcqSubIndex(pkgAcquire *Owner, string const &URI, - string const &URIDesc, string const &ShortDesc, - HashString const &ExpectedHash) - : Item(Owner), ExpectedHash(ExpectedHash) -{ - /* XXX: Beware: Currently this class does nothing (of value) anymore ! */ - Debug = _config->FindB("Debug::pkgAcquire::SubIndex",false); - - DestFile = _config->FindDir("Dir::State::lists") + "partial/"; - DestFile += URItoFileName(URI); - - Desc.URI = URI; - Desc.Description = URIDesc; - Desc.Owner = this; - Desc.ShortDesc = ShortDesc; - - QueueURI(Desc); - - if(Debug) - std::clog << "pkgAcqSubIndex: " << Desc.URI << std::endl; -} - /*}}}*/ -// AcqSubIndex::Custom600Headers - Insert custom request headers /*{{{*/ -// --------------------------------------------------------------------- -/* The only header we use is the last-modified header. */ -string pkgAcqSubIndex::Custom600Headers() -{ - string Final = _config->FindDir("Dir::State::lists"); - Final += URItoFileName(Desc.URI); - - struct stat Buf; - if (stat(Final.c_str(),&Buf) != 0) - return "\nIndex-File: true\nFail-Ignore: true\n"; - return "\nIndex-File: true\nFail-Ignore: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime); -} - /*}}}*/ -void pkgAcqSubIndex::Failed(string Message,pkgAcquire::MethodConfig * /*Cnf*/)/*{{{*/ -{ - if(Debug) - std::clog << "pkgAcqSubIndex failed: " << Desc.URI << " with " << Message << std::endl; - - Complete = false; - Status = StatDone; - Dequeue(); - - // No good Index is provided -} - /*}}}*/ -void pkgAcqSubIndex::Done(string Message,unsigned long long Size,string Md5Hash, /*{{{*/ - pkgAcquire::MethodConfig *Cnf) -{ - if(Debug) - std::clog << "pkgAcqSubIndex::Done(): " << Desc.URI << std::endl; - - string FileName = LookupTag(Message,"Filename"); - if (FileName.empty() == true) - { - Status = StatError; - ErrorText = "Method gave a blank filename"; - return; - } - - if (FileName != DestFile) - { - Local = true; - Desc.URI = "copy:" + FileName; - QueueURI(Desc); - return; - } - - Item::Done(Message,Size,Md5Hash,Cnf); - - string FinalFile = _config->FindDir("Dir::State::lists")+URItoFileName(Desc.URI); - - /* Downloaded invalid transindex => Error (LP: #346386) (Closes: #627642) */ - indexRecords SubIndexParser; - if (FileExists(DestFile) == true && !SubIndexParser.Load(DestFile)) { - Status = StatError; - ErrorText = SubIndexParser.ErrorText; - return; - } - - // success in downloading the index - // rename the index - if(Debug) - std::clog << "Renaming: " << DestFile << " -> " << FinalFile << std::endl; - Rename(DestFile,FinalFile); - chmod(FinalFile.c_str(),0644); - DestFile = FinalFile; - - if(ParseIndex(DestFile) == false) - return Failed("", NULL); - - Complete = true; - Status = StatDone; - Dequeue(); - return; -} - /*}}}*/ -bool pkgAcqSubIndex::ParseIndex(string const &IndexFile) /*{{{*/ -{ - indexRecords SubIndexParser; - if (FileExists(IndexFile) == false || SubIndexParser.Load(IndexFile) == false) - return false; - // so something with the downloaded index - return true; -} - /*}}}*/ // AcqDiffIndex::AcqDiffIndex - Constructor /*{{{*/ // --------------------------------------------------------------------- /* Get the DiffIndex file first and see if there are patches available @@ -342,28 +408,29 @@ bool pkgAcqSubIndex::ParseIndex(string const &IndexFile) /*{{{*/ * the original packages file */ pkgAcqDiffIndex::pkgAcqDiffIndex(pkgAcquire *Owner, - string URI,string URIDesc,string ShortDesc, - HashString ExpectedHash) - : Item(Owner), RealURI(URI), ExpectedHash(ExpectedHash), - Description(URIDesc) + pkgAcqMetaBase *TransactionManager, + IndexTarget const * const Target, + HashStringList const &ExpectedHashes, + indexRecords *MetaIndexParser) + : pkgAcqBaseIndex(Owner, TransactionManager, Target, ExpectedHashes, + MetaIndexParser) { Debug = _config->FindB("Debug::pkgAcquire::Diffs",false); - Desc.Description = URIDesc + "/DiffIndex"; + RealURI = Target->URI; Desc.Owner = this; - Desc.ShortDesc = ShortDesc; - Desc.URI = URI + ".diff/Index"; + Desc.Description = Target->Description + ".diff/Index"; + Desc.ShortDesc = Target->ShortDesc; + Desc.URI = Target->URI + ".diff/Index"; - DestFile = _config->FindDir("Dir::State::lists") + "partial/"; - DestFile += URItoFileName(URI) + string(".DiffIndex"); + DestFile = GetPartialFileNameFromURI(Desc.URI); if(Debug) std::clog << "pkgAcqDiffIndex: " << Desc.URI << std::endl; // look for the current package file - CurrentPackagesFile = _config->FindDir("Dir::State::lists"); - CurrentPackagesFile += URItoFileName(RealURI); + CurrentPackagesFile = GetFinalFileNameFromURI(RealURI); // FIXME: this file:/ check is a hack to prevent fetching // from local sources. this is really silly, and @@ -372,9 +439,7 @@ pkgAcqDiffIndex::pkgAcqDiffIndex(pkgAcquire *Owner, Desc.URI.substr(0,strlen("file:/")) == "file:/") { // we don't have a pkg file or we don't want to queue - if(Debug) - std::clog << "No index file, local or canceld by user" << std::endl; - Failed("", NULL); + Failed("No index file, local or canceld by user", NULL); return; } @@ -386,14 +451,24 @@ pkgAcqDiffIndex::pkgAcqDiffIndex(pkgAcquire *Owner, } /*}}}*/ +// Acquire::Item::GetFinalFilename - Return the full final file path /*{{{*/ +std::string pkgAcqDiffIndex::GetFinalFilename() const +{ + // the logic we inherent from pkgAcqBaseIndex isn't what we need here + return pkgAcquire::Item::GetFinalFilename(); +} + /*}}}*/ // AcqIndex::Custom600Headers - Insert custom request headers /*{{{*/ // --------------------------------------------------------------------- /* The only header we use is the last-modified header. */ +#if APT_PKG_ABI >= 413 +string pkgAcqDiffIndex::Custom600Headers() const +#else string pkgAcqDiffIndex::Custom600Headers() +#endif { - string Final = _config->FindDir("Dir::State::lists"); - Final += URItoFileName(RealURI) + string(".IndexDiff"); - + string const Final = GetFinalFilename(); + if(Debug) std::clog << "Custom600Header-IMS: " << Final << std::endl; @@ -404,196 +479,350 @@ string pkgAcqDiffIndex::Custom600Headers() return "\nIndex-File: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime); } /*}}}*/ +void pkgAcqDiffIndex::QueueOnIMSHit() const /*{{{*/ +{ + // list cleanup needs to know that this file as well as the already + // present index is ours, so we create an empty diff to save it for us + new pkgAcqIndexDiffs(Owner, TransactionManager, Target, + ExpectedHashes, MetaIndexParser); +} + /*}}}*/ bool pkgAcqDiffIndex::ParseDiffIndex(string IndexDiffFile) /*{{{*/ { + // failing here is fine: our caller will take care of trying to + // get the complete file if patching fails if(Debug) std::clog << "pkgAcqDiffIndex::ParseIndexDiff() " << IndexDiffFile << std::endl; - pkgTagSection Tags; - string ServerSha1; - vector<DiffInfo> available_patches; - FileFd Fd(IndexDiffFile,FileFd::ReadOnly); pkgTagFile TF(&Fd); if (_error->PendingError() == true) return false; - if(TF.Step(Tags) == true) + pkgTagSection Tags; + if(unlikely(TF.Step(Tags) == false)) + return false; + + HashStringList ServerHashes; + unsigned long long ServerSize = 0; + + for (char const * const * type = HashString::SupportedHashes(); *type != NULL; ++type) { - bool found = false; - DiffInfo d; - string size; + std::string tagname = *type; + tagname.append("-Current"); + std::string const tmp = Tags.FindS(tagname.c_str()); + if (tmp.empty() == true) + continue; - string const tmp = Tags.FindS("SHA1-Current"); + string hash; + unsigned long long size; std::stringstream ss(tmp); - ss >> ServerSha1 >> size; - unsigned long const ServerSize = atol(size.c_str()); + ss >> hash >> size; + if (unlikely(hash.empty() == true)) + continue; + if (unlikely(ServerSize != 0 && ServerSize != size)) + continue; + ServerHashes.push_back(HashString(*type, hash)); + ServerSize = size; + } - FileFd fd(CurrentPackagesFile, FileFd::ReadOnly); - SHA1Summation SHA1; - SHA1.AddFD(fd); - string const local_sha1 = SHA1.Result(); + if (ServerHashes.usable() == false) + { + if (Debug == true) + std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": Did not find a good hashsum in the index" << std::endl; + return false; + } - if(local_sha1 == ServerSha1) + if (ServerHashes != HashSums()) + { + if (Debug == true) { - // we have the same sha1 as the server so we are done here - if(Debug) - std::clog << "Package file is up-to-date" << std::endl; - // list cleanup needs to know that this file as well as the already - // present index is ours, so we create an empty diff to save it for us - new pkgAcqIndexDiffs(Owner, RealURI, Description, Desc.ShortDesc, - ExpectedHash, ServerSha1, available_patches); - return true; + std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": Index has different hashes than parser, probably older, so fail pdiffing" << std::endl; + printHashSumComparision(CurrentPackagesFile, ServerHashes, HashSums()); } - else - { - if(Debug) - std::clog << "SHA1-Current: " << ServerSha1 << " and we start at "<< fd.Name() << " " << fd.Size() << " " << local_sha1 << std::endl; + return false; + } - // check the historie and see what patches we need - string const history = Tags.FindS("SHA1-History"); - std::stringstream hist(history); - while(hist >> d.sha1 >> size >> d.file) - { - // read until the first match is found - // from that point on, we probably need all diffs - if(d.sha1 == local_sha1) - found=true; - else if (found == false) - continue; + if (ServerHashes.VerifyFile(CurrentPackagesFile) == true) + { + // we have the same sha1 as the server so we are done here + if(Debug) + std::clog << "pkgAcqDiffIndex: Package file " << CurrentPackagesFile << " is up-to-date" << std::endl; + QueueOnIMSHit(); + return true; + } - if(Debug) - std::clog << "Need to get diff: " << d.file << std::endl; - available_patches.push_back(d); - } + FileFd fd(CurrentPackagesFile, FileFd::ReadOnly); + Hashes LocalHashesCalc; + LocalHashesCalc.AddFD(fd); + HashStringList const LocalHashes = LocalHashesCalc.GetHashStringList(); - if (available_patches.empty() == false) - { - // patching with too many files is rather slow compared to a fast download - unsigned long const fileLimit = _config->FindI("Acquire::PDiffs::FileLimit", 0); - if (fileLimit != 0 && fileLimit < available_patches.size()) - { - if (Debug) - std::clog << "Need " << available_patches.size() << " diffs (Limit is " << fileLimit - << ") so fallback to complete download" << std::endl; - return false; - } + if(Debug) + std::clog << "Server-Current: " << ServerHashes.find(NULL)->toStr() << " and we start at " + << fd.Name() << " " << fd.FileSize() << " " << LocalHashes.find(NULL)->toStr() << std::endl; - // see if the patches are too big - found = false; // it was true and it will be true again at the end - d = *available_patches.begin(); - string const firstPatch = d.file; - unsigned long patchesSize = 0; - std::stringstream patches(Tags.FindS("SHA1-Patches")); - while(patches >> d.sha1 >> size >> d.file) - { - if (firstPatch == d.file) - found = true; - else if (found == false) - continue; + // parse all of (provided) history + vector<DiffInfo> available_patches; + bool firstAcceptedHashes = true; + for (char const * const * type = HashString::SupportedHashes(); *type != NULL; ++type) + { + if (LocalHashes.find(*type) == NULL) + continue; - patchesSize += atol(size.c_str()); - } - unsigned long const sizeLimit = ServerSize * _config->FindI("Acquire::PDiffs::SizeLimit", 100); - if (sizeLimit > 0 && (sizeLimit/100) < patchesSize) - { - if (Debug) - std::clog << "Need " << patchesSize << " bytes (Limit is " << sizeLimit/100 - << ") so fallback to complete download" << std::endl; - return false; - } - } - } + std::string tagname = *type; + tagname.append("-History"); + std::string const tmp = Tags.FindS(tagname.c_str()); + if (tmp.empty() == true) + continue; + + string hash, filename; + unsigned long long size; + std::stringstream ss(tmp); - // we have something, queue the next diff - if(found) + while (ss >> hash >> size >> filename) { - // queue the diffs - string::size_type const last_space = Description.rfind(" "); - if(last_space != string::npos) - Description.erase(last_space, Description.size()-last_space); - - /* decide if we should download patches one by one or in one go: - The first is good if the server merges patches, but many don't so client - based merging can be attempt in which case the second is better. - "bad things" will happen if patches are merged on the server, - but client side merging is attempt as well */ - bool pdiff_merge = _config->FindB("Acquire::PDiffs::Merge", true); - if (pdiff_merge == true) + if (unlikely(hash.empty() == true || filename.empty() == true)) + continue; + + // see if we have a record for this file already + std::vector<DiffInfo>::iterator cur = available_patches.begin(); + for (; cur != available_patches.end(); ++cur) + { + if (cur->file != filename || unlikely(cur->result_size != size)) + continue; + cur->result_hashes.push_back(HashString(*type, hash)); + break; + } + if (cur != available_patches.end()) + continue; + if (firstAcceptedHashes == true) { - // reprepro adds this flag if it has merged patches on the server - std::string const precedence = Tags.FindS("X-Patch-Precedence"); - pdiff_merge = (precedence != "merged"); + DiffInfo next; + next.file = filename; + next.result_hashes.push_back(HashString(*type, hash)); + next.result_size = size; + next.patch_size = 0; + available_patches.push_back(next); } - - if (pdiff_merge == false) - new pkgAcqIndexDiffs(Owner, RealURI, Description, Desc.ShortDesc, - ExpectedHash, ServerSha1, available_patches); else { - std::vector<pkgAcqIndexMergeDiffs*> *diffs = new std::vector<pkgAcqIndexMergeDiffs*>(available_patches.size()); - for(size_t i = 0; i < available_patches.size(); ++i) - (*diffs)[i] = new pkgAcqIndexMergeDiffs(Owner, RealURI, Description, Desc.ShortDesc, ExpectedHash, - available_patches[i], diffs); + if (Debug == true) + std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": File " << filename + << " wasn't in the list for the first parsed hash! (history)" << std::endl; + break; } + } + firstAcceptedHashes = false; + } - Complete = false; - Status = StatDone; - Dequeue(); - return true; + if (unlikely(available_patches.empty() == true)) + { + if (Debug) + std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": " + << "Couldn't find any patches for the patch series." << std::endl; + return false; + } + + for (char const * const * type = HashString::SupportedHashes(); *type != NULL; ++type) + { + if (LocalHashes.find(*type) == NULL) + continue; + + std::string tagname = *type; + tagname.append("-Patches"); + std::string const tmp = Tags.FindS(tagname.c_str()); + if (tmp.empty() == true) + continue; + + string hash, filename; + unsigned long long size; + std::stringstream ss(tmp); + + while (ss >> hash >> size >> filename) + { + if (unlikely(hash.empty() == true || filename.empty() == true)) + continue; + + // see if we have a record for this file already + std::vector<DiffInfo>::iterator cur = available_patches.begin(); + for (; cur != available_patches.end(); ++cur) + { + if (cur->file != filename) + continue; + if (unlikely(cur->patch_size != 0 && cur->patch_size != size)) + continue; + cur->patch_hashes.push_back(HashString(*type, hash)); + cur->patch_size = size; + break; + } + if (cur != available_patches.end()) + continue; + if (Debug == true) + std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": File " << filename + << " wasn't in the list for the first parsed hash! (patches)" << std::endl; + break; } } - - // Nothing found, report and return false - // Failing here is ok, if we return false later, the full - // IndexFile is queued - if(Debug) - std::clog << "Can't find a patch in the index file" << std::endl; - return false; + + bool foundStart = false; + for (std::vector<DiffInfo>::iterator cur = available_patches.begin(); + cur != available_patches.end(); ++cur) + { + if (LocalHashes != cur->result_hashes) + continue; + + available_patches.erase(available_patches.begin(), cur); + foundStart = true; + break; + } + + if (foundStart == false || unlikely(available_patches.empty() == true)) + { + if (Debug) + std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": " + << "Couldn't find the start of the patch series." << std::endl; + return false; + } + + // patching with too many files is rather slow compared to a fast download + unsigned long const fileLimit = _config->FindI("Acquire::PDiffs::FileLimit", 0); + if (fileLimit != 0 && fileLimit < available_patches.size()) + { + if (Debug) + std::clog << "Need " << available_patches.size() << " diffs (Limit is " << fileLimit + << ") so fallback to complete download" << std::endl; + return false; + } + + // calculate the size of all patches we have to get + // note that all sizes are uncompressed, while we download compressed files + unsigned long long patchesSize = 0; + for (std::vector<DiffInfo>::const_iterator cur = available_patches.begin(); + cur != available_patches.end(); ++cur) + patchesSize += cur->patch_size; + unsigned long long const sizeLimit = ServerSize * _config->FindI("Acquire::PDiffs::SizeLimit", 100); + if (sizeLimit > 0 && (sizeLimit/100) < patchesSize) + { + if (Debug) + std::clog << "Need " << patchesSize << " bytes (Limit is " << sizeLimit/100 + << ") so fallback to complete download" << std::endl; + return false; + } + + // we have something, queue the diffs + string::size_type const last_space = Description.rfind(" "); + if(last_space != string::npos) + Description.erase(last_space, Description.size()-last_space); + + /* decide if we should download patches one by one or in one go: + The first is good if the server merges patches, but many don't so client + based merging can be attempt in which case the second is better. + "bad things" will happen if patches are merged on the server, + but client side merging is attempt as well */ + bool pdiff_merge = _config->FindB("Acquire::PDiffs::Merge", true); + if (pdiff_merge == true) + { + // reprepro adds this flag if it has merged patches on the server + std::string const precedence = Tags.FindS("X-Patch-Precedence"); + pdiff_merge = (precedence != "merged"); + } + + if (pdiff_merge == false) + { + new pkgAcqIndexDiffs(Owner, TransactionManager, Target, ExpectedHashes, + MetaIndexParser, available_patches); + } + else + { + std::vector<pkgAcqIndexMergeDiffs*> *diffs = new std::vector<pkgAcqIndexMergeDiffs*>(available_patches.size()); + for(size_t i = 0; i < available_patches.size(); ++i) + (*diffs)[i] = new pkgAcqIndexMergeDiffs(Owner, TransactionManager, + Target, + ExpectedHashes, + MetaIndexParser, + available_patches[i], + diffs); + } + + Complete = false; + Status = StatDone; + Dequeue(); + return true; } /*}}}*/ -void pkgAcqDiffIndex::Failed(string Message,pkgAcquire::MethodConfig * /*Cnf*/)/*{{{*/ +void pkgAcqDiffIndex::Failed(string Message,pkgAcquire::MethodConfig * Cnf)/*{{{*/ { + Item::Failed(Message,Cnf); + Status = StatDone; + if(Debug) std::clog << "pkgAcqDiffIndex failed: " << Desc.URI << " with " << Message << std::endl << "Falling back to normal index file acquire" << std::endl; - new pkgAcqIndex(Owner, RealURI, Description, Desc.ShortDesc, - ExpectedHash); + new pkgAcqIndex(Owner, TransactionManager, Target, ExpectedHashes, MetaIndexParser); +} + /*}}}*/ +bool pkgAcqDiffIndex::TransactionState(TransactionStates const state) /*{{{*/ +{ + if (pkgAcquire::Item::TransactionState(state) == false) + return false; - Complete = false; - Status = StatDone; - Dequeue(); + switch (state) + { + case TransactionCommit: + break; + case TransactionAbort: + std::string const Partial = GetPartialFileNameFromURI(RealURI); + unlink(Partial.c_str()); + break; + } + + return true; } /*}}}*/ -void pkgAcqDiffIndex::Done(string Message,unsigned long long Size,string Md5Hash, /*{{{*/ +void pkgAcqDiffIndex::Done(string Message,unsigned long long Size,HashStringList const &Hashes, /*{{{*/ pkgAcquire::MethodConfig *Cnf) { if(Debug) std::clog << "pkgAcqDiffIndex::Done(): " << Desc.URI << std::endl; - Item::Done(Message,Size,Md5Hash,Cnf); + Item::Done(Message, Size, Hashes, Cnf); - string FinalFile; - FinalFile = _config->FindDir("Dir::State::lists")+URItoFileName(RealURI); + // verify the index target + if(Target && Target->MetaKey != "" && MetaIndexParser && Hashes.usable()) + { + std::string IndexMetaKey = Target->MetaKey + ".diff/Index"; + indexRecords::checkSum *Record = MetaIndexParser->Lookup(IndexMetaKey); + if(Record && Record->Hashes.usable() && Hashes != Record->Hashes) + { + RenameOnError(HashSumMismatch); + printHashSumComparision(RealURI, Record->Hashes, Hashes); + Failed(Message, Cnf); + return; + } - // success in downloading the index - // rename the index - FinalFile += string(".IndexDiff"); - if(Debug) - std::clog << "Renaming: " << DestFile << " -> " << FinalFile - << std::endl; - Rename(DestFile,FinalFile); - chmod(FinalFile.c_str(),0644); - DestFile = FinalFile; + } - if(!ParseDiffIndex(DestFile)) - return Failed("", NULL); + string const FinalFile = GetFinalFilename(); + if(StringToBool(LookupTag(Message,"IMS-Hit"),false)) + DestFile = FinalFile; + + if(ParseDiffIndex(DestFile) == false) + { + Failed("Message: Couldn't parse pdiff index", Cnf); + // queue for final move - this should happen even if we fail + // while parsing (e.g. on sizelimit) and download the complete file. + TransactionManager->TransactionStageCopy(this, DestFile, FinalFile); + return; + } + + TransactionManager->TransactionStageCopy(this, DestFile, FinalFile); Complete = true; Status = StatDone; Dequeue(); + return; } /*}}}*/ @@ -603,63 +832,83 @@ void pkgAcqDiffIndex::Done(string Message,unsigned long long Size,string Md5Hash * for each diff and the index */ pkgAcqIndexDiffs::pkgAcqIndexDiffs(pkgAcquire *Owner, - string URI,string URIDesc,string ShortDesc, - HashString ExpectedHash, - string ServerSha1, + pkgAcqMetaBase *TransactionManager, + struct IndexTarget const * const Target, + HashStringList const &ExpectedHashes, + indexRecords *MetaIndexParser, vector<DiffInfo> diffs) - : Item(Owner), RealURI(URI), ExpectedHash(ExpectedHash), - available_patches(diffs), ServerSha1(ServerSha1) + : pkgAcqBaseIndex(Owner, TransactionManager, Target, ExpectedHashes, MetaIndexParser), + available_patches(diffs) { - - DestFile = _config->FindDir("Dir::State::lists") + "partial/"; - DestFile += URItoFileName(URI); + DestFile = GetPartialFileNameFromURI(Target->URI); Debug = _config->FindB("Debug::pkgAcquire::Diffs",false); - Description = URIDesc; + RealURI = Target->URI; Desc.Owner = this; - Desc.ShortDesc = ShortDesc; + Description = Target->Description; + Desc.ShortDesc = Target->ShortDesc; if(available_patches.empty() == true) { - // we are done (yeah!) + // we are done (yeah!), check hashes against the final file + DestFile = GetFinalFileNameFromURI(Target->URI); Finish(true); } else { + // patching needs to be bootstrapped with the 'old' version + std::string const PartialFile = GetPartialFileNameFromURI(RealURI); + if (RealFileExists(PartialFile) == false) + { + if (symlink(GetFinalFilename().c_str(), PartialFile.c_str()) != 0) + { + Failed("Link creation of " + PartialFile + " to " + GetFinalFilename() + " failed", NULL); + return; + } + } + // get the next diff State = StateFetchDiff; QueueNextDiff(); } } /*}}}*/ -void pkgAcqIndexDiffs::Failed(string Message,pkgAcquire::MethodConfig * /*Cnf*/)/*{{{*/ +void pkgAcqIndexDiffs::Failed(string Message,pkgAcquire::MethodConfig * Cnf)/*{{{*/ { + Item::Failed(Message,Cnf); + Status = StatDone; + if(Debug) std::clog << "pkgAcqIndexDiffs failed: " << Desc.URI << " with " << Message << std::endl << "Falling back to normal index file acquire" << std::endl; - new pkgAcqIndex(Owner, RealURI, Description,Desc.ShortDesc, - ExpectedHash); + DestFile = GetPartialFileNameFromURI(Target->URI); + RenameOnError(PDiffError); + new pkgAcqIndex(Owner, TransactionManager, Target, ExpectedHashes, MetaIndexParser); Finish(); } /*}}}*/ // Finish - helper that cleans the item out of the fetcher queue /*{{{*/ void pkgAcqIndexDiffs::Finish(bool allDone) { + if(Debug) + std::clog << "pkgAcqIndexDiffs::Finish(): " + << allDone << " " + << Desc.URI << std::endl; + // we restore the original name, this is required, otherwise // the file will be cleaned if(allDone) { - DestFile = _config->FindDir("Dir::State::lists"); - DestFile += URItoFileName(RealURI); - - if(!ExpectedHash.empty() && !ExpectedHash.VerifyFile(DestFile)) + if(HashSums().usable() && !HashSums().VerifyFile(DestFile)) { RenameOnError(HashSumMismatch); Dequeue(); return; } + TransactionManager->TransactionStageCopy(this, DestFile, GetFinalFilename()); + // this is for the "real" finish Complete = true; Status = StatDone; @@ -679,21 +928,32 @@ void pkgAcqIndexDiffs::Finish(bool allDone) /*}}}*/ bool pkgAcqIndexDiffs::QueueNextDiff() /*{{{*/ { - // calc sha1 of the just patched file - string FinalFile = _config->FindDir("Dir::State::lists"); - FinalFile += URItoFileName(RealURI); + std::string const FinalFile = GetPartialFileNameFromURI(RealURI); + + if(!FileExists(FinalFile)) + { + Failed("Message: No FinalFile " + FinalFile + " available", NULL); + return false; + } FileFd fd(FinalFile, FileFd::ReadOnly); - SHA1Summation SHA1; - SHA1.AddFD(fd); - string local_sha1 = string(SHA1.Result()); + Hashes LocalHashesCalc; + LocalHashesCalc.AddFD(fd); + HashStringList const LocalHashes = LocalHashesCalc.GetHashStringList(); + if(Debug) - std::clog << "QueueNextDiff: " - << FinalFile << " (" << local_sha1 << ")"<<std::endl; + std::clog << "QueueNextDiff: " << FinalFile << " (" << LocalHashes.find(NULL)->toStr() << ")" << std::endl; + + if (unlikely(LocalHashes.usable() == false || ExpectedHashes.usable() == false)) + { + Failed("Local/Expected hashes are not usable", NULL); + return false; + } + // final file reached before all patches are applied - if(local_sha1 == ServerSha1) + if(LocalHashes == ExpectedHashes) { Finish(true); return true; @@ -701,10 +961,10 @@ bool pkgAcqIndexDiffs::QueueNextDiff() /*{{{*/ // remove all patches until the next matching patch is found // this requires the Index file to be ordered - for(vector<DiffInfo>::iterator I=available_patches.begin(); + for(vector<DiffInfo>::iterator I = available_patches.begin(); available_patches.empty() == false && I != available_patches.end() && - I->sha1 != local_sha1; + I->result_hashes != LocalHashes; ++I) { available_patches.erase(I); @@ -713,38 +973,50 @@ bool pkgAcqIndexDiffs::QueueNextDiff() /*{{{*/ // error checking and falling back if no patch was found if(available_patches.empty() == true) { - Failed("", NULL); + Failed("No patches left to reach target", NULL); return false; } // queue the right diff Desc.URI = RealURI + ".diff/" + available_patches[0].file + ".gz"; Desc.Description = Description + " " + available_patches[0].file + string(".pdiff"); - DestFile = _config->FindDir("Dir::State::lists") + "partial/"; - DestFile += URItoFileName(RealURI + ".diff/" + available_patches[0].file); + DestFile = GetPartialFileNameFromURI(RealURI + ".diff/" + available_patches[0].file); if(Debug) std::clog << "pkgAcqIndexDiffs::QueueNextDiff(): " << Desc.URI << std::endl; - + QueueURI(Desc); return true; } /*}}}*/ -void pkgAcqIndexDiffs::Done(string Message,unsigned long long Size,string Md5Hash, /*{{{*/ +void pkgAcqIndexDiffs::Done(string Message,unsigned long long Size, HashStringList const &Hashes, /*{{{*/ pkgAcquire::MethodConfig *Cnf) { if(Debug) std::clog << "pkgAcqIndexDiffs::Done(): " << Desc.URI << std::endl; - Item::Done(Message,Size,Md5Hash,Cnf); + Item::Done(Message, Size, Hashes, Cnf); - string FinalFile; - FinalFile = _config->FindDir("Dir::State::lists")+URItoFileName(RealURI); + // FIXME: verify this download too before feeding it to rred + std::string const FinalFile = GetPartialFileNameFromURI(RealURI); // success in downloading a diff, enter ApplyDiff state if(State == StateFetchDiff) { + FileFd fd(DestFile, FileFd::ReadOnly, FileFd::Gzip); + class Hashes LocalHashesCalc; + LocalHashesCalc.AddFD(fd); + HashStringList const LocalHashes = LocalHashesCalc.GetHashStringList(); + + if (fd.Size() != available_patches[0].patch_size || + available_patches[0].patch_hashes != LocalHashes) + { + // patchfiles are dated, so bad indicates a bad download, so kill it + unlink(DestFile.c_str()); + Failed("Patch has Size/Hashsum mismatch", NULL); + return; + } // rred excepts the patch as $FinalFile.ed Rename(DestFile,FinalFile+".ed"); @@ -756,7 +1028,7 @@ void pkgAcqIndexDiffs::Done(string Message,unsigned long long Size,string Md5Has Local = true; Desc.URI = "rred:" + FinalFile; QueueURI(Desc); - Mode = "rred"; + SetActiveSubprocess("rred"); return; } @@ -779,37 +1051,39 @@ void pkgAcqIndexDiffs::Done(string Message,unsigned long long Size,string Md5Has // see if there is more to download if(available_patches.empty() == false) { - new pkgAcqIndexDiffs(Owner, RealURI, Description, Desc.ShortDesc, - ExpectedHash, ServerSha1, available_patches); + new pkgAcqIndexDiffs(Owner, TransactionManager, Target, + ExpectedHashes, MetaIndexParser, + available_patches); return Finish(); } else + // update + DestFile = FinalFile; return Finish(true); } } /*}}}*/ // AcqIndexMergeDiffs::AcqIndexMergeDiffs - Constructor /*{{{*/ pkgAcqIndexMergeDiffs::pkgAcqIndexMergeDiffs(pkgAcquire *Owner, - string const &URI, string const &URIDesc, - string const &ShortDesc, HashString const &ExpectedHash, - DiffInfo const &patch, - std::vector<pkgAcqIndexMergeDiffs*> const * const allPatches) - : Item(Owner), RealURI(URI), ExpectedHash(ExpectedHash), - patch(patch),allPatches(allPatches), State(StateFetchDiff) + pkgAcqMetaBase *TransactionManager, + struct IndexTarget const * const Target, + HashStringList const &ExpectedHashes, + indexRecords *MetaIndexParser, + DiffInfo const &patch, + std::vector<pkgAcqIndexMergeDiffs*> const * const allPatches) + : pkgAcqBaseIndex(Owner, TransactionManager, Target, ExpectedHashes, MetaIndexParser), + patch(patch), allPatches(allPatches), State(StateFetchDiff) { - - DestFile = _config->FindDir("Dir::State::lists") + "partial/"; - DestFile += URItoFileName(URI); - Debug = _config->FindB("Debug::pkgAcquire::Diffs",false); - Description = URIDesc; + RealURI = Target->URI; Desc.Owner = this; - Desc.ShortDesc = ShortDesc; + Description = Target->Description; + Desc.ShortDesc = Target->ShortDesc; Desc.URI = RealURI + ".diff/" + patch.file + ".gz"; Desc.Description = Description + " " + patch.file + string(".pdiff"); - DestFile = _config->FindDir("Dir::State::lists") + "partial/"; - DestFile += URItoFileName(RealURI + ".diff/" + patch.file); + + DestFile = GetPartialFileNameFromURI(RealURI + ".diff/" + patch.file); if(Debug) std::clog << "pkgAcqIndexMergeDiffs: " << Desc.URI << std::endl; @@ -817,13 +1091,13 @@ pkgAcqIndexMergeDiffs::pkgAcqIndexMergeDiffs(pkgAcquire *Owner, QueueURI(Desc); } /*}}}*/ -void pkgAcqIndexMergeDiffs::Failed(string Message,pkgAcquire::MethodConfig * /*Cnf*/)/*{{{*/ +void pkgAcqIndexMergeDiffs::Failed(string Message,pkgAcquire::MethodConfig * Cnf)/*{{{*/ { if(Debug) std::clog << "pkgAcqIndexMergeDiffs failed: " << Desc.URI << " with " << Message << std::endl; - Complete = false; + + Item::Failed(Message,Cnf); Status = StatDone; - Dequeue(); // check if we are the first to fail, otherwise we are done here State = StateDoneDiff; @@ -834,23 +1108,39 @@ void pkgAcqIndexMergeDiffs::Failed(string Message,pkgAcquire::MethodConfig * /*C // first failure means we should fallback State = StateErrorDiff; - std::clog << "Falling back to normal index file acquire" << std::endl; - new pkgAcqIndex(Owner, RealURI, Description,Desc.ShortDesc, - ExpectedHash); + if (Debug) + std::clog << "Falling back to normal index file acquire" << std::endl; + DestFile = GetPartialFileNameFromURI(Target->URI); + RenameOnError(PDiffError); + new pkgAcqIndex(Owner, TransactionManager, Target, ExpectedHashes, MetaIndexParser); } /*}}}*/ -void pkgAcqIndexMergeDiffs::Done(string Message,unsigned long long Size,string Md5Hash, /*{{{*/ +void pkgAcqIndexMergeDiffs::Done(string Message,unsigned long long Size,HashStringList const &Hashes, /*{{{*/ pkgAcquire::MethodConfig *Cnf) { if(Debug) std::clog << "pkgAcqIndexMergeDiffs::Done(): " << Desc.URI << std::endl; - Item::Done(Message,Size,Md5Hash,Cnf); + Item::Done(Message,Size,Hashes,Cnf); - string const FinalFile = _config->FindDir("Dir::State::lists") + URItoFileName(RealURI); + // FIXME: verify download before feeding it to rred + string const FinalFile = GetPartialFileNameFromURI(RealURI); if (State == StateFetchDiff) { + FileFd fd(DestFile, FileFd::ReadOnly, FileFd::Gzip); + class Hashes LocalHashesCalc; + LocalHashesCalc.AddFD(fd); + HashStringList const LocalHashes = LocalHashesCalc.GetHashStringList(); + + if (fd.Size() != patch.patch_size || patch.patch_hashes != LocalHashes) + { + // patchfiles are dated, so bad indicates a bad download, so kill it + unlink(DestFile.c_str()); + Failed("Patch has Size/Hashsum mismatch", NULL); + return; + } + // rred expects the patch as $FinalFile.ed.$patchname.gz Rename(DestFile, FinalFile + ".ed." + patch.file + ".gz"); @@ -868,42 +1158,50 @@ void pkgAcqIndexMergeDiffs::Done(string Message,unsigned long long Size,string M // this is the last completed diff, so we are ready to apply now State = StateApplyDiff; + // patching needs to be bootstrapped with the 'old' version + if (symlink(GetFinalFilename().c_str(), FinalFile.c_str()) != 0) + { + Failed("Link creation of " + FinalFile + " to " + GetFinalFilename() + " failed", NULL); + return; + } + if(Debug) std::clog << "Sending to rred method: " << FinalFile << std::endl; Local = true; Desc.URI = "rred:" + FinalFile; QueueURI(Desc); - Mode = "rred"; + SetActiveSubprocess("rred"); return; } // success in download/apply all diffs, clean up else if (State == StateApplyDiff) { // see if we really got the expected file - if(!ExpectedHash.empty() && !ExpectedHash.VerifyFile(DestFile)) + if(ExpectedHashes.usable() && ExpectedHashes != Hashes) { RenameOnError(HashSumMismatch); return; } // move the result into place + std::string const Final = GetFinalFilename(); if(Debug) - std::clog << "Moving patched file in place: " << std::endl - << DestFile << " -> " << FinalFile << std::endl; - Rename(DestFile, FinalFile); - chmod(FinalFile.c_str(), 0644); + std::clog << "Queue patched file in place: " << std::endl + << DestFile << " -> " << Final << std::endl; - // otherwise lists cleanup will eat the file - DestFile = FinalFile; + // queue for copy by the transaction manager + TransactionManager->TransactionStageCopy(this, DestFile, Final); // ensure the ed's are gone regardless of list-cleanup for (std::vector<pkgAcqIndexMergeDiffs *>::const_iterator I = allPatches->begin(); I != allPatches->end(); ++I) { - std::string patch = FinalFile + ".ed." + (*I)->patch.file + ".gz"; - unlink(patch.c_str()); + std::string const PartialFile = GetPartialFileNameFromURI(RealURI); + std::string patch = PartialFile + ".ed." + (*I)->patch.file + ".gz"; + unlink(patch.c_str()); } + unlink(FinalFile.c_str()); // all set and done Complete = true; @@ -912,72 +1210,134 @@ void pkgAcqIndexMergeDiffs::Done(string Message,unsigned long long Size,string M } } /*}}}*/ +// AcqBaseIndex - Constructor /*{{{*/ +pkgAcqBaseIndex::pkgAcqBaseIndex(pkgAcquire *Owner, + pkgAcqMetaBase *TransactionManager, + struct IndexTarget const * const Target, + HashStringList const &ExpectedHashes, + indexRecords *MetaIndexParser) +: Item(Owner, ExpectedHashes, TransactionManager), Target(Target), + MetaIndexParser(MetaIndexParser) +{ +} + /*}}}*/ +// AcqBaseIndex::VerifyHashByMetaKey - verify hash for the given metakey /*{{{*/ +bool pkgAcqBaseIndex::VerifyHashByMetaKey(HashStringList const &Hashes) +{ + if(MetaKey != "" && Hashes.usable()) + { + indexRecords::checkSum *Record = MetaIndexParser->Lookup(MetaKey); + if(Record && Record->Hashes.usable() && Hashes != Record->Hashes) + { + printHashSumComparision(RealURI, Record->Hashes, Hashes); + return false; + } + } + return true; +} + /*}}}*/ +// AcqBaseIndex::GetFinalFilename - Return the full final file path /*{{{*/ +std::string pkgAcqBaseIndex::GetFinalFilename() const +{ + return GetFinalFileNameFromURI(RealURI); +} + /*}}}*/ // AcqIndex::AcqIndex - Constructor /*{{{*/ // --------------------------------------------------------------------- -/* The package file is added to the queue and a second class is - instantiated to fetch the revision file */ +/* The package file is added to the queue and a second class is + instantiated to fetch the revision file */ pkgAcqIndex::pkgAcqIndex(pkgAcquire *Owner, string URI,string URIDesc,string ShortDesc, - HashString ExpectedHash, string comprExt) - : Item(Owner), RealURI(URI), ExpectedHash(ExpectedHash) + HashStringList const &ExpectedHash) + : pkgAcqBaseIndex(Owner, 0, NULL, ExpectedHash, NULL) { - if(comprExt.empty() == true) - { - // autoselect the compression method - std::vector<std::string> types = APT::Configuration::getCompressionTypes(); - for (std::vector<std::string>::const_iterator t = types.begin(); t != types.end(); ++t) - comprExt.append(*t).append(" "); - if (comprExt.empty() == false) - comprExt.erase(comprExt.end()-1); - } - CompressionExtension = comprExt; + RealURI = URI; + AutoSelectCompression(); Init(URI, URIDesc, ShortDesc); + + if(_config->FindB("Debug::Acquire::Transaction", false) == true) + std::clog << "New pkgIndex with TransactionManager " + << TransactionManager << std::endl; } -pkgAcqIndex::pkgAcqIndex(pkgAcquire *Owner, IndexTarget const *Target, - HashString const &ExpectedHash, indexRecords const *MetaIndexParser) - : Item(Owner), RealURI(Target->URI), ExpectedHash(ExpectedHash) + /*}}}*/ +// AcqIndex::AcqIndex - Constructor /*{{{*/ +pkgAcqIndex::pkgAcqIndex(pkgAcquire *Owner, + pkgAcqMetaBase *TransactionManager, + IndexTarget const *Target, + HashStringList const &ExpectedHash, + indexRecords *MetaIndexParser) + : pkgAcqBaseIndex(Owner, TransactionManager, Target, ExpectedHash, + MetaIndexParser) { + RealURI = Target->URI; + // autoselect the compression method + AutoSelectCompression(); + Init(Target->URI, Target->Description, Target->ShortDesc); + + if(_config->FindB("Debug::Acquire::Transaction", false) == true) + std::clog << "New pkgIndex with TransactionManager " + << TransactionManager << std::endl; +} + /*}}}*/ +// AcqIndex::AutoSelectCompression - Select compression /*{{{*/ +void pkgAcqIndex::AutoSelectCompression() +{ std::vector<std::string> types = APT::Configuration::getCompressionTypes(); - CompressionExtension = ""; - if (ExpectedHash.empty() == false) + CompressionExtensions = ""; + if (ExpectedHashes.usable()) { - for (std::vector<std::string>::const_iterator t = types.begin(); t != types.end(); ++t) - if (*t == "uncompressed" || MetaIndexParser->Exists(string(Target->MetaKey).append(".").append(*t)) == true) - CompressionExtension.append(*t).append(" "); + for (std::vector<std::string>::const_iterator t = types.begin(); + t != types.end(); ++t) + { + std::string CompressedMetaKey = string(Target->MetaKey).append(".").append(*t); + if (*t == "uncompressed" || + MetaIndexParser->Exists(CompressedMetaKey) == true) + CompressionExtensions.append(*t).append(" "); + } } else { for (std::vector<std::string>::const_iterator t = types.begin(); t != types.end(); ++t) - CompressionExtension.append(*t).append(" "); + CompressionExtensions.append(*t).append(" "); } - if (CompressionExtension.empty() == false) - CompressionExtension.erase(CompressionExtension.end()-1); - - // only verify non-optional targets, see acquire-item.h for a FIXME - // to make this more flexible - if (Target->IsOptional()) - Verify = false; - else - Verify = true; - - Init(Target->URI, Target->Description, Target->ShortDesc); + if (CompressionExtensions.empty() == false) + CompressionExtensions.erase(CompressionExtensions.end()-1); } /*}}}*/ // AcqIndex::Init - defered Constructor /*{{{*/ -void pkgAcqIndex::Init(string const &URI, string const &URIDesc, string const &ShortDesc) { - Decompression = false; - Erase = false; +void pkgAcqIndex::Init(string const &URI, string const &URIDesc, + string const &ShortDesc) +{ + Stage = STAGE_DOWNLOAD; - DestFile = _config->FindDir("Dir::State::lists") + "partial/"; - DestFile += URItoFileName(URI); + DestFile = GetPartialFileNameFromURI(URI); - std::string const comprExt = CompressionExtension.substr(0, CompressionExtension.find(' ')); - if (comprExt == "uncompressed") + CurrentCompressionExtension = CompressionExtensions.substr(0, CompressionExtensions.find(' ')); + if (CurrentCompressionExtension == "uncompressed") + { Desc.URI = URI; + if(Target) + MetaKey = string(Target->MetaKey); + } else - Desc.URI = URI + '.' + comprExt; + { + Desc.URI = URI + '.' + CurrentCompressionExtension; + DestFile = DestFile + '.' + CurrentCompressionExtension; + if(Target) + MetaKey = string(Target->MetaKey) + '.' + CurrentCompressionExtension; + } + + // load the filesize + if(MetaIndexParser) + { + indexRecords::checkSum *Record = MetaIndexParser->Lookup(MetaKey); + if(Record) + FileSize = Record->Size; + + InitByHashIfNeeded(MetaKey); + } Desc.Description = URIDesc; Desc.Owner = this; @@ -986,46 +1346,150 @@ void pkgAcqIndex::Init(string const &URI, string const &URIDesc, string const &S QueueURI(Desc); } /*}}}*/ +// AcqIndex::AdjustForByHash - modify URI for by-hash support /*{{{*/ +void pkgAcqIndex::InitByHashIfNeeded(const std::string MetaKey) +{ + // TODO: + // - (maybe?) add support for by-hash into the sources.list as flag + // - make apt-ftparchive generate the hashes (and expire?) + std::string HostKnob = "APT::Acquire::" + ::URI(Desc.URI).Host + "::By-Hash"; + if(_config->FindB("APT::Acquire::By-Hash", false) == true || + _config->FindB(HostKnob, false) == true || + MetaIndexParser->GetSupportsAcquireByHash()) + { + indexRecords::checkSum *Record = MetaIndexParser->Lookup(MetaKey); + if(Record) + { + // FIXME: should we really use the best hash here? or a fixed one? + const HashString *TargetHash = Record->Hashes.find(""); + std::string ByHash = "/by-hash/" + TargetHash->HashType() + "/" + TargetHash->HashValue(); + size_t trailing_slash = Desc.URI.find_last_of("/"); + Desc.URI = Desc.URI.replace( + trailing_slash, + Desc.URI.substr(trailing_slash+1).size()+1, + ByHash); + } else { + _error->Warning( + "Fetching ByHash requested but can not find record for %s", + MetaKey.c_str()); + } + } +} + /*}}}*/ // AcqIndex::Custom600Headers - Insert custom request headers /*{{{*/ // --------------------------------------------------------------------- /* The only header we use is the last-modified header. */ +#if APT_PKG_ABI >= 413 +string pkgAcqIndex::Custom600Headers() const +#else string pkgAcqIndex::Custom600Headers() +#endif { - string Final = _config->FindDir("Dir::State::lists"); - Final += URItoFileName(RealURI); - if (_config->FindB("Acquire::GzipIndexes",false)) - Final += ".gz"; - + string Final = GetFinalFilename(); + string msg = "\nIndex-File: true"; - // FIXME: this really should use "IndexTarget::IsOptional()" but that - // seems to be difficult without breaking ABI - if (ShortDesc().find("Translation") != 0) - msg += "\nFail-Ignore: true"; struct stat Buf; if (stat(Final.c_str(),&Buf) == 0) msg += "\nLast-Modified: " + TimeRFC1123(Buf.st_mtime); + if(Target->IsOptional()) + msg += "\nFail-Ignore: true"; + return msg; } /*}}}*/ -void pkgAcqIndex::Failed(string Message,pkgAcquire::MethodConfig *Cnf) /*{{{*/ +// pkgAcqIndex::Failed - getting the indexfile failed /*{{{*/ +void pkgAcqIndex::Failed(string Message,pkgAcquire::MethodConfig *Cnf) { - size_t const nextExt = CompressionExtension.find(' '); + Item::Failed(Message,Cnf); + + size_t const nextExt = CompressionExtensions.find(' '); if (nextExt != std::string::npos) { - CompressionExtension = CompressionExtension.substr(nextExt+1); + CompressionExtensions = CompressionExtensions.substr(nextExt+1); Init(RealURI, Desc.Description, Desc.ShortDesc); + Status = StatIdle; return; } - // on decompression failure, remove bad versions in partial/ - if (Decompression && Erase) { - string s = _config->FindDir("Dir::State::lists") + "partial/"; - s.append(URItoFileName(RealURI)); - unlink(s.c_str()); - } - Item::Failed(Message,Cnf); + + if(Target->IsOptional() && ExpectedHashes.empty() && Stage == STAGE_DOWNLOAD) + Status = StatDone; + else + TransactionManager->AbortTransaction(); +} + /*}}}*/ +bool pkgAcqIndex::TransactionState(TransactionStates const state) /*{{{*/ +{ + if (pkgAcquire::Item::TransactionState(state) == false) + return false; + + switch (state) + { + case TransactionAbort: + if (Stage == STAGE_DECOMPRESS_AND_VERIFY) + { + // keep the compressed file, but drop the decompressed + EraseFileName.clear(); + if (PartialFile.empty() == false && flExtension(PartialFile) == "decomp") + unlink(PartialFile.c_str()); + } + break; + case TransactionCommit: + if (EraseFileName.empty() == false) + unlink(EraseFileName.c_str()); + break; + } + return true; +} + /*}}}*/ +// pkgAcqIndex::GetFinalFilename - Return the full final file path /*{{{*/ +std::string pkgAcqIndex::GetFinalFilename() const +{ + std::string const FinalFile = GetFinalFileNameFromURI(RealURI); + return GetCompressedFileName(RealURI, FinalFile, CurrentCompressionExtension); +} + /*}}}*/ +// AcqIndex::ReverifyAfterIMS - Reverify index after an ims-hit /*{{{*/ +void pkgAcqIndex::ReverifyAfterIMS() +{ + // update destfile to *not* include the compression extension when doing + // a reverify (as its uncompressed on disk already) + DestFile = GetCompressedFileName(RealURI, GetPartialFileNameFromURI(RealURI), CurrentCompressionExtension); + + // copy FinalFile into partial/ so that we check the hash again + string FinalFile = GetFinalFilename(); + Stage = STAGE_DECOMPRESS_AND_VERIFY; + Desc.URI = "copy:" + FinalFile; + QueueURI(Desc); +} + /*}}}*/ +// AcqIndex::ValidateFile - Validate the content of the downloaded file /*{{{*/ +bool pkgAcqIndex::ValidateFile(const std::string &FileName) +{ + // FIXME: this can go away once we only ever download stuff that + // has a valid hash and we never do GET based probing + // FIXME2: this also leaks debian-isms into the code and should go therefore + + /* Always validate the index file for correctness (all indexes must + * have a Package field) (LP: #346386) (Closes: #627642) + */ + FileFd fd(FileName, FileFd::ReadOnly, FileFd::Extension); + // Only test for correctness if the content of the file is not empty + // (empty is ok) + if (fd.Size() > 0) + { + pkgTagSection sec; + pkgTagFile tag(&fd); + + // all our current indexes have a field 'Package' in each section + if (_error->PendingError() == true || + tag.Step(sec) == false || + sec.Exists("Package") == false) + return false; + } + return true; } /*}}}*/ // AcqIndex::Done - Finished a fetch /*{{{*/ @@ -1034,79 +1498,51 @@ void pkgAcqIndex::Failed(string Message,pkgAcquire::MethodConfig *Cnf) /*{{{*/ method could possibly return an alternate filename which points to the uncompressed version of the file. If this is so the file is copied into the partial directory. In all other cases the file - is decompressed with a gzip uri. */ -void pkgAcqIndex::Done(string Message,unsigned long long Size,string Hash, + is decompressed with a compressed uri. */ +void pkgAcqIndex::Done(string Message, + unsigned long long Size, + HashStringList const &Hashes, pkgAcquire::MethodConfig *Cfg) { - Item::Done(Message,Size,Hash,Cfg); + Item::Done(Message,Size,Hashes,Cfg); - if (Decompression == true) + switch(Stage) { - if (_config->FindB("Debug::pkgAcquire::Auth", false)) - { - std::cerr << std::endl << RealURI << ": Computed Hash: " << Hash; - std::cerr << " Expected Hash: " << ExpectedHash.toStr() << std::endl; - } - - if (!ExpectedHash.empty() && ExpectedHash.toStr() != Hash) - { - RenameOnError(HashSumMismatch); - return; - } - - /* Verify the index file for correctness (all indexes must - * have a Package field) (LP: #346386) (Closes: #627642) */ - if (Verify == true) - { - FileFd fd(DestFile, FileFd::ReadOnly); - // Only test for correctness if the file is not empty (empty is ok) - if (fd.FileSize() > 0) - { - pkgTagSection sec; - pkgTagFile tag(&fd); - - // all our current indexes have a field 'Package' in each section - if (_error->PendingError() == true || tag.Step(sec) == false || sec.Exists("Package") == false) - { - RenameOnError(InvalidFormat); - return; - } - } - } - - // Done, move it into position - string FinalFile = _config->FindDir("Dir::State::lists"); - FinalFile += URItoFileName(RealURI); - Rename(DestFile,FinalFile); - chmod(FinalFile.c_str(),0644); - - /* We restore the original name to DestFile so that the clean operation - will work OK */ - DestFile = _config->FindDir("Dir::State::lists") + "partial/"; - DestFile += URItoFileName(RealURI); - - // Remove the compressed version. - if (Erase == true) - unlink(DestFile.c_str()); + case STAGE_DOWNLOAD: + StageDownloadDone(Message, Hashes, Cfg); + break; + case STAGE_DECOMPRESS_AND_VERIFY: + StageDecompressDone(Message, Hashes, Cfg); + break; + } +} + /*}}}*/ +// AcqIndex::StageDownloadDone - Queue for decompress and verify /*{{{*/ +void pkgAcqIndex::StageDownloadDone(string Message, + HashStringList const &Hashes, + pkgAcquire::MethodConfig *Cfg) +{ + // First check if the calculcated Hash of the (compressed) downloaded + // file matches the hash we have in the MetaIndexRecords for this file + if(VerifyHashByMetaKey(Hashes) == false) + { + RenameOnError(HashSumMismatch); + Failed(Message, Cfg); return; } - Erase = false; Complete = true; - + // Handle the unzipd case string FileName = LookupTag(Message,"Alt-Filename"); if (FileName.empty() == false) { - // The files timestamp matches - if (StringToBool(LookupTag(Message,"Alt-IMS-Hit"),false) == true) - return; - Decompression = true; + Stage = STAGE_DECOMPRESS_AND_VERIFY; Local = true; DestFile += ".decomp"; Desc.URI = "copy:" + FileName; QueueURI(Desc); - Mode = "copy"; + SetActiveSubprocess("copy"); return; } @@ -1117,517 +1553,653 @@ void pkgAcqIndex::Done(string Message,unsigned long long Size,string Hash, ErrorText = "Method gave a blank filename"; } - std::string const compExt = CompressionExtension.substr(0, CompressionExtension.find(' ')); + // Methods like e.g. "file:" will give us a (compressed) FileName that is + // not the "DestFile" we set, in this case we uncompress from the local file + if (FileName != DestFile) + Local = true; + else + EraseFileName = FileName; - // The files timestamp matches - if (StringToBool(LookupTag(Message,"IMS-Hit"),false) == true) { - if (_config->FindB("Acquire::GzipIndexes",false) && compExt == "gz") - // Update DestFile for .gz suffix so that the clean operation keeps it - DestFile += ".gz"; + // we need to verify the file against the current Release file again + // on if-modfied-since hit to avoid a stale attack against us + if(StringToBool(LookupTag(Message,"IMS-Hit"),false) == true) + { + // The files timestamp matches, reverify by copy into partial/ + EraseFileName = ""; + ReverifyAfterIMS(); return; - } - - if (FileName == DestFile) - Erase = true; - else - Local = true; - - string decompProg; + } - // If we enable compressed indexes and already have gzip, keep it - if (_config->FindB("Acquire::GzipIndexes",false) && compExt == "gz" && !Local) { - string FinalFile = _config->FindDir("Dir::State::lists"); - FinalFile += URItoFileName(RealURI) + ".gz"; - Rename(DestFile,FinalFile); - chmod(FinalFile.c_str(),0644); - - // Update DestFile for .gz suffix so that the clean operation keeps it - DestFile = _config->FindDir("Dir::State::lists") + "partial/"; - DestFile += URItoFileName(RealURI) + ".gz"; + // If we have compressed indexes enabled, queue for hash verification + if (_config->FindB("Acquire::GzipIndexes",false)) + { + DestFile = GetPartialFileNameFromURI(RealURI + '.' + CurrentCompressionExtension); + EraseFileName = ""; + Stage = STAGE_DECOMPRESS_AND_VERIFY; + Desc.URI = "copy:" + FileName; + QueueURI(Desc); + SetActiveSubprocess("copy"); return; } // get the binary name for your used compression type - decompProg = _config->Find(string("Acquire::CompressionTypes::").append(compExt),""); - if(decompProg.empty() == false); - else if(compExt == "uncompressed") + string decompProg; + if(CurrentCompressionExtension == "uncompressed") decompProg = "copy"; - else { - _error->Error("Unsupported extension: %s", compExt.c_str()); + else + decompProg = _config->Find(string("Acquire::CompressionTypes::").append(CurrentCompressionExtension),""); + if(decompProg.empty() == true) + { + _error->Error("Unsupported extension: %s", CurrentCompressionExtension.c_str()); return; } - Decompression = true; + // queue uri for the next stage + Stage = STAGE_DECOMPRESS_AND_VERIFY; DestFile += ".decomp"; Desc.URI = decompProg + ":" + FileName; QueueURI(Desc); + SetActiveSubprocess(decompProg); +} + /*}}}*/ +// pkgAcqIndex::StageDecompressDone - Final verification /*{{{*/ +void pkgAcqIndex::StageDecompressDone(string Message, + HashStringList const &Hashes, + pkgAcquire::MethodConfig *Cfg) +{ + if (ExpectedHashes.usable() && ExpectedHashes != Hashes) + { + Desc.URI = RealURI; + RenameOnError(HashSumMismatch); + printHashSumComparision(RealURI, ExpectedHashes, Hashes); + Failed(Message, Cfg); + return; + } + + if(!ValidateFile(DestFile)) + { + RenameOnError(InvalidFormat); + Failed(Message, Cfg); + return; + } - // FIXME: this points to a c++ string that goes out of scope - Mode = decompProg.c_str(); + // Done, queue for rename on transaction finished + TransactionManager->TransactionStageCopy(this, DestFile, GetFinalFilename()); + + return; } /*}}}*/ -// AcqIndexTrans::pkgAcqIndexTrans - Constructor /*{{{*/ -// --------------------------------------------------------------------- -/* The Translation file is added to the queue */ -pkgAcqIndexTrans::pkgAcqIndexTrans(pkgAcquire *Owner, - string URI,string URIDesc,string ShortDesc) - : pkgAcqIndex(Owner, URI, URIDesc, ShortDesc, HashString(), "") +// AcqMetaBase - Constructor /*{{{*/ +pkgAcqMetaBase::pkgAcqMetaBase(pkgAcquire *Owner, + const std::vector<IndexTarget*>* IndexTargets, + indexRecords* MetaIndexParser, + std::string const &RealURI, + HashStringList const &ExpectedHashes, + pkgAcqMetaBase *TransactionManager) +: Item(Owner, ExpectedHashes, TransactionManager), + MetaIndexParser(MetaIndexParser), LastMetaIndexParser(NULL), IndexTargets(IndexTargets), + AuthPass(false), RealURI(RealURI), IMSHit(false) { } -pkgAcqIndexTrans::pkgAcqIndexTrans(pkgAcquire *Owner, IndexTarget const *Target, - HashString const &ExpectedHash, indexRecords const *MetaIndexParser) - : pkgAcqIndex(Owner, Target, ExpectedHash, MetaIndexParser) + /*}}}*/ +// AcqMetaBase::Add - Add a item to the current Transaction /*{{{*/ +void pkgAcqMetaBase::Add(Item *I) { + Transaction.push_back(I); } /*}}}*/ -// AcqIndexTrans::Custom600Headers - Insert custom request headers /*{{{*/ -// --------------------------------------------------------------------- -string pkgAcqIndexTrans::Custom600Headers() +// AcqMetaBase::AbortTransaction - Abort the current Transaction /*{{{*/ +void pkgAcqMetaBase::AbortTransaction() { - string Final = _config->FindDir("Dir::State::lists"); - Final += URItoFileName(RealURI); + if(_config->FindB("Debug::Acquire::Transaction", false) == true) + std::clog << "AbortTransaction: " << TransactionManager << std::endl; - struct stat Buf; - if (stat(Final.c_str(),&Buf) != 0) - return "\nFail-Ignore: true\nIndex-File: true"; - return "\nFail-Ignore: true\nIndex-File: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime); + // ensure the toplevel is in error state too + for (std::vector<Item*>::iterator I = Transaction.begin(); + I != Transaction.end(); ++I) + { + (*I)->TransactionState(TransactionAbort); + } + Transaction.clear(); } /*}}}*/ -// AcqIndexTrans::Failed - Silence failure messages for missing files /*{{{*/ -// --------------------------------------------------------------------- -/* */ -void pkgAcqIndexTrans::Failed(string Message,pkgAcquire::MethodConfig *Cnf) +// AcqMetaBase::TransactionHasError - Check for errors in Transaction /*{{{*/ +bool pkgAcqMetaBase::TransactionHasError() { - size_t const nextExt = CompressionExtension.find(' '); - if (nextExt != std::string::npos) - { - CompressionExtension = CompressionExtension.substr(nextExt+1); - Init(RealURI, Desc.Description, Desc.ShortDesc); - Status = StatIdle; - return; + for (pkgAcquire::ItemIterator I = Transaction.begin(); + I != Transaction.end(); ++I) + { + switch((*I)->Status) { + case StatDone: break; + case StatIdle: break; + case StatAuthError: return true; + case StatError: return true; + case StatTransientNetworkError: return true; + case StatFetching: break; + } } + return false; +} + /*}}}*/ +// AcqMetaBase::CommitTransaction - Commit a transaction /*{{{*/ +void pkgAcqMetaBase::CommitTransaction() +{ + if(_config->FindB("Debug::Acquire::Transaction", false) == true) + std::clog << "CommitTransaction: " << this << std::endl; - if (Cnf->LocalOnly == true || - StringToBool(LookupTag(Message,"Transient-Failure"),false) == false) - { - // Ignore this - Status = StatDone; - Complete = false; - Dequeue(); - return; + // move new files into place *and* remove files that are not + // part of the transaction but are still on disk + for (std::vector<Item*>::iterator I = Transaction.begin(); + I != Transaction.end(); ++I) + { + (*I)->TransactionState(TransactionCommit); } - - Item::Failed(Message,Cnf); + Transaction.clear(); +} + /*}}}*/ +bool pkgAcqMetaBase::TransactionState(TransactionStates const state) /*{{{*/ +{ + // Do not remove InRelease on IMSHit of Release.gpg [yes, this is very edgecasey] + if (TransactionManager->IMSHit == false) + return pkgAcquire::Item::TransactionState(state); + return true; +} + /*}}}*/ +// AcqMetaBase::TransactionStageCopy - Stage a file for copying /*{{{*/ +void pkgAcqMetaBase::TransactionStageCopy(Item *I, + const std::string &From, + const std::string &To) +{ + I->PartialFile = From; + I->DestFile = To; } /*}}}*/ -pkgAcqMetaSig::pkgAcqMetaSig(pkgAcquire *Owner, /*{{{*/ - string URI,string URIDesc,string ShortDesc, - string MetaIndexURI, string MetaIndexURIDesc, - string MetaIndexShortDesc, - const vector<IndexTarget*>* IndexTargets, - indexRecords* MetaIndexParser) : - Item(Owner), RealURI(URI), MetaIndexURI(MetaIndexURI), - MetaIndexURIDesc(MetaIndexURIDesc), MetaIndexShortDesc(MetaIndexShortDesc), - MetaIndexParser(MetaIndexParser), IndexTargets(IndexTargets) +// AcqMetaBase::TransactionStageRemoval - Stage a file for removal /*{{{*/ +void pkgAcqMetaBase::TransactionStageRemoval(Item *I, + const std::string &FinalFile) { - DestFile = _config->FindDir("Dir::State::lists") + "partial/"; - DestFile += URItoFileName(URI); + I->PartialFile = ""; + I->DestFile = FinalFile; +} + /*}}}*/ +// AcqMetaBase::GenerateAuthWarning - Check gpg authentication error /*{{{*/ +bool pkgAcqMetaBase::CheckStopAuthentication(pkgAcquire::Item * const I, const std::string &Message) +{ + // FIXME: this entire function can do now that we disallow going to + // a unauthenticated state and can cleanly rollback + + string const Final = I->GetFinalFilename(); + if(FileExists(Final)) + { + I->Status = StatTransientNetworkError; + _error->Warning(_("An error occurred during the signature " + "verification. The repository is not updated " + "and the previous index files will be used. " + "GPG error: %s: %s\n"), + Desc.Description.c_str(), + LookupTag(Message,"Message").c_str()); + RunScripts("APT::Update::Auth-Failure"); + return true; + } else if (LookupTag(Message,"Message").find("NODATA") != string::npos) { + /* Invalid signature file, reject (LP: #346386) (Closes: #627642) */ + _error->Error(_("GPG error: %s: %s"), + Desc.Description.c_str(), + LookupTag(Message,"Message").c_str()); + I->Status = StatError; + return true; + } else { + _error->Warning(_("GPG error: %s: %s"), + Desc.Description.c_str(), + LookupTag(Message,"Message").c_str()); + } + // gpgv method failed + ReportMirrorFailure("GPGFailure"); + return false; +} + /*}}}*/ +// AcqMetaSig::AcqMetaSig - Constructor /*{{{*/ +pkgAcqMetaSig::pkgAcqMetaSig(pkgAcquire *Owner, + pkgAcqMetaBase *TransactionManager, + string const &URI, string const &URIDesc,string const &ShortDesc, + pkgAcqMetaIndex * const MetaIndex) : + pkgAcquire::Item(Owner, HashStringList(), TransactionManager), MetaIndex(MetaIndex), + URIDesc(URIDesc), RealURI(URI) +{ + DestFile = GetPartialFileNameFromURI(URI); - // remove any partial downloaded sig-file in partial/. - // it may confuse proxies and is too small to warrant a + // remove any partial downloaded sig-file in partial/. + // it may confuse proxies and is too small to warrant a // partial download anyway unlink(DestFile.c_str()); + // set the TransactionManager + if(_config->FindB("Debug::Acquire::Transaction", false) == true) + std::clog << "New pkgAcqMetaSig with TransactionManager " + << TransactionManager << std::endl; + // Create the item Desc.Description = URIDesc; Desc.Owner = this; Desc.ShortDesc = ShortDesc; Desc.URI = URI; - - string Final = _config->FindDir("Dir::State::lists"); - Final += URItoFileName(RealURI); - if (RealFileExists(Final) == true) + + // If we got a hit for Release, we will get one for Release.gpg too (or obscure errors), + // so we skip the download step and go instantly to verification + if (TransactionManager->IMSHit == true && RealFileExists(GetFinalFilename())) { - // File was already in place. It needs to be re-downloaded/verified - // because Release might have changed, we do give it a different - // name than DestFile because otherwise the http method will - // send If-Range requests and there are too many broken servers - // out there that do not understand them - LastGoodSig = DestFile+".reverify"; - Rename(Final,LastGoodSig); + Complete = true; + Status = StatDone; + PartialFile = DestFile = GetFinalFilename(); + MetaIndexFileSignature = DestFile; + MetaIndex->QueueForSignatureVerify(this, MetaIndex->DestFile, DestFile); } - - QueueURI(Desc); + else + QueueURI(Desc); } /*}}}*/ pkgAcqMetaSig::~pkgAcqMetaSig() /*{{{*/ { - // if the file was never queued undo file-changes done in the constructor - if (QueueCounter == 1 && Status == StatIdle && FileSize == 0 && Complete == false && - LastGoodSig.empty() == false) - { - string const Final = _config->FindDir("Dir::State::lists") + URItoFileName(RealURI); - if (RealFileExists(Final) == false && RealFileExists(LastGoodSig) == true) - Rename(LastGoodSig, Final); - } - } /*}}}*/ -// pkgAcqMetaSig::Custom600Headers - Insert custom request headers /*{{{*/ -// --------------------------------------------------------------------- -/* The only header we use is the last-modified header. */ -string pkgAcqMetaSig::Custom600Headers() +// pkgAcqMetaSig::GetFinalFilename - Return the full final file path /*{{{*/ +std::string pkgAcqMetaSig::GetFinalFilename() const { - struct stat Buf; - if (stat(LastGoodSig.c_str(),&Buf) != 0) - return "\nIndex-File: true"; - - return "\nIndex-File: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime); + return GetFinalFileNameFromURI(RealURI); } - -void pkgAcqMetaSig::Done(string Message,unsigned long long Size,string MD5, + /*}}}*/ +// pkgAcqMetaSig::Done - The signature was downloaded/verified /*{{{*/ +// --------------------------------------------------------------------- +/* The only header we use is the last-modified header. */ +void pkgAcqMetaSig::Done(string Message,unsigned long long Size, + HashStringList const &Hashes, pkgAcquire::MethodConfig *Cfg) { - Item::Done(Message,Size,MD5,Cfg); - - string FileName = LookupTag(Message,"Filename"); - if (FileName.empty() == true) + if (MetaIndexFileSignature.empty() == false) { - Status = StatError; - ErrorText = "Method gave a blank filename"; - return; + DestFile = MetaIndexFileSignature; + MetaIndexFileSignature.clear(); } + Item::Done(Message, Size, Hashes, Cfg); - if (FileName != DestFile) + if(MetaIndex->AuthPass == false) { - // We have to copy it into place - Local = true; - Desc.URI = "copy:" + FileName; - QueueURI(Desc); + if(MetaIndex->CheckDownloadDone(this, Message, Hashes) == true) + { + // destfile will be modified to point to MetaIndexFile for the + // gpgv method, so we need to save it here + MetaIndexFileSignature = DestFile; + MetaIndex->QueueForSignatureVerify(this, MetaIndex->DestFile, DestFile); + } return; } - - Complete = true; - - // put the last known good file back on i-m-s hit (it will - // be re-verified again) - // Else do nothing, we have the new file in DestFile then - if(StringToBool(LookupTag(Message,"IMS-Hit"),false) == true) - Rename(LastGoodSig, DestFile); - - // queue a pkgAcqMetaIndex to be verified against the sig we just retrieved - new pkgAcqMetaIndex(Owner, MetaIndexURI, MetaIndexURIDesc, - MetaIndexShortDesc, DestFile, IndexTargets, - MetaIndexParser); - + else if(MetaIndex->CheckAuthDone(Message) == true) + { + if (TransactionManager->IMSHit == false) + { + TransactionManager->TransactionStageCopy(this, DestFile, GetFinalFilename()); + TransactionManager->TransactionStageCopy(MetaIndex, MetaIndex->DestFile, MetaIndex->GetFinalFilename()); + } + } } /*}}}*/ void pkgAcqMetaSig::Failed(string Message,pkgAcquire::MethodConfig *Cnf)/*{{{*/ { - string Final = _config->FindDir("Dir::State::lists") + URItoFileName(RealURI); + Item::Failed(Message,Cnf); - // if we get a network error we fail gracefully - if(Status == StatTransientNetworkError) - { - Item::Failed(Message,Cnf); - // move the sigfile back on transient network failures - if(FileExists(LastGoodSig)) - Rename(LastGoodSig,Final); + // check if we need to fail at this point + if (MetaIndex->AuthPass == true && MetaIndex->CheckStopAuthentication(this, Message)) + return; - // set the status back to , Item::Failed likes to reset it - Status = pkgAcquire::Item::StatTransientNetworkError; - return; + string const FinalRelease = MetaIndex->GetFinalFilename(); + string const FinalReleasegpg = GetFinalFilename(); + string const FinalInRelease = TransactionManager->GetFinalFilename(); + + if (RealFileExists(FinalReleasegpg) || RealFileExists(FinalInRelease)) + { + std::string downgrade_msg; + strprintf(downgrade_msg, _("The repository '%s' is no longer signed."), + MetaIndex->URIDesc.c_str()); + if(_config->FindB("Acquire::AllowDowngradeToInsecureRepositories")) + { + // meh, the users wants to take risks (we still mark the packages + // from this repository as unauthenticated) + _error->Warning("%s", downgrade_msg.c_str()); + _error->Warning(_("This is normally not allowed, but the option " + "Acquire::AllowDowngradeToInsecureRepositories was " + "given to override it.")); + Status = StatDone; + } else { + _error->Error("%s", downgrade_msg.c_str()); + if (TransactionManager->IMSHit == false) + Rename(MetaIndex->DestFile, MetaIndex->DestFile + ".FAILED"); + Item::Failed("Message: " + downgrade_msg, Cnf); + TransactionManager->AbortTransaction(); + return; + } } + else + _error->Warning(_("The data from '%s' is not signed. Packages " + "from that repository can not be authenticated."), + MetaIndex->URIDesc.c_str()); + + // ensures that a Release.gpg file in the lists/ is removed by the transaction + TransactionManager->TransactionStageRemoval(this, DestFile); + + // only allow going further if the users explicitely wants it + if(AllowInsecureRepositories(MetaIndex->MetaIndexParser, TransactionManager, this) == true) + { + if (RealFileExists(FinalReleasegpg) || RealFileExists(FinalInRelease)) + { + // open the last Release if we have it + if (TransactionManager->IMSHit == false) + { + MetaIndex->LastMetaIndexParser = new indexRecords; + _error->PushToStack(); + if (RealFileExists(FinalInRelease)) + MetaIndex->LastMetaIndexParser->Load(FinalInRelease); + else + MetaIndex->LastMetaIndexParser->Load(FinalRelease); + // its unlikely to happen, but if what we have is bad ignore it + if (_error->PendingError()) + { + delete MetaIndex->LastMetaIndexParser; + MetaIndex->LastMetaIndexParser = NULL; + } + _error->RevertToStack(); + } + } - // Delete any existing sigfile when the acquire failed - unlink(Final.c_str()); + // we parse the indexes here because at this point the user wanted + // a repository that may potentially harm him + MetaIndex->MetaIndexParser->Load(MetaIndex->DestFile); + if (MetaIndex->VerifyVendor(Message) == false) + /* expired Release files are still a problem you need extra force for */; + else + MetaIndex->QueueIndexes(true); - // queue a pkgAcqMetaIndex with no sigfile - new pkgAcqMetaIndex(Owner, MetaIndexURI, MetaIndexURIDesc, MetaIndexShortDesc, - "", IndexTargets, MetaIndexParser); + TransactionManager->TransactionStageCopy(MetaIndex, MetaIndex->DestFile, MetaIndex->GetFinalFilename()); + } - if (Cnf->LocalOnly == true || + // FIXME: this is used often (e.g. in pkgAcqIndexTrans) so refactor + if (Cnf->LocalOnly == true || StringToBool(LookupTag(Message,"Transient-Failure"),false) == false) - { + { // Ignore this Status = StatDone; - Complete = false; - Dequeue(); - return; } - - Item::Failed(Message,Cnf); } /*}}}*/ pkgAcqMetaIndex::pkgAcqMetaIndex(pkgAcquire *Owner, /*{{{*/ + pkgAcqMetaBase *TransactionManager, string URI,string URIDesc,string ShortDesc, - string SigFile, - const vector<struct IndexTarget*>* IndexTargets, + string MetaIndexSigURI,string MetaIndexSigURIDesc, string MetaIndexSigShortDesc, + const vector<IndexTarget*>* IndexTargets, indexRecords* MetaIndexParser) : - Item(Owner), RealURI(URI), SigFile(SigFile), IndexTargets(IndexTargets), - MetaIndexParser(MetaIndexParser), AuthPass(false), IMSHit(false) + pkgAcqMetaBase(Owner, IndexTargets, MetaIndexParser, URI, HashStringList(), + TransactionManager), + URIDesc(URIDesc), ShortDesc(ShortDesc), + MetaIndexSigURI(MetaIndexSigURI), MetaIndexSigURIDesc(MetaIndexSigURIDesc), + MetaIndexSigShortDesc(MetaIndexSigShortDesc) +{ + if(TransactionManager == NULL) + { + this->TransactionManager = this; + this->TransactionManager->Add(this); + } + + if(_config->FindB("Debug::Acquire::Transaction", false) == true) + std::clog << "New pkgAcqMetaIndex with TransactionManager " + << this->TransactionManager << std::endl; + + + Init(URIDesc, ShortDesc); +} + /*}}}*/ +// pkgAcqMetaIndex::Init - Delayed constructor /*{{{*/ +void pkgAcqMetaIndex::Init(std::string URIDesc, std::string ShortDesc) { - DestFile = _config->FindDir("Dir::State::lists") + "partial/"; - DestFile += URItoFileName(URI); + DestFile = GetPartialFileNameFromURI(RealURI); // Create the item Desc.Description = URIDesc; Desc.Owner = this; Desc.ShortDesc = ShortDesc; - Desc.URI = URI; + Desc.URI = RealURI; + // we expect more item + ExpectedAdditionalItems = IndexTargets->size(); QueueURI(Desc); } /*}}}*/ -// pkgAcqMetaIndex::Custom600Headers - Insert custom request headers /*{{{*/ -// --------------------------------------------------------------------- -/* The only header we use is the last-modified header. */ -string pkgAcqMetaIndex::Custom600Headers() -{ - string Final = _config->FindDir("Dir::State::lists"); - Final += URItoFileName(RealURI); - - struct stat Buf; - if (stat(Final.c_str(),&Buf) != 0) - return "\nIndex-File: true"; - - return "\nIndex-File: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime); -} - /*}}}*/ -void pkgAcqMetaIndex::Done(string Message,unsigned long long Size,string Hash, /*{{{*/ +void pkgAcqMetaIndex::Done(string Message,unsigned long long Size, /*{{{*/ + HashStringList const &Hashes, pkgAcquire::MethodConfig *Cfg) { - Item::Done(Message,Size,Hash,Cfg); - - // MetaIndexes are done in two passes: one to download the - // metaindex with an appropriate method, and a second to verify it - // with the gpgv method + Item::Done(Message,Size,Hashes,Cfg); - if (AuthPass == true) + if(CheckDownloadDone(this, Message, Hashes)) { - AuthDone(Message); - - // all cool, move Release file into place - Complete = true; + // we have a Release file, now download the Signature, all further + // verify/queue for additional downloads will be done in the + // pkgAcqMetaSig::Done() code + new pkgAcqMetaSig(Owner, TransactionManager, + MetaIndexSigURI, MetaIndexSigURIDesc, + MetaIndexSigShortDesc, this); } - else - { - RetrievalDone(Message); - if (!Complete) - // Still more retrieving to do - return; +} + /*}}}*/ +bool pkgAcqMetaBase::CheckAuthDone(string Message) /*{{{*/ +{ + // At this point, the gpgv method has succeeded, so there is a + // valid signature from a key in the trusted keyring. We + // perform additional verification of its contents, and use them + // to verify the indexes we are about to download - if (SigFile == "") + if (TransactionManager->IMSHit == false) + { + // open the last (In)Release if we have it + std::string const FinalFile = GetFinalFilename(); + std::string FinalRelease; + std::string FinalInRelease; + if (APT::String::Endswith(FinalFile, "InRelease")) { - // There was no signature file, so we are finished. Download - // the indexes and do only hashsum verification if possible - MetaIndexParser->Load(DestFile); - QueueIndexes(false); + FinalInRelease = FinalFile; + FinalRelease = FinalFile.substr(0, FinalFile.length() - strlen("InRelease")) + "Release"; } else { - // There was a signature file, so pass it to gpgv for - // verification - - if (_config->FindB("Debug::pkgAcquire::Auth", false)) - std::cerr << "Metaindex acquired, queueing gpg verification (" - << SigFile << "," << DestFile << ")\n"; - AuthPass = true; - Desc.URI = "gpgv:" + SigFile; - QueueURI(Desc); - Mode = "gpgv"; - return; + FinalInRelease = FinalFile.substr(0, FinalFile.length() - strlen("Release")) + "InRelease"; + FinalRelease = FinalFile; + } + if (RealFileExists(FinalInRelease) || RealFileExists(FinalRelease)) + { + LastMetaIndexParser = new indexRecords; + _error->PushToStack(); + if (RealFileExists(FinalInRelease)) + LastMetaIndexParser->Load(FinalInRelease); + else + LastMetaIndexParser->Load(FinalRelease); + // its unlikely to happen, but if what we have is bad ignore it + if (_error->PendingError()) + { + delete LastMetaIndexParser; + LastMetaIndexParser = NULL; + } + _error->RevertToStack(); } } - if (Complete == true) + if (!MetaIndexParser->Load(DestFile)) { - string FinalFile = _config->FindDir("Dir::State::lists"); - FinalFile += URItoFileName(RealURI); - if (SigFile == DestFile) - SigFile = FinalFile; - Rename(DestFile,FinalFile); - chmod(FinalFile.c_str(),0644); - DestFile = FinalFile; + Status = StatAuthError; + ErrorText = MetaIndexParser->ErrorText; + return false; } + + if (!VerifyVendor(Message)) + { + Status = StatAuthError; + return false; + } + + if (_config->FindB("Debug::pkgAcquire::Auth", false)) + std::cerr << "Signature verification succeeded: " + << DestFile << std::endl; + + // Download further indexes with verification + QueueIndexes(true); + + return true; } /*}}}*/ -void pkgAcqMetaIndex::RetrievalDone(string Message) /*{{{*/ +// pkgAcqMetaBase::Custom600Headers - Get header for AcqMetaBase /*{{{*/ +// --------------------------------------------------------------------- +#if APT_PKG_ABI >= 413 +string pkgAcqMetaBase::Custom600Headers() const +#else +string pkgAcqMetaBase::Custom600Headers() +#endif +{ + std::string Header = "\nIndex-File: true"; + std::string MaximumSize; + strprintf(MaximumSize, "\nMaximum-Size: %i", + _config->FindI("Acquire::MaxReleaseFileSize", 10*1000*1000)); + Header += MaximumSize; + + string const FinalFile = GetFinalFilename(); + + struct stat Buf; + if (stat(FinalFile.c_str(),&Buf) == 0) + Header += "\nLast-Modified: " + TimeRFC1123(Buf.st_mtime); + + return Header; +} + /*}}}*/ +// pkgAcqMetaBase::GetFinalFilename - Return the full final file path /*{{{*/ +std::string pkgAcqMetaBase::GetFinalFilename() const +{ + return GetFinalFileNameFromURI(RealURI); +} + /*}}}*/ +// pkgAcqMetaBase::QueueForSignatureVerify /*{{{*/ +void pkgAcqMetaBase::QueueForSignatureVerify(pkgAcquire::Item * const I, std::string const &File, std::string const &Signature) +{ + AuthPass = true; + I->Desc.URI = "gpgv:" + Signature; + I->DestFile = File; + QueueURI(I->Desc); + I->SetActiveSubprocess("gpgv"); +} + /*}}}*/ +// pkgAcqMetaBase::CheckDownloadDone /*{{{*/ +bool pkgAcqMetaBase::CheckDownloadDone(pkgAcquire::Item * const I, const std::string &Message, HashStringList const &Hashes) const { // We have just finished downloading a Release file (it is not // verified yet) - string FileName = LookupTag(Message,"Filename"); + string const FileName = LookupTag(Message,"Filename"); if (FileName.empty() == true) { - Status = StatError; - ErrorText = "Method gave a blank filename"; - return; + I->Status = StatError; + I->ErrorText = "Method gave a blank filename"; + return false; } - if (FileName != DestFile) + if (FileName != I->DestFile) { - Local = true; - Desc.URI = "copy:" + FileName; - QueueURI(Desc); - return; + I->Local = true; + I->Desc.URI = "copy:" + FileName; + I->QueueURI(I->Desc); + return false; } // make sure to verify against the right file on I-M-S hit - IMSHit = StringToBool(LookupTag(Message,"IMS-Hit"),false); - if(IMSHit) + bool IMSHit = StringToBool(LookupTag(Message,"IMS-Hit"), false); + if (IMSHit == false && Hashes.usable()) { - string FinalFile = _config->FindDir("Dir::State::lists"); - FinalFile += URItoFileName(RealURI); - if (SigFile == DestFile) + // detect IMS-Hits servers haven't detected by Hash comparison + std::string const FinalFile = I->GetFinalFilename(); + if (RealFileExists(FinalFile) && Hashes.VerifyFile(FinalFile) == true) { - SigFile = FinalFile; - // constructor of pkgAcqMetaClearSig moved it out of the way, - // now move it back in on IMS hit for the 'old' file - string const OldClearSig = DestFile + ".reverify"; - if (RealFileExists(OldClearSig) == true) - Rename(OldClearSig, FinalFile); + IMSHit = true; + unlink(I->DestFile.c_str()); } - DestFile = FinalFile; - } - Complete = true; -} - /*}}}*/ -void pkgAcqMetaIndex::AuthDone(string Message) /*{{{*/ -{ - // At this point, the gpgv method has succeeded, so there is a - // valid signature from a key in the trusted keyring. We - // perform additional verification of its contents, and use them - // to verify the indexes we are about to download - - if (!MetaIndexParser->Load(DestFile)) - { - Status = StatAuthError; - ErrorText = MetaIndexParser->ErrorText; - return; } - if (!VerifyVendor(Message)) + if(IMSHit == true) { - return; + // for simplicity, the transaction manager is always InRelease + // even if it doesn't exist. + if (TransactionManager != NULL) + TransactionManager->IMSHit = true; + I->PartialFile = I->DestFile = I->GetFinalFilename(); } - if (_config->FindB("Debug::pkgAcquire::Auth", false)) - std::cerr << "Signature verification succeeded: " - << DestFile << std::endl; - - // Download further indexes with verification - QueueIndexes(true); - - // is it a clearsigned MetaIndex file? - if (DestFile == SigFile) - return; + // set Item to complete as the remaining work is all local (verify etc) + I->Complete = true; - // Done, move signature file into position - string VerifiedSigFile = _config->FindDir("Dir::State::lists") + - URItoFileName(RealURI) + ".gpg"; - Rename(SigFile,VerifiedSigFile); - chmod(VerifiedSigFile.c_str(),0644); + return true; } /*}}}*/ -void pkgAcqMetaIndex::QueueIndexes(bool verify) /*{{{*/ +void pkgAcqMetaBase::QueueIndexes(bool verify) /*{{{*/ { -#if 0 - /* Reject invalid, existing Release files (LP: #346386) (Closes: #627642) - * FIXME: Disabled; it breaks unsigned repositories without hashes */ - if (!verify && FileExists(DestFile) && !MetaIndexParser->Load(DestFile)) - { - Status = StatError; - ErrorText = MetaIndexParser->ErrorText; - return; - } -#endif - bool transInRelease = false; - { - std::vector<std::string> const keys = MetaIndexParser->MetaKeys(); - for (std::vector<std::string>::const_iterator k = keys.begin(); k != keys.end(); ++k) - // FIXME: Feels wrong to check for hardcoded string here, but what should we do else… - if (k->find("Translation-") != std::string::npos) - { - transInRelease = true; - break; - } - } + // at this point the real Items are loaded in the fetcher + ExpectedAdditionalItems = 0; - for (vector <struct IndexTarget*>::const_iterator Target = IndexTargets->begin(); + vector <struct IndexTarget*>::const_iterator Target; + for (Target = IndexTargets->begin(); Target != IndexTargets->end(); ++Target) { - HashString ExpectedIndexHash; + HashStringList ExpectedIndexHashes; const indexRecords::checkSum *Record = MetaIndexParser->Lookup((*Target)->MetaKey); - bool compressedAvailable = false; - if (Record == NULL) + + // optional target that we do not have in the Release file are + // skipped + if (verify == true && Record == NULL && (*Target)->IsOptional()) + continue; + + // targets without a hash record are a error when verify is required + if (verify == true && Record == NULL) { - if ((*Target)->IsOptional() == true) - { - std::vector<std::string> types = APT::Configuration::getCompressionTypes(); - for (std::vector<std::string>::const_iterator t = types.begin(); t != types.end(); ++t) - if (MetaIndexParser->Exists((*Target)->MetaKey + "." + *t) == true) - { - compressedAvailable = true; - break; - } - } - else if (verify == true) - { - Status = StatAuthError; - strprintf(ErrorText, _("Unable to find expected entry '%s' in Release file (Wrong sources.list entry or malformed file)"), (*Target)->MetaKey.c_str()); - return; - } + Status = StatAuthError; + strprintf(ErrorText, _("Unable to find expected entry '%s' in Release file (Wrong sources.list entry or malformed file)"), (*Target)->MetaKey.c_str()); + return; } - else + + if (Record) + ExpectedIndexHashes = Record->Hashes; + + if (_config->FindB("Debug::pkgAcquire::Auth", false)) { - ExpectedIndexHash = Record->Hash; - if (_config->FindB("Debug::pkgAcquire::Auth", false)) - { - std::cerr << "Queueing: " << (*Target)->URI << std::endl; - std::cerr << "Expected Hash: " << ExpectedIndexHash.toStr() << std::endl; - std::cerr << "For: " << Record->MetaKeyFilename << std::endl; - } - if (verify == true && ExpectedIndexHash.empty() == true && (*Target)->IsOptional() == false) - { - Status = StatAuthError; - strprintf(ErrorText, _("Unable to find hash sum for '%s' in Release file"), (*Target)->MetaKey.c_str()); - return; - } - } + std::cerr << "Queueing: " << (*Target)->URI << std::endl + << "Expected Hash:" << std::endl; + for (HashStringList::const_iterator hs = ExpectedIndexHashes.begin(); hs != ExpectedIndexHashes.end(); ++hs) + std::cerr << "\t- " << hs->toStr() << std::endl; + std::cerr << "For: " << ((Record == NULL) ? "<NULL>" : Record->MetaKeyFilename) << std::endl; - if ((*Target)->IsOptional() == true) + } + if (verify == true && ExpectedIndexHashes.empty() == true) { - if ((*Target)->IsSubIndex() == true) - new pkgAcqSubIndex(Owner, (*Target)->URI, (*Target)->Description, - (*Target)->ShortDesc, ExpectedIndexHash); - else if (transInRelease == false || Record != NULL || compressedAvailable == true) - { - if (_config->FindB("Acquire::PDiffs",true) == true && transInRelease == true && - MetaIndexParser->Exists((*Target)->MetaKey + ".diff/Index") == true) - new pkgAcqDiffIndex(Owner, (*Target)->URI, (*Target)->Description, - (*Target)->ShortDesc, ExpectedIndexHash); - else - new pkgAcqIndexTrans(Owner, *Target, ExpectedIndexHash, MetaIndexParser); - } - continue; + Status = StatAuthError; + strprintf(ErrorText, _("Unable to find hash sum for '%s' in Release file"), (*Target)->MetaKey.c_str()); + return; } - /* Queue Packages file (either diff or full packages files, depending + /* Queue the Index file (Packages, Sources, Translation-$foo + (either diff or full packages files, depending on the users option) - we also check if the PDiff Index file is listed in the Meta-Index file. Ideal would be if pkgAcqDiffIndex would test this instead, but passing the required info to it is to much hassle */ if(_config->FindB("Acquire::PDiffs",true) == true && (verify == false || - MetaIndexParser->Exists((*Target)->MetaKey + ".diff/Index") == true)) - new pkgAcqDiffIndex(Owner, (*Target)->URI, (*Target)->Description, - (*Target)->ShortDesc, ExpectedIndexHash); + MetaIndexParser->Exists((*Target)->MetaKey + ".diff/Index") == true)) + new pkgAcqDiffIndex(Owner, TransactionManager, *Target, ExpectedIndexHashes, MetaIndexParser); else - new pkgAcqIndex(Owner, *Target, ExpectedIndexHash, MetaIndexParser); + new pkgAcqIndex(Owner, TransactionManager, *Target, ExpectedIndexHashes, MetaIndexParser); } } /*}}}*/ -bool pkgAcqMetaIndex::VerifyVendor(string Message) /*{{{*/ +bool pkgAcqMetaBase::VerifyVendor(string Message) /*{{{*/ { string::size_type pos; @@ -1668,13 +2240,32 @@ bool pkgAcqMetaIndex::VerifyVendor(string Message) /*{{{*/ MetaIndexParser->GetValidUntil() > 0) { time_t const invalid_since = time(NULL) - MetaIndexParser->GetValidUntil(); if (invalid_since > 0) - // TRANSLATOR: The first %s is the URL of the bad Release file, the second is - // the time since then the file is invalid - formated in the same way as in - // the download progress display (e.g. 7d 3h 42min 1s) - return _error->Error( - _("Release file for %s is expired (invalid since %s). " - "Updates for this repository will not be applied."), - RealURI.c_str(), TimeToStr(invalid_since).c_str()); + { + std::string errmsg; + strprintf(errmsg, + // TRANSLATOR: The first %s is the URL of the bad Release file, the second is + // the time since then the file is invalid - formated in the same way as in + // the download progress display (e.g. 7d 3h 42min 1s) + _("Release file for %s is expired (invalid since %s). " + "Updates for this repository will not be applied."), + RealURI.c_str(), TimeToStr(invalid_since).c_str()); + if (ErrorText.empty()) + ErrorText = errmsg; + return _error->Error("%s", errmsg.c_str()); + } + } + + /* Did we get a file older than what we have? This is a last minute IMS hit and doubles + as a prevention of downgrading us to older (still valid) files */ + if (TransactionManager->IMSHit == false && LastMetaIndexParser != NULL && + LastMetaIndexParser->GetDate() > MetaIndexParser->GetDate()) + { + TransactionManager->IMSHit = true; + unlink(DestFile.c_str()); + PartialFile = DestFile = GetFinalFilename(); + delete MetaIndexParser; + MetaIndexParser = LastMetaIndexParser; + LastMetaIndexParser = NULL; } if (_config->FindB("Debug::pkgAcquire::Auth", false)) @@ -1704,147 +2295,175 @@ bool pkgAcqMetaIndex::VerifyVendor(string Message) /*{{{*/ return true; } /*}}}*/ -// pkgAcqMetaIndex::Failed - no Release file present or no signature file present /*{{{*/ -// --------------------------------------------------------------------- -/* */ -void pkgAcqMetaIndex::Failed(string Message,pkgAcquire::MethodConfig * /*Cnf*/) +// pkgAcqMetaIndex::Failed - no Release file present /*{{{*/ +void pkgAcqMetaIndex::Failed(string Message, + pkgAcquire::MethodConfig * Cnf) { - if (AuthPass == true) - { - // gpgv method failed, if we have a good signature - string LastGoodSigFile = _config->FindDir("Dir::State::lists").append("partial/").append(URItoFileName(RealURI)); - if (DestFile != SigFile) - LastGoodSigFile.append(".gpg"); - LastGoodSigFile.append(".reverify"); - - if(FileExists(LastGoodSigFile)) - { - string VerifiedSigFile = _config->FindDir("Dir::State::lists") + URItoFileName(RealURI); - if (DestFile != SigFile) - VerifiedSigFile.append(".gpg"); - Rename(LastGoodSigFile, VerifiedSigFile); - Status = StatTransientNetworkError; - _error->Warning(_("An error occurred during the signature " - "verification. The repository is not updated " - "and the previous index files will be used. " - "GPG error: %s: %s\n"), - Desc.Description.c_str(), - LookupTag(Message,"Message").c_str()); - RunScripts("APT::Update::Auth-Failure"); - return; - } else if (LookupTag(Message,"Message").find("NODATA") != string::npos) { - /* Invalid signature file, reject (LP: #346386) (Closes: #627642) */ - _error->Error(_("GPG error: %s: %s"), - Desc.Description.c_str(), - LookupTag(Message,"Message").c_str()); - return; - } else { - _error->Warning(_("GPG error: %s: %s"), - Desc.Description.c_str(), - LookupTag(Message,"Message").c_str()); - } - // gpgv method failed - ReportMirrorFailure("GPGFailure"); - } - - /* Always move the meta index, even if gpgv failed. This ensures - * that PackageFile objects are correctly filled in */ - if (FileExists(DestFile)) { - string FinalFile = _config->FindDir("Dir::State::lists"); - FinalFile += URItoFileName(RealURI); - /* InRelease files become Release files, otherwise - * they would be considered as trusted later on */ - if (SigFile == DestFile) { - RealURI = RealURI.replace(RealURI.rfind("InRelease"), 9, - "Release"); - FinalFile = FinalFile.replace(FinalFile.rfind("InRelease"), 9, - "Release"); - SigFile = FinalFile; - } - Rename(DestFile,FinalFile); - chmod(FinalFile.c_str(),0644); + pkgAcquire::Item::Failed(Message, Cnf); + Status = StatDone; - DestFile = FinalFile; - } + _error->Warning(_("The repository '%s' does not have a Release file. " + "This is deprecated, please contact the owner of the " + "repository."), URIDesc.c_str()); - // No Release file was present, or verification failed, so fall + // No Release file was present so fall // back to queueing Packages files without verification - QueueIndexes(false); + // only allow going further if the users explicitely wants it + if(AllowInsecureRepositories(MetaIndexParser, TransactionManager, this) == true) + { + // Done, queue for rename on transaction finished + if (FileExists(DestFile)) + TransactionManager->TransactionStageCopy(this, DestFile, GetFinalFilename()); + + // queue without any kind of hashsum support + QueueIndexes(false); + } +} + /*}}}*/ +void pkgAcqMetaIndex::Finished() /*{{{*/ +{ + if(_config->FindB("Debug::Acquire::Transaction", false) == true) + std::clog << "Finished: " << DestFile <<std::endl; + if(TransactionManager != NULL && + TransactionManager->TransactionHasError() == false) + TransactionManager->CommitTransaction(); } /*}}}*/ pkgAcqMetaClearSig::pkgAcqMetaClearSig(pkgAcquire *Owner, /*{{{*/ string const &URI, string const &URIDesc, string const &ShortDesc, string const &MetaIndexURI, string const &MetaIndexURIDesc, string const &MetaIndexShortDesc, string const &MetaSigURI, string const &MetaSigURIDesc, string const &MetaSigShortDesc, - const vector<struct IndexTarget*>* IndexTargets, + const vector<IndexTarget*>* IndexTargets, indexRecords* MetaIndexParser) : - pkgAcqMetaIndex(Owner, URI, URIDesc, ShortDesc, "", IndexTargets, MetaIndexParser), - MetaIndexURI(MetaIndexURI), MetaIndexURIDesc(MetaIndexURIDesc), MetaIndexShortDesc(MetaIndexShortDesc), - MetaSigURI(MetaSigURI), MetaSigURIDesc(MetaSigURIDesc), MetaSigShortDesc(MetaSigShortDesc) + pkgAcqMetaIndex(Owner, NULL, URI, URIDesc, ShortDesc, MetaSigURI, MetaSigURIDesc,MetaSigShortDesc, IndexTargets, MetaIndexParser), + MetaIndexURI(MetaIndexURI), MetaIndexURIDesc(MetaIndexURIDesc), MetaIndexShortDesc(MetaIndexShortDesc), + MetaSigURI(MetaSigURI), MetaSigURIDesc(MetaSigURIDesc), MetaSigShortDesc(MetaSigShortDesc) { - SigFile = DestFile; - - // keep the old InRelease around in case of transistent network errors - string const Final = _config->FindDir("Dir::State::lists") + URItoFileName(RealURI); - if (RealFileExists(Final) == true) - { - string const LastGoodSig = DestFile + ".reverify"; - Rename(Final,LastGoodSig); - } + // index targets + (worst case:) Release/Release.gpg + ExpectedAdditionalItems = IndexTargets->size() + 2; } /*}}}*/ pkgAcqMetaClearSig::~pkgAcqMetaClearSig() /*{{{*/ { - // if the file was never queued undo file-changes done in the constructor - if (QueueCounter == 1 && Status == StatIdle && FileSize == 0 && Complete == false) - { - string const Final = _config->FindDir("Dir::State::lists") + URItoFileName(RealURI); - string const LastGoodSig = DestFile + ".reverify"; - if (RealFileExists(Final) == false && RealFileExists(LastGoodSig) == true) - Rename(LastGoodSig, Final); - } } /*}}}*/ // pkgAcqMetaClearSig::Custom600Headers - Insert custom request headers /*{{{*/ -// --------------------------------------------------------------------- -// FIXME: this can go away once the InRelease file is used widely +#if APT_PKG_ABI >= 413 +string pkgAcqMetaClearSig::Custom600Headers() const +#else string pkgAcqMetaClearSig::Custom600Headers() +#endif +{ + string Header = pkgAcqMetaBase::Custom600Headers(); + Header += "\nFail-Ignore: true"; + return Header; +} + /*}}}*/ +// pkgAcqMetaClearSig::Done - We got a file /*{{{*/ +class APT_HIDDEN DummyItem : public pkgAcquire::Item { - string Final = _config->FindDir("Dir::State::lists"); - Final += URItoFileName(RealURI); + std::string URI; + public: + virtual std::string DescURI() {return URI;}; - struct stat Buf; - if (stat(Final.c_str(),&Buf) != 0) + DummyItem(pkgAcquire *Owner, std::string const &URI) : pkgAcquire::Item(Owner), URI(URI) + { + Status = StatDone; + DestFile = GetFinalFileNameFromURI(URI); + } +}; +void pkgAcqMetaClearSig::Done(std::string Message,unsigned long long Size, + HashStringList const &Hashes, + pkgAcquire::MethodConfig *Cnf) +{ + Item::Done(Message, Size, Hashes, Cnf); + + // if we expect a ClearTextSignature (InRelease), ensure that + // this is what we get and if not fail to queue a + // Release/Release.gpg, see #346386 + if (FileExists(DestFile) && !StartsWithGPGClearTextSignature(DestFile)) { - Final = DestFile + ".reverify"; - if (stat(Final.c_str(),&Buf) != 0) - return "\nIndex-File: true\nFail-Ignore: true\n"; + pkgAcquire::Item::Failed(Message, Cnf); + RenameOnError(NotClearsigned); + TransactionManager->AbortTransaction(); + return; } - return "\nIndex-File: true\nFail-Ignore: true\nLast-Modified: " + TimeRFC1123(Buf.st_mtime); + if(AuthPass == false) + { + if(CheckDownloadDone(this, Message, Hashes) == true) + QueueForSignatureVerify(this, DestFile, DestFile); + return; + } + else if(CheckAuthDone(Message) == true) + { + if (TransactionManager->IMSHit == false) + TransactionManager->TransactionStageCopy(this, DestFile, GetFinalFilename()); + else if (RealFileExists(GetFinalFilename()) == false) + { + // We got an InRelease file IMSHit, but we haven't one, which means + // we had a valid Release/Release.gpg combo stepping in, which we have + // to 'acquire' now to ensure list cleanup isn't removing them + new DummyItem(Owner, MetaIndexURI); + new DummyItem(Owner, MetaSigURI); + } + } } /*}}}*/ void pkgAcqMetaClearSig::Failed(string Message,pkgAcquire::MethodConfig *Cnf) /*{{{*/ { + Item::Failed(Message, Cnf); + + // we failed, we will not get additional items from this method + ExpectedAdditionalItems = 0; + if (AuthPass == false) { - // Remove the 'old' InRelease file if we try Release.gpg now as otherwise - // the file will stay around and gives a false-auth impression (CVE-2012-0214) - string FinalFile = _config->FindDir("Dir::State::lists"); - FinalFile.append(URItoFileName(RealURI)); - if (FileExists(FinalFile)) - unlink(FinalFile.c_str()); + // Queue the 'old' InRelease file for removal if we try Release.gpg + // as otherwise the file will stay around and gives a false-auth + // impression (CVE-2012-0214) + TransactionManager->TransactionStageRemoval(this, GetFinalFilename()); + Status = StatDone; - new pkgAcqMetaSig(Owner, - MetaSigURI, MetaSigURIDesc, MetaSigShortDesc, + new pkgAcqMetaIndex(Owner, TransactionManager, MetaIndexURI, MetaIndexURIDesc, MetaIndexShortDesc, + MetaSigURI, MetaSigURIDesc, MetaSigShortDesc, IndexTargets, MetaIndexParser); - if (Cnf->LocalOnly == true || - StringToBool(LookupTag(Message, "Transient-Failure"), false) == false) - Dequeue(); } else - pkgAcqMetaIndex::Failed(Message, Cnf); + { + if(CheckStopAuthentication(this, Message)) + return; + + _error->Warning(_("The data from '%s' is not signed. Packages " + "from that repository can not be authenticated."), + URIDesc.c_str()); + + // No Release file was present, or verification failed, so fall + // back to queueing Packages files without verification + // only allow going further if the users explicitely wants it + if(AllowInsecureRepositories(MetaIndexParser, TransactionManager, this) == true) + { + Status = StatDone; + + /* Always move the meta index, even if gpgv failed. This ensures + * that PackageFile objects are correctly filled in */ + if (FileExists(DestFile)) + { + string FinalFile = GetFinalFilename(); + /* InRelease files become Release files, otherwise + * they would be considered as trusted later on */ + RealURI = RealURI.replace(RealURI.rfind("InRelease"), 9, + "Release"); + FinalFile = FinalFile.replace(FinalFile.rfind("InRelease"), 9, + "Release"); + + // Done, queue for rename on transaction finished + TransactionManager->TransactionStageCopy(this, DestFile, FinalFile); + } + QueueIndexes(false); + } + } } /*}}}*/ // AcqArchive::AcqArchive - Constructor /*{{{*/ @@ -1854,7 +2473,7 @@ void pkgAcqMetaClearSig::Failed(string Message,pkgAcquire::MethodConfig *Cnf) /* pkgAcqArchive::pkgAcqArchive(pkgAcquire *Owner,pkgSourceList *Sources, pkgRecords *Recs,pkgCache::VerIterator const &Version, string &StoreFilename) : - Item(Owner), Version(Version), Sources(Sources), Recs(Recs), + Item(Owner, HashStringList()), Version(Version), Sources(Sources), Recs(Recs), StoreFilename(StoreFilename), Vf(Version.FileList()), Trusted(false) { @@ -1939,7 +2558,6 @@ pkgAcqArchive::pkgAcqArchive(pkgAcquire *Owner,pkgSourceList *Sources, checking later. */ bool pkgAcqArchive::QueueNext() { - string const ForceHash = _config->Find("Acquire::ForceHash"); for (; Vf.end() == false; ++Vf) { // Ignore not source sources @@ -1960,31 +2578,10 @@ bool pkgAcqArchive::QueueNext() pkgRecords::Parser &Parse = Recs->Lookup(Vf); if (_error->PendingError() == true) return false; - + string PkgFile = Parse.FileName(); - if (ForceHash.empty() == false) - { - if(stringcasecmp(ForceHash, "sha512") == 0) - ExpectedHash = HashString("SHA512", Parse.SHA512Hash()); - else if(stringcasecmp(ForceHash, "sha256") == 0) - ExpectedHash = HashString("SHA256", Parse.SHA256Hash()); - else if (stringcasecmp(ForceHash, "sha1") == 0) - ExpectedHash = HashString("SHA1", Parse.SHA1Hash()); - else - ExpectedHash = HashString("MD5Sum", Parse.MD5Hash()); - } - else - { - string Hash; - if ((Hash = Parse.SHA512Hash()).empty() == false) - ExpectedHash = HashString("SHA512", Hash); - else if ((Hash = Parse.SHA256Hash()).empty() == false) - ExpectedHash = HashString("SHA256", Hash); - else if ((Hash = Parse.SHA1Hash()).empty() == false) - ExpectedHash = HashString("SHA1", Hash); - else - ExpectedHash = HashString("MD5Sum", Parse.MD5Hash()); - } + ExpectedHashes = Parse.Hashes(); + if (PkgFile.empty() == true) return _error->Error(_("The package index files are corrupted. No Filename: " "field for package %s."), @@ -2071,10 +2668,10 @@ bool pkgAcqArchive::QueueNext() // AcqArchive::Done - Finished fetching /*{{{*/ // --------------------------------------------------------------------- /* */ -void pkgAcqArchive::Done(string Message,unsigned long long Size,string CalcHash, +void pkgAcqArchive::Done(string Message,unsigned long long Size, HashStringList const &CalcHashes, pkgAcquire::MethodConfig *Cfg) { - Item::Done(Message,Size,CalcHash,Cfg); + Item::Done(Message, Size, CalcHashes, Cfg); // Check the size if (Size != Version->Size) @@ -2082,11 +2679,12 @@ void pkgAcqArchive::Done(string Message,unsigned long long Size,string CalcHash, RenameOnError(SizeMismatch); return; } - - // Check the hash - if(ExpectedHash.toStr() != CalcHash) + + // FIXME: could this empty() check impose *any* sort of security issue? + if(ExpectedHashes.usable() && ExpectedHashes != CalcHashes) { RenameOnError(HashSumMismatch); + printHashSumComparision(DestFile, ExpectedHashes, CalcHashes); return; } @@ -2099,32 +2697,35 @@ void pkgAcqArchive::Done(string Message,unsigned long long Size,string CalcHash, return; } - Complete = true; - // Reference filename if (FileName != DestFile) { StoreFilename = DestFile = FileName; Local = true; + Complete = true; return; } - + // Done, move it into position - string FinalFile = _config->FindDir("Dir::Cache::Archives"); - FinalFile += flNotDir(StoreFilename); + string const FinalFile = GetFinalFilename(); Rename(DestFile,FinalFile); - StoreFilename = DestFile = FinalFile; Complete = true; } /*}}}*/ +// Acquire::Item::GetFinalFilename - Return the full final file path /*{{{*/ +std::string pkgAcqArchive::GetFinalFilename() const +{ + return _config->FindDir("Dir::Cache::Archives") + flNotDir(StoreFilename); +} + /*}}}*/ // AcqArchive::Failed - Failure handler /*{{{*/ // --------------------------------------------------------------------- /* Here we try other sources */ void pkgAcqArchive::Failed(string Message,pkgAcquire::MethodConfig *Cnf) { - ErrorText = LookupTag(Message,"Message"); - + Item::Failed(Message,Cnf); + /* We don't really want to retry on failed media swaps, this prevents that. An interesting observation is that permanent failures are not recorded. */ @@ -2134,10 +2735,10 @@ void pkgAcqArchive::Failed(string Message,pkgAcquire::MethodConfig *Cnf) // Vf = Version.FileList(); while (Vf.end() == false) ++Vf; StoreFilename = string(); - Item::Failed(Message,Cnf); return; } - + + Status = StatIdle; if (QueueNext() == false) { // This is the retry counter @@ -2150,15 +2751,19 @@ void pkgAcqArchive::Failed(string Message,pkgAcquire::MethodConfig *Cnf) if (QueueNext() == true) return; } - + StoreFilename = string(); - Item::Failed(Message,Cnf); + Status = StatError; } } /*}}}*/ // AcqArchive::IsTrusted - Determine whether this archive comes from a trusted source /*{{{*/ // --------------------------------------------------------------------- +#if APT_PKG_ABI >= 413 +APT_PURE bool pkgAcqArchive::IsTrusted() const +#else APT_PURE bool pkgAcqArchive::IsTrusted() +#endif { return Trusted; } @@ -2177,11 +2782,11 @@ void pkgAcqArchive::Finished() // AcqFile::pkgAcqFile - Constructor /*{{{*/ // --------------------------------------------------------------------- /* The file is added to the queue */ -pkgAcqFile::pkgAcqFile(pkgAcquire *Owner,string URI,string Hash, +pkgAcqFile::pkgAcqFile(pkgAcquire *Owner,string URI, HashStringList const &Hashes, unsigned long long Size,string Dsc,string ShortDesc, const string &DestDir, const string &DestFilename, bool IsIndexFile) : - Item(Owner), ExpectedHash(Hash), IsIndexFile(IsIndexFile) + Item(Owner, Hashes), IsIndexFile(IsIndexFile) { Retries = _config->FindI("Acquire::Retries",0); @@ -2218,15 +2823,16 @@ pkgAcqFile::pkgAcqFile(pkgAcquire *Owner,string URI,string Hash, // AcqFile::Done - Item downloaded OK /*{{{*/ // --------------------------------------------------------------------- /* */ -void pkgAcqFile::Done(string Message,unsigned long long Size,string CalcHash, +void pkgAcqFile::Done(string Message,unsigned long long Size,HashStringList const &CalcHashes, pkgAcquire::MethodConfig *Cnf) { - Item::Done(Message,Size,CalcHash,Cnf); + Item::Done(Message,Size,CalcHashes,Cnf); // Check the hash - if(!ExpectedHash.empty() && ExpectedHash.toStr() != CalcHash) + if(ExpectedHashes.usable() && ExpectedHashes != CalcHashes) { RenameOnError(HashSumMismatch); + printHashSumComparision(DestFile, ExpectedHashes, CalcHashes); return; } @@ -2267,7 +2873,12 @@ void pkgAcqFile::Done(string Message,unsigned long long Size,string CalcHash, // Symlink the file if (symlink(FileName.c_str(),DestFile.c_str()) != 0) { - ErrorText = "Link to " + DestFile + " failure "; + _error->PushToStack(); + _error->Errno("pkgAcqFile::Done", "Symlinking file %s failed", DestFile.c_str()); + std::stringstream msg; + _error->DumpErrors(msg); + _error->RevertToStack(); + ErrorText = msg.str(); Status = StatError; Complete = false; } @@ -2279,25 +2890,29 @@ void pkgAcqFile::Done(string Message,unsigned long long Size,string CalcHash, /* Here we try other sources */ void pkgAcqFile::Failed(string Message,pkgAcquire::MethodConfig *Cnf) { - ErrorText = LookupTag(Message,"Message"); - + Item::Failed(Message,Cnf); + // This is the retry counter if (Retries != 0 && Cnf->LocalOnly == false && StringToBool(LookupTag(Message,"Transient-Failure"),false) == true) { - Retries--; + --Retries; QueueURI(Desc); + Status = StatIdle; return; } - - Item::Failed(Message,Cnf); + } /*}}}*/ // AcqIndex::Custom600Headers - Insert custom request headers /*{{{*/ // --------------------------------------------------------------------- /* The only header we use is the last-modified header. */ +#if APT_PKG_ABI >= 413 +string pkgAcqFile::Custom600Headers() const +#else string pkgAcqFile::Custom600Headers() +#endif { if (IsIndexFile) return "\nIndex-File: true"; diff --git a/apt-pkg/acquire-item.h b/apt-pkg/acquire-item.h index f48d2a0d7..07c86f31b 100644 --- a/apt-pkg/acquire-item.h +++ b/apt-pkg/acquire-item.h @@ -46,6 +46,8 @@ class indexRecords; class pkgRecords; class pkgSourceList; +class IndexTarget; +class pkgAcqMetaBase; /** \brief Represents the process by which a pkgAcquire object should {{{ * retrieve a file or a collection of files. @@ -61,6 +63,10 @@ class pkgSourceList; */ class pkgAcquire::Item : public WeakPointable { + friend class pkgAcqMetaBase; + + void *d; + protected: /** \brief The acquire object with which this item is associated. */ @@ -68,15 +74,19 @@ class pkgAcquire::Item : public WeakPointable /** \brief Insert this item into its owner's queue. * + * The method is designed to check if the request would end + * in an IMSHit and if it determines that it would, it isn't + * queueing the Item and instead sets it to completion instantly. + * * \param Item Metadata about this item (its URI and * description). + * \return true if the item was inserted, false if IMSHit was detected */ - inline void QueueURI(ItemDesc &Item) - {Owner->Enqueue(Item);}; + virtual bool QueueURI(ItemDesc &Item); /** \brief Remove this item from its owner's queue. */ - inline void Dequeue() {Owner->Dequeue(this);}; - + void Dequeue(); + /** \brief Rename a file without modifying its timestamp. * * Many item methods call this as their final action. @@ -86,7 +96,10 @@ class pkgAcquire::Item : public WeakPointable * \param To The new name of \a From. If \a To exists it will be * overwritten. */ - void Rename(std::string From,std::string To); + bool Rename(std::string From,std::string To); + + /** \brief Get the full pathname of the final file for the current URI */ + virtual std::string GetFinalFilename() const; public: @@ -115,7 +128,7 @@ class pkgAcquire::Item : public WeakPointable /** \brief The item was could not be downloaded because of * a transient network error (e.g. network down) */ - StatTransientNetworkError + StatTransientNetworkError, } Status; /** \brief Contains a textual description of the error encountered @@ -132,7 +145,12 @@ class pkgAcquire::Item : public WeakPointable /** \brief If not \b NULL, contains the name of a subprocess that * is operating on this object (for instance, "gzip" or "gpgv"). */ - const char *Mode; + APT_DEPRECATED const char *Mode; + + /** \brief contains the name of the subprocess that is operating on this object + * (for instance, "gzip", "rred" or "gpgv"). This is obsoleting #Mode from above + * as it can manage the lifetime of included string properly. */ + std::string ActiveSubprocess; /** \brief A client-supplied unique identifier. * @@ -166,12 +184,28 @@ class pkgAcquire::Item : public WeakPointable * \sa pkgAcquire */ unsigned int QueueCounter; + + /** \brief TransactionManager */ + pkgAcqMetaBase *TransactionManager; + + /** \brief The number of additional fetch items that are expected + * once this item is done. + * + * Some items like pkgAcqMeta{Index,Sig} will queue additional + * items. This variable can be set by the methods if it knows + * in advance how many items to expect to get a more accurate + * progress. + */ + unsigned int ExpectedAdditionalItems; /** \brief The name of the file into which the retrieved object * will be written. */ std::string DestFile; + /** \brief storge name until a transaction is finished */ + std::string PartialFile; + /** \brief Invoked by the acquire worker when the object couldn't * be fetched. * @@ -201,12 +235,12 @@ class pkgAcquire::Item : public WeakPointable * \param Message Data from the acquire method. Use LookupTag() * to parse it. * \param Size The size of the object that was fetched. - * \param Hash The HashSum of the object that was fetched. + * \param Hashes The HashSums of the object that was fetched. * \param Cnf The method via which the object was fetched. * * \sa pkgAcqMethod */ - virtual void Done(std::string Message,unsigned long long Size,std::string Hash, + virtual void Done(std::string Message, unsigned long long Size, HashStringList const &Hashes, pkgAcquire::MethodConfig *Cnf); /** \brief Invoked when the worker starts to fetch this object. @@ -228,7 +262,11 @@ class pkgAcquire::Item : public WeakPointable * line, so they should (if nonempty) have a leading newline and * no trailing newline. */ +#if APT_PKG_ABI >= 413 + virtual std::string Custom600Headers() const {return std::string();}; +#else virtual std::string Custom600Headers() {return std::string();}; +#endif /** \brief A "descriptive" URI-like string. * @@ -244,20 +282,27 @@ class pkgAcquire::Item : public WeakPointable /** \brief Invoked by the worker when the download is completely done. */ virtual void Finished() {}; - /** \brief HashSum + /** \brief HashSums * - * \return the HashSum of this object, if applicable; otherwise, an - * empty string. + * \return the HashSums of this object, if applicable; otherwise, an + * empty list. */ - virtual std::string HashSum() {return std::string();}; + HashStringList HashSums() const {return ExpectedHashes;}; + std::string HashSum() const {HashStringList const hashes = HashSums(); HashString const * const hs = hashes.find(NULL); return hs != NULL ? hs->toStr() : ""; }; /** \return the acquire process with which this item is associated. */ + pkgAcquire *GetOwner() const {return Owner;}; +#if APT_PKG_ABI < 413 pkgAcquire *GetOwner() {return Owner;}; +#endif /** \return \b true if this object is being fetched from a trusted source. */ +#if APT_PKG_ABI >= 413 + virtual bool IsTrusted() const {return false;}; +#else virtual bool IsTrusted() {return false;}; +#endif - // report mirror problems /** \brief Report mirror problem * * This allows reporting mirror failures back to a centralized @@ -267,6 +312,11 @@ class pkgAcquire::Item : public WeakPointable */ void ReportMirrorFailure(std::string FailCode); + /** \brief Set the name of the current active subprocess + * + * See also #ActiveSubprocess + */ + void SetActiveSubprocess(const std::string &subprocess); /** \brief Initialize an item. * @@ -274,12 +324,12 @@ class pkgAcquire::Item : public WeakPointable * process, but does not place it into any fetch queues (you must * manually invoke QueueURI() to do so). * - * Initializes all fields of the item other than Owner to 0, - * false, or the empty string. - * * \param Owner The new owner of this item. + * \param ExpectedHashes of the file represented by this item */ - Item(pkgAcquire *Owner); + Item(pkgAcquire *Owner, + HashStringList const &ExpectedHashes=HashStringList(), + pkgAcqMetaBase *TransactionManager=NULL); /** \brief Remove this item from its owner's queue by invoking * pkgAcquire::Remove. @@ -291,7 +341,11 @@ class pkgAcquire::Item : public WeakPointable enum RenameOnErrorState { HashSumMismatch, SizeMismatch, - InvalidFormat + InvalidFormat, + SignatureError, + NotClearsigned, + MaximumSizeExceeded, + PDiffError, }; /** \brief Rename failed file and set error @@ -299,62 +353,321 @@ class pkgAcquire::Item : public WeakPointable * \param state respresenting the error we encountered */ bool RenameOnError(RenameOnErrorState const state); + + enum TransactionStates { + TransactionCommit, + TransactionAbort, + }; + virtual bool TransactionState(TransactionStates const state); + + /** \brief The HashSums of the item is supposed to have than done */ + HashStringList ExpectedHashes; + + /** \brief The item that is currently being downloaded. */ + pkgAcquire::ItemDesc Desc; }; /*}}}*/ /** \brief Information about an index patch (aka diff). */ /*{{{*/ -struct DiffInfo { +struct APT_HIDDEN DiffInfo { /** The filename of the diff. */ std::string file; - /** The sha1 hash of the diff. */ - std::string sha1; + /** The hashes of the diff */ + HashStringList result_hashes; - /** The size of the diff. */ - unsigned long size; + /** The hashes of the file after the diff is applied */ + HashStringList patch_hashes; + + /** The size of the file after the diff is applied */ + unsigned long long result_size; + + /** The size of the diff itself */ + unsigned long long patch_size; }; /*}}}*/ -/** \brief An item that is responsible for fetching a SubIndex {{{ - * - * The MetaIndex file includes only records for important indexes - * and records for these SubIndex files so these can carry records - * for addition files like PDiffs and Translations - */ -class pkgAcqSubIndex : public pkgAcquire::Item +class pkgAcqMetaBase : public pkgAcquire::Item /*{{{*/ { + void *d; + protected: - /** \brief If \b true, debugging information will be written to std::clog. */ - bool Debug; + std::vector<Item*> Transaction; - /** \brief The item that is currently being downloaded. */ - pkgAcquire::ItemDesc Desc; + /** \brief A package-system-specific parser for the meta-index file. */ + indexRecords *MetaIndexParser; + indexRecords *LastMetaIndexParser; - /** \brief The Hash that this file should have after download + /** \brief The index files which should be looked up in the meta-index + * and then downloaded. */ - HashString ExpectedHash; + const std::vector<IndexTarget*>* IndexTargets; - public: - // Specialized action members - virtual void Failed(std::string Message,pkgAcquire::MethodConfig *Cnf); - virtual void Done(std::string Message,unsigned long long Size,std::string Md5Hash, - pkgAcquire::MethodConfig *Cnf); - virtual std::string DescURI() {return Desc.URI;}; - virtual std::string Custom600Headers(); - virtual bool ParseIndex(std::string const &IndexFile); + /** \brief If \b true, the index's signature is currently being verified. + */ + bool AuthPass; + + /** \brief The URI of the signature file. Unlike Desc.URI, this is + * never modified; it is used to determine the file that is being + * downloaded. + */ + std::string RealURI; - /** \brief Create a new pkgAcqSubIndex. + /** \brief Starts downloading the individual index files. * - * \param Owner The Acquire object that owns this item. + * \param verify If \b true, only indices whose expected hashsum + * can be determined from the meta-index will be downloaded, and + * the hashsums of indices will be checked (reporting + * #StatAuthError if there is a mismatch). If verify is \b false, + * no hashsum checking will be performed. + */ + void QueueIndexes(bool verify); + + /** \brief Called when a file is finished being retrieved. * - * \param URI The URI of the list file to download. + * If the file was not downloaded to DestFile, a copy process is + * set up to copy it to DestFile; otherwise, Complete is set to \b + * true and the file is moved to its final location. * - * \param URIDesc A long description of the list file to download. + * \param Message The message block received from the fetch + * subprocess. + */ + bool CheckDownloadDone(pkgAcquire::Item * const I, const std::string &Message, HashStringList const &Hashes) const; + + /** \brief Queue the downloaded Signature for verification */ + void QueueForSignatureVerify(pkgAcquire::Item * const I, std::string const &File, std::string const &Signature); + +#if APT_PKG_ABI >= 413 + virtual std::string Custom600Headers() const; +#else + virtual std::string Custom600Headers(); +#endif + + /** \brief Called when authentication succeeded. * - * \param ShortDesc A short description of the list file to download. + * Sanity-checks the authenticated file, queues up the individual + * index files for download, and saves the signature in the lists + * directory next to the authenticated list file. * - * \param ExpectedHash The list file's MD5 signature. + * \param Message The message block received from the fetch + * subprocess. */ - pkgAcqSubIndex(pkgAcquire *Owner, std::string const &URI,std::string const &URIDesc, - std::string const &ShortDesc, HashString const &ExpectedHash); + bool CheckAuthDone(std::string Message); + + /** Check if the current item should fail at this point */ + bool CheckStopAuthentication(pkgAcquire::Item * const I, const std::string &Message); + + /** \brief Check that the release file is a release file for the + * correct distribution. + * + * \return \b true if no fatal errors were encountered. + */ + bool VerifyVendor(std::string Message); + + virtual bool TransactionState(TransactionStates const state); + + public: + // This refers more to the Transaction-Manager than the actual file + bool IMSHit; + + virtual std::string DescURI() {return RealURI; }; + virtual bool QueueURI(pkgAcquire::ItemDesc &Item); + + // transaction code + void Add(Item *I); + void AbortTransaction(); + bool TransactionHasError() APT_PURE; + void CommitTransaction(); + + /** \brief Stage (queue) a copy action when the transaction is committed + */ + void TransactionStageCopy(Item *I, + const std::string &From, + const std::string &To); + /** \brief Stage (queue) a removal action when the transaction is committed + */ + void TransactionStageRemoval(Item *I, const std::string &FinalFile); + + /** \brief Get the full pathname of the final file for the current URI */ + virtual std::string GetFinalFilename() const; + + pkgAcqMetaBase(pkgAcquire *Owner, + const std::vector<IndexTarget*>* IndexTargets, + indexRecords* MetaIndexParser, + std::string const &RealURI, + HashStringList const &ExpectedHashes=HashStringList(), + pkgAcqMetaBase *TransactionManager=NULL); +}; + /*}}}*/ +/** \brief An item that is responsible for downloading the meta-index {{{ + * file (i.e., Release) itself and verifying its signature. + * + * Once the download and verification are complete, the downloads of + * the individual index files are queued up using pkgAcqDiffIndex. + * If the meta-index file had a valid signature, the expected hashsums + * of the index files will be the md5sums listed in the meta-index; + * otherwise, the expected hashsums will be "" (causing the + * authentication of the index files to be bypassed). + */ +class APT_HIDDEN pkgAcqMetaIndex : public pkgAcqMetaBase +{ + void *d; + + protected: + std::string URIDesc; + std::string ShortDesc; + + /** \brief The URI of the meta-index file for the detached signature */ + std::string MetaIndexSigURI; + + /** \brief A "URI-style" description of the meta-index file */ + std::string MetaIndexSigURIDesc; + + /** \brief A brief description of the meta-index file */ + std::string MetaIndexSigShortDesc; + + /** \brief delayed constructor */ + void Init(std::string URIDesc, std::string ShortDesc); + + public: + + // Specialized action members + virtual void Failed(std::string Message,pkgAcquire::MethodConfig *Cnf); + virtual void Done(std::string Message,unsigned long long Size, HashStringList const &Hashes, + pkgAcquire::MethodConfig *Cnf); + virtual void Finished(); + + /** \brief Create a new pkgAcqMetaIndex. */ + pkgAcqMetaIndex(pkgAcquire *Owner, + pkgAcqMetaBase *TransactionManager, + std::string URI,std::string URIDesc, std::string ShortDesc, + std::string MetaIndexSigURI, std::string MetaIndexSigURIDesc, std::string MetaIndexSigShortDesc, + const std::vector<IndexTarget*>* IndexTargets, + indexRecords* MetaIndexParser); + + friend class pkgAcqMetaSig; +}; + /*}}}*/ +/** \brief An acquire item that downloads the detached signature {{{ + * of a meta-index (Release) file, then queues up the release + * file itself. + * + * \todo Why protected members? + * + * \sa pkgAcqMetaIndex + */ +class APT_HIDDEN pkgAcqMetaSig : public pkgAcquire::Item +{ + void *d; + + pkgAcqMetaIndex * const MetaIndex; + + /** \brief The file we use to verify the MetaIndexFile with (not always set!) */ + std::string MetaIndexFileSignature; + + protected: + + /** \brief Long URI description used in the acquire system */ + std::string URIDesc; + + /** \brief URI used to get the file */ + std::string RealURI; + + /** \brief Get the full pathname of the final file for the current URI */ + virtual std::string GetFinalFilename() const; + + public: + virtual std::string DescURI() {return RealURI;}; + + // Specialized action members + virtual void Failed(std::string Message,pkgAcquire::MethodConfig *Cnf); + virtual void Done(std::string Message,unsigned long long Size, + HashStringList const &Hashes, + pkgAcquire::MethodConfig *Cnf); + + /** \brief Create a new pkgAcqMetaSig. */ + pkgAcqMetaSig(pkgAcquire *Owner, + pkgAcqMetaBase *TransactionManager, + std::string const &URI,std::string const &URIDesc, + std::string const &ShortDesc, pkgAcqMetaIndex * const MetaIndex); + virtual ~pkgAcqMetaSig(); +}; + /*}}}*/ +/** \brief An item repsonsible for downloading clearsigned metaindexes {{{*/ +class APT_HIDDEN pkgAcqMetaClearSig : public pkgAcqMetaIndex +{ + void *d; + + /** \brief The URI of the meta-index file for the detached signature */ + std::string MetaIndexURI; + + /** \brief A "URI-style" description of the meta-index file */ + std::string MetaIndexURIDesc; + + /** \brief A brief description of the meta-index file */ + std::string MetaIndexShortDesc; + + /** \brief The URI of the detached meta-signature file if the clearsigned one failed. */ + std::string MetaSigURI; + + /** \brief A "URI-style" description of the meta-signature file */ + std::string MetaSigURIDesc; + + /** \brief A brief description of the meta-signature file */ + std::string MetaSigShortDesc; + +public: + virtual void Failed(std::string Message,pkgAcquire::MethodConfig *Cnf); +#if APT_PKG_ABI >= 413 + virtual std::string Custom600Headers() const; +#else + virtual std::string Custom600Headers(); +#endif + virtual void Done(std::string Message,unsigned long long Size, + HashStringList const &Hashes, + pkgAcquire::MethodConfig *Cnf); + + /** \brief Create a new pkgAcqMetaClearSig. */ + pkgAcqMetaClearSig(pkgAcquire *Owner, + std::string const &URI, std::string const &URIDesc, std::string const &ShortDesc, + std::string const &MetaIndexURI, std::string const &MetaIndexURIDesc, std::string const &MetaIndexShortDesc, + std::string const &MetaSigURI, std::string const &MetaSigURIDesc, std::string const &MetaSigShortDesc, + const std::vector<IndexTarget*>* IndexTargets, + indexRecords* MetaIndexParser); + virtual ~pkgAcqMetaClearSig(); +}; + /*}}}*/ +/** \brief Common base class for all classes that deal with fetching {{{ + indexes + */ +class pkgAcqBaseIndex : public pkgAcquire::Item +{ + void *d; + + protected: + /** \brief Pointer to the IndexTarget data + */ + const struct IndexTarget * Target; + + /** \brief Pointer to the indexRecords parser */ + indexRecords *MetaIndexParser; + + /** \brief The MetaIndex Key */ + std::string MetaKey; + + /** \brief The URI of the index file to recreate at our end (either + * by downloading it or by applying partial patches). + */ + std::string RealURI; + + bool VerifyHashByMetaKey(HashStringList const &Hashes); + + /** \brief Get the full pathname of the final file for the current URI */ + virtual std::string GetFinalFilename() const; + + pkgAcqBaseIndex(pkgAcquire *Owner, + pkgAcqMetaBase *TransactionManager, + struct IndexTarget const * const Target, + HashStringList const &ExpectedHashes, + indexRecords *MetaIndexParser); }; /*}}}*/ /** \brief An item that is responsible for fetching an index file of {{{ @@ -366,25 +679,14 @@ class pkgAcqSubIndex : public pkgAcquire::Item * * \sa pkgAcqIndexDiffs, pkgAcqIndex */ -class pkgAcqDiffIndex : public pkgAcquire::Item +class APT_HIDDEN pkgAcqDiffIndex : public pkgAcqBaseIndex { + void *d; + protected: /** \brief If \b true, debugging information will be written to std::clog. */ bool Debug; - /** \brief The item that is currently being downloaded. */ - pkgAcquire::ItemDesc Desc; - - /** \brief The URI of the index file to recreate at our end (either - * by downloading it or by applying partial patches). - */ - std::string RealURI; - - /** \brief The Hash that the real index file should have after - * all patches have been applied. - */ - HashString ExpectedHash; - /** \brief The index file which will be patched to generate the new * file. */ @@ -395,13 +697,23 @@ class pkgAcqDiffIndex : public pkgAcquire::Item */ std::string Description; + /** \brief Get the full pathname of the final file for the current URI */ + virtual std::string GetFinalFilename() const; + + virtual bool QueueURI(pkgAcquire::ItemDesc &Item); + + virtual bool TransactionState(TransactionStates const state); public: // Specialized action members virtual void Failed(std::string Message,pkgAcquire::MethodConfig *Cnf); - virtual void Done(std::string Message,unsigned long long Size,std::string Md5Hash, + virtual void Done(std::string Message,unsigned long long Size, HashStringList const &Hashes, pkgAcquire::MethodConfig *Cnf); virtual std::string DescURI() {return RealURI + "Index";}; +#if APT_PKG_ABI >= 413 + virtual std::string Custom600Headers() const; +#else virtual std::string Custom600Headers(); +#endif /** \brief Parse the Index file for a set of Packages diffs. * @@ -414,7 +726,6 @@ class pkgAcqDiffIndex : public pkgAcquire::Item * false otherwise. */ bool ParseDiffIndex(std::string IndexDiffFile); - /** \brief Create a new pkgAcqDiffIndex. * @@ -426,10 +737,15 @@ class pkgAcqDiffIndex : public pkgAcquire::Item * * \param ShortDesc A short description of the list file to download. * - * \param ExpectedHash The list file's MD5 signature. + * \param ExpectedHashes The list file's hashsums which are expected. */ - pkgAcqDiffIndex(pkgAcquire *Owner,std::string URI,std::string URIDesc, - std::string ShortDesc, HashString ExpectedHash); + pkgAcqDiffIndex(pkgAcquire *Owner, + pkgAcqMetaBase *TransactionManager, + struct IndexTarget const * const Target, + HashStringList const &ExpectedHashes, + indexRecords *MetaIndexParser); + private: + APT_HIDDEN void QueueOnIMSHit() const; }; /*}}}*/ /** \brief An item that is responsible for fetching client-merge patches {{{ @@ -443,8 +759,10 @@ class pkgAcqDiffIndex : public pkgAcquire::Item * * \sa pkgAcqDiffIndex, pkgAcqIndex */ -class pkgAcqIndexMergeDiffs : public pkgAcquire::Item +class APT_HIDDEN pkgAcqIndexMergeDiffs : public pkgAcqBaseIndex { + void *d; + protected: /** \brief If \b true, debugging output will be written to @@ -452,21 +770,6 @@ class pkgAcqIndexMergeDiffs : public pkgAcquire::Item */ bool Debug; - /** \brief description of the item that is currently being - * downloaded. - */ - pkgAcquire::ItemDesc Desc; - - /** \brief URI of the package index file that is being - * reconstructed. - */ - std::string RealURI; - - /** \brief HashSum of the package index file that is being - * reconstructed. - */ - HashString ExpectedHash; - /** \brief description of the file being downloaded. */ std::string Description; @@ -499,9 +802,8 @@ class pkgAcqIndexMergeDiffs : public pkgAcquire::Item * outright; its arguments are ignored. */ virtual void Failed(std::string Message,pkgAcquire::MethodConfig *Cnf); - - virtual void Done(std::string Message,unsigned long long Size,std::string Md5Hash, - pkgAcquire::MethodConfig *Cnf); + virtual void Done(std::string Message,unsigned long long Size, HashStringList const &Hashes, + pkgAcquire::MethodConfig *Cnf); virtual std::string DescURI() {return RealURI + "Index";}; /** \brief Create an index merge-diff item. @@ -515,7 +817,7 @@ class pkgAcqIndexMergeDiffs : public pkgAcquire::Item * * \param ShortDesc A brief description of this item. * - * \param ExpectedHash The expected md5sum of the completely + * \param ExpectedHashes The expected md5sum of the completely * reconstructed package index file; the index file will be tested * against this value when it is entirely reconstructed. * @@ -525,9 +827,13 @@ class pkgAcqIndexMergeDiffs : public pkgAcquire::Item * \param allPatches contains all related items so that each item can * check if it was the last one to complete the download step */ - pkgAcqIndexMergeDiffs(pkgAcquire *Owner,std::string const &URI,std::string const &URIDesc, - std::string const &ShortDesc, HashString const &ExpectedHash, - DiffInfo const &patch, std::vector<pkgAcqIndexMergeDiffs*> const * const allPatches); + pkgAcqIndexMergeDiffs(pkgAcquire *Owner, + pkgAcqMetaBase *TransactionManager, + struct IndexTarget const * const Target, + HashStringList const &ExpectedHash, + indexRecords *MetaIndexParser, + DiffInfo const &patch, + std::vector<pkgAcqIndexMergeDiffs*> const * const allPatches); }; /*}}}*/ /** \brief An item that is responsible for fetching server-merge patches {{{ @@ -541,8 +847,10 @@ class pkgAcqIndexMergeDiffs : public pkgAcquire::Item * * \sa pkgAcqDiffIndex, pkgAcqIndex */ -class pkgAcqIndexDiffs : public pkgAcquire::Item +class APT_HIDDEN pkgAcqIndexDiffs : public pkgAcqBaseIndex { + void *d; + private: /** \brief Queue up the next diff download. @@ -554,20 +862,20 @@ class pkgAcqIndexDiffs : public pkgAcquire::Item * \return \b true if an applicable diff was found, \b false * otherwise. */ - bool QueueNextDiff(); + APT_HIDDEN bool QueueNextDiff(); /** \brief Handle tasks that must be performed after the item * finishes downloading. * - * Dequeues the item and checks the resulting file's md5sum - * against ExpectedHash after the last patch was applied. + * Dequeues the item and checks the resulting file's hashsums + * against ExpectedHashes after the last patch was applied. * There is no need to check the md5/sha1 after a "normal" * patch because QueueNextDiff() will check the sha1 later. * * \param allDone If \b true, the file was entirely reconstructed, * and its md5sum is verified. */ - void Finish(bool allDone=false); + APT_HIDDEN void Finish(bool allDone=false); protected: @@ -576,21 +884,6 @@ class pkgAcqIndexDiffs : public pkgAcquire::Item */ bool Debug; - /** \brief A description of the item that is currently being - * downloaded. - */ - pkgAcquire::ItemDesc Desc; - - /** \brief The URI of the package index file that is being - * reconstructed. - */ - std::string RealURI; - - /** \brief The HashSum of the package index file that is being - * reconstructed. - */ - HashString ExpectedHash; - /** A description of the file being downloaded. */ std::string Description; @@ -604,9 +897,6 @@ class pkgAcqIndexDiffs : public pkgAcquire::Item */ std::vector<DiffInfo> available_patches; - /** Stop applying patches when reaching that sha1 */ - std::string ServerSha1; - /** The current status of this patch. */ enum DiffState { @@ -632,9 +922,9 @@ class pkgAcqIndexDiffs : public pkgAcquire::Item */ virtual void Failed(std::string Message,pkgAcquire::MethodConfig *Cnf); - virtual void Done(std::string Message,unsigned long long Size,std::string Md5Hash, + virtual void Done(std::string Message,unsigned long long Size, HashStringList const &Hashes, pkgAcquire::MethodConfig *Cnf); - virtual std::string DescURI() {return RealURI + "Index";}; + virtual std::string DescURI() {return RealURI + "IndexDiffs";}; /** \brief Create an index diff item. * @@ -650,19 +940,19 @@ class pkgAcqIndexDiffs : public pkgAcquire::Item * * \param ShortDesc A brief description of this item. * - * \param ExpectedHash The expected md5sum of the completely + * \param ExpectedHashes The expected hashsums of the completely * reconstructed package index file; the index file will be tested * against this value when it is entirely reconstructed. * - * \param ServerSha1 is the sha1sum of the current file on the server - * * \param diffs The remaining diffs from the index of diffs. They * should be ordered so that each diff appears before any diff * that depends on it. */ - pkgAcqIndexDiffs(pkgAcquire *Owner,std::string URI,std::string URIDesc, - std::string ShortDesc, HashString ExpectedHash, - std::string ServerSha1, + pkgAcqIndexDiffs(pkgAcquire *Owner, + pkgAcqMetaBase *TransactionManager, + struct IndexTarget const * const Target, + HashStringList const &ExpectedHash, + indexRecords *MetaIndexParser, std::vector<DiffInfo> diffs=std::vector<DiffInfo>()); }; /*}}}*/ @@ -673,55 +963,77 @@ class pkgAcqIndexDiffs : public pkgAcquire::Item * * \todo Why does pkgAcqIndex have protected members? */ -class pkgAcqIndex : public pkgAcquire::Item +class APT_HIDDEN pkgAcqIndex : public pkgAcqBaseIndex { - protected: + void *d; - /** \brief If \b true, the index file has been decompressed. */ - bool Decompression; + protected: - /** \brief If \b true, the partially downloaded file will be - * removed when the download completes. + /** \brief The stages the method goes through + * + * The method first downloads the indexfile, then its decompressed (or + * copied) and verified */ - bool Erase; + enum AllStages { + STAGE_DOWNLOAD, + STAGE_DECOMPRESS_AND_VERIFY, + }; + AllStages Stage; - /** \brief Verify for correctness by checking if a "Package" - * tag is found in the index. This can be set to - * false for optional index targets - * - */ - // FIXME: instead of a bool it should use a verify string that will - // then be used in the pkgAcqIndex::Done method to ensure that - // the downloaded file contains the expected tag - bool Verify; + /** \brief Handle what needs to be done when the download is done */ + void StageDownloadDone(std::string Message, + HashStringList const &Hashes, + pkgAcquire::MethodConfig *Cfg); - /** \brief The download request that is currently being - * processed. + /** \brief Handle what needs to be done when the decompression/copy is + * done */ - pkgAcquire::ItemDesc Desc; + void StageDecompressDone(std::string Message, + HashStringList const &Hashes, + pkgAcquire::MethodConfig *Cfg); - /** \brief The object that is actually being fetched (minus any - * compression-related extensions). + /** \brief If \b set, this partially downloaded file will be + * removed when the download completes. */ - std::string RealURI; - - /** \brief The expected hashsum of the decompressed index file. */ - HashString ExpectedHash; + std::string EraseFileName; /** \brief The compression-related file extensions that are being * added to the downloaded file one by one if first fails (e.g., "gz bz2"). */ - std::string CompressionExtension; + std::string CompressionExtensions; + + /** \brief The actual compression extension currently used */ + std::string CurrentCompressionExtension; + + /** \brief Do the changes needed to fetch via AptByHash (if needed) */ + void InitByHashIfNeeded(const std::string MetaKey); + + /** \brief Auto select the right compression to use */ + void AutoSelectCompression(); + + /** \brief Schedule file for verification after a IMS hit */ + void ReverifyAfterIMS(); + + /** \brief Validate the downloaded index file */ + bool ValidateFile(const std::string &FileName); + + /** \brief Get the full pathname of the final file for the current URI */ + virtual std::string GetFinalFilename() const; + + virtual bool TransactionState(TransactionStates const state); public: - // Specialized action members virtual void Failed(std::string Message,pkgAcquire::MethodConfig *Cnf); - virtual void Done(std::string Message,unsigned long long Size,std::string Md5Hash, + virtual void Done(std::string Message,unsigned long long Size, + HashStringList const &Hashes, pkgAcquire::MethodConfig *Cnf); +#if APT_PKG_ABI >= 413 + virtual std::string Custom600Headers() const; +#else virtual std::string Custom600Headers(); +#endif virtual std::string DescURI() {return Desc.URI;}; - virtual std::string HashSum() {return ExpectedHash.toStr(); }; /** \brief Create a pkgAcqIndex. * @@ -734,7 +1046,7 @@ class pkgAcqIndex : public pkgAcquire::Item * * \param ShortDesc A brief description of this index file. * - * \param ExpectedHash The expected hashsum of this index file. + * \param ExpectedHashes The expected hashsum of this index file. * * \param compressExt The compression-related extension with which * this index file should be downloaded, or "" to autodetect @@ -743,47 +1055,21 @@ class pkgAcqIndex : public pkgAcquire::Item * fallback is ".gz" or none. */ pkgAcqIndex(pkgAcquire *Owner,std::string URI,std::string URIDesc, - std::string ShortDesc, HashString ExpectedHash, - std::string compressExt=""); - pkgAcqIndex(pkgAcquire *Owner, struct IndexTarget const * const Target, - HashString const &ExpectedHash, indexRecords const *MetaIndexParser); - void Init(std::string const &URI, std::string const &URIDesc, std::string const &ShortDesc); -}; - /*}}}*/ -/** \brief An acquire item that is responsible for fetching a {{{ - * translated index file. - * - * The only difference from pkgAcqIndex is that transient failures - * are suppressed: no error occurs if the translated index file is - * missing. - */ -class pkgAcqIndexTrans : public pkgAcqIndex -{ - public: - - virtual void Failed(std::string Message,pkgAcquire::MethodConfig *Cnf); - virtual std::string Custom600Headers(); - - /** \brief Create a pkgAcqIndexTrans. - * - * \param Owner The pkgAcquire object with which this item is - * associated. - * - * \param URI The URI of the index file that is to be downloaded. - * - * \param URIDesc A "URI-style" description of this index file. - * - * \param ShortDesc A brief description of this index file. - */ - pkgAcqIndexTrans(pkgAcquire *Owner,std::string URI,std::string URIDesc, - std::string ShortDesc); - pkgAcqIndexTrans(pkgAcquire *Owner, struct IndexTarget const * const Target, - HashString const &ExpectedHash, indexRecords const *MetaIndexParser); + std::string ShortDesc, HashStringList const &ExpectedHashes); + pkgAcqIndex(pkgAcquire *Owner, pkgAcqMetaBase *TransactionManager, + IndexTarget const * const Target, + HashStringList const &ExpectedHash, + indexRecords *MetaIndexParser); + + void Init(std::string const &URI, std::string const &URIDesc, + std::string const &ShortDesc); }; /*}}}*/ /** \brief Information about an index file. */ /*{{{*/ -class IndexTarget +class APT_HIDDEN IndexTarget { + void *d; + public: /** \brief A URI from which the index file can be downloaded. */ std::string URI; @@ -802,230 +1088,18 @@ class IndexTarget virtual bool IsOptional() const { return false; } - virtual bool IsSubIndex() const { - return false; - } }; /*}}}*/ /** \brief Information about an optional index file. */ /*{{{*/ -class OptionalIndexTarget : public IndexTarget +class APT_HIDDEN OptionalIndexTarget : public IndexTarget { + void *d; + virtual bool IsOptional() const { return true; } }; /*}}}*/ -/** \brief Information about an subindex index file. */ /*{{{*/ -class SubIndexTarget : public IndexTarget -{ - virtual bool IsSubIndex() const { - return true; - } -}; - /*}}}*/ -/** \brief Information about an subindex index file. */ /*{{{*/ -class OptionalSubIndexTarget : public OptionalIndexTarget -{ - virtual bool IsSubIndex() const { - return true; - } -}; - /*}}}*/ - -/** \brief An acquire item that downloads the detached signature {{{ - * of a meta-index (Release) file, then queues up the release - * file itself. - * - * \todo Why protected members? - * - * \sa pkgAcqMetaIndex - */ -class pkgAcqMetaSig : public pkgAcquire::Item -{ - protected: - /** \brief The last good signature file */ - std::string LastGoodSig; - - /** \brief The fetch request that is currently being processed. */ - pkgAcquire::ItemDesc Desc; - - /** \brief The URI of the signature file. Unlike Desc.URI, this is - * never modified; it is used to determine the file that is being - * downloaded. - */ - std::string RealURI; - - /** \brief The URI of the meta-index file to be fetched after the signature. */ - std::string MetaIndexURI; - - /** \brief A "URI-style" description of the meta-index file to be - * fetched after the signature. - */ - std::string MetaIndexURIDesc; - - /** \brief A brief description of the meta-index file to be fetched - * after the signature. - */ - std::string MetaIndexShortDesc; - - /** \brief A package-system-specific parser for the meta-index file. */ - indexRecords* MetaIndexParser; - - /** \brief The index files which should be looked up in the meta-index - * and then downloaded. - * - * \todo Why a list of pointers instead of a list of structs? - */ - const std::vector<struct IndexTarget*>* IndexTargets; - - public: - - // Specialized action members - virtual void Failed(std::string Message,pkgAcquire::MethodConfig *Cnf); - virtual void Done(std::string Message,unsigned long long Size,std::string Md5Hash, - pkgAcquire::MethodConfig *Cnf); - virtual std::string Custom600Headers(); - virtual std::string DescURI() {return RealURI; }; - - /** \brief Create a new pkgAcqMetaSig. */ - pkgAcqMetaSig(pkgAcquire *Owner,std::string URI,std::string URIDesc, std::string ShortDesc, - std::string MetaIndexURI, std::string MetaIndexURIDesc, std::string MetaIndexShortDesc, - const std::vector<struct IndexTarget*>* IndexTargets, - indexRecords* MetaIndexParser); - virtual ~pkgAcqMetaSig(); -}; - /*}}}*/ -/** \brief An item that is responsible for downloading the meta-index {{{ - * file (i.e., Release) itself and verifying its signature. - * - * Once the download and verification are complete, the downloads of - * the individual index files are queued up using pkgAcqDiffIndex. - * If the meta-index file had a valid signature, the expected hashsums - * of the index files will be the md5sums listed in the meta-index; - * otherwise, the expected hashsums will be "" (causing the - * authentication of the index files to be bypassed). - */ -class pkgAcqMetaIndex : public pkgAcquire::Item -{ - protected: - /** \brief The fetch command that is currently being processed. */ - pkgAcquire::ItemDesc Desc; - - /** \brief The URI that is actually being downloaded; never - * modified by pkgAcqMetaIndex. - */ - std::string RealURI; - - /** \brief The file in which the signature for this index was stored. - * - * If empty, the signature and the md5sums of the individual - * indices will not be checked. - */ - std::string SigFile; - - /** \brief The index files to download. */ - const std::vector<struct IndexTarget*>* IndexTargets; - - /** \brief The parser for the meta-index file. */ - indexRecords* MetaIndexParser; - - /** \brief If \b true, the index's signature is currently being verified. - */ - bool AuthPass; - // required to deal gracefully with problems caused by incorrect ims hits - bool IMSHit; - - /** \brief Check that the release file is a release file for the - * correct distribution. - * - * \return \b true if no fatal errors were encountered. - */ - bool VerifyVendor(std::string Message); - - /** \brief Called when a file is finished being retrieved. - * - * If the file was not downloaded to DestFile, a copy process is - * set up to copy it to DestFile; otherwise, Complete is set to \b - * true and the file is moved to its final location. - * - * \param Message The message block received from the fetch - * subprocess. - */ - void RetrievalDone(std::string Message); - - /** \brief Called when authentication succeeded. - * - * Sanity-checks the authenticated file, queues up the individual - * index files for download, and saves the signature in the lists - * directory next to the authenticated list file. - * - * \param Message The message block received from the fetch - * subprocess. - */ - void AuthDone(std::string Message); - - /** \brief Starts downloading the individual index files. - * - * \param verify If \b true, only indices whose expected hashsum - * can be determined from the meta-index will be downloaded, and - * the hashsums of indices will be checked (reporting - * #StatAuthError if there is a mismatch). If verify is \b false, - * no hashsum checking will be performed. - */ - void QueueIndexes(bool verify); - - public: - - // Specialized action members - virtual void Failed(std::string Message,pkgAcquire::MethodConfig *Cnf); - virtual void Done(std::string Message,unsigned long long Size, std::string Hash, - pkgAcquire::MethodConfig *Cnf); - virtual std::string Custom600Headers(); - virtual std::string DescURI() {return RealURI; }; - - /** \brief Create a new pkgAcqMetaIndex. */ - pkgAcqMetaIndex(pkgAcquire *Owner, - std::string URI,std::string URIDesc, std::string ShortDesc, - std::string SigFile, - const std::vector<struct IndexTarget*>* IndexTargets, - indexRecords* MetaIndexParser); -}; - /*}}}*/ -/** \brief An item repsonsible for downloading clearsigned metaindexes {{{*/ -class pkgAcqMetaClearSig : public pkgAcqMetaIndex -{ - /** \brief The URI of the meta-index file for the detached signature */ - std::string MetaIndexURI; - - /** \brief A "URI-style" description of the meta-index file */ - std::string MetaIndexURIDesc; - - /** \brief A brief description of the meta-index file */ - std::string MetaIndexShortDesc; - - /** \brief The URI of the detached meta-signature file if the clearsigned one failed. */ - std::string MetaSigURI; - - /** \brief A "URI-style" description of the meta-signature file */ - std::string MetaSigURIDesc; - - /** \brief A brief description of the meta-signature file */ - std::string MetaSigShortDesc; - -public: - void Failed(std::string Message,pkgAcquire::MethodConfig *Cnf); - virtual std::string Custom600Headers(); - - /** \brief Create a new pkgAcqMetaClearSig. */ - pkgAcqMetaClearSig(pkgAcquire *Owner, - std::string const &URI, std::string const &URIDesc, std::string const &ShortDesc, - std::string const &MetaIndexURI, std::string const &MetaIndexURIDesc, std::string const &MetaIndexShortDesc, - std::string const &MetaSigURI, std::string const &MetaSigURIDesc, std::string const &MetaSigShortDesc, - const std::vector<struct IndexTarget*>* IndexTargets, - indexRecords* MetaIndexParser); - virtual ~pkgAcqMetaClearSig(); -}; - /*}}}*/ /** \brief An item that is responsible for fetching a package file. {{{ * * If the package file already exists in the cache, nothing will be @@ -1033,13 +1107,12 @@ public: */ class pkgAcqArchive : public pkgAcquire::Item { + void *d; + protected: /** \brief The package version being fetched. */ pkgCache::VerIterator Version; - /** \brief The fetch command that is currently being processed. */ - pkgAcquire::ItemDesc Desc; - /** \brief The list of sources from which to pick archives to * download this package from. */ @@ -1050,9 +1123,6 @@ class pkgAcqArchive : public pkgAcquire::Item */ pkgRecords *Recs; - /** \brief The hashsum of this package. */ - HashString ExpectedHash; - /** \brief A location in which the actual filename of the package * should be stored. */ @@ -1075,18 +1145,24 @@ class pkgAcqArchive : public pkgAcquire::Item /** \brief Queue up the next available file for this version. */ bool QueueNext(); - + + /** \brief Get the full pathname of the final file for the current URI */ + virtual std::string GetFinalFilename() const; + public: - + virtual void Failed(std::string Message,pkgAcquire::MethodConfig *Cnf); - virtual void Done(std::string Message,unsigned long long Size,std::string Hash, + virtual void Done(std::string Message,unsigned long long Size, HashStringList const &Hashes, pkgAcquire::MethodConfig *Cnf); virtual std::string DescURI() {return Desc.URI;}; virtual std::string ShortDesc() {return Desc.ShortDesc;}; virtual void Finished(); - virtual std::string HashSum() {return ExpectedHash.toStr(); }; +#if APT_PKG_ABI >= 413 + virtual bool IsTrusted() const; +#else virtual bool IsTrusted(); - +#endif + /** \brief Create a new pkgAcqArchive. * * \param Owner The pkgAcquire object with which this item is @@ -1118,11 +1194,7 @@ class pkgAcqArchive : public pkgAcquire::Item */ class pkgAcqFile : public pkgAcquire::Item { - /** \brief The currently active download process. */ - pkgAcquire::ItemDesc Desc; - - /** \brief The hashsum of the file to download, if it is known. */ - HashString ExpectedHash; + void *d; /** \brief How many times to retry the download, set from * Acquire::Retries. @@ -1136,11 +1208,14 @@ class pkgAcqFile : public pkgAcquire::Item // Specialized action members virtual void Failed(std::string Message,pkgAcquire::MethodConfig *Cnf); - virtual void Done(std::string Message,unsigned long long Size,std::string CalcHash, + virtual void Done(std::string Message,unsigned long long Size, HashStringList const &CalcHashes, pkgAcquire::MethodConfig *Cnf); virtual std::string DescURI() {return Desc.URI;}; - virtual std::string HashSum() {return ExpectedHash.toStr(); }; +#if APT_PKG_ABI >= 413 + virtual std::string Custom600Headers() const; +#else virtual std::string Custom600Headers(); +#endif /** \brief Create a new pkgAcqFile object. * @@ -1149,8 +1224,8 @@ class pkgAcqFile : public pkgAcquire::Item * * \param URI The URI to download. * - * \param Hash The hashsum of the file to download, if it is known; - * otherwise "". + * \param Hashes The hashsums of the file to download, if they are known; + * otherwise empty list. * * \param Size The size of the file to download, if it is known; * otherwise 0. @@ -1173,7 +1248,7 @@ class pkgAcqFile : public pkgAcquire::Item * is the absolute name to which the file should be downloaded. */ - pkgAcqFile(pkgAcquire *Owner, std::string URI, std::string Hash, unsigned long long Size, + pkgAcqFile(pkgAcquire *Owner, std::string URI, HashStringList const &Hashes, unsigned long long Size, std::string Desc, std::string ShortDesc, const std::string &DestDir="", const std::string &DestFilename="", bool IsIndexFile=false); diff --git a/apt-pkg/acquire-method.cc b/apt-pkg/acquire-method.cc index 746c553f1..c29ef469e 100644 --- a/apt-pkg/acquire-method.cc +++ b/apt-pkg/acquire-method.cc @@ -102,7 +102,10 @@ void pkgAcqMethod::Fail(string Err,bool Transient) if (Queue != 0) { std::cout << "400 URI Failure\nURI: " << Queue->Uri << "\n" - << "Message: " << Err << " " << IP << "\n"; + << "Message: " << Err; + if (IP.empty() == false && _config->FindB("Acquire::Failure::ShowIP", true) == true) + std::cout << " " << IP; + std::cout << "\n"; Dequeue(); } else @@ -119,6 +122,18 @@ void pkgAcqMethod::Fail(string Err,bool Transient) std::cout << "\n" << std::flush; } /*}}}*/ +// AcqMethod::DropPrivsOrDie - Drop privileges or die /*{{{*/ +// --------------------------------------------------------------------- +/* */ +void pkgAcqMethod::DropPrivsOrDie() +{ + if (!DropPrivileges()) { + Fail(false); + exit(112); /* call the european emergency number */ + } +} + + /*}}}*/ // AcqMethod::URIStart - Indicate a download is starting /*{{{*/ // --------------------------------------------------------------------- /* */ @@ -147,6 +162,16 @@ void pkgAcqMethod::URIStart(FetchResult &Res) // AcqMethod::URIDone - A URI is finished /*{{{*/ // --------------------------------------------------------------------- /* */ +static void printHashStringList(HashStringList const * const list) +{ + for (HashStringList::const_iterator hash = list->begin(); hash != list->end(); ++hash) + { + // very old compatibility name for MD5Sum + if (hash->HashType() == "MD5Sum") + std::cout << "MD5-Hash: " << hash->HashValue() << "\n"; + std::cout << hash->HashType() << "-Hash: " << hash->HashValue() << "\n"; + } +} void pkgAcqMethod::URIDone(FetchResult &Res, FetchResult *Alt) { if (Queue == 0) @@ -164,15 +189,8 @@ void pkgAcqMethod::URIDone(FetchResult &Res, FetchResult *Alt) if (Res.LastModified != 0) std::cout << "Last-Modified: " << TimeRFC1123(Res.LastModified) << "\n"; - if (Res.MD5Sum.empty() == false) - std::cout << "MD5-Hash: " << Res.MD5Sum << "\n" - << "MD5Sum-Hash: " << Res.MD5Sum << "\n"; - if (Res.SHA1Sum.empty() == false) - std::cout << "SHA1-Hash: " << Res.SHA1Sum << "\n"; - if (Res.SHA256Sum.empty() == false) - std::cout << "SHA256-Hash: " << Res.SHA256Sum << "\n"; - if (Res.SHA512Sum.empty() == false) - std::cout << "SHA512-Hash: " << Res.SHA512Sum << "\n"; + printHashStringList(&Res.Hashes); + if (UsedMirror.empty() == false) std::cout << "UsedMirror: " << UsedMirror << "\n"; if (Res.GPGVOutput.empty() == false) @@ -200,15 +218,8 @@ void pkgAcqMethod::URIDone(FetchResult &Res, FetchResult *Alt) if (Alt->LastModified != 0) std::cout << "Alt-Last-Modified: " << TimeRFC1123(Alt->LastModified) << "\n"; - if (Alt->MD5Sum.empty() == false) - std::cout << "Alt-MD5-Hash: " << Alt->MD5Sum << "\n"; - if (Alt->SHA1Sum.empty() == false) - std::cout << "Alt-SHA1-Hash: " << Alt->SHA1Sum << "\n"; - if (Alt->SHA256Sum.empty() == false) - std::cout << "Alt-SHA256-Hash: " << Alt->SHA256Sum << "\n"; - if (Alt->SHA512Sum.empty() == false) - std::cout << "Alt-SHA512-Hash: " << Alt->SHA512Sum << "\n"; - + printHashStringList(&Alt->Hashes); + if (Alt->IMSHit == true) std::cout << "Alt-IMS-Hit: true\n"; } @@ -355,6 +366,17 @@ int pkgAcqMethod::Run(bool Single) Tmp->LastModified = 0; Tmp->IndexFile = StringToBool(LookupTag(Message,"Index-File"),false); Tmp->FailIgnore = StringToBool(LookupTag(Message,"Fail-Ignore"),false); + Tmp->ExpectedHashes = HashStringList(); + for (char const * const * t = HashString::SupportedHashes(); *t != NULL; ++t) + { + std::string tag = "Expected-"; + tag.append(*t); + std::string const hash = LookupTag(Message, tag.c_str()); + if (hash.empty() == false) + Tmp->ExpectedHashes.push_back(HashString(*t, hash)); + } + char *End; + Tmp->MaximumSize = strtoll(LookupTag(Message, "Maximum-Size", "0").c_str(), &End, 10); Tmp->Next = 0; // Append it to the list @@ -442,12 +464,9 @@ pkgAcqMethod::FetchResult::FetchResult() : LastModified(0), // --------------------------------------------------------------------- /* This hides the number of hashes we are supporting from the caller. It just deals with the hash class. */ -void pkgAcqMethod::FetchResult::TakeHashes(Hashes &Hash) +void pkgAcqMethod::FetchResult::TakeHashes(class Hashes &Hash) { - MD5Sum = Hash.MD5.Result(); - SHA1Sum = Hash.SHA1.Result(); - SHA256Sum = Hash.SHA256.Result(); - SHA512Sum = Hash.SHA512.Result(); + Hashes = Hash.GetHashStringList(); } /*}}}*/ void pkgAcqMethod::Dequeue() { /*{{{*/ @@ -458,3 +477,5 @@ void pkgAcqMethod::Dequeue() { /*{{{*/ delete Tmp; } /*}}}*/ + +pkgAcqMethod::~pkgAcqMethod() {} diff --git a/apt-pkg/acquire-method.h b/apt-pkg/acquire-method.h index 221ccf273..399454892 100644 --- a/apt-pkg/acquire-method.h +++ b/apt-pkg/acquire-method.h @@ -20,6 +20,7 @@ #ifndef PKGLIB_ACQUIRE_METHOD_H #define PKGLIB_ACQUIRE_METHOD_H +#include <apt-pkg/hashes.h> #include <apt-pkg/macros.h> #include <stdarg.h> @@ -33,7 +34,6 @@ #include <apt-pkg/strutl.h> #endif -class Hashes; class pkgAcqMethod { protected: @@ -44,17 +44,20 @@ class pkgAcqMethod std::string Uri; std::string DestFile; + int DestFileFd; time_t LastModified; bool IndexFile; bool FailIgnore; + HashStringList ExpectedHashes; + // a maximum size we will download, this can be the exact filesize + // for when we know it or a arbitrary limit when we don't know the + // filesize (like a InRelease file) + unsigned long long MaximumSize; }; struct FetchResult { - std::string MD5Sum; - std::string SHA1Sum; - std::string SHA256Sum; - std::string SHA512Sum; + HashStringList Hashes; std::vector<std::string> GPGVOutput; time_t LastModified; bool IMSHit; @@ -62,7 +65,7 @@ class pkgAcqMethod unsigned long long Size; unsigned long long ResumePoint; - void TakeHashes(Hashes &Hash); + void TakeHashes(class Hashes &Hash); FetchResult(); }; @@ -106,8 +109,8 @@ class pkgAcqMethod inline void SetIP(std::string aIP) {IP = aIP;}; pkgAcqMethod(const char *Ver,unsigned long Flags = 0); - virtual ~pkgAcqMethod() {}; - + virtual ~pkgAcqMethod(); + void DropPrivsOrDie(); private: APT_HIDDEN void Dequeue(); }; diff --git a/apt-pkg/acquire-worker.cc b/apt-pkg/acquire-worker.cc index 047a655ce..9254e20a3 100644 --- a/apt-pkg/acquire-worker.cc +++ b/apt-pkg/acquire-worker.cc @@ -34,6 +34,9 @@ #include <signal.h> #include <stdio.h> #include <errno.h> +#include <sys/types.h> +#include <pwd.h> +#include <grp.h> #include <apti18n.h> /*}}}*/ @@ -306,7 +309,10 @@ bool pkgAcquire::Worker::RunMessages() pkgAcquire::Item *Owner = Itm->Owner; pkgAcquire::ItemDesc Desc = *Itm; - + + if (RealFileExists(Owner->DestFile)) + ChangeOwnerAndPermissionOfFile("201::URIDone", Owner->DestFile.c_str(), "root", "root", 0644); + // Display update before completion if (Log != 0 && Log->MorePulses == true) Log->Pulse(Owner->GetOwner()); @@ -326,25 +332,30 @@ bool pkgAcquire::Worker::RunMessages() Owner->DestFile.c_str(), LookupTag(Message,"Size","0").c_str(),TotalSize); // see if there is a hash to verify - string RecivedHash; - HashString expectedHash(Owner->HashSum()); - if(!expectedHash.empty()) + HashStringList ReceivedHashes; + for (char const * const * type = HashString::SupportedHashes(); *type != NULL; ++type) { - string hashTag = expectedHash.HashType()+"-Hash"; - string hashSum = LookupTag(Message, hashTag.c_str()); - if(!hashSum.empty()) - RecivedHash = expectedHash.HashType() + ":" + hashSum; - if(_config->FindB("Debug::pkgAcquire::Auth", false) == true) - { - clog << "201 URI Done: " << Owner->DescURI() << endl - << "RecivedHash: " << RecivedHash << endl - << "ExpectedHash: " << expectedHash.toStr() - << endl << endl; - } + std::string const tagname = std::string(*type) + "-Hash"; + std::string const hashsum = LookupTag(Message, tagname.c_str()); + if (hashsum.empty() == false) + ReceivedHashes.push_back(HashString(*type, hashsum)); + } + + if(_config->FindB("Debug::pkgAcquire::Auth", false) == true) + { + std::clog << "201 URI Done: " << Owner->DescURI() << endl + << "ReceivedHash:" << endl; + for (HashStringList::const_iterator hs = ReceivedHashes.begin(); hs != ReceivedHashes.end(); ++hs) + std::clog << "\t- " << hs->toStr() << std::endl; + std::clog << "ExpectedHash:" << endl; + HashStringList expectedHashes = Owner->HashSums(); + for (HashStringList::const_iterator hs = expectedHashes.begin(); hs != expectedHashes.end(); ++hs) + std::clog << "\t- " << hs->toStr() << std::endl; + std::clog << endl; } - Owner->Done(Message, ServerSize, RecivedHash.c_str(), Config); + Owner->Done(Message, ServerSize, ReceivedHashes, Config); ItemDone(); - + // Log that we are done if (Log != 0) { @@ -366,16 +377,21 @@ bool pkgAcquire::Worker::RunMessages() { if (Itm == 0) { - _error->Error("Method gave invalid 400 URI Failure message"); + std::string const msg = LookupTag(Message,"Message"); + _error->Error("Method gave invalid 400 URI Failure message: %s", msg.c_str()); break; } // Display update before completion if (Log != 0 && Log->MorePulses == true) Log->Pulse(Itm->Owner->GetOwner()); - + pkgAcquire::Item *Owner = Itm->Owner; pkgAcquire::ItemDesc Desc = *Itm; + + if (RealFileExists(Owner->DestFile)) + ChangeOwnerAndPermissionOfFile("400::URIFailure", Owner->DestFile.c_str(), "root", "root", 0644); + OwnerQ->ItemDone(Itm); // set some status @@ -525,9 +541,25 @@ bool pkgAcquire::Worker::QueueItem(pkgAcquire::Queue::QItem *Item) Message.reserve(300); Message += "URI: " + Item->URI; Message += "\nFilename: " + Item->Owner->DestFile; + HashStringList const hsl = Item->Owner->HashSums(); + for (HashStringList::const_iterator hs = hsl.begin(); hs != hsl.end(); ++hs) + Message += "\nExpected-" + hs->HashType() + ": " + hs->HashValue(); + if(Item->Owner->FileSize > 0) + { + string MaximumSize; + strprintf(MaximumSize, "%llu", Item->Owner->FileSize); + Message += "\nMaximum-Size: " + MaximumSize; + } Message += Item->Owner->Custom600Headers(); Message += "\n\n"; - + + if (RealFileExists(Item->Owner->DestFile)) + { + std::string SandboxUser = _config->Find("APT::Sandbox::User"); + ChangeOwnerAndPermissionOfFile("Item::QueueURI", Item->Owner->DestFile.c_str(), + SandboxUser.c_str(), "root", 0600); + } + if (Debug == true) clog << " -> " << Access << ':' << QuoteString(Message,"\n") << endl; OutQueue += Message; diff --git a/apt-pkg/acquire-worker.h b/apt-pkg/acquire-worker.h index 67aee4b59..db8889c8e 100644 --- a/apt-pkg/acquire-worker.h +++ b/apt-pkg/acquire-worker.h @@ -101,6 +101,11 @@ class pkgAcquire::Worker : public WeakPointable */ int OutFd; + /** \brief The socket to send SCM_RIGHTS message through + */ + int PrivSepSocketFd; + int PrivSepSocketFdChild; + /** \brief Set to \b true if the worker is in a state in which it * might generate data or command responses. * diff --git a/apt-pkg/acquire.cc b/apt-pkg/acquire.cc index a187a00ae..0c815c005 100644 --- a/apt-pkg/acquire.cc +++ b/apt-pkg/acquire.cc @@ -27,15 +27,20 @@ #include <vector> #include <iostream> #include <sstream> +#include <iomanip> + #include <stdio.h> #include <stdlib.h> #include <string.h> #include <unistd.h> - +#include <pwd.h> +#include <grp.h> #include <dirent.h> #include <sys/time.h> #include <sys/select.h> #include <errno.h> +#include <sys/stat.h> +#include <sys/types.h> #include <apti18n.h> /*}}}*/ @@ -49,52 +54,110 @@ pkgAcquire::pkgAcquire() : LockFD(-1), Queues(0), Workers(0), Configs(0), Log(NU Debug(_config->FindB("Debug::pkgAcquire",false)), Running(false) { - string const Mode = _config->Find("Acquire::Queue-Mode","host"); - if (strcasecmp(Mode.c_str(),"host") == 0) - QueueMode = QueueHost; - if (strcasecmp(Mode.c_str(),"access") == 0) - QueueMode = QueueAccess; + Initialize(); } -pkgAcquire::pkgAcquire(pkgAcquireStatus *Progress) : LockFD(-1), Queues(0), Workers(0), - Configs(0), Log(Progress), ToFetch(0), +pkgAcquire::pkgAcquire(pkgAcquireStatus *Progress) : LockFD(-1), Queues(0), Workers(0), + Configs(0), Log(NULL), ToFetch(0), Debug(_config->FindB("Debug::pkgAcquire",false)), Running(false) { + Initialize(); + SetLog(Progress); +} +void pkgAcquire::Initialize() +{ string const Mode = _config->Find("Acquire::Queue-Mode","host"); if (strcasecmp(Mode.c_str(),"host") == 0) QueueMode = QueueHost; if (strcasecmp(Mode.c_str(),"access") == 0) QueueMode = QueueAccess; - Setup(Progress, ""); + + // chown the auth.conf file as it will be accessed by our methods + std::string const SandboxUser = _config->Find("APT::Sandbox::User"); + if (getuid() == 0 && SandboxUser.empty() == false) // if we aren't root, we can't chown, so don't try it + { + struct passwd const * const pw = getpwnam(SandboxUser.c_str()); + struct group const * const gr = getgrnam("root"); + if (pw != NULL && gr != NULL) + { + std::string const AuthConf = _config->FindFile("Dir::Etc::netrc"); + if(AuthConf.empty() == false && RealFileExists(AuthConf) && + chown(AuthConf.c_str(), pw->pw_uid, gr->gr_gid) != 0) + _error->WarningE("SetupAPTPartialDirectory", "chown to %s:root of file %s failed", SandboxUser.c_str(), AuthConf.c_str()); + } + } } /*}}}*/ -// Acquire::Setup - Delayed Constructor /*{{{*/ -// --------------------------------------------------------------------- -/* Do everything needed to be a complete Acquire object and report the - success (or failure) back so the user knows that something is wrong… */ +// Acquire::GetLock - lock directory and prepare for action /*{{{*/ +static bool SetupAPTPartialDirectory(std::string const &grand, std::string const &parent) +{ + std::string const partial = parent + "partial"; + mode_t const mode = umask(S_IWGRP | S_IWOTH); + bool const creation_fail = (CreateAPTDirectoryIfNeeded(grand, partial) == false && + CreateAPTDirectoryIfNeeded(parent, partial) == false); + umask(mode); + if (creation_fail == true) + return false; + + std::string const SandboxUser = _config->Find("APT::Sandbox::User"); + if (getuid() == 0 && SandboxUser.empty() == false) // if we aren't root, we can't chown, so don't try it + { + struct passwd const * const pw = getpwnam(SandboxUser.c_str()); + struct group const * const gr = getgrnam("root"); + if (pw != NULL && gr != NULL) + { + // chown the partial dir + if(chown(partial.c_str(), pw->pw_uid, gr->gr_gid) != 0) + _error->WarningE("SetupAPTPartialDirectory", "chown to %s:root of directory %s failed", SandboxUser.c_str(), partial.c_str()); + } + } + if (chmod(partial.c_str(), 0700) != 0) + _error->WarningE("SetupAPTPartialDirectory", "chmod 0700 of directory %s failed", partial.c_str()); + + return true; +} bool pkgAcquire::Setup(pkgAcquireStatus *Progress, string const &Lock) { Log = Progress; + if (Lock.empty()) + { + string const listDir = _config->FindDir("Dir::State::lists"); + if (SetupAPTPartialDirectory(_config->FindDir("Dir::State"), listDir) == false) + return _error->Errno("Acquire", _("List directory %spartial is missing."), listDir.c_str()); + string const archivesDir = _config->FindDir("Dir::Cache::Archives"); + if (SetupAPTPartialDirectory(_config->FindDir("Dir::Cache"), archivesDir) == false) + return _error->Errno("Acquire", _("Archives directory %spartial is missing."), archivesDir.c_str()); + return true; + } + return GetLock(Lock); +} +bool pkgAcquire::GetLock(std::string const &Lock) +{ + if (Lock.empty() == true) + return false; // check for existence and possibly create auxiliary directories string const listDir = _config->FindDir("Dir::State::lists"); - string const partialListDir = listDir + "partial/"; string const archivesDir = _config->FindDir("Dir::Cache::Archives"); - string const partialArchivesDir = archivesDir + "partial/"; - if (CreateAPTDirectoryIfNeeded(_config->FindDir("Dir::State"), partialListDir) == false && - CreateAPTDirectoryIfNeeded(listDir, partialListDir) == false) - return _error->Errno("Acquire", _("List directory %spartial is missing."), listDir.c_str()); - - if (CreateAPTDirectoryIfNeeded(_config->FindDir("Dir::Cache"), partialArchivesDir) == false && - CreateAPTDirectoryIfNeeded(archivesDir, partialArchivesDir) == false) - return _error->Errno("Acquire", _("Archives directory %spartial is missing."), archivesDir.c_str()); + if (Lock == listDir) + { + if (SetupAPTPartialDirectory(_config->FindDir("Dir::State"), listDir) == false) + return _error->Errno("Acquire", _("List directory %spartial is missing."), listDir.c_str()); + } + if (Lock == archivesDir) + { + if (SetupAPTPartialDirectory(_config->FindDir("Dir::Cache"), archivesDir) == false) + return _error->Errno("Acquire", _("Archives directory %spartial is missing."), archivesDir.c_str()); + } - if (Lock.empty() == true || _config->FindB("Debug::NoLocking", false) == true) + if (_config->FindB("Debug::NoLocking", false) == true) return true; // Lock the directory this acquire object will work in - LockFD = GetLock(flCombine(Lock, "lock")); + if (LockFD != -1) + close(LockFD); + LockFD = ::GetLock(flCombine(Lock, "lock")); if (LockFD == -1) return _error->Error(_("Unable to lock directory %s"), Lock.c_str()); @@ -486,6 +549,9 @@ bool pkgAcquire::Clean(string Dir) if (DirectoryExists(Dir) == false) return true; + if(Dir == "/") + return _error->Error(_("Clean of %s is not supported"), Dir.c_str()); + DIR *D = opendir(Dir.c_str()); if (D == 0) return _error->Errno("opendir",_("Unable to read %s"),Dir.c_str()); @@ -577,27 +643,18 @@ pkgAcquire::UriIterator pkgAcquire::UriEnd() // Acquire::MethodConfig::MethodConfig - Constructor /*{{{*/ // --------------------------------------------------------------------- /* */ -pkgAcquire::MethodConfig::MethodConfig() +pkgAcquire::MethodConfig::MethodConfig() : d(NULL), Next(0), SingleInstance(false), + Pipeline(false), SendConfig(false), LocalOnly(false), NeedsCleanup(false), + Removable(false) { - SingleInstance = false; - Pipeline = false; - SendConfig = false; - LocalOnly = false; - Removable = false; - Next = 0; } /*}}}*/ // Queue::Queue - Constructor /*{{{*/ // --------------------------------------------------------------------- /* */ -pkgAcquire::Queue::Queue(string Name,pkgAcquire *Owner) : Name(Name), - Owner(Owner) +pkgAcquire::Queue::Queue(string Name,pkgAcquire *Owner) : d(NULL), Next(0), + Name(Name), Items(0), Workers(0), Owner(Owner), PipeDepth(0), MaxPipeDepth(1) { - Items = 0; - Next = 0; - Workers = 0; - MaxPipeDepth = 1; - PipeDepth = 0; } /*}}}*/ // Queue::~Queue - Destructor /*{{{*/ @@ -801,7 +858,7 @@ void pkgAcquire::Queue::Bump() // AcquireStatus::pkgAcquireStatus - Constructor /*{{{*/ // --------------------------------------------------------------------- /* */ -pkgAcquireStatus::pkgAcquireStatus() : d(NULL), Update(true), MorePulses(false) +pkgAcquireStatus::pkgAcquireStatus() : d(NULL), Percent(0), Update(true), MorePulses(false) { Start(); } @@ -821,7 +878,9 @@ bool pkgAcquireStatus::Pulse(pkgAcquire *Owner) // Compute the total number of bytes to fetch unsigned int Unknown = 0; unsigned int Count = 0; - for (pkgAcquire::ItemCIterator I = Owner->ItemsBegin(); I != Owner->ItemsEnd(); + bool UnfetchedReleaseFiles = false; + for (pkgAcquire::ItemCIterator I = Owner->ItemsBegin(); + I != Owner->ItemsEnd(); ++I, ++Count) { TotalItems++; @@ -832,6 +891,13 @@ bool pkgAcquireStatus::Pulse(pkgAcquire *Owner) if ((*I)->Local == true) continue; + // see if the method tells us to expect more + TotalItems += (*I)->ExpectedAdditionalItems; + + // check if there are unfetched Release files + if ((*I)->Complete == false && (*I)->ExpectedAdditionalItems > 0) + UnfetchedReleaseFiles = true; + TotalBytes += (*I)->FileSize; if ((*I)->Complete == true) CurrentBytes += (*I)->FileSize; @@ -843,6 +909,7 @@ bool pkgAcquireStatus::Pulse(pkgAcquire *Owner) unsigned long long ResumeSize = 0; for (pkgAcquire::Worker *I = Owner->WorkersBegin(); I != 0; I = Owner->WorkerStep(I)) + { if (I->CurrentItem != 0 && I->CurrentItem->Owner->Complete == false) { CurrentBytes += I->CurrentSize; @@ -853,6 +920,7 @@ bool pkgAcquireStatus::Pulse(pkgAcquire *Owner) I->CurrentItem->Owner->Complete == false) TotalBytes += I->CurrentSize; } + } // Normalize the figures and account for unknown size downloads if (TotalBytes <= 0) @@ -863,6 +931,12 @@ bool pkgAcquireStatus::Pulse(pkgAcquire *Owner) // Wha?! Is not supposed to happen. if (CurrentBytes > TotalBytes) CurrentBytes = TotalBytes; + + // debug + if (_config->FindB("Debug::acquire::progress", false) == true) + std::clog << " Bytes: " + << SizeToStr(CurrentBytes) << " / " << SizeToStr(TotalBytes) + << std::endl; // Compute the CPS struct timeval NewTime; @@ -883,6 +957,14 @@ bool pkgAcquireStatus::Pulse(pkgAcquire *Owner) Time = NewTime; } + // calculate the percentage, if we have too little data assume 1% + if (TotalBytes > 0 && UnfetchedReleaseFiles) + Percent = 0; + else + // use both files and bytes because bytes can be unreliable + Percent = (0.8 * (CurrentBytes/float(TotalBytes)*100.0) + + 0.2 * (CurrentItems/float(TotalItems)*100.0)); + int fd = _config->FindI("APT::Status-Fd",-1); if(fd > 0) { @@ -900,13 +982,11 @@ bool pkgAcquireStatus::Pulse(pkgAcquire *Owner) else snprintf(msg,sizeof(msg), _("Retrieving file %li of %li"), i, TotalItems); - - // build the status str status << "dlstatus:" << i - << ":" << (CurrentBytes/float(TotalBytes)*100.0) - << ":" << msg - << endl; + << ":" << std::setprecision(3) << Percent + << ":" << msg + << endl; std::string const dlstatus = status.str(); FileFd::Write(fd, dlstatus.c_str(), dlstatus.size()); @@ -961,3 +1041,7 @@ void pkgAcquireStatus::Fetched(unsigned long long Size,unsigned long long Resume FetchedBytes += Size - Resume; } /*}}}*/ + +APT_CONST pkgAcquire::UriIterator::~UriIterator() {} +APT_CONST pkgAcquire::MethodConfig::~MethodConfig() {} +APT_CONST pkgAcquireStatus::~pkgAcquireStatus() {} diff --git a/apt-pkg/acquire.h b/apt-pkg/acquire.h index ef16d8556..fc90624e1 100644 --- a/apt-pkg/acquire.h +++ b/apt-pkg/acquire.h @@ -111,6 +111,7 @@ class pkgAcquire struct MethodConfig; struct ItemDesc; friend class Item; + friend class pkgAcqMetaBase; friend class Queue; typedef std::vector<Item *>::iterator ItemIterator; @@ -351,14 +352,24 @@ class pkgAcquire * long as the pkgAcquire object does. * \param Lock defines a lock file that should be acquired to ensure * only one Acquire class is in action at the time or an empty string - * if no lock file should be used. + * if no lock file should be used. If set also all needed directories + * will be created. */ - bool Setup(pkgAcquireStatus *Progress = NULL, std::string const &Lock = ""); + APT_DEPRECATED bool Setup(pkgAcquireStatus *Progress = NULL, std::string const &Lock = ""); void SetLog(pkgAcquireStatus *Progress) { Log = Progress; } + /** \brief acquire lock and perform directory setup + * + * \param Lock defines a lock file that should be acquired to ensure + * only one Acquire class is in action at the time or an empty string + * if no lock file should be used. If set also all needed directories + * will be created and setup. + */ + bool GetLock(std::string const &Lock); + /** \brief Construct a new pkgAcquire. */ - pkgAcquire(pkgAcquireStatus *Log) APT_DEPRECATED; + pkgAcquire(pkgAcquireStatus *Log); pkgAcquire(); /** \brief Destroy this pkgAcquire object. @@ -368,6 +379,8 @@ class pkgAcquire */ virtual ~pkgAcquire(); + private: + APT_HIDDEN void Initialize(); }; /** \brief Represents a single download source from which an item @@ -585,7 +598,7 @@ class pkgAcquire::UriIterator * * \param Q The queue over which this UriIterator should iterate. */ - UriIterator(pkgAcquire::Queue *Q) : CurQ(Q), CurItem(0) + UriIterator(pkgAcquire::Queue *Q) : d(NULL), CurQ(Q), CurItem(0) { while (CurItem == 0 && CurQ != 0) { @@ -593,7 +606,7 @@ class pkgAcquire::UriIterator CurQ = CurQ->Next; } } - virtual ~UriIterator() {}; + virtual ~UriIterator(); }; /*}}}*/ /** \brief Information about the properties of a single acquire method. {{{*/ @@ -651,8 +664,7 @@ struct pkgAcquire::MethodConfig */ MethodConfig(); - /* \brief Destructor, empty currently */ - virtual ~MethodConfig() {}; + virtual ~MethodConfig(); }; /*}}}*/ /** \brief A monitor object for downloads controlled by the pkgAcquire class. {{{ @@ -714,6 +726,10 @@ class pkgAcquireStatus /** \brief The number of items that have been successfully downloaded. */ unsigned long CurrentItems; + /** \brief The estimated percentage of the download (0-100) + */ + double Percent; + public: /** \brief If \b true, the download scheduler should call Pulse() @@ -794,7 +810,7 @@ class pkgAcquireStatus /** \brief Initialize all counters to 0 and the time to the current time. */ pkgAcquireStatus(); - virtual ~pkgAcquireStatus() {}; + virtual ~pkgAcquireStatus(); }; /*}}}*/ /** @} */ diff --git a/apt-pkg/algorithms.cc b/apt-pkg/algorithms.cc index 608ec7fce..adbec82f7 100644 --- a/apt-pkg/algorithms.cc +++ b/apt-pkg/algorithms.cc @@ -468,7 +468,7 @@ void pkgProblemResolver::MakeScores() if (D->Version != 0) { pkgCache::VerIterator const IV = Cache[T].InstVerIter(Cache); - if (IV.end() == true || D.IsSatisfied(IV) != D.IsNegative()) + if (IV.end() == true || D.IsSatisfied(IV) == false) continue; } Scores[T->ID] += DepMap[D->Type]; @@ -640,13 +640,17 @@ bool pkgProblemResolver::DoUpgrade(pkgCache::PkgIterator Pkg) // ProblemResolver::Resolve - calls a resolver to fix the situation /*{{{*/ // --------------------------------------------------------------------- /* */ +#if APT_PKG_ABI < 413 bool pkgProblemResolver::Resolve(bool BrokenFix) { + return Resolve(BrokenFix, NULL); +} +#endif +bool pkgProblemResolver::Resolve(bool BrokenFix, OpProgress * const Progress) +{ std::string const solver = _config->Find("APT::Solver", "internal"); - if (solver != "internal") { - OpTextProgress Prog(*_config); - return EDSP::ResolveExternal(solver.c_str(), Cache, false, false, false, &Prog); - } + if (solver != "internal") + return EDSP::ResolveExternal(solver.c_str(), Cache, false, false, false, Progress); return ResolveInternal(BrokenFix); } /*}}}*/ @@ -1140,13 +1144,17 @@ bool pkgProblemResolver::InstOrNewPolicyBroken(pkgCache::PkgIterator I) /* This is the work horse of the soft upgrade routine. It is very gental in that it does not install or remove any packages. It is assumed that the system was non-broken previously. */ +#if APT_PKG_ABI < 413 bool pkgProblemResolver::ResolveByKeep() { + return ResolveByKeep(NULL); +} +#endif +bool pkgProblemResolver::ResolveByKeep(OpProgress * const Progress) +{ std::string const solver = _config->Find("APT::Solver", "internal"); - if (solver != "internal") { - OpTextProgress Prog(*_config); - return EDSP::ResolveExternal(solver.c_str(), Cache, true, false, false, &Prog); - } + if (solver != "internal") + return EDSP::ResolveExternal(solver.c_str(), Cache, true, false, false, Progress); return ResolveByKeepInternal(); } /*}}}*/ diff --git a/apt-pkg/algorithms.h b/apt-pkg/algorithms.h index f35bd9a13..2ac28c0d7 100644 --- a/apt-pkg/algorithms.h +++ b/apt-pkg/algorithms.h @@ -82,9 +82,9 @@ class pkgSimulate : public pkgPackageManager /*{{{*/ virtual bool Remove(PkgIterator Pkg,bool Purge); private: - void ShortBreaks(); - void Describe(PkgIterator iPkg,std::ostream &out,bool Current,bool Candidate); - + APT_HIDDEN void ShortBreaks(); + APT_HIDDEN void Describe(PkgIterator iPkg,std::ostream &out,bool Current,bool Candidate); + public: pkgSimulate(pkgDepCache *Cache); @@ -114,7 +114,7 @@ class pkgProblemResolver /*{{{*/ // Sort stuff static pkgProblemResolver *This; - static int ScoreSort(const void *a,const void *b) APT_PURE; + APT_HIDDEN static int ScoreSort(const void *a,const void *b) APT_PURE; struct PackageKill { @@ -122,12 +122,12 @@ class pkgProblemResolver /*{{{*/ DepIterator Dep; }; - void MakeScores(); - bool DoUpgrade(pkgCache::PkgIterator Pkg); + APT_HIDDEN void MakeScores(); + APT_HIDDEN bool DoUpgrade(pkgCache::PkgIterator Pkg); + + APT_HIDDEN bool ResolveInternal(bool const BrokenFix = false); + APT_HIDDEN bool ResolveByKeepInternal(); - bool ResolveInternal(bool const BrokenFix = false); - bool ResolveByKeepInternal(); - protected: bool InstOrNewPolicyBroken(pkgCache::PkgIterator Pkg); @@ -136,12 +136,22 @@ class pkgProblemResolver /*{{{*/ inline void Protect(pkgCache::PkgIterator Pkg) {Flags[Pkg->ID] |= Protected; Cache.MarkProtected(Pkg);}; inline void Remove(pkgCache::PkgIterator Pkg) {Flags[Pkg->ID] |= ToRemove;}; inline void Clear(pkgCache::PkgIterator Pkg) {Flags[Pkg->ID] &= ~(Protected | ToRemove);}; - - // Try to intelligently resolve problems by installing and removing packages + + // Try to intelligently resolve problems by installing and removing packages +#if APT_PKG_ABI >= 413 + bool Resolve(bool BrokenFix = false, OpProgress * const Progress = NULL); +#else bool Resolve(bool BrokenFix = false); - + bool Resolve(bool BrokenFix, OpProgress * const Progress); +#endif + // Try to resolve problems only by using keep +#if APT_PKG_ABI >= 413 + bool ResolveByKeep(OpProgress * const Progress = NULL); +#else bool ResolveByKeep(); + bool ResolveByKeep(OpProgress * const Progress); +#endif APT_DEPRECATED void InstallProtect(); diff --git a/apt-pkg/aptconfiguration.cc b/apt-pkg/aptconfiguration.cc index 9982759c6..01b85a74e 100644 --- a/apt-pkg/aptconfiguration.cc +++ b/apt-pkg/aptconfiguration.cc @@ -32,6 +32,35 @@ #include <apti18n.h> /*}}}*/ namespace APT { +// setDefaultConfigurationForCompressors /*{{{*/ +static void setDefaultConfigurationForCompressors() { + // Set default application paths to check for optional compression types + _config->CndSet("Dir::Bin::bzip2", "/bin/bzip2"); + _config->CndSet("Dir::Bin::xz", "/usr/bin/xz"); + if (FileExists(_config->FindFile("Dir::Bin::xz")) == true) { + _config->Set("Dir::Bin::lzma", _config->FindFile("Dir::Bin::xz")); + _config->Set("APT::Compressor::lzma::Binary", "xz"); + if (_config->Exists("APT::Compressor::lzma::CompressArg") == false) { + _config->Set("APT::Compressor::lzma::CompressArg::", "--format=lzma"); + _config->Set("APT::Compressor::lzma::CompressArg::", "-9"); + } + if (_config->Exists("APT::Compressor::lzma::UncompressArg") == false) { + _config->Set("APT::Compressor::lzma::UncompressArg::", "--format=lzma"); + _config->Set("APT::Compressor::lzma::UncompressArg::", "-d"); + } + } else { + _config->CndSet("Dir::Bin::lzma", "/usr/bin/lzma"); + if (_config->Exists("APT::Compressor::lzma::CompressArg") == false) { + _config->Set("APT::Compressor::lzma::CompressArg::", "--suffix="); + _config->Set("APT::Compressor::lzma::CompressArg::", "-9"); + } + if (_config->Exists("APT::Compressor::lzma::UncompressArg") == false) { + _config->Set("APT::Compressor::lzma::UncompressArg::", "--suffix="); + _config->Set("APT::Compressor::lzma::UncompressArg::", "-d"); + } + } +} + /*}}}*/ // getCompressionTypes - Return Vector of usable compressiontypes /*{{{*/ // --------------------------------------------------------------------- /* return a vector of compression types in the preferred order. */ @@ -402,35 +431,6 @@ bool Configuration::checkArchitecture(std::string const &Arch) { return (std::find(archs.begin(), archs.end(), Arch) != archs.end()); } /*}}}*/ -// setDefaultConfigurationForCompressors /*{{{*/ -void Configuration::setDefaultConfigurationForCompressors() { - // Set default application paths to check for optional compression types - _config->CndSet("Dir::Bin::bzip2", "/bin/bzip2"); - _config->CndSet("Dir::Bin::xz", "/usr/bin/xz"); - if (FileExists(_config->FindFile("Dir::Bin::xz")) == true) { - _config->Set("Dir::Bin::lzma", _config->FindFile("Dir::Bin::xz")); - _config->Set("APT::Compressor::lzma::Binary", "xz"); - if (_config->Exists("APT::Compressor::lzma::CompressArg") == false) { - _config->Set("APT::Compressor::lzma::CompressArg::", "--format=lzma"); - _config->Set("APT::Compressor::lzma::CompressArg::", "-9"); - } - if (_config->Exists("APT::Compressor::lzma::UncompressArg") == false) { - _config->Set("APT::Compressor::lzma::UncompressArg::", "--format=lzma"); - _config->Set("APT::Compressor::lzma::UncompressArg::", "-d"); - } - } else { - _config->CndSet("Dir::Bin::lzma", "/usr/bin/lzma"); - if (_config->Exists("APT::Compressor::lzma::CompressArg") == false) { - _config->Set("APT::Compressor::lzma::CompressArg::", "--suffix="); - _config->Set("APT::Compressor::lzma::CompressArg::", "-9"); - } - if (_config->Exists("APT::Compressor::lzma::UncompressArg") == false) { - _config->Set("APT::Compressor::lzma::UncompressArg::", "--suffix="); - _config->Set("APT::Compressor::lzma::UncompressArg::", "-d"); - } - } -} - /*}}}*/ // getCompressors - Return Vector of usealbe compressors /*{{{*/ // --------------------------------------------------------------------- /* return a vector of compressors used by apt-ftparchive in the @@ -540,7 +540,7 @@ std::string const Configuration::getBuildProfilesString() { return ""; std::vector<std::string>::const_iterator p = profiles.begin(); std::string list = *p; - for (; p != profiles.end(); ++p) + for (++p; p != profiles.end(); ++p) list.append(",").append(*p); return list; } diff --git a/apt-pkg/aptconfiguration.h b/apt-pkg/aptconfiguration.h index dfed194ae..c7b8d2d73 100644 --- a/apt-pkg/aptconfiguration.h +++ b/apt-pkg/aptconfiguration.h @@ -123,9 +123,6 @@ public: /*{{{*/ /** \return Return a comma-separated list of enabled build profile specifications */ std::string static const getBuildProfilesString(); /*}}}*/ - private: /*{{{*/ - void static setDefaultConfigurationForCompressors(); - /*}}}*/ }; /*}}}*/ } diff --git a/apt-pkg/cachefile.cc b/apt-pkg/cachefile.cc index 0fd40106f..ea3d45480 100644 --- a/apt-pkg/cachefile.cc +++ b/apt-pkg/cachefile.cc @@ -143,6 +143,9 @@ bool pkgCacheFile::BuildDepCache(OpProgress *Progress) if (DCache != NULL) return true; + if (BuildPolicy(Progress) == false) + return false; + DCache = new pkgDepCache(Cache,Policy); if (_error->PendingError() == true) return false; diff --git a/apt-pkg/cachefilter.cc b/apt-pkg/cachefilter.cc index e388f2450..4362f43e3 100644 --- a/apt-pkg/cachefilter.cc +++ b/apt-pkg/cachefilter.cc @@ -6,6 +6,7 @@ // Include Files /*{{{*/ #include <config.h> +#include <apt-pkg/cachefile.h> #include <apt-pkg/cachefilter.h> #include <apt-pkg/error.h> #include <apt-pkg/pkgcache.h> @@ -22,7 +23,11 @@ /*}}}*/ namespace APT { namespace CacheFilter { -PackageNameMatchesRegEx::PackageNameMatchesRegEx(std::string const &Pattern) : d(NULL) {/*{{{*/ +APT_CONST Matcher::~Matcher() {} +APT_CONST PackageMatcher::~PackageMatcher() {} + +// Name matches RegEx /*{{{*/ +PackageNameMatchesRegEx::PackageNameMatchesRegEx(std::string const &Pattern) { pattern = new regex_t; int const Res = regcomp(pattern, Pattern.c_str(), REG_EXTENDED | REG_ICASE | REG_NOSUB); if (Res == 0) @@ -34,41 +39,36 @@ PackageNameMatchesRegEx::PackageNameMatchesRegEx(std::string const &Pattern) : d regerror(Res, pattern, Error, sizeof(Error)); _error->Error(_("Regex compilation error - %s"), Error); } - /*}}}*/ -bool PackageNameMatchesRegEx::operator() (pkgCache::PkgIterator const &Pkg) {/*{{{*/ +bool PackageNameMatchesRegEx::operator() (pkgCache::PkgIterator const &Pkg) { if (unlikely(pattern == NULL)) return false; else return regexec(pattern, Pkg.Name(), 0, 0, 0) == 0; } - /*}}}*/ -bool PackageNameMatchesRegEx::operator() (pkgCache::GrpIterator const &Grp) {/*{{{*/ +bool PackageNameMatchesRegEx::operator() (pkgCache::GrpIterator const &Grp) { if (unlikely(pattern == NULL)) return false; else return regexec(pattern, Grp.Name(), 0, 0, 0) == 0; } - /*}}}*/ -PackageNameMatchesRegEx::~PackageNameMatchesRegEx() { /*{{{*/ +PackageNameMatchesRegEx::~PackageNameMatchesRegEx() { if (pattern == NULL) return; regfree(pattern); delete pattern; } /*}}}*/ - -// Fnmatch support /*{{{*/ -//---------------------------------------------------------------------- -bool PackageNameMatchesFnmatch::operator() (pkgCache::PkgIterator const &Pkg) {/*{{{*/ +// Name matches Fnmatch /*{{{*/ +PackageNameMatchesFnmatch::PackageNameMatchesFnmatch(std::string const &Pattern) : + Pattern(Pattern) {} +bool PackageNameMatchesFnmatch::operator() (pkgCache::PkgIterator const &Pkg) { return fnmatch(Pattern.c_str(), Pkg.Name(), FNM_CASEFOLD) == 0; } - /*}}}*/ -bool PackageNameMatchesFnmatch::operator() (pkgCache::GrpIterator const &Grp) {/*{{{*/ +bool PackageNameMatchesFnmatch::operator() (pkgCache::GrpIterator const &Grp) { return fnmatch(Pattern.c_str(), Grp.Name(), FNM_CASEFOLD) == 0; } /*}}}*/ - -// CompleteArch to <kernel>-<cpu> tuple /*{{{*/ +// Architecture matches <kernel>-<cpu> specification /*{{{*/ //---------------------------------------------------------------------- /* The complete architecture, consisting of <kernel>-<cpu>. */ static std::string CompleteArch(std::string const &arch) { @@ -82,12 +82,10 @@ static std::string CompleteArch(std::string const &arch) { else if (arch == "any") return "*-*"; else return "linux-" + arch; } - /*}}}*/ -PackageArchitectureMatchesSpecification::PackageArchitectureMatchesSpecification(std::string const &pattern, bool const isPattern) :/*{{{*/ - literal(pattern), complete(CompleteArch(pattern)), isPattern(isPattern), d(NULL) { +PackageArchitectureMatchesSpecification::PackageArchitectureMatchesSpecification(std::string const &pattern, bool const isPattern) : + literal(pattern), complete(CompleteArch(pattern)), isPattern(isPattern) { } - /*}}}*/ -bool PackageArchitectureMatchesSpecification::operator() (char const * const &arch) {/*{{{*/ +bool PackageArchitectureMatchesSpecification::operator() (char const * const &arch) { if (strcmp(literal.c_str(), arch) == 0 || strcmp(complete.c_str(), arch) == 0) return true; @@ -96,16 +94,112 @@ bool PackageArchitectureMatchesSpecification::operator() (char const * const &ar return fnmatch(complete.c_str(), pkgarch.c_str(), 0) == 0; return fnmatch(pkgarch.c_str(), complete.c_str(), 0) == 0; } - /*}}}*/ -bool PackageArchitectureMatchesSpecification::operator() (pkgCache::PkgIterator const &Pkg) {/*{{{*/ +bool PackageArchitectureMatchesSpecification::operator() (pkgCache::PkgIterator const &Pkg) { return (*this)(Pkg.Arch()); } +PackageArchitectureMatchesSpecification::~PackageArchitectureMatchesSpecification() { +} /*}}}*/ -bool PackageArchitectureMatchesSpecification::operator() (pkgCache::VerIterator const &Ver) {/*{{{*/ - return (*this)(Ver.ParentPkg()); +// Package is new install /*{{{*/ +PackageIsNewInstall::PackageIsNewInstall(pkgCacheFile * const Cache) : Cache(Cache) {} +APT_PURE bool PackageIsNewInstall::operator() (pkgCache::PkgIterator const &Pkg) { + return (*Cache)[Pkg].NewInstall(); } +PackageIsNewInstall::~PackageIsNewInstall() {} /*}}}*/ -PackageArchitectureMatchesSpecification::~PackageArchitectureMatchesSpecification() { /*{{{*/ +// Generica like True, False, NOT, AND, OR /*{{{*/ +APT_CONST bool TrueMatcher::operator() (pkgCache::PkgIterator const &) { return true; } +APT_CONST bool TrueMatcher::operator() (pkgCache::GrpIterator const &) { return true; } +APT_CONST bool TrueMatcher::operator() (pkgCache::VerIterator const &) { return true; } + +APT_CONST bool FalseMatcher::operator() (pkgCache::PkgIterator const &) { return false; } +APT_CONST bool FalseMatcher::operator() (pkgCache::GrpIterator const &) { return false; } +APT_CONST bool FalseMatcher::operator() (pkgCache::VerIterator const &) { return false; } + +NOTMatcher::NOTMatcher(Matcher * const matcher) : matcher(matcher) {} +bool NOTMatcher::operator() (pkgCache::PkgIterator const &Pkg) { return ! (*matcher)(Pkg); } +bool NOTMatcher::operator() (pkgCache::GrpIterator const &Grp) { return ! (*matcher)(Grp); } +bool NOTMatcher::operator() (pkgCache::VerIterator const &Ver) { return ! (*matcher)(Ver); } +NOTMatcher::~NOTMatcher() { delete matcher; } + +ANDMatcher::ANDMatcher() {} +ANDMatcher::ANDMatcher(Matcher * const matcher1) { + AND(matcher1); +} +ANDMatcher::ANDMatcher(Matcher * const matcher1, Matcher * const matcher2) { + AND(matcher1).AND(matcher2); +} +ANDMatcher::ANDMatcher(Matcher * const matcher1, Matcher * const matcher2, Matcher * const matcher3) { + AND(matcher1).AND(matcher2).AND(matcher3); +} +ANDMatcher::ANDMatcher(Matcher * const matcher1, Matcher * const matcher2, Matcher * const matcher3, Matcher * const matcher4) { + AND(matcher1).AND(matcher2).AND(matcher3).AND(matcher4); +} +ANDMatcher::ANDMatcher(Matcher * const matcher1, Matcher * const matcher2, Matcher * const matcher3, Matcher * const matcher4, Matcher * const matcher5) { + AND(matcher1).AND(matcher2).AND(matcher3).AND(matcher4).AND(matcher5); +} +ANDMatcher& ANDMatcher::AND(Matcher * const matcher) { matchers.push_back(matcher); return *this; } +bool ANDMatcher::operator() (pkgCache::PkgIterator const &Pkg) { + for (std::vector<Matcher *>::const_iterator M = matchers.begin(); M != matchers.end(); ++M) + if ((**M)(Pkg) == false) + return false; + return true; +} +bool ANDMatcher::operator() (pkgCache::GrpIterator const &Grp) { + for (std::vector<Matcher *>::const_iterator M = matchers.begin(); M != matchers.end(); ++M) + if ((**M)(Grp) == false) + return false; + return true; +} +bool ANDMatcher::operator() (pkgCache::VerIterator const &Ver) { + for (std::vector<Matcher *>::const_iterator M = matchers.begin(); M != matchers.end(); ++M) + if ((**M)(Ver) == false) + return false; + return true; +} +ANDMatcher::~ANDMatcher() { + for (std::vector<Matcher *>::iterator M = matchers.begin(); M != matchers.end(); ++M) + delete *M; +} + +ORMatcher::ORMatcher() {} +ORMatcher::ORMatcher(Matcher * const matcher1) { + OR(matcher1); +} +ORMatcher::ORMatcher(Matcher * const matcher1, Matcher * const matcher2) { + OR(matcher1).OR(matcher2); +} +ORMatcher::ORMatcher(Matcher * const matcher1, Matcher * const matcher2, Matcher * const matcher3) { + OR(matcher1).OR(matcher2).OR(matcher3); +} +ORMatcher::ORMatcher(Matcher * const matcher1, Matcher * const matcher2, Matcher * const matcher3, Matcher * const matcher4) { + OR(matcher1).OR(matcher2).OR(matcher3).OR(matcher4); +} +ORMatcher::ORMatcher(Matcher * const matcher1, Matcher * const matcher2, Matcher * const matcher3, Matcher * const matcher4, Matcher * const matcher5) { + OR(matcher1).OR(matcher2).OR(matcher3).OR(matcher4).OR(matcher5); +} +ORMatcher& ORMatcher::OR(Matcher * const matcher) { matchers.push_back(matcher); return *this; } +bool ORMatcher::operator() (pkgCache::PkgIterator const &Pkg) { + for (std::vector<Matcher *>::const_iterator M = matchers.begin(); M != matchers.end(); ++M) + if ((**M)(Pkg) == true) + return true; + return false; +} +bool ORMatcher::operator() (pkgCache::GrpIterator const &Grp) { + for (std::vector<Matcher *>::const_iterator M = matchers.begin(); M != matchers.end(); ++M) + if ((**M)(Grp) == true) + return true; + return false; +} +bool ORMatcher::operator() (pkgCache::VerIterator const &Ver) { + for (std::vector<Matcher *>::const_iterator M = matchers.begin(); M != matchers.end(); ++M) + if ((**M)(Ver) == true) + return true; + return false; +} +ORMatcher::~ORMatcher() { + for (std::vector<Matcher *>::iterator M = matchers.begin(); M != matchers.end(); ++M) + delete *M; } /*}}}*/ diff --git a/apt-pkg/cachefilter.h b/apt-pkg/cachefilter.h index 49d2855f5..b4697b773 100644 --- a/apt-pkg/cachefilter.h +++ b/apt-pkg/cachefilter.h @@ -10,85 +10,90 @@ #include <apt-pkg/cacheiterators.h> #include <string> +#include <vector> #include <regex.h> + +class pkgCacheFile; /*}}}*/ namespace APT { namespace CacheFilter { -#define PACKAGE_MATCHER_ABI_COMPAT 1 -#ifdef PACKAGE_MATCHER_ABI_COMPAT - -// PackageNameMatchesRegEx /*{{{*/ -class PackageNameMatchesRegEx { - /** \brief dpointer placeholder (for later in case we need it) */ - void *d; - regex_t* pattern; +class Matcher { public: - PackageNameMatchesRegEx(std::string const &Pattern); - bool operator() (pkgCache::PkgIterator const &Pkg); - bool operator() (pkgCache::GrpIterator const &Grp); - ~PackageNameMatchesRegEx(); + virtual bool operator() (pkgCache::PkgIterator const &/*Pkg*/) = 0; + virtual bool operator() (pkgCache::GrpIterator const &/*Grp*/) = 0; + virtual bool operator() (pkgCache::VerIterator const &/*Ver*/) = 0; + virtual ~Matcher(); }; - /*}}}*/ -// PackageNameMatchesFnmatch /*{{{*/ - class PackageNameMatchesFnmatch { - /** \brief dpointer placeholder (for later in case we need it) */ - void *d; - const std::string Pattern; + +class PackageMatcher : public Matcher { public: - PackageNameMatchesFnmatch(std::string const &Pattern) - : Pattern(Pattern) {}; - bool operator() (pkgCache::PkgIterator const &Pkg); - bool operator() (pkgCache::GrpIterator const &Grp); - ~PackageNameMatchesFnmatch() {}; + virtual bool operator() (pkgCache::PkgIterator const &Pkg) = 0; + virtual bool operator() (pkgCache::VerIterator const &Ver) { return (*this)(Ver.ParentPkg()); } + virtual bool operator() (pkgCache::GrpIterator const &/*Grp*/) { return false; } + virtual ~PackageMatcher(); }; - /*}}}*/ -// PackageArchitectureMatchesSpecification /*{{{*/ -/** \class PackageArchitectureMatchesSpecification - \brief matching against architecture specification strings - - The strings are of the format \<kernel\>-\<cpu\> where either component, - or the whole string, can be the wildcard "any" as defined in - debian-policy §11.1 "Architecture specification strings". - Examples: i386, mipsel, linux-any, any-amd64, any */ -class PackageArchitectureMatchesSpecification { - std::string literal; - std::string complete; - bool isPattern; - /** \brief dpointer placeholder (for later in case we need it) */ - void *d; +// Generica like True, False, NOT, AND, OR /*{{{*/ +class TrueMatcher : public Matcher { public: - /** \brief matching against architecture specification strings - * - * @param pattern is the architecture specification string - * @param isPattern defines if the given \b pattern is a - * architecture specification pattern to match others against - * or if it is the fixed string and matched against patterns - */ - PackageArchitectureMatchesSpecification(std::string const &pattern, bool const isPattern = true); - bool operator() (char const * const &arch); - bool operator() (pkgCache::PkgIterator const &Pkg); - bool operator() (pkgCache::VerIterator const &Ver); - ~PackageArchitectureMatchesSpecification(); + virtual bool operator() (pkgCache::PkgIterator const &Pkg); + virtual bool operator() (pkgCache::GrpIterator const &Grp); + virtual bool operator() (pkgCache::VerIterator const &Ver); }; -#else +class FalseMatcher : public Matcher { +public: + virtual bool operator() (pkgCache::PkgIterator const &Pkg); + virtual bool operator() (pkgCache::GrpIterator const &Grp); + virtual bool operator() (pkgCache::VerIterator const &Ver); +}; -class PackageMatcher { - public: - virtual bool operator() (pkgCache::PkgIterator const &Pkg) { return false; }; - virtual bool operator() (pkgCache::GrpIterator const &Grp) { return false; }; - virtual bool operator() (pkgCache::VerIterator const &Ver) { return false; }; - - virtual ~PackageMatcher() {}; +class NOTMatcher : public Matcher { + Matcher * const matcher; +public: + NOTMatcher(Matcher * const matcher); + virtual bool operator() (pkgCache::PkgIterator const &Pkg); + virtual bool operator() (pkgCache::GrpIterator const &Grp); + virtual bool operator() (pkgCache::VerIterator const &Ver); + virtual ~NOTMatcher(); }; -// PackageNameMatchesRegEx /*{{{*/ -class PackageNameMatchesRegEx : public PackageMatcher { - /** \brief dpointer placeholder (for later in case we need it) */ - void *d; +class ANDMatcher : public Matcher { + std::vector<Matcher *> matchers; +public: + // 5 ought to be enough for everybody… c++11 variadic templates would be nice + ANDMatcher(); + ANDMatcher(Matcher * const matcher1); + ANDMatcher(Matcher * const matcher1, Matcher * const matcher2); + ANDMatcher(Matcher * const matcher1, Matcher * const matcher2, Matcher * const matcher3); + ANDMatcher(Matcher * const matcher1, Matcher * const matcher2, Matcher * const matcher3, Matcher * const matcher4); + ANDMatcher(Matcher * const matcher1, Matcher * const matcher2, Matcher * const matcher3, Matcher * const matcher4, Matcher * const matcher5); + ANDMatcher& AND(Matcher * const matcher); + virtual bool operator() (pkgCache::PkgIterator const &Pkg); + virtual bool operator() (pkgCache::GrpIterator const &Grp); + virtual bool operator() (pkgCache::VerIterator const &Ver); + virtual ~ANDMatcher(); +}; +class ORMatcher : public Matcher { + std::vector<Matcher *> matchers; +public: + // 5 ought to be enough for everybody… c++11 variadic templates would be nice + ORMatcher(); + ORMatcher(Matcher * const matcher1); + ORMatcher(Matcher * const matcher1, Matcher * const matcher2); + ORMatcher(Matcher * const matcher1, Matcher * const matcher2, Matcher * const matcher3); + ORMatcher(Matcher * const matcher1, Matcher * const matcher2, Matcher * const matcher3, Matcher * const matcher4); + ORMatcher(Matcher * const matcher1, Matcher * const matcher2, Matcher * const matcher3, Matcher * const matcher4, Matcher * const matcher5); + ORMatcher& OR(Matcher * const matcher); + virtual bool operator() (pkgCache::PkgIterator const &Pkg); + virtual bool operator() (pkgCache::GrpIterator const &Grp); + virtual bool operator() (pkgCache::VerIterator const &Ver); + virtual ~ORMatcher(); +}; + /*}}}*/ +class PackageNameMatchesRegEx : public PackageMatcher { /*{{{*/ regex_t* pattern; public: PackageNameMatchesRegEx(std::string const &Pattern); @@ -97,20 +102,16 @@ public: virtual ~PackageNameMatchesRegEx(); }; /*}}}*/ -// PackageNameMatchesFnmatch /*{{{*/ - class PackageNameMatchesFnmatch : public PackageMatcher{ - /** \brief dpointer placeholder (for later in case we need it) */ - void *d; - const std::string Pattern; +class PackageNameMatchesFnmatch : public PackageMatcher { /*{{{*/ + const std::string Pattern; public: - PackageNameMatchesFnmatch(std::string const &Pattern) - : Pattern(Pattern) {}; - virtual bool operator() (pkgCache::PkgIterator const &Pkg); + PackageNameMatchesFnmatch(std::string const &Pattern); + virtual bool operator() (pkgCache::PkgIterator const &Pkg); virtual bool operator() (pkgCache::GrpIterator const &Grp); virtual ~PackageNameMatchesFnmatch() {}; }; /*}}}*/ -// PackageArchitectureMatchesSpecification /*{{{*/ +class PackageArchitectureMatchesSpecification : public PackageMatcher { /*{{{*/ /** \class PackageArchitectureMatchesSpecification \brief matching against architecture specification strings @@ -119,12 +120,9 @@ public: debian-policy §11.1 "Architecture specification strings". Examples: i386, mipsel, linux-any, any-amd64, any */ -class PackageArchitectureMatchesSpecification : public PackageMatcher { std::string literal; std::string complete; bool isPattern; - /** \brief dpointer placeholder (for later in case we need it) */ - void *d; public: /** \brief matching against architecture specification strings * @@ -136,11 +134,18 @@ public: PackageArchitectureMatchesSpecification(std::string const &pattern, bool const isPattern = true); bool operator() (char const * const &arch); virtual bool operator() (pkgCache::PkgIterator const &Pkg); - virtual bool operator() (pkgCache::VerIterator const &Ver); virtual ~PackageArchitectureMatchesSpecification(); }; -#endif /*}}}*/ +class PackageIsNewInstall : public PackageMatcher { /*{{{*/ + pkgCacheFile * const Cache; +public: + PackageIsNewInstall(pkgCacheFile * const Cache); + virtual bool operator() (pkgCache::PkgIterator const &Pkg); + virtual ~PackageIsNewInstall(); +}; + /*}}}*/ + } } #endif diff --git a/apt-pkg/cacheiterators.h b/apt-pkg/cacheiterators.h index 2fdf8404d..fe798799c 100644 --- a/apt-pkg/cacheiterators.h +++ b/apt-pkg/cacheiterators.h @@ -159,8 +159,14 @@ class pkgCache::PkgIterator: public Iterator<Package, PkgIterator> { enum OkState {NeedsNothing,NeedsUnpack,NeedsConfigure}; // Accessors - inline const char *Name() const {return S->Name == 0?0:Owner->StrP + S->Name;} - inline const char *Section() const {return S->Section == 0?0:Owner->StrP + S->Section;} + inline const char *Name() const { return Group().Name(); } + // Versions have sections - and packages can have different versions with different sections + // so this interface is broken by design. Run as fast as you can to Version.Section(). + APT_DEPRECATED inline const char *Section() const { + APT_IGNORE_DEPRECATED_PUSH + return S->Section == 0?0:Owner->StrP + S->Section; + APT_IGNORE_DEPRECATED_POP + } inline bool Purge() const {return S->CurrentState == pkgCache::State::Purge || (S->CurrentVer == 0 && S->CurrentState == pkgCache::State::NotInstalled);} inline const char *Arch() const {return S->Arch == 0?0:Owner->StrP + S->Arch;} @@ -211,6 +217,14 @@ class pkgCache::VerIterator : public Iterator<Version, VerIterator> { // Accessors inline const char *VerStr() const {return S->VerStr == 0?0:Owner->StrP + S->VerStr;} inline const char *Section() const {return S->Section == 0?0:Owner->StrP + S->Section;} +#if APT_PKG_ABI >= 413 + /** \brief source package name this version comes from + Always contains the name, even if it is the same as the binary name */ + inline const char *SourcePkgName() const {return Owner->StrP + S->SourcePkgName;} + /** \brief source version this version comes from + Always contains the version string, even if it is the same as the binary version */ + inline const char *SourceVerStr() const {return Owner->StrP + S->SourceVerStr;} +#endif inline const char *Arch() const { if ((S->MultiArch & pkgCache::Version::All) == pkgCache::Version::All) return "all"; @@ -332,7 +346,7 @@ class pkgCache::PrvIterator : public Iterator<Provides, PrvIterator> { inline void operator ++() {operator ++(0);} // Accessors - inline const char *Name() const {return Owner->StrP + Owner->PkgP[S->ParentPkg].Name;} + inline const char *Name() const {return ParentPkg().Name();} inline const char *ProvideVersion() const {return S->ProvideVersion == 0?0:Owner->StrP + S->ProvideVersion;} inline PkgIterator ParentPkg() const {return PkgIterator(*Owner,Owner->PkgP + S->ParentPkg);} inline VerIterator OwnerVer() const {return VerIterator(*Owner,Owner->VerP + S->Version);} diff --git a/apt-pkg/cacheset.cc b/apt-pkg/cacheset.cc index 2ed6a96da..0ad99713a 100644 --- a/apt-pkg/cacheset.cc +++ b/apt-pkg/cacheset.cc @@ -24,6 +24,7 @@ #include <apt-pkg/depcache.h> #include <apt-pkg/macros.h> #include <apt-pkg/pkgcache.h> +#include <apt-pkg/fileutl.h> #include <stddef.h> #include <stdio.h> @@ -36,8 +37,23 @@ #include <apti18n.h> /*}}}*/ namespace APT { -// FromTask - Return all packages in the cache from a specific task /*{{{*/ -bool PackageContainerInterface::FromTask(PackageContainerInterface * const pci, pkgCacheFile &Cache, std::string pattern, CacheSetHelper &helper) { + +// PackageFrom - selecting the appropriate method for package selection /*{{{*/ +bool CacheSetHelper::PackageFrom(enum PkgSelector const select, PackageContainerInterface * const pci, + pkgCacheFile &Cache, std::string const &pattern) { + switch (select) { + case UNKNOWN: return false; + case REGEX: return PackageFromRegEx(pci, Cache, pattern); + case TASK: return PackageFromTask(pci, Cache, pattern); + case FNMATCH: return PackageFromFnmatch(pci, Cache, pattern); + case PACKAGENAME: return PackageFromPackageName(pci, Cache, pattern); + case STRING: return PackageFromString(pci, Cache, pattern); + } + return false; +} + /*}}}*/ +// PackageFromTask - Return all packages in the cache from a specific task /*{{{*/ +bool CacheSetHelper::PackageFromTask(PackageContainerInterface * const pci, pkgCacheFile &Cache, std::string pattern) { size_t const archfound = pattern.find_last_of(':'); std::string arch = "native"; if (archfound != std::string::npos) { @@ -54,7 +70,7 @@ bool PackageContainerInterface::FromTask(PackageContainerInterface * const pci, bool const wasEmpty = pci->empty(); if (wasEmpty == true) - pci->setConstructor(TASK); + pci->setConstructor(CacheSetHelper::TASK); // get the records pkgRecords Recs(Cache); @@ -90,32 +106,32 @@ bool PackageContainerInterface::FromTask(PackageContainerInterface * const pci, continue; pci->insert(Pkg); - helper.showTaskSelection(Pkg, pattern); + showPackageSelection(Pkg, CacheSetHelper::TASK, pattern); found = true; } regfree(&Pattern); if (found == false) { - helper.canNotFindTask(pci, Cache, pattern); - pci->setConstructor(UNKNOWN); + canNotFindPackage(CacheSetHelper::TASK, pci, Cache, pattern); + pci->setConstructor(CacheSetHelper::UNKNOWN); return false; } - if (wasEmpty == false && pci->getConstructor() != UNKNOWN) - pci->setConstructor(UNKNOWN); + if (wasEmpty == false && pci->getConstructor() != CacheSetHelper::UNKNOWN) + pci->setConstructor(CacheSetHelper::UNKNOWN); return true; } /*}}}*/ -// FromRegEx - Return all packages in the cache matching a pattern /*{{{*/ -bool PackageContainerInterface::FromRegEx(PackageContainerInterface * const pci, pkgCacheFile &Cache, std::string pattern, CacheSetHelper &helper) { +// PackageFromRegEx - Return all packages in the cache matching a pattern /*{{{*/ +bool CacheSetHelper::PackageFromRegEx(PackageContainerInterface * const pci, pkgCacheFile &Cache, std::string pattern) { static const char * const isregex = ".?+*|[^$"; if (pattern.find_first_of(isregex) == std::string::npos) return false; bool const wasEmpty = pci->empty(); if (wasEmpty == true) - pci->setConstructor(REGEX); + pci->setConstructor(CacheSetHelper::REGEX); size_t archfound = pattern.find_last_of(':'); std::string arch = "native"; @@ -149,28 +165,25 @@ bool PackageContainerInterface::FromRegEx(PackageContainerInterface * const pci, } pci->insert(Pkg); - helper.showRegExSelection(Pkg, pattern); + showPackageSelection(Pkg, CacheSetHelper::REGEX, pattern); found = true; } if (found == false) { - helper.canNotFindRegEx(pci, Cache, pattern); - pci->setConstructor(UNKNOWN); + canNotFindPackage(CacheSetHelper::REGEX, pci, Cache, pattern); + pci->setConstructor(CacheSetHelper::UNKNOWN); return false; } - if (wasEmpty == false && pci->getConstructor() != UNKNOWN) - pci->setConstructor(UNKNOWN); + if (wasEmpty == false && pci->getConstructor() != CacheSetHelper::UNKNOWN) + pci->setConstructor(CacheSetHelper::UNKNOWN); return true; } /*}}}*/ -// FromFnmatch - Returns the package defined by this fnmatch /*{{{*/ -bool -PackageContainerInterface::FromFnmatch(PackageContainerInterface * const pci, - pkgCacheFile &Cache, - std::string pattern, - CacheSetHelper &helper) +// PackageFromFnmatch - Returns the package defined by this fnmatch /*{{{*/ +bool CacheSetHelper::PackageFromFnmatch(PackageContainerInterface * const pci, + pkgCacheFile &Cache, std::string pattern) { static const char * const isfnmatch = ".?*[]!"; if (pattern.find_first_of(isfnmatch) == std::string::npos) @@ -178,7 +191,7 @@ PackageContainerInterface::FromFnmatch(PackageContainerInterface * const pci, bool const wasEmpty = pci->empty(); if (wasEmpty == true) - pci->setConstructor(FNMATCH); + pci->setConstructor(CacheSetHelper::FNMATCH); size_t archfound = pattern.find_last_of(':'); std::string arch = "native"; @@ -212,33 +225,25 @@ PackageContainerInterface::FromFnmatch(PackageContainerInterface * const pci, } pci->insert(Pkg); -#if (APT_PKG_MAJOR >= 4 && APT_PKG_MINOR >= 13) - helper.showFnmatchSelection(Pkg, pattern); -#else - helper.showRegExSelection(Pkg, pattern); -#endif + showPackageSelection(Pkg, CacheSetHelper::FNMATCH, pattern); found = true; } if (found == false) { -#if (APT_PKG_MAJOR >= 4 && APT_PKG_MINOR >= 13) - helper.canNotFindFnmatch(pci, Cache, pattern); -#else - helper.canNotFindRegEx(pci, Cache, pattern); -#endif - pci->setConstructor(UNKNOWN); + canNotFindPackage(CacheSetHelper::FNMATCH, pci, Cache, pattern); + pci->setConstructor(CacheSetHelper::UNKNOWN); return false; } - if (wasEmpty == false && pci->getConstructor() != UNKNOWN) - pci->setConstructor(UNKNOWN); + if (wasEmpty == false && pci->getConstructor() != CacheSetHelper::UNKNOWN) + pci->setConstructor(CacheSetHelper::UNKNOWN); return true; } /*}}}*/ -// FromName - Returns the package defined by this string /*{{{*/ -pkgCache::PkgIterator PackageContainerInterface::FromName(pkgCacheFile &Cache, - std::string const &str, CacheSetHelper &helper) { +// PackageFromName - Returns the package defined by this string /*{{{*/ +pkgCache::PkgIterator CacheSetHelper::PackageFromName(pkgCacheFile &Cache, + std::string const &str) { std::string pkg = str; size_t archfound = pkg.find_last_of(':'); std::string arch; @@ -259,13 +264,13 @@ pkgCache::PkgIterator PackageContainerInterface::FromName(pkgCacheFile &Cache, Pkg = Cache.GetPkgCache()->FindPkg(pkg, arch); if (Pkg.end() == true) - return helper.canNotFindPkgName(Cache, str); + return canNotFindPkgName(Cache, str); return Pkg; } /*}}}*/ -// FromGroup - Returns the package defined by this string /*{{{*/ -bool PackageContainerInterface::FromGroup(PackageContainerInterface * const pci, pkgCacheFile &Cache, - std::string pkg, CacheSetHelper &helper) { +// PackageFromPackageName - Returns the package defined by this string /*{{{*/ +bool CacheSetHelper::PackageFromPackageName(PackageContainerInterface * const pci, pkgCacheFile &Cache, + std::string pkg) { if (unlikely(Cache.GetPkgCache() == 0)) return false; @@ -305,7 +310,7 @@ bool PackageContainerInterface::FromGroup(PackageContainerInterface * const pci, } } - pkgCache::PkgIterator Pkg = helper.canNotFindPkgName(Cache, pkg); + pkgCache::PkgIterator Pkg = canNotFindPkgName(Cache, pkg); if (Pkg.end() == true) return false; @@ -313,19 +318,18 @@ bool PackageContainerInterface::FromGroup(PackageContainerInterface * const pci, return true; } /*}}}*/ -// FromString - Return all packages matching a specific string /*{{{*/ -bool PackageContainerInterface::FromString(PackageContainerInterface * const pci, pkgCacheFile &Cache, std::string const &str, CacheSetHelper &helper) { +// PackageFromString - Return all packages matching a specific string /*{{{*/ +bool CacheSetHelper::PackageFromString(PackageContainerInterface * const pci, pkgCacheFile &Cache, std::string const &str) { bool found = true; _error->PushToStack(); - if (FromGroup(pci, Cache, str, helper) == false && - FromTask(pci, Cache, str, helper) == false && -#if (APT_PKG_MAJOR >= 4 && APT_PKG_MINOR >= 13) - FromFnmatch(pci, Cache, str, helper) == false) -#endif - FromRegEx(pci, Cache, str, helper) == false) + if (PackageFrom(CacheSetHelper::PACKAGENAME, pci, Cache, str) == false && + PackageFrom(CacheSetHelper::TASK, pci, Cache, str) == false && + // FIXME: hm, hm, regexp/fnmatch incompatible? + PackageFrom(CacheSetHelper::FNMATCH, pci, Cache, str) == false && + PackageFrom(CacheSetHelper::REGEX, pci, Cache, str) == false) { - helper.canNotFindPackage(pci, Cache, str); + canNotFindPackage(CacheSetHelper::PACKAGENAME, pci, Cache, str); found = false; } @@ -336,51 +340,50 @@ bool PackageContainerInterface::FromString(PackageContainerInterface * const pci return found; } /*}}}*/ -// FromCommandLine - Return all packages specified on commandline /*{{{*/ -bool PackageContainerInterface::FromCommandLine(PackageContainerInterface * const pci, pkgCacheFile &Cache, const char **cmdline, CacheSetHelper &helper) { +// PackageFromCommandLine - Return all packages specified on commandline /*{{{*/ +bool CacheSetHelper::PackageFromCommandLine(PackageContainerInterface * const pci, pkgCacheFile &Cache, const char **cmdline) { bool found = false; for (const char **I = cmdline; *I != 0; ++I) - found |= PackageContainerInterface::FromString(pci, Cache, *I, helper); + found |= PackageFrom(CacheSetHelper::PACKAGENAME, pci, Cache, *I); return found; } /*}}}*/ // FromModifierCommandLine - helper doing the work for PKG:GroupedFromCommandLine /*{{{*/ -bool PackageContainerInterface::FromModifierCommandLine(unsigned short &modID, PackageContainerInterface * const pci, +bool CacheSetHelper::PackageFromModifierCommandLine(unsigned short &modID, PackageContainerInterface * const pci, pkgCacheFile &Cache, const char * cmdline, - std::list<Modifier> const &mods, CacheSetHelper &helper) { + std::list<PkgModifier> const &mods) { std::string str = cmdline; unsigned short fallback = modID; bool modifierPresent = false; - for (std::list<Modifier>::const_iterator mod = mods.begin(); + for (std::list<PkgModifier>::const_iterator mod = mods.begin(); mod != mods.end(); ++mod) { size_t const alength = strlen(mod->Alias); switch(mod->Pos) { - case Modifier::POSTFIX: + case PkgModifier::POSTFIX: if (str.compare(str.length() - alength, alength, mod->Alias, 0, alength) != 0) continue; str.erase(str.length() - alength); modID = mod->ID; break; - case Modifier::PREFIX: + case PkgModifier::PREFIX: continue; - case Modifier::NONE: + case PkgModifier::NONE: continue; } modifierPresent = true; break; } if (modifierPresent == true) { - bool const errors = helper.showErrors(false); - pkgCache::PkgIterator Pkg = FromName(Cache, cmdline, helper); - helper.showErrors(errors); - if (Pkg.end() == false) { - pci->insert(Pkg); + bool const errors = showErrors(false); + bool const found = PackageFrom(PACKAGENAME, pci, Cache, cmdline); + showErrors(errors); + if (found == true) { modID = fallback; return true; } } - return FromString(pci, Cache, str, helper); + return PackageFrom(CacheSetHelper::PACKAGENAME, pci, Cache, str); } /*}}}*/ // FromModifierCommandLine - helper doing the work for VER:GroupedFromCommandLine /*{{{*/ @@ -389,7 +392,7 @@ bool VersionContainerInterface::FromModifierCommandLine(unsigned short &modID, pkgCacheFile &Cache, const char * cmdline, std::list<Modifier> const &mods, CacheSetHelper &helper) { - Version select = NEWEST; + CacheSetHelper::VerSelector select = CacheSetHelper::NEWEST; std::string str = cmdline; if (unlikely(str.empty() == true)) return false; @@ -432,7 +435,8 @@ bool VersionContainerInterface::FromModifierCommandLine(unsigned short &modID, // FromCommandLine - Return all versions specified on commandline /*{{{*/ bool VersionContainerInterface::FromCommandLine(VersionContainerInterface * const vci, pkgCacheFile &Cache, const char **cmdline, - Version const &fallback, CacheSetHelper &helper) { + CacheSetHelper::VerSelector const fallback, + CacheSetHelper &helper) { bool found = false; for (const char **I = cmdline; *I != 0; ++I) found |= VersionContainerInterface::FromString(vci, Cache, *I, fallback, helper); @@ -442,8 +446,17 @@ bool VersionContainerInterface::FromCommandLine(VersionContainerInterface * cons // FromString - Returns all versions spedcified by a string /*{{{*/ bool VersionContainerInterface::FromString(VersionContainerInterface * const vci, pkgCacheFile &Cache, std::string pkg, - Version const &fallback, CacheSetHelper &helper, + CacheSetHelper::VerSelector const fallback, + CacheSetHelper &helper, bool const onlyFromName) { + PackageSet pkgset; + if(FileExists(pkg)) { + helper.PackageFrom(CacheSetHelper::STRING, &pkgset, Cache, pkg); + if(pkgset.empty() == true) + return false; + return VersionContainerInterface::FromPackage(vci, Cache, pkgset.begin(), fallback, helper); + } + std::string ver; bool verIsRel = false; size_t const vertag = pkg.find_last_of("/="); @@ -452,15 +465,14 @@ bool VersionContainerInterface::FromString(VersionContainerInterface * const vci verIsRel = (pkg[vertag] == '/'); pkg.erase(vertag); } - PackageSet pkgset; if (onlyFromName == false) - PackageContainerInterface::FromString(&pkgset, Cache, pkg, helper); + helper.PackageFrom(CacheSetHelper::STRING, &pkgset, Cache, pkg); else { - pkgset.insert(PackageContainerInterface::FromName(Cache, pkg, helper)); + helper.PackageFrom(CacheSetHelper::PACKAGENAME, &pkgset, Cache, pkg); } bool errors = true; - if (pkgset.getConstructor() != PackageSet::UNKNOWN) + if (pkgset.getConstructor() != CacheSetHelper::UNKNOWN) errors = helper.showErrors(false); bool found = false; @@ -479,7 +491,7 @@ bool VersionContainerInterface::FromString(VersionContainerInterface * const vci if (P->VersionList != 0) V = P.VersionList(); else - V = helper.canNotFindNewestVer(Cache, P); + V = helper.canNotGetVersion(CacheSetHelper::NEWEST, Cache, P); } else { pkgVersionMatch Match(ver, (verIsRel == true ? pkgVersionMatch::Release : pkgVersionMatch::Version)); @@ -496,11 +508,14 @@ bool VersionContainerInterface::FromString(VersionContainerInterface * const vci } if (V.end() == true) continue; - helper.showSelectedVersion(P, V, ver, verIsRel); + if (verIsRel == true) + helper.showVersionSelection(P, V, CacheSetHelper::RELEASE, ver); + else + helper.showVersionSelection(P, V, CacheSetHelper::VERSIONNUMBER, ver); vci->insert(V); found = true; } - if (pkgset.getConstructor() != PackageSet::UNKNOWN) + if (pkgset.getConstructor() != CacheSetHelper::UNKNOWN) helper.showErrors(errors); return found; } @@ -509,30 +524,30 @@ bool VersionContainerInterface::FromString(VersionContainerInterface * const vci bool VersionContainerInterface::FromPackage(VersionContainerInterface * const vci, pkgCacheFile &Cache, pkgCache::PkgIterator const &P, - Version const &fallback, + CacheSetHelper::VerSelector const fallback, CacheSetHelper &helper) { pkgCache::VerIterator V; bool showErrors; bool found = false; switch(fallback) { - case ALL: + case CacheSetHelper::ALL: if (P->VersionList != 0) for (V = P.VersionList(); V.end() != true; ++V) found |= vci->insert(V); else - helper.canNotFindAllVer(vci, Cache, P); + helper.canNotFindVersion(CacheSetHelper::ALL, vci, Cache, P); break; - case CANDANDINST: + case CacheSetHelper::CANDANDINST: found |= vci->insert(getInstalledVer(Cache, P, helper)); found |= vci->insert(getCandidateVer(Cache, P, helper)); break; - case CANDIDATE: + case CacheSetHelper::CANDIDATE: found |= vci->insert(getCandidateVer(Cache, P, helper)); break; - case INSTALLED: + case CacheSetHelper::INSTALLED: found |= vci->insert(getInstalledVer(Cache, P, helper)); break; - case CANDINST: + case CacheSetHelper::CANDINST: showErrors = helper.showErrors(false); V = getCandidateVer(Cache, P, helper); if (V.end() == true) @@ -541,9 +556,9 @@ bool VersionContainerInterface::FromPackage(VersionContainerInterface * const vc if (V.end() == false) found |= vci->insert(V); else - helper.canNotFindInstCandVer(vci, Cache, P); + helper.canNotFindVersion(CacheSetHelper::CANDINST, vci, Cache, P); break; - case INSTCAND: + case CacheSetHelper::INSTCAND: showErrors = helper.showErrors(false); V = getInstalledVer(Cache, P, helper); if (V.end() == true) @@ -552,14 +567,18 @@ bool VersionContainerInterface::FromPackage(VersionContainerInterface * const vc if (V.end() == false) found |= vci->insert(V); else - helper.canNotFindInstCandVer(vci, Cache, P); + helper.canNotFindVersion(CacheSetHelper::INSTCAND, vci, Cache, P); break; - case NEWEST: + case CacheSetHelper::NEWEST: if (P->VersionList != 0) found |= vci->insert(P.VersionList()); else - helper.canNotFindNewestVer(Cache, P); + helper.canNotFindVersion(CacheSetHelper::NEWEST, vci, Cache, P); break; + case CacheSetHelper::RELEASE: + case CacheSetHelper::VERSIONNUMBER: + // both make no sense here, so always false + return false; } return found; } @@ -576,7 +595,7 @@ pkgCache::VerIterator VersionContainerInterface::getCandidateVer(pkgCacheFile &C Cand = Cache[Pkg].CandidateVerIter(Cache); } if (Cand.end() == true) - return helper.canNotFindCandidateVer(Cache, Pkg); + return helper.canNotGetVersion(CacheSetHelper::CANDIDATE, Cache, Pkg); return Cand; } /*}}}*/ @@ -584,19 +603,26 @@ pkgCache::VerIterator VersionContainerInterface::getCandidateVer(pkgCacheFile &C pkgCache::VerIterator VersionContainerInterface::getInstalledVer(pkgCacheFile &Cache, pkgCache::PkgIterator const &Pkg, CacheSetHelper &helper) { if (Pkg->CurrentVer == 0) - return helper.canNotFindInstalledVer(Cache, Pkg); + return helper.canNotGetVersion(CacheSetHelper::INSTALLED, Cache, Pkg); return Pkg.CurrentVer(); } /*}}}*/ -// canNotFindPkgName - handle the case no package has this name /*{{{*/ -pkgCache::PkgIterator CacheSetHelper::canNotFindPkgName(pkgCacheFile &Cache, - std::string const &str) { - if (ShowError == true) - _error->Insert(ErrorType, _("Unable to locate package %s"), str.c_str()); - return pkgCache::PkgIterator(Cache, 0); +// canNotFindPackage - with the given selector and pattern /*{{{*/ +void CacheSetHelper::canNotFindPackage(enum PkgSelector const select, + PackageContainerInterface * const pci, pkgCacheFile &Cache, + std::string const &pattern) { + switch (select) { +APT_IGNORE_DEPRECATED_PUSH + case REGEX: canNotFindRegEx(pci, Cache, pattern); break; + case TASK: canNotFindTask(pci, Cache, pattern); break; + case FNMATCH: canNotFindFnmatch(pci, Cache, pattern); break; + case PACKAGENAME: canNotFindPackage(pci, Cache, pattern); break; + case STRING: canNotFindPackage(pci, Cache, pattern); break; + case UNKNOWN: break; +APT_IGNORE_DEPRECATED_POP + } } - /*}}}*/ // canNotFindTask - handle the case no package is found for a task /*{{{*/ void CacheSetHelper::canNotFindTask(PackageContainerInterface * const /*pci*/, pkgCacheFile &/*Cache*/, std::string pattern) { if (ShowError == true) @@ -608,17 +634,45 @@ void CacheSetHelper::canNotFindRegEx(PackageContainerInterface * const /*pci*/, if (ShowError == true) _error->Insert(ErrorType, _("Couldn't find any package by regex '%s'"), pattern.c_str()); } -#if (APT_PKG_MAJOR >= 4 && APT_PKG_MINOR >= 13) + /*}}}*/ // canNotFindFnmatch - handle the case no package is found by a fnmatch /*{{{*/ -void CacheSetHelper::canNotFindFnmatch(PackageContainerInterface * const pci, pkgCacheFile &Cache, std::string pattern) { + void CacheSetHelper::canNotFindFnmatch(PackageContainerInterface * const /*pci*/, pkgCacheFile &/*Cache*/, std::string pattern) { if (ShowError == true) _error->Insert(ErrorType, _("Couldn't find any package by glob '%s'"), pattern.c_str()); } -#endif /*}}}*/ + /*}}}*/ // canNotFindPackage - handle the case no package is found from a string/*{{{*/ APT_CONST void CacheSetHelper::canNotFindPackage(PackageContainerInterface * const /*pci*/, pkgCacheFile &/*Cache*/, std::string const &/*str*/) { } /*}}}*/ + /*}}}*/ +// canNotFindPkgName - handle the case no package has this name /*{{{*/ +pkgCache::PkgIterator CacheSetHelper::canNotFindPkgName(pkgCacheFile &Cache, + std::string const &str) { + if (ShowError == true) + _error->Insert(ErrorType, _("Unable to locate package %s"), str.c_str()); + return pkgCache::PkgIterator(Cache, 0); +} + /*}}}*/ +// canNotFindVersion - for package by selector /*{{{*/ +void CacheSetHelper::canNotFindVersion(enum VerSelector const select, VersionContainerInterface * const vci, pkgCacheFile &Cache, pkgCache::PkgIterator const &Pkg) +{ + switch (select) { +APT_IGNORE_DEPRECATED_PUSH + case ALL: canNotFindAllVer(vci, Cache, Pkg); break; + case INSTCAND: canNotFindInstCandVer(vci, Cache, Pkg); break; + case CANDINST: canNotFindCandInstVer(vci, Cache, Pkg); break; + case NEWEST: canNotFindNewestVer(Cache, Pkg); break; + case CANDIDATE: canNotFindCandidateVer(Cache, Pkg); break; + case INSTALLED: canNotFindInstalledVer(Cache, Pkg); break; +APT_IGNORE_DEPRECATED_POP + case CANDANDINST: canNotGetCandInstVer(Cache, Pkg); break; + case RELEASE: + case VERSIONNUMBER: + // invalid in this branch + break; + } +} // canNotFindAllVer /*{{{*/ void CacheSetHelper::canNotFindAllVer(VersionContainerInterface * const /*vci*/, pkgCacheFile &/*Cache*/, pkgCache::PkgIterator const &Pkg) { @@ -627,19 +681,37 @@ void CacheSetHelper::canNotFindAllVer(VersionContainerInterface * const /*vci*/, } /*}}}*/ // canNotFindInstCandVer /*{{{*/ -void CacheSetHelper::canNotFindInstCandVer(VersionContainerInterface * const /*vci*/, pkgCacheFile &/*Cache*/, +void CacheSetHelper::canNotFindInstCandVer(VersionContainerInterface * const /*vci*/, pkgCacheFile &Cache, pkgCache::PkgIterator const &Pkg) { - if (ShowError == true) - _error->Insert(ErrorType, _("Can't select installed nor candidate version from package '%s' as it has neither of them"), Pkg.FullName(true).c_str()); + canNotGetInstCandVer(Cache, Pkg); } /*}}}*/ // canNotFindInstCandVer /*{{{*/ -void CacheSetHelper::canNotFindCandInstVer(VersionContainerInterface * const /*vci*/, pkgCacheFile &/*Cache*/, +void CacheSetHelper::canNotFindCandInstVer(VersionContainerInterface * const /*vci*/, pkgCacheFile &Cache, pkgCache::PkgIterator const &Pkg) { - if (ShowError == true) - _error->Insert(ErrorType, _("Can't select installed nor candidate version from package '%s' as it has neither of them"), Pkg.FullName(true).c_str()); + canNotGetCandInstVer(Cache, Pkg); } /*}}}*/ + /*}}}*/ +// canNotGetVersion - for package by selector /*{{{*/ +pkgCache::VerIterator CacheSetHelper::canNotGetVersion(enum VerSelector const select, pkgCacheFile &Cache, pkgCache::PkgIterator const &Pkg) { + switch (select) { +APT_IGNORE_DEPRECATED_PUSH + case NEWEST: return canNotFindNewestVer(Cache, Pkg); + case CANDIDATE: return canNotFindCandidateVer(Cache, Pkg); + case INSTALLED: return canNotFindInstalledVer(Cache, Pkg); +APT_IGNORE_DEPRECATED_POP + case CANDINST: return canNotGetCandInstVer(Cache, Pkg); + case INSTCAND: return canNotGetInstCandVer(Cache, Pkg); + case ALL: + case CANDANDINST: + case RELEASE: + case VERSIONNUMBER: + // invalid in this branch + return pkgCache::VerIterator(Cache, 0); + } + return pkgCache::VerIterator(Cache, 0); +} // canNotFindNewestVer /*{{{*/ pkgCache::VerIterator CacheSetHelper::canNotFindNewestVer(pkgCacheFile &Cache, pkgCache::PkgIterator const &Pkg) { @@ -664,6 +736,37 @@ pkgCache::VerIterator CacheSetHelper::canNotFindInstalledVer(pkgCacheFile &Cache return pkgCache::VerIterator(Cache, 0); } /*}}}*/ +// canNotFindInstCandVer /*{{{*/ +pkgCache::VerIterator CacheSetHelper::canNotGetInstCandVer(pkgCacheFile &Cache, + pkgCache::PkgIterator const &Pkg) { + if (ShowError == true) + _error->Insert(ErrorType, _("Can't select installed nor candidate version from package '%s' as it has neither of them"), Pkg.FullName(true).c_str()); + return pkgCache::VerIterator(Cache, 0); +} + /*}}}*/ +// canNotFindInstCandVer /*{{{*/ +pkgCache::VerIterator CacheSetHelper::canNotGetCandInstVer(pkgCacheFile &Cache, + pkgCache::PkgIterator const &Pkg) { + if (ShowError == true) + _error->Insert(ErrorType, _("Can't select installed nor candidate version from package '%s' as it has neither of them"), Pkg.FullName(true).c_str()); + return pkgCache::VerIterator(Cache, 0); +} + /*}}}*/ + /*}}}*/ +// showPackageSelection - by selector and given pattern /*{{{*/ +void CacheSetHelper::showPackageSelection(pkgCache::PkgIterator const &pkg, enum PkgSelector const select, + std::string const &pattern) { + switch (select) { +APT_IGNORE_DEPRECATED_PUSH + case REGEX: showRegExSelection(pkg, pattern); break; + case TASK: showTaskSelection(pkg, pattern); break; + case FNMATCH: showFnmatchSelection(pkg, pattern); break; +APT_IGNORE_DEPRECATED_POP + case PACKAGENAME: /* no suprises here */ break; + case STRING: /* handled by the special cases */ break; + case UNKNOWN: break; + } +} // showTaskSelection /*{{{*/ APT_CONST void CacheSetHelper::showTaskSelection(pkgCache::PkgIterator const &/*pkg*/, std::string const &/*pattern*/) { @@ -674,14 +777,35 @@ APT_CONST void CacheSetHelper::showRegExSelection(pkgCache::PkgIterator const &/ std::string const &/*pattern*/) { } /*}}}*/ -#if (APT_PKG_MAJOR >= 4 && APT_PKG_MINOR >= 13) // showFnmatchSelection /*{{{*/ -APT_CONST void CacheSetHelper::showFnmatchSelection(pkgCache::PkgIterator const &pkg, - std::string const &pattern) { +APT_CONST void CacheSetHelper::showFnmatchSelection(pkgCache::PkgIterator const &/*pkg*/, + std::string const &/*pattern*/) { } /*}}}*/ -#endif -// showSelectedVersion /*{{{*/ + /*}}}*/ +// showVersionSelection /*{{{*/ +void CacheSetHelper::showVersionSelection(pkgCache::PkgIterator const &Pkg, + pkgCache::VerIterator const &Ver, enum VerSelector const select, std::string const &pattern) { + switch (select) { +APT_IGNORE_DEPRECATED_PUSH + case RELEASE: + showSelectedVersion(Pkg, Ver, pattern, true); + break; + case VERSIONNUMBER: + showSelectedVersion(Pkg, Ver, pattern, false); + break; +APT_IGNORE_DEPRECATED_POP + case NEWEST: + case CANDIDATE: + case INSTALLED: + case CANDINST: + case INSTCAND: + case ALL: + case CANDANDINST: + // not really suprises, but in fact: just not implemented + break; + } +} APT_CONST void CacheSetHelper::showSelectedVersion(pkgCache::PkgIterator const &/*Pkg*/, pkgCache::VerIterator const /*Ver*/, std::string const &/*ver*/, diff --git a/apt-pkg/cacheset.h b/apt-pkg/cacheset.h index 16a3daa42..97aee8c2d 100644 --- a/apt-pkg/cacheset.h +++ b/apt-pkg/cacheset.h @@ -13,14 +13,17 @@ #include <map> #include <set> #include <list> +#include <vector> #include <string> #include <iterator> +#include <algorithm> #include <stddef.h> #include <apt-pkg/error.h> #include <apt-pkg/pkgcache.h> #include <apt-pkg/cacheiterators.h> +#include <apt-pkg/macros.h> #ifndef APT_8_CLEANER_HEADERS #include <apt-pkg/cachefile.h> @@ -51,36 +54,127 @@ public: /*{{{*/ ShowError(ShowError), ErrorType(ErrorType) {} virtual ~CacheSetHelper() {} - virtual void showTaskSelection(pkgCache::PkgIterator const &pkg, std::string const &pattern); - virtual void showRegExSelection(pkgCache::PkgIterator const &pkg, std::string const &pattern); -#if (APT_PKG_MAJOR >= 4 && APT_PKG_MINOR >= 13) - virtual void showFnmatchSelection(pkgCache::PkgIterator const &pkg, std::string const &pattern); -#endif - virtual void showSelectedVersion(pkgCache::PkgIterator const &Pkg, pkgCache::VerIterator const Ver, - std::string const &ver, bool const verIsRel); + enum PkgSelector { UNKNOWN, REGEX, TASK, FNMATCH, PACKAGENAME, STRING }; - virtual void canNotFindTask(PackageContainerInterface * const pci, pkgCacheFile &Cache, std::string pattern); - virtual void canNotFindRegEx(PackageContainerInterface * const pci, pkgCacheFile &Cache, std::string pattern); -#if (APT_PKG_MAJOR >= 4 && APT_PKG_MINOR >= 13) - virtual void canNotFindFnmatch(PackageContainerInterface * const pci, pkgCacheFile &Cache, std::string pattern); -#endif - virtual void canNotFindPackage(PackageContainerInterface * const pci, pkgCacheFile &Cache, std::string const &str); + virtual bool PackageFrom(enum PkgSelector const select, PackageContainerInterface * const pci, pkgCacheFile &Cache, std::string const &pattern); + + virtual bool PackageFromCommandLine(PackageContainerInterface * const pci, pkgCacheFile &Cache, const char **cmdline); + + struct PkgModifier { + enum Position { NONE, PREFIX, POSTFIX }; + unsigned short ID; + const char * const Alias; + Position Pos; + PkgModifier (unsigned short const &id, const char * const alias, Position const &pos) : ID(id), Alias(alias), Pos(pos) {} + }; + virtual bool PackageFromModifierCommandLine(unsigned short &modID, PackageContainerInterface * const pci, + pkgCacheFile &Cache, const char * cmdline, + std::list<PkgModifier> const &mods); + + // use PackageFrom(PACKAGENAME, …) instead + APT_DEPRECATED pkgCache::PkgIterator PackageFromName(pkgCacheFile &Cache, std::string const &pattern); + + /** \brief be notified about the package being selected via pattern + * + * Main use is probably to show a message to the user what happened + * + * \param pkg is the package which was selected + * \param select is the selection method which choose the package + * \param pattern is the string used by the selection method to pick the package + */ + virtual void showPackageSelection(pkgCache::PkgIterator const &pkg, PkgSelector const select, std::string const &pattern); + // use the method above instead, react only on the type you need and let the base handle the rest if need be + // this allows use to add new selection methods without breaking the ABI constantly with new virtual methods + APT_DEPRECATED virtual void showTaskSelection(pkgCache::PkgIterator const &pkg, std::string const &pattern); + APT_DEPRECATED virtual void showRegExSelection(pkgCache::PkgIterator const &pkg, std::string const &pattern); + APT_DEPRECATED virtual void showFnmatchSelection(pkgCache::PkgIterator const &pkg, std::string const &pattern); + + /** \brief be notified if a package can't be found via pattern + * + * Can be used to show a message as well as to try something else to make it match + * + * \param select is the method tried for selection + * \param pci is the container the package should be inserted in + * \param Cache is the package universe available + * \param pattern is the string not matching anything + */ + virtual void canNotFindPackage(enum PkgSelector const select, PackageContainerInterface * const pci, pkgCacheFile &Cache, std::string const &pattern); + // same as above for showPackageSelection + APT_DEPRECATED virtual void canNotFindTask(PackageContainerInterface * const pci, pkgCacheFile &Cache, std::string pattern); + APT_DEPRECATED virtual void canNotFindRegEx(PackageContainerInterface * const pci, pkgCacheFile &Cache, std::string pattern); + APT_DEPRECATED virtual void canNotFindFnmatch(PackageContainerInterface * const pci, pkgCacheFile &Cache, std::string pattern); + APT_DEPRECATED virtual void canNotFindPackage(PackageContainerInterface * const pci, pkgCacheFile &Cache, std::string const &str); + + /** \brief specifies which version(s) we want to refer to */ + enum VerSelector { + /** by release string */ + RELEASE, + /** by version number string */ + VERSIONNUMBER, + /** All versions */ + ALL, + /** Candidate and installed version */ + CANDANDINST, + /** Candidate version */ + CANDIDATE, + /** Installed version */ + INSTALLED, + /** Candidate or if non installed version */ + CANDINST, + /** Installed or if non candidate version */ + INSTCAND, + /** Newest version */ + NEWEST + }; + + /** \brief be notified about the version being selected via pattern + * + * Main use is probably to show a message to the user what happened + * Note that at the moment this method is only called for RELEASE + * and VERSION selections, not for the others. + * + * \param Pkg is the package which was selected for + * \param Ver is the version selected + * \param select is the selection method which choose the version + * \param pattern is the string used by the selection method to pick the version + */ + virtual void showVersionSelection(pkgCache::PkgIterator const &Pkg, pkgCache::VerIterator const &Ver, + enum VerSelector const select, std::string const &pattern); + // renamed to have a similar interface to showPackageSelection + APT_DEPRECATED virtual void showSelectedVersion(pkgCache::PkgIterator const &Pkg, pkgCache::VerIterator const Ver, + std::string const &ver, bool const verIsRel); - virtual void canNotFindAllVer(VersionContainerInterface * const vci, pkgCacheFile &Cache, pkgCache::PkgIterator const &Pkg); - virtual void canNotFindInstCandVer(VersionContainerInterface * const vci, pkgCacheFile &Cache, + /** \brief be notified if a version can't be found for a package + * + * Main use is probably to show a message to the user what happened + * + * \param select is the method tried for selection + * \param vci is the container the version should be inserted in + * \param Cache is the package universe available + * \param Pkg is the package we wanted a version from + */ + virtual void canNotFindVersion(enum VerSelector const select, VersionContainerInterface * const vci, pkgCacheFile &Cache, pkgCache::PkgIterator const &Pkg); + // same as above for showPackageSelection + APT_DEPRECATED virtual void canNotFindAllVer(VersionContainerInterface * const vci, pkgCacheFile &Cache, pkgCache::PkgIterator const &Pkg); + APT_DEPRECATED virtual void canNotFindInstCandVer(VersionContainerInterface * const vci, pkgCacheFile &Cache, pkgCache::PkgIterator const &Pkg); - virtual void canNotFindCandInstVer(VersionContainerInterface * const vci, + APT_DEPRECATED virtual void canNotFindCandInstVer(VersionContainerInterface * const vci, pkgCacheFile &Cache, pkgCache::PkgIterator const &Pkg); - virtual pkgCache::PkgIterator canNotFindPkgName(pkgCacheFile &Cache, std::string const &str); - virtual pkgCache::VerIterator canNotFindNewestVer(pkgCacheFile &Cache, + // the difference between canNotFind and canNotGet is that the later is more low-level + // and called from other places: In this case looking into the code is the only real answer… + virtual pkgCache::VerIterator canNotGetVersion(enum VerSelector const select, pkgCacheFile &Cache, pkgCache::PkgIterator const &Pkg); + // same as above for showPackageSelection + APT_DEPRECATED virtual pkgCache::VerIterator canNotFindNewestVer(pkgCacheFile &Cache, pkgCache::PkgIterator const &Pkg); - virtual pkgCache::VerIterator canNotFindCandidateVer(pkgCacheFile &Cache, + APT_DEPRECATED virtual pkgCache::VerIterator canNotFindCandidateVer(pkgCacheFile &Cache, pkgCache::PkgIterator const &Pkg); - virtual pkgCache::VerIterator canNotFindInstalledVer(pkgCacheFile &Cache, + APT_DEPRECATED virtual pkgCache::VerIterator canNotFindInstalledVer(pkgCacheFile &Cache, pkgCache::PkgIterator const &Pkg); + virtual pkgCache::PkgIterator canNotFindPkgName(pkgCacheFile &Cache, std::string const &str); + bool showErrors() const { return ShowError; } bool showErrors(bool const newValue) { if (ShowError == newValue) return ShowError; else return ((ShowError = newValue) == false); } GlobalError::MsgType errorType() const { return ErrorType; } @@ -98,7 +192,19 @@ public: /*{{{*/ protected: bool ShowError; GlobalError::MsgType ErrorType; + + pkgCache::VerIterator canNotGetInstCandVer(pkgCacheFile &Cache, + pkgCache::PkgIterator const &Pkg); + pkgCache::VerIterator canNotGetCandInstVer(pkgCacheFile &Cache, + pkgCache::PkgIterator const &Pkg); + + bool PackageFromTask(PackageContainerInterface * const pci, pkgCacheFile &Cache, std::string pattern); + bool PackageFromRegEx(PackageContainerInterface * const pci, pkgCacheFile &Cache, std::string pattern); + bool PackageFromFnmatch(PackageContainerInterface * const pci, pkgCacheFile &Cache, std::string pattern); + bool PackageFromPackageName(PackageContainerInterface * const pci, pkgCacheFile &Cache, std::string pattern); + bool PackageFromString(PackageContainerInterface * const pci, pkgCacheFile &Cache, std::string const &pattern); }; /*}}}*/ + class PackageContainerInterface { /*{{{*/ /** \class PackageContainerInterface @@ -118,7 +224,11 @@ public: inline const char *Name() const {return getPkg().Name(); } inline std::string FullName(bool const Pretty) const { return getPkg().FullName(Pretty); } inline std::string FullName() const { return getPkg().FullName(); } - inline const char *Section() const {return getPkg().Section(); } + APT_DEPRECATED inline const char *Section() const { + APT_IGNORE_DEPRECATED_PUSH + return getPkg().Section(); + APT_IGNORE_DEPRECATED_POP + } inline bool Purge() const {return getPkg().Purge(); } inline const char *Arch() const {return getPkg().Arch(); } inline pkgCache::GrpIterator Group() const { return getPkg().Group(); } @@ -142,29 +252,46 @@ public: virtual bool empty() const = 0; virtual void clear() = 0; - enum Constructor { UNKNOWN, REGEX, TASK, FNMATCH }; - virtual void setConstructor(Constructor const &con) = 0; - virtual Constructor getConstructor() const = 0; - - static bool FromTask(PackageContainerInterface * const pci, pkgCacheFile &Cache, std::string pattern, CacheSetHelper &helper); - static bool FromRegEx(PackageContainerInterface * const pci, pkgCacheFile &Cache, std::string pattern, CacheSetHelper &helper); - static pkgCache::PkgIterator FromName(pkgCacheFile &Cache, std::string const &pattern, CacheSetHelper &helper); - static bool FromFnmatch(PackageContainerInterface * const pci, pkgCacheFile &Cache, std::string pattern, CacheSetHelper &helper); - static bool FromGroup(PackageContainerInterface * const pci, pkgCacheFile &Cache, std::string pattern, CacheSetHelper &helper); - static bool FromString(PackageContainerInterface * const pci, pkgCacheFile &Cache, std::string const &pattern, CacheSetHelper &helper); - static bool FromCommandLine(PackageContainerInterface * const pci, pkgCacheFile &Cache, const char **cmdline, CacheSetHelper &helper); - - struct Modifier { - enum Position { NONE, PREFIX, POSTFIX }; - unsigned short ID; - const char * const Alias; - Position Pos; - Modifier (unsigned short const &id, const char * const alias, Position const &pos) : ID(id), Alias(alias), Pos(pos) {} - }; - - static bool FromModifierCommandLine(unsigned short &modID, PackageContainerInterface * const pci, - pkgCacheFile &Cache, const char * cmdline, - std::list<Modifier> const &mods, CacheSetHelper &helper); + // FIXME: This is a bloody hack removed soon. Use CacheSetHelper::PkgSelector ! + enum APT_DEPRECATED Constructor { UNKNOWN = CacheSetHelper::UNKNOWN, + REGEX = CacheSetHelper::REGEX, + TASK = CacheSetHelper::TASK, + FNMATCH = CacheSetHelper::FNMATCH }; +APT_IGNORE_DEPRECATED_PUSH + void setConstructor(Constructor const by) { ConstructedBy = (CacheSetHelper::PkgSelector)by; } +APT_IGNORE_DEPRECATED_POP + + void setConstructor(CacheSetHelper::PkgSelector const by) { ConstructedBy = by; } + CacheSetHelper::PkgSelector getConstructor() const { return ConstructedBy; } + PackageContainerInterface() : ConstructedBy(CacheSetHelper::UNKNOWN) {} + PackageContainerInterface(CacheSetHelper::PkgSelector const by) : ConstructedBy(by) {} + + APT_DEPRECATED static bool FromTask(PackageContainerInterface * const pci, pkgCacheFile &Cache, std::string pattern, CacheSetHelper &helper) { + return helper.PackageFrom(CacheSetHelper::TASK, pci, Cache, pattern); } + APT_DEPRECATED static bool FromRegEx(PackageContainerInterface * const pci, pkgCacheFile &Cache, std::string pattern, CacheSetHelper &helper) { + return helper.PackageFrom(CacheSetHelper::REGEX, pci, Cache, pattern); } + APT_DEPRECATED static bool FromFnmatch(PackageContainerInterface * const pci, pkgCacheFile &Cache, std::string pattern, CacheSetHelper &helper) { + return helper.PackageFrom(CacheSetHelper::FNMATCH, pci, Cache, pattern); } + APT_DEPRECATED static bool FromGroup(PackageContainerInterface * const pci, pkgCacheFile &Cache, std::string pattern, CacheSetHelper &helper) { + return helper.PackageFrom(CacheSetHelper::PACKAGENAME, pci, Cache, pattern); } + APT_DEPRECATED static bool FromString(PackageContainerInterface * const pci, pkgCacheFile &Cache, std::string const &pattern, CacheSetHelper &helper) { + return helper.PackageFrom(CacheSetHelper::STRING, pci, Cache, pattern); } + APT_DEPRECATED static bool FromCommandLine(PackageContainerInterface * const pci, pkgCacheFile &Cache, const char **cmdline, CacheSetHelper &helper) { + return helper.PackageFromCommandLine(pci, Cache, cmdline); } + + APT_DEPRECATED typedef CacheSetHelper::PkgModifier Modifier; + +APT_IGNORE_DEPRECATED_PUSH + APT_DEPRECATED static pkgCache::PkgIterator FromName(pkgCacheFile &Cache, std::string const &pattern, CacheSetHelper &helper) { + return helper.PackageFromName(Cache, pattern); } + APT_DEPRECATED static bool FromModifierCommandLine(unsigned short &modID, PackageContainerInterface * const pci, + pkgCacheFile &Cache, const char * cmdline, + std::list<Modifier> const &mods, CacheSetHelper &helper) { + return helper.PackageFromModifierCommandLine(modID, pci, Cache, cmdline, mods); } +APT_IGNORE_DEPRECATED_POP + +private: + CacheSetHelper::PkgSelector ConstructedBy; }; /*}}}*/ template<class Container> class PackageContainer : public PackageContainerInterface {/*{{{*/ @@ -228,11 +355,23 @@ public: /*{{{*/ iterator end() { return iterator(_cont.end()); } const_iterator find(pkgCache::PkgIterator const &P) const { return const_iterator(_cont.find(P)); } - void setConstructor(Constructor const &by) { ConstructedBy = by; } - Constructor getConstructor() const { return ConstructedBy; } + PackageContainer() : PackageContainerInterface() {} + PackageContainer(CacheSetHelper::PkgSelector const &by) : PackageContainerInterface(by) {} +APT_IGNORE_DEPRECATED_PUSH + APT_DEPRECATED PackageContainer(Constructor const &by) : PackageContainerInterface((CacheSetHelper::PkgSelector)by) {} +APT_IGNORE_DEPRECATED_POP + + /** \brief sort all included versions with given comparer + + Some containers are sorted by default, some are not and can't be, + but a few like std::vector can be sorted if need be, so this can be + specialized in later on. The default is that this will fail though. + Specifically, already sorted containers like std::set will return + false as well as there is no easy way to check that the given comparer + would sort in the same way the set is currently sorted - PackageContainer() : ConstructedBy(UNKNOWN) {} - PackageContainer(Constructor const &by) : ConstructedBy(by) {} + \return \b true if the set was sorted, \b false if not. */ + template<class Compare> bool sort(Compare /*Comp*/) { return false; } /** \brief returns all packages in the cache who belong to the given task @@ -243,8 +382,8 @@ public: /*{{{*/ \param pattern name of the task \param helper responsible for error and message handling */ static PackageContainer FromTask(pkgCacheFile &Cache, std::string const &pattern, CacheSetHelper &helper) { - PackageContainer cont(TASK); - PackageContainerInterface::FromTask(&cont, Cache, pattern, helper); + PackageContainer cont(CacheSetHelper::TASK); + helper.PackageFrom(CacheSetHelper::TASK, &cont, Cache, pattern); return cont; } static PackageContainer FromTask(pkgCacheFile &Cache, std::string const &pattern) { @@ -260,9 +399,9 @@ public: /*{{{*/ \param Cache the packages are in \param pattern regular expression for package names \param helper responsible for error and message handling */ - static PackageContainer FromRegEx(pkgCacheFile &Cache, std::string pattern, CacheSetHelper &helper) { - PackageContainer cont(REGEX); - PackageContainerInterface::FromRegEx(&cont, Cache, pattern, helper); + static PackageContainer FromRegEx(pkgCacheFile &Cache, std::string const &pattern, CacheSetHelper &helper) { + PackageContainer cont(CacheSetHelper::REGEX); + helper.PackageFrom(CacheSetHelper::REGEX, &cont, Cache, pattern); return cont; } @@ -271,9 +410,9 @@ public: /*{{{*/ return FromRegEx(Cache, pattern, helper); } - static PackageContainer FromFnmatch(pkgCacheFile &Cache, std::string pattern, CacheSetHelper &helper) { - PackageContainer cont(FNMATCH); - PackageContainerInterface::FromFnmatch(&cont, Cache, pattern, helper); + static PackageContainer FromFnmatch(pkgCacheFile &Cache, std::string const &pattern, CacheSetHelper &helper) { + PackageContainer cont(CacheSetHelper::FNMATCH); + helper.PackageFrom(CacheSetHelper::FNMATCH, &cont, Cache, pattern); return cont; } static PackageContainer FromFnMatch(pkgCacheFile &Cache, std::string const &pattern) { @@ -281,18 +420,20 @@ public: /*{{{*/ return FromFnmatch(Cache, pattern, helper); } +APT_IGNORE_DEPRECATED_PUSH /** \brief returns a package specified by a string \param Cache the package is in \param pattern String the package name should be extracted from \param helper responsible for error and message handling */ - static pkgCache::PkgIterator FromName(pkgCacheFile &Cache, std::string const &pattern, CacheSetHelper &helper) { - return PackageContainerInterface::FromName(Cache, pattern, helper); + APT_DEPRECATED static pkgCache::PkgIterator FromName(pkgCacheFile &Cache, std::string const &pattern, CacheSetHelper &helper) { + return helper.PackageFromName(Cache, pattern); } - static pkgCache::PkgIterator FromName(pkgCacheFile &Cache, std::string const &pattern) { + APT_DEPRECATED static pkgCache::PkgIterator FromName(pkgCacheFile &Cache, std::string const &pattern) { CacheSetHelper helper; - return PackageContainerInterface::FromName(Cache, pattern, helper); + return FromName(Cache, pattern, helper); } +APT_IGNORE_DEPRECATED_POP /** \brief returns all packages specified by a string @@ -301,7 +442,7 @@ public: /*{{{*/ \param helper responsible for error and message handling */ static PackageContainer FromString(pkgCacheFile &Cache, std::string const &pattern, CacheSetHelper &helper) { PackageContainer cont; - PackageContainerInterface::FromString(&cont, Cache, pattern, helper); + helper.PackageFrom(CacheSetHelper::PACKAGENAME, &cont, Cache, pattern); return cont; } static PackageContainer FromString(pkgCacheFile &Cache, std::string const &pattern) { @@ -318,7 +459,7 @@ public: /*{{{*/ \param helper responsible for error and message handling */ static PackageContainer FromCommandLine(pkgCacheFile &Cache, const char **cmdline, CacheSetHelper &helper) { PackageContainer cont; - PackageContainerInterface::FromCommandLine(&cont, Cache, cmdline, helper); + helper.PackageFromCommandLine(&cont, Cache, cmdline); return cont; } static PackageContainer FromCommandLine(pkgCacheFile &Cache, const char **cmdline) { @@ -340,14 +481,14 @@ public: /*{{{*/ static std::map<unsigned short, PackageContainer> GroupedFromCommandLine( pkgCacheFile &Cache, const char **cmdline, - std::list<Modifier> const &mods, + std::list<CacheSetHelper::PkgModifier> const &mods, unsigned short const &fallback, CacheSetHelper &helper) { std::map<unsigned short, PackageContainer> pkgsets; for (const char **I = cmdline; *I != 0; ++I) { unsigned short modID = fallback; PackageContainer pkgset; - PackageContainerInterface::FromModifierCommandLine(modID, &pkgset, Cache, *I, mods, helper); + helper.PackageFromModifierCommandLine(modID, &pkgset, Cache, *I, mods); pkgsets[modID].insert(pkgset); } return pkgsets; @@ -355,22 +496,23 @@ public: /*{{{*/ static std::map<unsigned short, PackageContainer> GroupedFromCommandLine( pkgCacheFile &Cache, const char **cmdline, - std::list<Modifier> const &mods, + std::list<CacheSetHelper::PkgModifier> const &mods, unsigned short const &fallback) { CacheSetHelper helper; return GroupedFromCommandLine(Cache, cmdline, mods, fallback, helper); } /*}}}*/ -private: /*{{{*/ - Constructor ConstructedBy; - /*}}}*/ }; /*}}}*/ - +// specialisations for push_back containers: std::list & std::vector /*{{{*/ template<> template<class Cont> void PackageContainer<std::list<pkgCache::PkgIterator> >::insert(PackageContainer<Cont> const &pkgcont) { for (typename PackageContainer<Cont>::const_iterator p = pkgcont.begin(); p != pkgcont.end(); ++p) _cont.push_back(*p); } +template<> template<class Cont> void PackageContainer<std::vector<pkgCache::PkgIterator> >::insert(PackageContainer<Cont> const &pkgcont) { + for (typename PackageContainer<Cont>::const_iterator p = pkgcont.begin(); p != pkgcont.end(); ++p) + _cont.push_back(*p); +} // these two are 'inline' as otherwise the linker has problems with seeing these untemplated // specializations again and again - but we need to see them, so that library users can use them template<> inline bool PackageContainer<std::list<pkgCache::PkgIterator> >::insert(pkgCache::PkgIterator const &P) { @@ -379,12 +521,65 @@ template<> inline bool PackageContainer<std::list<pkgCache::PkgIterator> >::inse _cont.push_back(P); return true; } +template<> inline bool PackageContainer<std::vector<pkgCache::PkgIterator> >::insert(pkgCache::PkgIterator const &P) { + if (P.end() == true) + return false; + _cont.push_back(P); + return true; +} template<> inline void PackageContainer<std::list<pkgCache::PkgIterator> >::insert(const_iterator begin, const_iterator end) { for (const_iterator p = begin; p != end; ++p) _cont.push_back(*p); } +template<> inline void PackageContainer<std::vector<pkgCache::PkgIterator> >::insert(const_iterator begin, const_iterator end) { + for (const_iterator p = begin; p != end; ++p) + _cont.push_back(*p); +} + /*}}}*/ + +template<> template<class Compare> inline bool PackageContainer<std::vector<pkgCache::PkgIterator> >::sort(Compare Comp) { + std::sort(_cont.begin(), _cont.end(), Comp); + return true; +} + +// class PackageUniverse - pkgCache as PackageContainerInterface /*{{{*/ +/** \class PackageUniverse + + Wraps around our usual pkgCache, so that it can be stuffed into methods + expecting a PackageContainer. + + The wrapping is read-only in practice modeled by making erase and co + private methods. */ +class APT_HIDDEN PackageUniverse : public PackageContainerInterface { + pkgCache * const _cont; +public: + typedef pkgCache::PkgIterator iterator; + typedef pkgCache::PkgIterator const_iterator; + + APT_PUBLIC bool empty() const { return false; } + APT_PUBLIC size_t size() const { return _cont->Head().PackageCount; } + + APT_PUBLIC const_iterator begin() const { return _cont->PkgBegin(); } + APT_PUBLIC const_iterator end() const { return _cont->PkgEnd(); } + APT_PUBLIC iterator begin() { return _cont->PkgBegin(); } + APT_PUBLIC iterator end() { return _cont->PkgEnd(); } + + APT_PUBLIC PackageUniverse(pkgCache * const Owner) : _cont(Owner) { } + +private: + bool insert(pkgCache::PkgIterator const &) { return true; } + template<class Cont> void insert(PackageContainer<Cont> const &) { } + void insert(const_iterator, const_iterator) { } + + void clear() { } + iterator& erase(iterator &iter) { return iter; } + size_t erase(const pkgCache::PkgIterator) { return 0; } + void erase(iterator, iterator) { } +}; + /*}}}*/ typedef PackageContainer<std::set<pkgCache::PkgIterator> > PackageSet; typedef PackageContainer<std::list<pkgCache::PkgIterator> > PackageList; +typedef PackageContainer<std::vector<pkgCache::PkgIterator> > PackageVector; class VersionContainerInterface { /*{{{*/ /** \class APT::VersionContainerInterface @@ -426,45 +621,63 @@ public: virtual void clear() = 0; /** \brief specifies which version(s) will be returned if non is given */ - enum Version { - /** All versions */ - ALL, - /** Candidate and installed version */ - CANDANDINST, - /** Candidate version */ - CANDIDATE, - /** Installed version */ - INSTALLED, - /** Candidate or if non installed version */ - CANDINST, - /** Installed or if non candidate version */ - INSTCAND, - /** Newest version */ - NEWEST + enum APT_DEPRECATED Version { + ALL = CacheSetHelper::ALL, + CANDANDINST = CacheSetHelper::CANDANDINST, + CANDIDATE = CacheSetHelper::CANDIDATE, + INSTALLED = CacheSetHelper::INSTALLED, + CANDINST = CacheSetHelper::CANDINST, + INSTCAND = CacheSetHelper::INSTCAND, + NEWEST = CacheSetHelper::NEWEST }; struct Modifier { - enum Position { NONE, PREFIX, POSTFIX }; - unsigned short ID; + unsigned short const ID; const char * const Alias; - Position Pos; - Version SelectVersion; + enum Position { NONE, PREFIX, POSTFIX } const Pos; + enum CacheSetHelper::VerSelector const SelectVersion; Modifier (unsigned short const &id, const char * const alias, Position const &pos, - Version const &select) : ID(id), Alias(alias), Pos(pos), + enum CacheSetHelper::VerSelector const select) : ID(id), Alias(alias), Pos(pos), SelectVersion(select) {} +APT_IGNORE_DEPRECATED_PUSH + APT_DEPRECATED Modifier(unsigned short const &id, const char * const alias, Position const &pos, + Version const &select) : ID(id), Alias(alias), Pos(pos), + SelectVersion((CacheSetHelper::VerSelector)select) {} +APT_IGNORE_DEPRECATED_POP }; static bool FromCommandLine(VersionContainerInterface * const vci, pkgCacheFile &Cache, - const char **cmdline, Version const &fallback, + const char **cmdline, CacheSetHelper::VerSelector const fallback, CacheSetHelper &helper); +APT_IGNORE_DEPRECATED_PUSH + APT_DEPRECATED static bool FromCommandLine(VersionContainerInterface * const vci, pkgCacheFile &Cache, + const char **cmdline, Version const &fallback, + CacheSetHelper &helper) { + return FromCommandLine(vci, Cache, cmdline, (CacheSetHelper::VerSelector)fallback, helper); + } +APT_IGNORE_DEPRECATED_POP static bool FromString(VersionContainerInterface * const vci, pkgCacheFile &Cache, - std::string pkg, Version const &fallback, CacheSetHelper &helper, + std::string pkg, CacheSetHelper::VerSelector const fallback, CacheSetHelper &helper, bool const onlyFromName = false); +APT_IGNORE_DEPRECATED_PUSH + APT_DEPRECATED static bool FromString(VersionContainerInterface * const vci, pkgCacheFile &Cache, + std::string pkg, Version const &fallback, CacheSetHelper &helper, + bool const onlyFromName = false) { + return FromString(vci, Cache, pkg, (CacheSetHelper::VerSelector)fallback, helper, onlyFromName); + } +APT_IGNORE_DEPRECATED_POP static bool FromPackage(VersionContainerInterface * const vci, pkgCacheFile &Cache, - pkgCache::PkgIterator const &P, Version const &fallback, + pkgCache::PkgIterator const &P, CacheSetHelper::VerSelector const fallback, CacheSetHelper &helper); +APT_IGNORE_DEPRECATED_PUSH + APT_DEPRECATED static bool FromPackage(VersionContainerInterface * const vci, pkgCacheFile &Cache, + pkgCache::PkgIterator const &P, Version const &fallback, + CacheSetHelper &helper) { + return FromPackage(vci, Cache, P, (CacheSetHelper::VerSelector)fallback, helper); + } +APT_IGNORE_DEPRECATED_POP static bool FromModifierCommandLine(unsigned short &modID, VersionContainerInterface * const vci, @@ -476,8 +689,17 @@ public: static bool FromDependency(VersionContainerInterface * const vci, pkgCacheFile &Cache, pkgCache::DepIterator const &D, - Version const &selector, + CacheSetHelper::VerSelector const selector, CacheSetHelper &helper); +APT_IGNORE_DEPRECATED_PUSH + APT_DEPRECATED static bool FromDependency(VersionContainerInterface * const vci, + pkgCacheFile &Cache, + pkgCache::DepIterator const &D, + Version const &selector, + CacheSetHelper &helper) { + return FromDependency(vci, Cache, D, (CacheSetHelper::VerSelector)selector, helper); + } +APT_IGNORE_DEPRECATED_POP protected: /*{{{*/ @@ -559,6 +781,18 @@ public: /*{{{*/ iterator end() { return iterator(_cont.end()); } const_iterator find(pkgCache::VerIterator const &V) const { return const_iterator(_cont.find(V)); } + /** \brief sort all included versions with given comparer + + Some containers are sorted by default, some are not and can't be, + but a few like std::vector can be sorted if need be, so this can be + specialized in later on. The default is that this will fail though. + Specifically, already sorted containers like std::set will return + false as well as there is no easy way to check that the given comparer + would sort in the same way the set is currently sorted + + \return \b true if the set was sorted, \b false if not. */ + template<class Compare> bool sort(Compare /*Comp*/) { return false; } + /** \brief returns all versions specified on the commandline Get all versions from the commandline, uses given default version if @@ -568,35 +802,59 @@ public: /*{{{*/ \param fallback version specification \param helper responsible for error and message handling */ static VersionContainer FromCommandLine(pkgCacheFile &Cache, const char **cmdline, - Version const &fallback, CacheSetHelper &helper) { + CacheSetHelper::VerSelector const fallback, CacheSetHelper &helper) { VersionContainer vercon; VersionContainerInterface::FromCommandLine(&vercon, Cache, cmdline, fallback, helper); return vercon; } static VersionContainer FromCommandLine(pkgCacheFile &Cache, const char **cmdline, - Version const &fallback) { + CacheSetHelper::VerSelector const fallback) { CacheSetHelper helper; return FromCommandLine(Cache, cmdline, fallback, helper); } static VersionContainer FromCommandLine(pkgCacheFile &Cache, const char **cmdline) { - return FromCommandLine(Cache, cmdline, CANDINST); + return FromCommandLine(Cache, cmdline, CacheSetHelper::CANDINST); } - static VersionContainer FromString(pkgCacheFile &Cache, std::string const &pkg, - Version const &fallback, CacheSetHelper &helper, - bool const onlyFromName = false) { + CacheSetHelper::VerSelector const fallback, CacheSetHelper &helper, + bool const /*onlyFromName = false*/) { VersionContainer vercon; VersionContainerInterface::FromString(&vercon, Cache, pkg, fallback, helper); return vercon; } static VersionContainer FromString(pkgCacheFile &Cache, std::string pkg, - Version const &fallback) { + CacheSetHelper::VerSelector const fallback) { CacheSetHelper helper; return FromString(Cache, pkg, fallback, helper); } static VersionContainer FromString(pkgCacheFile &Cache, std::string pkg) { - return FromString(Cache, pkg, CANDINST); + return FromString(Cache, pkg, CacheSetHelper::CANDINST); + } +APT_IGNORE_DEPRECATED_PUSH + static VersionContainer FromCommandLine(pkgCacheFile &Cache, const char **cmdline, + Version const &fallback, CacheSetHelper &helper) { + VersionContainer vercon; + VersionContainerInterface::FromCommandLine(&vercon, Cache, cmdline, (CacheSetHelper::VerSelector)fallback, helper); + return vercon; + } + static VersionContainer FromCommandLine(pkgCacheFile &Cache, const char **cmdline, + Version const &fallback) { + CacheSetHelper helper; + return FromCommandLine(Cache, cmdline, (CacheSetHelper::VerSelector)fallback, helper); + } + static VersionContainer FromString(pkgCacheFile &Cache, std::string const &pkg, + Version const &fallback, CacheSetHelper &helper, + bool const /*onlyFromName = false*/) { + VersionContainer vercon; + VersionContainerInterface::FromString(&vercon, Cache, pkg, (CacheSetHelper::VerSelector)fallback, helper); + return vercon; } + static VersionContainer FromString(pkgCacheFile &Cache, std::string pkg, + Version const &fallback) { + CacheSetHelper helper; + return FromString(Cache, pkg, (CacheSetHelper::VerSelector)fallback, helper); + } +APT_IGNORE_DEPRECATED_POP /** \brief returns all versions specified for the package @@ -605,18 +863,31 @@ public: /*{{{*/ \param fallback the version(s) you want to get \param helper the helper used for display and error handling */ static VersionContainer FromPackage(pkgCacheFile &Cache, pkgCache::PkgIterator const &P, - Version const &fallback, CacheSetHelper &helper) { + CacheSetHelper::VerSelector const fallback, CacheSetHelper &helper) { VersionContainer vercon; VersionContainerInterface::FromPackage(&vercon, Cache, P, fallback, helper); return vercon; } static VersionContainer FromPackage(pkgCacheFile &Cache, pkgCache::PkgIterator const &P, - Version const &fallback) { + CacheSetHelper::VerSelector const fallback) { CacheSetHelper helper; return FromPackage(Cache, P, fallback, helper); } +APT_IGNORE_DEPRECATED_PUSH + static VersionContainer FromPackage(pkgCacheFile &Cache, pkgCache::PkgIterator const &P, + Version const &fallback, CacheSetHelper &helper) { + VersionContainer vercon; + VersionContainerInterface::FromPackage(&vercon, Cache, P, (CacheSetHelper::VerSelector)fallback, helper); + return vercon; + } + static VersionContainer FromPackage(pkgCacheFile &Cache, pkgCache::PkgIterator const &P, + Version const &fallback) { + CacheSetHelper helper; + return FromPackage(Cache, P, (CacheSetHelper::VerSelector)fallback, helper); + } +APT_IGNORE_DEPRECATED_POP static VersionContainer FromPackage(pkgCacheFile &Cache, pkgCache::PkgIterator const &P) { - return FromPackage(Cache, P, CANDIDATE); + return FromPackage(Cache, P, CacheSetHelper::CANDIDATE); } static std::map<unsigned short, VersionContainer> GroupedFromCommandLine( @@ -645,26 +916,43 @@ public: /*{{{*/ } static VersionContainer FromDependency(pkgCacheFile &Cache, pkgCache::DepIterator const &D, - Version const &selector, CacheSetHelper &helper) { + CacheSetHelper::VerSelector const selector, CacheSetHelper &helper) { VersionContainer vercon; VersionContainerInterface::FromDependency(&vercon, Cache, D, selector, helper); return vercon; } static VersionContainer FromDependency(pkgCacheFile &Cache, pkgCache::DepIterator const &D, - Version const &selector) { + CacheSetHelper::VerSelector const selector) { CacheSetHelper helper; return FromPackage(Cache, D, selector, helper); } +APT_IGNORE_DEPRECATED_PUSH + static VersionContainer FromDependency(pkgCacheFile &Cache, pkgCache::DepIterator const &D, + Version const &selector, CacheSetHelper &helper) { + VersionContainer vercon; + VersionContainerInterface::FromDependency(&vercon, Cache, D, (CacheSetHelper::VerSelector)selector, helper); + return vercon; + } + static VersionContainer FromDependency(pkgCacheFile &Cache, pkgCache::DepIterator const &D, + Version const &selector) { + CacheSetHelper helper; + return FromPackage(Cache, D, (CacheSetHelper::VerSelector)selector, helper); + } +APT_IGNORE_DEPRECATED_POP static VersionContainer FromDependency(pkgCacheFile &Cache, pkgCache::DepIterator const &D) { - return FromPackage(Cache, D, CANDIDATE); + return FromPackage(Cache, D, CacheSetHelper::CANDIDATE); } /*}}}*/ }; /*}}}*/ - +// specialisations for push_back containers: std::list & std::vector /*{{{*/ template<> template<class Cont> void VersionContainer<std::list<pkgCache::VerIterator> >::insert(VersionContainer<Cont> const &vercont) { for (typename VersionContainer<Cont>::const_iterator v = vercont.begin(); v != vercont.end(); ++v) _cont.push_back(*v); } +template<> template<class Cont> void VersionContainer<std::vector<pkgCache::VerIterator> >::insert(VersionContainer<Cont> const &vercont) { + for (typename VersionContainer<Cont>::const_iterator v = vercont.begin(); v != vercont.end(); ++v) + _cont.push_back(*v); +} // these two are 'inline' as otherwise the linker has problems with seeing these untemplated // specializations again and again - but we need to see them, so that library users can use them template<> inline bool VersionContainer<std::list<pkgCache::VerIterator> >::insert(pkgCache::VerIterator const &V) { @@ -673,11 +961,29 @@ template<> inline bool VersionContainer<std::list<pkgCache::VerIterator> >::inse _cont.push_back(V); return true; } +template<> inline bool VersionContainer<std::vector<pkgCache::VerIterator> >::insert(pkgCache::VerIterator const &V) { + if (V.end() == true) + return false; + _cont.push_back(V); + return true; +} template<> inline void VersionContainer<std::list<pkgCache::VerIterator> >::insert(const_iterator begin, const_iterator end) { for (const_iterator v = begin; v != end; ++v) _cont.push_back(*v); } +template<> inline void VersionContainer<std::vector<pkgCache::VerIterator> >::insert(const_iterator begin, const_iterator end) { + for (const_iterator v = begin; v != end; ++v) + _cont.push_back(*v); +} + /*}}}*/ + +template<> template<class Compare> inline bool VersionContainer<std::vector<pkgCache::VerIterator> >::sort(Compare Comp) { + std::sort(_cont.begin(), _cont.end(), Comp); + return true; +} + typedef VersionContainer<std::set<pkgCache::VerIterator> > VersionSet; typedef VersionContainer<std::list<pkgCache::VerIterator> > VersionList; +typedef VersionContainer<std::vector<pkgCache::VerIterator> > VersionVector; } #endif diff --git a/apt-pkg/cdrom.cc b/apt-pkg/cdrom.cc index a5ad6a9ff..8cec4b78e 100644 --- a/apt-pkg/cdrom.cc +++ b/apt-pkg/cdrom.cc @@ -466,7 +466,7 @@ bool pkgCdrom::WriteSourceList(string Name,vector<string> &List,bool Source) // Open the stream for reading ifstream F((FileExists(File)?File.c_str():"/dev/null"), ios::in ); - if (!F != 0) + if (F.fail() == true) return _error->Errno("ifstream::ifstream","Opening %s",File.c_str()); string NewFile = File + ".new"; @@ -477,7 +477,7 @@ bool pkgCdrom::WriteSourceList(string Name,vector<string> &List,bool Source) "Failed to open %s.new",File.c_str()); // Create a short uri without the path - string ShortURI = "cdrom:[" + Name + "]/"; + string ShortURI = "cdrom:[" + Name + "]/"; string ShortURI2 = "cdrom:" + Name + "/"; // For Compatibility string Type; @@ -485,12 +485,12 @@ bool pkgCdrom::WriteSourceList(string Name,vector<string> &List,bool Source) Type = "deb-src"; else Type = "deb"; - + char Buffer[300]; int CurLine = 0; bool First = true; while (F.eof() == false) - { + { F.getline(Buffer,sizeof(Buffer)); CurLine++; if (F.fail() && !F.eof()) @@ -754,7 +754,7 @@ bool pkgCdrom::Add(pkgCdromStatus *log) /*{{{*/ FileExists(InfoDir + "/info") == true) { ifstream F((InfoDir + "/info").c_str()); - if (!F == 0) + if (F.good() == true) getline(F,Name); if (Name.empty() == false) @@ -822,8 +822,11 @@ bool pkgCdrom::Add(pkgCdromStatus *log) /*{{{*/ // check for existence and possibly create state directory for copying string const listDir = _config->FindDir("Dir::State::lists"); string const partialListDir = listDir + "partial/"; - if (CreateAPTDirectoryIfNeeded(_config->FindDir("Dir::State"), partialListDir) == false && - CreateAPTDirectoryIfNeeded(listDir, partialListDir) == false) + mode_t const mode = umask(S_IWGRP | S_IWOTH); + bool const creation_fail = (CreateAPTDirectoryIfNeeded(_config->FindDir("Dir::State"), partialListDir) == false && + CreateAPTDirectoryIfNeeded(listDir, partialListDir) == false); + umask(mode); + if (creation_fail == true) { UnmountCDROM(CDROM, NULL); return _error->Errno("cdrom", _("List directory %spartial is missing."), listDir.c_str()); @@ -913,15 +916,18 @@ bool pkgCdrom::Add(pkgCdromStatus *log) /*{{{*/ return true; } /*}}}*/ -pkgUdevCdromDevices::pkgUdevCdromDevices() /*{{{*/ - : libudev_handle(NULL) +pkgUdevCdromDevices::pkgUdevCdromDevices() /*{{{*/ +: libudev_handle(NULL), udev_new(NULL), udev_enumerate_add_match_property(NULL), + udev_enumerate_scan_devices(NULL), udev_enumerate_get_list_entry(NULL), + udev_device_new_from_syspath(NULL), udev_enumerate_get_udev(NULL), + udev_list_entry_get_name(NULL), udev_device_get_devnode(NULL), + udev_enumerate_new(NULL), udev_list_entry_get_next(NULL), + udev_device_get_property_value(NULL), udev_enumerate_add_match_sysattr(NULL) { - } /*}}}*/ -bool -pkgUdevCdromDevices::Dlopen() /*{{{*/ +bool pkgUdevCdromDevices::Dlopen() /*{{{*/ { // alread open if(libudev_handle != NULL) @@ -950,18 +956,14 @@ pkgUdevCdromDevices::Dlopen() /*{{{*/ return true; } /*}}}*/ - /*{{{*/ -// convenience interface, this will just call ScanForRemovable -vector<CdromDevice> -pkgUdevCdromDevices::Scan() +// convenience interface, this will just call ScanForRemovable /*{{{*/ +vector<CdromDevice> pkgUdevCdromDevices::Scan() { bool CdromOnly = _config->FindB("APT::cdrom::CdromOnly", true); return ScanForRemovable(CdromOnly); } /*}}}*/ - /*{{{*/ -vector<CdromDevice> -pkgUdevCdromDevices::ScanForRemovable(bool CdromOnly) +vector<CdromDevice> pkgUdevCdromDevices::ScanForRemovable(bool CdromOnly)/*{{{*/ { vector<CdromDevice> cdrom_devices; struct udev_enumerate *enumerate; diff --git a/apt-pkg/clean.cc b/apt-pkg/clean.cc index 0ee3b765d..6edce5b6d 100644 --- a/apt-pkg/clean.cc +++ b/apt-pkg/clean.cc @@ -34,7 +34,10 @@ bool pkgArchiveCleaner::Go(std::string Dir,pkgCache &Cache) { bool CleanInstalled = _config->FindB("APT::Clean-Installed",true); - + + if(Dir == "/") + return _error->Error(_("Clean of %s is not supported"), Dir.c_str()); + DIR *D = opendir(Dir.c_str()); if (D == 0) return _error->Errno("opendir",_("Unable to read %s"),Dir.c_str()); @@ -128,3 +131,5 @@ bool pkgArchiveCleaner::Go(std::string Dir,pkgCache &Cache) return true; } /*}}}*/ + +APT_CONST pkgArchiveCleaner::~pkgArchiveCleaner() {} diff --git a/apt-pkg/clean.h b/apt-pkg/clean.h index 930d54a7f..466cb67a9 100644 --- a/apt-pkg/clean.h +++ b/apt-pkg/clean.h @@ -24,13 +24,13 @@ class pkgArchiveCleaner void *d; protected: - + virtual void Erase(const char * /*File*/,std::string /*Pkg*/,std::string /*Ver*/,struct stat & /*St*/) {}; - public: - + public: + bool Go(std::string Dir,pkgCache &Cache); - virtual ~pkgArchiveCleaner() {}; + virtual ~pkgArchiveCleaner(); }; #endif diff --git a/apt-pkg/contrib/cdromutl.cc b/apt-pkg/contrib/cdromutl.cc index 936e377fb..6eb917457 100644 --- a/apt-pkg/contrib/cdromutl.cc +++ b/apt-pkg/contrib/cdromutl.cc @@ -207,7 +207,6 @@ bool IdentCdrom(string CD,string &Res,unsigned int Version) /* Run over the directory, we assume that the reader order will never change as the media is read-only. In theory if the kernel did some sort of wacked caching this might not be true.. */ - char S[300]; for (struct dirent *Dir = readdir(D); Dir != 0; Dir = readdir(D)) { // Skip some files.. @@ -215,30 +214,32 @@ bool IdentCdrom(string CD,string &Res,unsigned int Version) strcmp(Dir->d_name,"..") == 0) continue; + std::string S; if (Version <= 1) { - sprintf(S,"%lu",(unsigned long)Dir->d_ino); + strprintf(S, "%lu", (unsigned long)Dir->d_ino); } else { struct stat Buf; if (stat(Dir->d_name,&Buf) != 0) continue; - sprintf(S,"%lu",(unsigned long)Buf.st_mtime); + strprintf(S, "%lu", (unsigned long)Buf.st_mtime); } - - Hash.Add(S); + + Hash.Add(S.c_str()); Hash.Add(Dir->d_name); }; - + if (chdir(StartDir.c_str()) != 0) { _error->Errno("chdir",_("Unable to change to %s"),StartDir.c_str()); closedir(D); return false; } closedir(D); - + // Some stats from the fsys + std::string S; if (_config->FindB("Debug::identcdrom",false) == false) { struct statvfs Buf; @@ -248,19 +249,19 @@ bool IdentCdrom(string CD,string &Res,unsigned int Version) // We use a kilobyte block size to advoid overflow if (writable_media) { - sprintf(S,"%lu",(long)(Buf.f_blocks*(Buf.f_bsize/1024))); + strprintf(S, "%lu", (unsigned long)(Buf.f_blocks*(Buf.f_bsize/1024))); } else { - sprintf(S,"%lu %lu",(long)(Buf.f_blocks*(Buf.f_bsize/1024)), - (long)(Buf.f_bfree*(Buf.f_bsize/1024))); + strprintf(S, "%lu %lu", (unsigned long)(Buf.f_blocks*(Buf.f_bsize/1024)), + (unsigned long)(Buf.f_bfree*(Buf.f_bsize/1024))); } - Hash.Add(S); - sprintf(S,"-%u",Version); + Hash.Add(S.c_str()); + strprintf(S, "-%u", Version); } else - sprintf(S,"-%u.debug",Version); - + strprintf(S, "-%u.debug", Version); + Res = Hash.Result().Value() + S; - return true; + return true; } /*}}}*/ // FindMountPointForDevice - Find mountpoint for the given device /*{{{*/ diff --git a/apt-pkg/contrib/cmndline.cc b/apt-pkg/contrib/cmndline.cc index 3799c822d..ff8b09ebc 100644 --- a/apt-pkg/contrib/cmndline.cc +++ b/apt-pkg/contrib/cmndline.cc @@ -34,6 +34,9 @@ CommandLine::CommandLine(Args *AList,Configuration *Conf) : ArgList(AList), Conf(Conf), FileList(0) { } +CommandLine::CommandLine() : ArgList(NULL), Conf(NULL), FileList(0) +{ +} /*}}}*/ // CommandLine::~CommandLine - Destructor /*{{{*/ // --------------------------------------------------------------------- @@ -47,23 +50,26 @@ CommandLine::~CommandLine() char const * CommandLine::GetCommand(Dispatch const * const Map, unsigned int const argc, char const * const * const argv) { - // if there is a -- on the line there must be the word we search for around it - // as -- marks the end of the options, just not sure if the command can be - // considered an option or not, so accept both + // if there is a -- on the line there must be the word we search for either + // before it (as -- marks the end of the options) or right after it (as we can't + // decide if the command is actually an option, given that in theory, you could + // have parameters named like commands) for (size_t i = 1; i < argc; ++i) { if (strcmp(argv[i], "--") != 0) continue; - ++i; - if (i < argc) + // check if command is before -- + for (size_t k = 1; k < i; ++k) for (size_t j = 0; Map[j].Match != NULL; ++j) - if (strcmp(argv[i], Map[j].Match) == 0) + if (strcmp(argv[k], Map[j].Match) == 0) return Map[j].Match; - i -= 2; - if (i != 0) + // see if the next token after -- is the command + ++i; + if (i < argc) for (size_t j = 0; Map[j].Match != NULL; ++j) if (strcmp(argv[i], Map[j].Match) == 0) return Map[j].Match; + // we found a --, but not a command return NULL; } // no --, so search for the first word matching a command diff --git a/apt-pkg/contrib/cmndline.h b/apt-pkg/contrib/cmndline.h index 143df58b2..58cbaa8c3 100644 --- a/apt-pkg/contrib/cmndline.h +++ b/apt-pkg/contrib/cmndline.h @@ -91,6 +91,7 @@ class CommandLine static CommandLine::Args MakeArgs(char ShortOpt, char const *LongOpt, char const *ConfName, unsigned long Flags) APT_CONST; + CommandLine(); CommandLine(Args *AList,Configuration *Conf); ~CommandLine(); }; diff --git a/apt-pkg/contrib/configuration.cc b/apt-pkg/contrib/configuration.cc index 00f6ad0f9..42e35d32a 100644 --- a/apt-pkg/contrib/configuration.cc +++ b/apt-pkg/contrib/configuration.cc @@ -253,8 +253,11 @@ string Configuration::FindDir(const char *Name,const char *Default) const // Configuration::FindVector - Find a vector of values /*{{{*/ // --------------------------------------------------------------------- /* Returns a vector of config values under the given item */ -#if (APT_PKG_MAJOR >= 4 && APT_PKG_MINOR < 13) -vector<string> Configuration::FindVector(const char *Name) const { return FindVector(Name, ""); } +#if APT_PKG_ABI < 413 +vector<string> Configuration::FindVector(const char *Name) const +{ + return FindVector(Name, ""); +} #endif vector<string> Configuration::FindVector(const char *Name, std::string const &Default) const { @@ -626,19 +629,19 @@ string Configuration::Item::FullTag(const Item *Stop) const tag/value. AsSectional enables Sectional parsing.*/ bool ReadConfigFile(Configuration &Conf,const string &FName,bool const &AsSectional, unsigned const &Depth) -{ +{ // Open the stream for reading - ifstream F(FName.c_str(),ios::in); - if (!F != 0) + ifstream F(FName.c_str(),ios::in); + if (F.fail() == true) return _error->Errno("ifstream::ifstream",_("Opening configuration file %s"),FName.c_str()); string LineBuffer; string Stack[100]; unsigned int StackPos = 0; - + // Parser state string ParentTag; - + int CurLine = 0; bool InComment = false; while (F.eof() == false) diff --git a/apt-pkg/contrib/configuration.h b/apt-pkg/contrib/configuration.h index c256139f4..8d7d51037 100644 --- a/apt-pkg/contrib/configuration.h +++ b/apt-pkg/contrib/configuration.h @@ -34,6 +34,8 @@ #include <vector> #include <iostream> +#include <apt-pkg/macros.h> + #ifndef APT_8_CLEANER_HEADERS using std::string; #endif @@ -59,7 +61,7 @@ class Configuration Item *Root; bool ToFree; - + Item *Lookup(Item *Head,const char *S,unsigned long const &Len,bool const &Create); Item *Lookup(const char *Name,const bool &Create); inline const Item *Lookup(const char *Name) const @@ -82,14 +84,16 @@ class Configuration * * \param Name of the parent node * \param Default list of values separated by commas */ +#if APT_PKG_ABI >= 413 + std::vector<std::string> FindVector(const char *Name, std::string const &Default = "") const; + std::vector<std::string> FindVector(std::string const &Name, std::string const &Default = "") const { return FindVector(Name.c_str(), Default); }; +#else std::vector<std::string> FindVector(const char *Name, std::string const &Default) const; std::vector<std::string> FindVector(std::string const &Name, std::string const &Default) const { return FindVector(Name.c_str(), Default); }; -#if (APT_PKG_MAJOR >= 4 && APT_PKG_MINOR >= 13) - std::vector<std::string> FindVector(const char *Name) const { return FindVector(Name, ""); }; -#else std::vector<std::string> FindVector(const char *Name) const; -#endif std::vector<std::string> FindVector(std::string const &Name) const { return FindVector(Name.c_str(), ""); }; +#endif + int FindI(const char *Name,int const &Default = 0) const; int FindI(std::string const &Name,int const &Default = 0) const {return FindI(Name.c_str(),Default);}; bool FindB(const char *Name,bool const &Default = false) const; @@ -129,7 +133,7 @@ class Configuration class MatchAgainstConfig { std::vector<regex_t *> patterns; - void clearPatterns(); + APT_HIDDEN void clearPatterns(); public: MatchAgainstConfig(char const * Config); diff --git a/apt-pkg/contrib/fileutl.cc b/apt-pkg/contrib/fileutl.cc index b77c7ff7f..8ec868ec0 100644 --- a/apt-pkg/contrib/fileutl.cc +++ b/apt-pkg/contrib/fileutl.cc @@ -47,6 +47,8 @@ #include <signal.h> #include <errno.h> #include <glob.h> +#include <pwd.h> +#include <grp.h> #include <set> #include <algorithm> @@ -63,6 +65,10 @@ #include <endian.h> #include <stdint.h> +#if __gnu_linux__ +#include <sys/prctl.h> +#endif + #include <apti18n.h> /*}}}*/ @@ -656,6 +662,22 @@ string flCombine(string Dir,string File) return Dir + '/' + File; } /*}}}*/ +// flAbsPath - Return the absolute path of the filename /*{{{*/ +// --------------------------------------------------------------------- +/* */ +string flAbsPath(string File) +{ + char *p = realpath(File.c_str(), NULL); + if (p == NULL) + { + _error->Errno("realpath", "flAbsPath failed"); + return ""; + } + std::string AbsPath(p); + free(p); + return AbsPath; +} + /*}}}*/ // SetCloseExec - Set the close on exec flag /*{{{*/ // --------------------------------------------------------------------- /* */ @@ -778,8 +800,9 @@ pid_t ExecFork(std::set<int> KeepFDs) signal(SIGCONT,SIG_DFL); signal(SIGTSTP,SIG_DFL); + long ScOpenMax = sysconf(_SC_OPEN_MAX); // Close all of our FDs - just in case - for (int K = 3; K != sysconf(_SC_OPEN_MAX); K++) + for (int K = 3; K != ScOpenMax; K++) { if(KeepFDs.find(K) == KeepFDs.end()) fcntl(K,F_SETFD,FD_CLOEXEC); @@ -835,6 +858,42 @@ bool ExecWait(pid_t Pid,const char *Name,bool Reap) return true; } /*}}}*/ +// StartsWithGPGClearTextSignature - Check if a file is Pgp/GPG clearsigned /*{{{*/ +bool StartsWithGPGClearTextSignature(string const &FileName) +{ + static const char* SIGMSG = "-----BEGIN PGP SIGNED MESSAGE-----\n"; + char buffer[strlen(SIGMSG)+1]; + FILE* gpg = fopen(FileName.c_str(), "r"); + if (gpg == NULL) + return false; + + char const * const test = fgets(buffer, sizeof(buffer), gpg); + fclose(gpg); + if (test == NULL || strcmp(buffer, SIGMSG) != 0) + return false; + + return true; +} + /*}}}*/ +// ChangeOwnerAndPermissionOfFile - set file attributes to requested values /*{{{*/ +bool ChangeOwnerAndPermissionOfFile(char const * const requester, char const * const file, char const * const user, char const * const group, mode_t const mode) +{ + if (strcmp(file, "/dev/null") == 0) + return true; + bool Res = true; + if (getuid() == 0 && strlen(user) != 0 && strlen(group) != 0) // if we aren't root, we can't chown, so don't try it + { + // ensure the file is owned by root and has good permissions + struct passwd const * const pw = getpwnam(user); + struct group const * const gr = getgrnam(group); + if (pw != NULL && gr != NULL && chown(file, pw->pw_uid, gr->gr_gid) != 0) + Res &= _error->WarningE(requester, "chown to %s:%s of file %s failed", user, group, file); + } + if (chmod(file, mode) != 0) + Res &= _error->WarningE(requester, "chmod 0%o of file %s failed", mode, file); + return Res; +} + /*}}}*/ class FileFdPrivate { /*{{{*/ public: @@ -853,7 +912,7 @@ class FileFdPrivate { /*{{{*/ bool eof; bool compressing; - LZMAFILE() : file(NULL), eof(false), compressing(false) {} + LZMAFILE() : file(NULL), eof(false), compressing(false) { buffer[0] = '\0'; } ~LZMAFILE() { if (compressing == true) { @@ -1241,7 +1300,8 @@ bool FileFd::OpenInternDescriptor(unsigned int const Mode, APT::Configuration::C if (d->lzma == NULL) d->lzma = new FileFdPrivate::LZMAFILE; d->lzma->file = (FILE*) compress_struct; - d->lzma->stream = LZMA_STREAM_INIT; + lzma_stream tmp_stream = LZMA_STREAM_INIT; + d->lzma->stream = tmp_stream; if ((Mode & ReadWrite) == ReadWrite) return FileFdError("ReadWrite mode is not supported for file %s", FileName.c_str()); @@ -1481,7 +1541,7 @@ bool FileFd::Read(void *To,unsigned long long Size,unsigned long long *Actual) int err; char const * const errmsg = BZ2_bzerror(d->bz2, &err); if (err != BZ_IO_ERROR) - return FileFdError("BZ2_bzread: %s (%d: %s)", _("Read error"), err, errmsg); + return FileFdError("BZ2_bzread: %s %s (%d: %s)", FileName.c_str(), _("Read error"), err, errmsg); } #endif #ifdef HAVE_LZMA @@ -1797,7 +1857,8 @@ static bool StatFileFd(char const * const msg, int const iFd, std::string const // higher-level code will generate more meaningful messages, // even translated this would be meaningless for users return _error->Errno("fstat", "Unable to determine %s for fd %i", msg, iFd); - ispipe = S_ISFIFO(Buf.st_mode); + if (FileName.empty() == false) + ispipe = S_ISFIFO(Buf.st_mode); } // for compressor pipes st_size is undefined and at 'best' zero @@ -1911,7 +1972,6 @@ bool FileFd::Close() { if ((Flags & Compressed) != Compressed && iFd > 0 && close(iFd) != 0) Res &= _error->Errno("close",_("Problem closing the file %s"), FileName.c_str()); - if (d != NULL) { Res &= d->CloseDown(FileName); @@ -1991,10 +2051,7 @@ APT_DEPRECATED gzFile FileFd::gzFd() { #endif } - -// Glob - wrapper around "glob()" /*{{{*/ -// --------------------------------------------------------------------- -/* */ +// Glob - wrapper around "glob()" /*{{{*/ std::vector<std::string> Glob(std::string const &pattern, int flags) { std::vector<std::string> result; @@ -2020,8 +2077,7 @@ std::vector<std::string> Glob(std::string const &pattern, int flags) return result; } /*}}}*/ - -std::string GetTempDir() +std::string GetTempDir() /*{{{*/ { const char *tmpdir = getenv("TMPDIR"); @@ -2030,21 +2086,202 @@ std::string GetTempDir() tmpdir = P_tmpdir; #endif - // check that tmpdir is set and exists struct stat st; - if (!tmpdir || strlen(tmpdir) == 0 || stat(tmpdir, &st) != 0) + if (!tmpdir || strlen(tmpdir) == 0 || // tmpdir is set + stat(tmpdir, &st) != 0 || (st.st_mode & S_IFDIR) == 0 || // exists and is directory + access(tmpdir, R_OK | W_OK | X_OK) != 0 // current user has rwx access to directory + ) tmpdir = "/tmp"; return string(tmpdir); } + /*}}}*/ +FileFd* GetTempFile(std::string const &Prefix, bool ImmediateUnlink) /*{{{*/ +{ + char fn[512]; + FileFd *Fd = new FileFd(); + + std::string tempdir = GetTempDir(); + snprintf(fn, sizeof(fn), "%s/%s.XXXXXX", + tempdir.c_str(), Prefix.c_str()); + int fd = mkstemp(fn); + if(ImmediateUnlink) + unlink(fn); + if (fd < 0) + { + _error->Errno("GetTempFile",_("Unable to mkstemp %s"), fn); + return NULL; + } + if (!Fd->OpenDescriptor(fd, FileFd::WriteOnly, FileFd::None, true)) + { + _error->Errno("GetTempFile",_("Unable to write to %s"),fn); + return NULL; + } -bool Rename(std::string From, std::string To) + return Fd; +} + /*}}}*/ +bool Rename(std::string From, std::string To) /*{{{*/ { if (rename(From.c_str(),To.c_str()) != 0) { _error->Error(_("rename failed, %s (%s -> %s)."),strerror(errno), From.c_str(),To.c_str()); return false; - } + } + return true; +} + /*}}}*/ +bool Popen(const char* Args[], FileFd &Fd, pid_t &Child, FileFd::OpenMode Mode)/*{{{*/ +{ + int fd; + if (Mode != FileFd::ReadOnly && Mode != FileFd::WriteOnly) + return _error->Error("Popen supports ReadOnly (x)or WriteOnly mode only"); + + int Pipe[2] = {-1, -1}; + if(pipe(Pipe) != 0) + return _error->Errno("pipe", _("Failed to create subprocess IPC")); + + std::set<int> keep_fds; + keep_fds.insert(Pipe[0]); + keep_fds.insert(Pipe[1]); + Child = ExecFork(keep_fds); + if(Child < 0) + return _error->Errno("fork", "Failed to fork"); + if(Child == 0) + { + if(Mode == FileFd::ReadOnly) + { + close(Pipe[0]); + fd = Pipe[1]; + } + else if(Mode == FileFd::WriteOnly) + { + close(Pipe[1]); + fd = Pipe[0]; + } + + if(Mode == FileFd::ReadOnly) + { + dup2(fd, 1); + dup2(fd, 2); + } else if(Mode == FileFd::WriteOnly) + dup2(fd, 0); + + execv(Args[0], (char**)Args); + _exit(100); + } + if(Mode == FileFd::ReadOnly) + { + close(Pipe[1]); + fd = Pipe[0]; + } else if(Mode == FileFd::WriteOnly) + { + close(Pipe[0]); + fd = Pipe[1]; + } + Fd.OpenDescriptor(fd, Mode, FileFd::None, true); + return true; } + /*}}}*/ +bool DropPrivileges() /*{{{*/ +{ + if(_config->FindB("Debug::NoDropPrivs", false) == true) + return true; + +#if __gnu_linux__ +#if defined(PR_SET_NO_NEW_PRIVS) && ( PR_SET_NO_NEW_PRIVS != 38 ) +#error "PR_SET_NO_NEW_PRIVS is defined, but with a different value than expected!" +#endif + // see prctl(2), needs linux3.5 at runtime - magic constant to avoid it at buildtime + int ret = prctl(38, 1, 0, 0, 0); + // ignore EINVAL - kernel is too old to understand the option + if(ret < 0 && errno != EINVAL) + _error->Warning("PR_SET_NO_NEW_PRIVS failed with %i", ret); +#endif + + // empty setting disables privilege dropping - this also ensures + // backward compatibility, see bug #764506 + const std::string toUser = _config->Find("APT::Sandbox::User"); + if (toUser.empty()) + return true; + + // uid will be 0 in the end, but gid might be different anyway + uid_t const old_uid = getuid(); + gid_t const old_gid = getgid(); + + if (old_uid != 0) + return true; + + struct passwd *pw = getpwnam(toUser.c_str()); + if (pw == NULL) + return _error->Error("No user %s, can not drop rights", toUser.c_str()); + + // Do not change the order here, it might break things + if (setgroups(1, &pw->pw_gid)) + return _error->Errno("setgroups", "Failed to setgroups"); + + if (setegid(pw->pw_gid) != 0) + return _error->Errno("setegid", "Failed to setegid"); + + if (setgid(pw->pw_gid) != 0) + return _error->Errno("setgid", "Failed to setgid"); + + if (setuid(pw->pw_uid) != 0) + return _error->Errno("setuid", "Failed to setuid"); + + // the seteuid() is probably uneeded (at least thats what the linux + // man-page says about setuid(2)) but we cargo culted it anyway + if (seteuid(pw->pw_uid) != 0) + return _error->Errno("seteuid", "Failed to seteuid"); + + // Verify that the user has only a single group, and the correct one + gid_t groups[1]; + if (getgroups(1, groups) != 1) + return _error->Errno("getgroups", "Could not get new groups"); + if (groups[0] != pw->pw_gid) + return _error->Error("Could not switch group"); + + // Verify that gid, egid, uid, and euid changed + if (getgid() != pw->pw_gid) + return _error->Error("Could not switch group"); + if (getegid() != pw->pw_gid) + return _error->Error("Could not switch effective group"); + if (getuid() != pw->pw_uid) + return _error->Error("Could not switch user"); + if (geteuid() != pw->pw_uid) + return _error->Error("Could not switch effective user"); + +#ifdef HAVE_GETRESUID + // verify that the saved set-user-id was changed as well + uid_t ruid = 0; + uid_t euid = 0; + uid_t suid = 0; + if (getresuid(&ruid, &euid, &suid)) + return _error->Errno("getresuid", "Could not get saved set-user-ID"); + if (suid != pw->pw_uid) + return _error->Error("Could not switch saved set-user-ID"); +#endif + +#ifdef HAVE_GETRESGID + // verify that the saved set-group-id was changed as well + gid_t rgid = 0; + gid_t egid = 0; + gid_t sgid = 0; + if (getresgid(&rgid, &egid, &sgid)) + return _error->Errno("getresuid", "Could not get saved set-group-ID"); + if (sgid != pw->pw_gid) + return _error->Error("Could not switch saved set-group-ID"); +#endif + + // Check that uid and gid changes do not work anymore + if (pw->pw_gid != old_gid && (setgid(old_gid) != -1 || setegid(old_gid) != -1)) + return _error->Error("Could restore a gid to root, privilege dropping did not work"); + + if (pw->pw_uid != old_uid && (setuid(old_uid) != -1 || seteuid(old_uid) != -1)) + return _error->Error("Could restore a uid to root, privilege dropping did not work"); + + return true; +} + /*}}}*/ diff --git a/apt-pkg/contrib/fileutl.h b/apt-pkg/contrib/fileutl.h index cc1a98eae..97cb05c56 100644 --- a/apt-pkg/contrib/fileutl.h +++ b/apt-pkg/contrib/fileutl.h @@ -85,7 +85,9 @@ class FileFd bool Skip(unsigned long long To); bool Truncate(unsigned long long To); unsigned long long Tell(); + // the size of the file content (compressed files will be uncompressed first) unsigned long long Size(); + // the size of the file itself unsigned long long FileSize(); time_t ModificationTime(); @@ -168,6 +170,8 @@ time_t GetModificationTime(std::string const &Path); bool Rename(std::string From, std::string To); std::string GetTempDir(); +FileFd* GetTempFile(std::string const &Prefix = "", + bool ImmediateUnlink = true); /** \brief Ensure the existence of the given Path * @@ -191,6 +195,34 @@ pid_t ExecFork(std::set<int> keep_fds); void MergeKeepFdsFromConfiguration(std::set<int> &keep_fds); bool ExecWait(pid_t Pid,const char *Name,bool Reap = false); +// check if the given file starts with a PGP cleartext signature +bool StartsWithGPGClearTextSignature(std::string const &FileName); + +/** change file attributes to requested known good values + * + * The method skips the user:group setting if not root. + * + * @param requester is printed as functionname in error cases + * @param file is the file to be modified + * @param user is the (new) owner of the file, e.g. _apt + * @param group is the (new) group owning the file, e.g. root + * @param mode is the access mode of the file, e.g. 0644 + */ +bool ChangeOwnerAndPermissionOfFile(char const * const requester, char const * const file, char const * const user, char const * const group, mode_t const mode); + +/** + * \brief Drop privileges + * + * Drop the privileges to the user _apt (or the one specified in + * APT::Sandbox::User). This does not set the supplementary group + * ids up correctly, it only uses the default group. Also prevent + * the process from gaining any new privileges afterwards, at least + * on Linux. + * + * \return true on success, false on failure with _error set + */ +bool DropPrivileges(); + // File string manipulators std::string flNotDir(std::string File); std::string flNotFile(std::string File); @@ -198,7 +230,23 @@ std::string flNoLink(std::string File); std::string flExtension(std::string File); std::string flCombine(std::string Dir,std::string File); +/** \brief Takes a file path and returns the absolute path + */ +std::string flAbsPath(std::string File); + // simple c++ glob std::vector<std::string> Glob(std::string const &pattern, int flags=0); +/** \brief Popen() implementation that execv() instead of using a shell + * + * \param Args the execv style command to run + * \param FileFd is a referenz to the FileFd to use for input or output + * \param Child a reference to the integer that stores the child pid + * Note that you must call ExecWait() or similar to cleanup + * \param Mode is either FileFd::ReadOnly or FileFd::WriteOnly + * \return true on success, false on failure with _error set + */ +bool Popen(const char* Args[], FileFd &Fd, pid_t &Child, FileFd::OpenMode Mode); + + #endif diff --git a/apt-pkg/contrib/gpgv.cc b/apt-pkg/contrib/gpgv.cc index f24dd9640..9d798cca9 100644 --- a/apt-pkg/contrib/gpgv.cc +++ b/apt-pkg/contrib/gpgv.cc @@ -32,50 +32,30 @@ static char * GenerateTemporaryFileTemplate(const char *basename) /*{{{*/ /*}}}*/ // ExecGPGV - returns the command needed for verify /*{{{*/ // --------------------------------------------------------------------- -/* Generating the commandline for calling gpgv is somehow complicated as +/* Generating the commandline for calling gpg is somehow complicated as we need to add multiple keyrings and user supplied options. - Also, as gpgv has no options to enforce a certain reduced style of + Also, as gpg has no options to enforce a certain reduced style of clear-signed files (=the complete content of the file is signed and the content isn't encoded) we do a divide and conquer approach here - and split up the clear-signed file in message and signature for gpgv + and split up the clear-signed file in message and signature for gpg. + And as a cherry on the cake, we use our apt-key wrapper to do part + of the lifting in regards to merging keyrings. Fun for the whole family. */ void ExecGPGV(std::string const &File, std::string const &FileGPG, int const &statusfd, int fd[2]) { #define EINTERNAL 111 - std::string const gpgvpath = _config->Find("Dir::Bin::gpg", "/usr/bin/gpgv"); - // FIXME: remove support for deprecated APT::GPGV setting - std::string const trustedFile = _config->Find("APT::GPGV::TrustedKeyring", _config->FindFile("Dir::Etc::Trusted")); - std::string const trustedPath = _config->FindDir("Dir::Etc::TrustedParts"); + std::string const aptkey = _config->FindFile("Dir::Bin::apt-key", "/usr/bin/apt-key"); bool const Debug = _config->FindB("Debug::Acquire::gpgv", false); - if (Debug == true) - { - std::clog << "gpgv path: " << gpgvpath << std::endl; - std::clog << "Keyring file: " << trustedFile << std::endl; - std::clog << "Keyring path: " << trustedPath << std::endl; - } - - std::vector<std::string> keyrings; - if (DirectoryExists(trustedPath)) - keyrings = GetListOfFilesInDir(trustedPath, "gpg", false, true); - if (RealFileExists(trustedFile) == true) - keyrings.push_back(trustedFile); - std::vector<const char *> Args; - Args.reserve(30); - - if (keyrings.empty() == true) - { - // TRANSLATOR: %s is the trusted keyring parts directory - ioprintf(std::cerr, _("No keyring installed in %s."), - _config->FindDir("Dir::Etc::TrustedParts").c_str()); - exit(EINTERNAL); - } + Args.reserve(10); - Args.push_back(gpgvpath.c_str()); - Args.push_back("--ignore-time-conflict"); + Args.push_back(aptkey.c_str()); + Args.push_back("--quiet"); + Args.push_back("--readonly"); + Args.push_back("verify"); char statusfdstr[10]; if (statusfd != -1) @@ -85,13 +65,6 @@ void ExecGPGV(std::string const &File, std::string const &FileGPG, Args.push_back(statusfdstr); } - for (std::vector<std::string>::const_iterator K = keyrings.begin(); - K != keyrings.end(); ++K) - { - Args.push_back("--keyring"); - Args.push_back(K->c_str()); - } - Configuration::Item const *Opts; Opts = _config->Tree("Acquire::gpgv::Options"); if (Opts != 0) @@ -160,7 +133,7 @@ void ExecGPGV(std::string const &File, std::string const &FileGPG, if (Debug == true) { - std::clog << "Preparing to exec: " << gpgvpath; + std::clog << "Preparing to exec: "; for (std::vector<const char *>::const_iterator a = Args.begin(); *a != NULL; ++a) std::clog << " " << *a; std::clog << std::endl; @@ -168,7 +141,7 @@ void ExecGPGV(std::string const &File, std::string const &FileGPG, if (statusfd != -1) { - int const nullfd = open("/dev/null", O_RDONLY); + int const nullfd = open("/dev/null", O_WRONLY); close(fd[0]); // Redirect output to /dev/null; we read from the status fd if (statusfd != STDOUT_FILENO) @@ -185,7 +158,7 @@ void ExecGPGV(std::string const &File, std::string const &FileGPG, if (releaseSignature == DETACHED) { - execvp(gpgvpath.c_str(), (char **) &Args[0]); + execvp(Args[0], (char **) &Args[0]); ioprintf(std::cerr, "Couldn't execute %s to check %s", Args[0], File.c_str()); exit(EINTERNAL); } @@ -205,7 +178,7 @@ void ExecGPGV(std::string const &File, std::string const &FileGPG, { if (statusfd != -1) dup2(fd[1], statusfd); - execvp(gpgvpath.c_str(), (char **) &Args[0]); + execvp(Args[0], (char **) &Args[0]); ioprintf(std::cerr, "Couldn't execute %s to check %s", Args[0], File.c_str()); UNLINK_EXIT(EINTERNAL); } @@ -216,7 +189,7 @@ void ExecGPGV(std::string const &File, std::string const &FileGPG, { if (errno == EINTR) continue; - ioprintf(std::cerr, _("Waited for %s but it wasn't there"), "gpgv"); + ioprintf(std::cerr, _("Waited for %s but it wasn't there"), "apt-key"); UNLINK_EXIT(EINTERNAL); } #undef UNLINK_EXIT @@ -229,14 +202,14 @@ void ExecGPGV(std::string const &File, std::string const &FileGPG, // check if it exit'ed normally … if (WIFEXITED(Status) == false) { - ioprintf(std::cerr, _("Sub-process %s exited unexpectedly"), "gpgv"); + ioprintf(std::cerr, _("Sub-process %s exited unexpectedly"), "apt-key"); exit(EINTERNAL); } // … and with a good exit code if (WEXITSTATUS(Status) != 0) { - ioprintf(std::cerr, _("Sub-process %s returned an error code (%u)"), "gpgv", WEXITSTATUS(Status)); + ioprintf(std::cerr, _("Sub-process %s returned an error code (%u)"), "apt-key", WEXITSTATUS(Status)); exit(WEXITSTATUS(Status)); } diff --git a/apt-pkg/contrib/hashes.cc b/apt-pkg/contrib/hashes.cc index 1fce0d75f..0fa443b4a 100644 --- a/apt-pkg/contrib/hashes.cc +++ b/apt-pkg/contrib/hashes.cc @@ -27,9 +27,9 @@ #include <iostream> /*}}}*/ -const char* HashString::_SupportedHashes[] = +const char * HashString::_SupportedHashes[] = { - "SHA512", "SHA256", "SHA1", "MD5Sum", NULL + "SHA512", "SHA256", "SHA1", "MD5Sum", "Checksum-FileSize", NULL }; HashString::HashString() @@ -42,11 +42,16 @@ HashString::HashString(std::string Type, std::string Hash) : Type(Type), Hash(Ha HashString::HashString(std::string StringedHash) /*{{{*/ { - // legacy: md5sum without "MD5Sum:" prefix - if (StringedHash.find(":") == std::string::npos && StringedHash.size() == 32) + if (StringedHash.find(":") == std::string::npos) { - Type = "MD5Sum"; - Hash = StringedHash; + // legacy: md5sum without "MD5Sum:" prefix + if (StringedHash.size() == 32) + { + Type = "MD5Sum"; + Hash = StringedHash; + } + if(_config->FindB("Debug::Hashes",false) == true) + std::clog << "HashString(string): invalid StringedHash " << StringedHash << std::endl; return; } std::string::size_type pos = StringedHash.find(":"); @@ -82,58 +87,207 @@ std::string HashString::GetHashForFile(std::string filename) const /*{{{*/ std::string fileHash; FileFd Fd(filename, FileFd::ReadOnly); - if(Type == "MD5Sum") + if(strcasecmp(Type.c_str(), "MD5Sum") == 0) { MD5Summation MD5; MD5.AddFD(Fd); fileHash = (std::string)MD5.Result(); } - else if (Type == "SHA1") + else if (strcasecmp(Type.c_str(), "SHA1") == 0) { SHA1Summation SHA1; SHA1.AddFD(Fd); fileHash = (std::string)SHA1.Result(); } - else if (Type == "SHA256") + else if (strcasecmp(Type.c_str(), "SHA256") == 0) { SHA256Summation SHA256; SHA256.AddFD(Fd); fileHash = (std::string)SHA256.Result(); } - else if (Type == "SHA512") + else if (strcasecmp(Type.c_str(), "SHA512") == 0) { SHA512Summation SHA512; SHA512.AddFD(Fd); fileHash = (std::string)SHA512.Result(); } + else if (strcasecmp(Type.c_str(), "Checksum-FileSize") == 0) + strprintf(fileHash, "%llu", Fd.FileSize()); Fd.Close(); return fileHash; } /*}}}*/ -const char** HashString::SupportedHashes() +const char** HashString::SupportedHashes() /*{{{*/ { return _SupportedHashes; } - -APT_PURE bool HashString::empty() const + /*}}}*/ +APT_PURE bool HashString::empty() const /*{{{*/ { return (Type.empty() || Hash.empty()); } + /*}}}*/ +std::string HashString::toStr() const /*{{{*/ +{ + return Type + ":" + Hash; +} + /*}}}*/ +APT_PURE bool HashString::operator==(HashString const &other) const /*{{{*/ +{ + return (strcasecmp(Type.c_str(), other.Type.c_str()) == 0 && Hash == other.Hash); +} +APT_PURE bool HashString::operator!=(HashString const &other) const +{ + return !(*this == other); +} + /*}}}*/ + +bool HashStringList::usable() const /*{{{*/ +{ + if (empty() == true) + return false; + std::string const forcedType = _config->Find("Acquire::ForceHash", ""); + if (forcedType.empty() == true) + { + // FileSize alone isn't usable + for (std::vector<HashString>::const_iterator hs = list.begin(); hs != list.end(); ++hs) + if (hs->HashType() != "Checksum-FileSize") + return true; + return false; + } + return find(forcedType) != NULL; +} + /*}}}*/ +HashString const * HashStringList::find(char const * const type) const /*{{{*/ +{ + if (type == NULL || type[0] == '\0') + { + std::string const forcedType = _config->Find("Acquire::ForceHash", ""); + if (forcedType.empty() == false) + return find(forcedType.c_str()); + for (char const * const * t = HashString::SupportedHashes(); *t != NULL; ++t) + for (std::vector<HashString>::const_iterator hs = list.begin(); hs != list.end(); ++hs) + if (strcasecmp(hs->HashType().c_str(), *t) == 0) + return &*hs; + return NULL; + } + for (std::vector<HashString>::const_iterator hs = list.begin(); hs != list.end(); ++hs) + if (strcasecmp(hs->HashType().c_str(), type) == 0) + return &*hs; + return NULL; +} + /*}}}*/ +bool HashStringList::supported(char const * const type) /*{{{*/ +{ + for (char const * const * t = HashString::SupportedHashes(); *t != NULL; ++t) + if (strcasecmp(*t, type) == 0) + return true; + return false; +} + /*}}}*/ +bool HashStringList::push_back(const HashString &hashString) /*{{{*/ +{ + if (hashString.HashType().empty() == true || + hashString.HashValue().empty() == true || + supported(hashString.HashType().c_str()) == false) + return false; + + // ensure that each type is added only once + HashString const * const hs = find(hashString.HashType().c_str()); + if (hs != NULL) + return *hs == hashString; + + list.push_back(hashString); + return true; +} + /*}}}*/ +bool HashStringList::VerifyFile(std::string filename) const /*{{{*/ +{ + if (usable() == false) + return false; -std::string HashString::toStr() const + Hashes hashes(*this); + FileFd file(filename, FileFd::ReadOnly); + HashString const * const hsf = find("Checksum-FileSize"); + if (hsf != NULL) + { + std::string fileSize; + strprintf(fileSize, "%llu", file.FileSize()); + if (hsf->HashValue() != fileSize) + return false; + } + hashes.AddFD(file); + HashStringList const hsl = hashes.GetHashStringList(); + return hsl == *this; +} + /*}}}*/ +bool HashStringList::operator==(HashStringList const &other) const /*{{{*/ { - return Type + std::string(":") + Hash; + std::string const forcedType = _config->Find("Acquire::ForceHash", ""); + if (forcedType.empty() == false) + { + HashString const * const hs = find(forcedType); + HashString const * const ohs = other.find(forcedType); + if (hs == NULL || ohs == NULL) + return false; + return *hs == *ohs; + } + short matches = 0; + for (const_iterator hs = begin(); hs != end(); ++hs) + { + HashString const * const ohs = other.find(hs->HashType()); + if (ohs == NULL) + continue; + if (*hs != *ohs) + return false; + ++matches; + } + if (matches == 0) + return false; + return true; } +bool HashStringList::operator!=(HashStringList const &other) const +{ + return !(*this == other); +} + /*}}}*/ + +// PrivateHashes /*{{{*/ +class PrivateHashes { +public: + unsigned long long FileSize; + unsigned int CalcHashes; -// Hashes::AddFD - Add the contents of the FD /*{{{*/ -// --------------------------------------------------------------------- -/* */ -bool Hashes::AddFD(int const Fd,unsigned long long Size, bool const addMD5, - bool const addSHA1, bool const addSHA256, bool const addSHA512) + PrivateHashes(unsigned int const CalcHashes) : FileSize(0), CalcHashes(CalcHashes) {} +}; + /*}}}*/ +// Hashes::Add* - Add the contents of data or FD /*{{{*/ +bool Hashes::Add(const unsigned char * const Data, unsigned long long const Size) +{ + bool Res = true; +APT_IGNORE_DEPRECATED_PUSH + if ((d->CalcHashes & MD5SUM) == MD5SUM) + Res &= MD5.Add(Data, Size); + if ((d->CalcHashes & SHA1SUM) == SHA1SUM) + Res &= SHA1.Add(Data, Size); + if ((d->CalcHashes & SHA256SUM) == SHA256SUM) + Res &= SHA256.Add(Data, Size); + if ((d->CalcHashes & SHA512SUM) == SHA512SUM) + Res &= SHA512.Add(Data, Size); +APT_IGNORE_DEPRECATED_POP + d->FileSize += Size; + return Res; +} +bool Hashes::Add(const unsigned char * const Data, unsigned long long const Size, unsigned int const Hashes) +{ + d->CalcHashes = Hashes; + return Add(Data, Size); +} +bool Hashes::AddFD(int const Fd,unsigned long long Size) { unsigned char Buf[64*64]; - bool const ToEOF = (Size == 0); + bool const ToEOF = (Size == UntilEOF); while (Size != 0 || ToEOF) { unsigned long long n = sizeof(Buf); @@ -144,19 +298,17 @@ bool Hashes::AddFD(int const Fd,unsigned long long Size, bool const addMD5, if (ToEOF && Res == 0) // EOF break; Size -= Res; - if (addMD5 == true) - MD5.Add(Buf,Res); - if (addSHA1 == true) - SHA1.Add(Buf,Res); - if (addSHA256 == true) - SHA256.Add(Buf,Res); - if (addSHA512 == true) - SHA512.Add(Buf,Res); + if (Add(Buf, Res) == false) + return false; } return true; } -bool Hashes::AddFD(FileFd &Fd,unsigned long long Size, bool const addMD5, - bool const addSHA1, bool const addSHA256, bool const addSHA512) +bool Hashes::AddFD(int const Fd,unsigned long long Size, unsigned int const Hashes) +{ + d->CalcHashes = Hashes; + return AddFD(Fd, Size); +} +bool Hashes::AddFD(FileFd &Fd,unsigned long long Size) { unsigned char Buf[64*64]; bool const ToEOF = (Size == 0); @@ -175,15 +327,49 @@ bool Hashes::AddFD(FileFd &Fd,unsigned long long Size, bool const addMD5, else if (a == 0) // EOF break; Size -= a; - if (addMD5 == true) - MD5.Add(Buf, a); - if (addSHA1 == true) - SHA1.Add(Buf, a); - if (addSHA256 == true) - SHA256.Add(Buf, a); - if (addSHA512 == true) - SHA512.Add(Buf, a); + if (Add(Buf, a) == false) + return false; } return true; } +bool Hashes::AddFD(FileFd &Fd,unsigned long long Size, unsigned int const Hashes) +{ + d->CalcHashes = Hashes; + return AddFD(Fd, Size); +} /*}}}*/ +HashStringList Hashes::GetHashStringList() +{ + HashStringList hashes; +APT_IGNORE_DEPRECATED_PUSH + if ((d->CalcHashes & MD5SUM) == MD5SUM) + hashes.push_back(HashString("MD5Sum", MD5.Result().Value())); + if ((d->CalcHashes & SHA1SUM) == SHA1SUM) + hashes.push_back(HashString("SHA1", SHA1.Result().Value())); + if ((d->CalcHashes & SHA256SUM) == SHA256SUM) + hashes.push_back(HashString("SHA256", SHA256.Result().Value())); + if ((d->CalcHashes & SHA512SUM) == SHA512SUM) + hashes.push_back(HashString("SHA512", SHA512.Result().Value())); +APT_IGNORE_DEPRECATED_POP + std::string SizeStr; + strprintf(SizeStr, "%llu", d->FileSize); + hashes.push_back(HashString("Checksum-FileSize", SizeStr)); + return hashes; +} +APT_IGNORE_DEPRECATED_PUSH +Hashes::Hashes() { d = new PrivateHashes(~0); } +Hashes::Hashes(unsigned int const Hashes) { d = new PrivateHashes(Hashes); } +Hashes::Hashes(HashStringList const &Hashes) { + unsigned int calcHashes = Hashes.usable() ? 0 : ~0; + if (Hashes.find("MD5Sum") != NULL) + calcHashes |= MD5SUM; + if (Hashes.find("SHA1") != NULL) + calcHashes |= SHA1SUM; + if (Hashes.find("SHA256") != NULL) + calcHashes |= SHA256SUM; + if (Hashes.find("SHA512") != NULL) + calcHashes |= SHA512SUM; + d = new PrivateHashes(calcHashes); +} +Hashes::~Hashes() { delete d; } +APT_IGNORE_DEPRECATED_POP diff --git a/apt-pkg/contrib/hashes.h b/apt-pkg/contrib/hashes.h index 5cd1af03b..ac13c8ace 100644 --- a/apt-pkg/contrib/hashes.h +++ b/apt-pkg/contrib/hashes.h @@ -17,6 +17,7 @@ #include <apt-pkg/md5.h> #include <apt-pkg/sha1.h> #include <apt-pkg/sha2.h> +#include <apt-pkg/macros.h> #include <cstring> #include <string> @@ -41,7 +42,7 @@ class HashString protected: std::string Type; std::string Hash; - static const char* _SupportedHashes[10]; + static const char * _SupportedHashes[10]; // internal helper std::string GetHashForFile(std::string filename) const; @@ -52,7 +53,10 @@ class HashString HashString(); // get hash type used - std::string HashType() { return Type; }; + std::string HashType() const { return Type; }; + std::string HashValue() const { return Hash; }; + APT_DEPRECATED std::string HashType() { return Type; }; + APT_DEPRECATED std::string HashValue() { return Hash; }; // verify the given filename against the currently loaded hash bool VerifyFile(std::string filename) const; @@ -64,35 +68,168 @@ class HashString // helper std::string toStr() const; // convert to str as "type:hash" bool empty() const; + bool operator==(HashString const &other) const; + bool operator!=(HashString const &other) const; // return the list of hashes we support static APT_CONST const char** SupportedHashes(); }; -class Hashes +class HashStringList { public: + /** find best hash if no specific one is requested + * + * @param type of the checksum to return, can be \b NULL + * @return If type is \b NULL (or the empty string) it will + * return the 'best' hash; otherwise the hash which was + * specifically requested. If no hash is found \b NULL will be returned. + */ + HashString const * find(char const * const type) const; + HashString const * find(std::string const &type) const { return find(type.c_str()); } + /** check if the given hash type is supported + * + * @param type to check + * @return true if supported, otherwise false + */ + static APT_PURE bool supported(char const * const type); + /** add the given #HashString to the list + * + * @param hashString to add + * @return true if the hash is added because it is supported and + * not already a different hash of the same type included, otherwise false + */ + bool push_back(const HashString &hashString); + /** @return size of the list of HashStrings */ + size_t size() const { return list.size(); } + + /** take the 'best' hash and verify file with it + * + * @param filename to verify + * @return true if the file matches the hashsum, otherwise false + */ + bool VerifyFile(std::string filename) const; - MD5Summation MD5; - SHA1Summation SHA1; - SHA256Summation SHA256; - SHA512Summation SHA512; - - inline bool Add(const unsigned char *Data,unsigned long long Size) + /** is the list empty ? + * + * @return \b true if the list is empty, otherwise \b false + */ + bool empty() const { return list.empty(); } + + /** has the list at least one good entry + * + * similar to #empty, but handles forced hashes. + * + * @return if no hash is forced, same result as #empty, + * if one is forced \b true if this has is available, \b false otherwise + */ + bool usable() const; + + typedef std::vector<HashString>::const_iterator const_iterator; + + /** iterator to the first element */ + const_iterator begin() const { return list.begin(); } + + /** iterator to the end element */ + const_iterator end() const { return list.end(); } + + /** start fresh with a clear list */ + void clear() { list.clear(); } + + /** compare two HashStringList for similarity. + * + * Two lists are similar if at least one hashtype is in both lists + * and the hashsum matches. All hashes are checked by default, + * if one doesn't match false is returned regardless of how many + * matched before. If a hash is forced, only this hash is compared, + * all others are ignored. + */ + bool operator==(HashStringList const &other) const; + bool operator!=(HashStringList const &other) const; + + HashStringList() {} + + // simplifying API-compatibility constructors + HashStringList(std::string const &hash) { + if (hash.empty() == false) + list.push_back(HashString(hash)); + } + HashStringList(char const * const hash) { + if (hash != NULL && hash[0] != '\0') + list.push_back(HashString(hash)); + } + + private: + std::vector<HashString> list; +}; + +class PrivateHashes; +class Hashes +{ + PrivateHashes *d; + + public: + /* those will disappear in the future as it is hard to add new ones this way. + * Use Add* to build the results and get them via GetHashStringList() instead */ + APT_DEPRECATED MD5Summation MD5; + APT_DEPRECATED SHA1Summation SHA1; + APT_DEPRECATED SHA256Summation SHA256; + APT_DEPRECATED SHA512Summation SHA512; + + static const int UntilEOF = 0; + + bool Add(const unsigned char * const Data, unsigned long long const Size); + APT_DEPRECATED bool Add(const unsigned char * const Data, unsigned long long const Size, unsigned int const Hashes); + inline bool Add(const char * const Data) + {return Add((unsigned char const * const)Data,strlen(Data));}; + inline bool Add(const unsigned char * const Beg,const unsigned char * const End) + {return Add(Beg,End-Beg);}; + + enum SupportedHashes { MD5SUM = (1 << 0), SHA1SUM = (1 << 1), SHA256SUM = (1 << 2), + SHA512SUM = (1 << 3) }; + bool AddFD(int const Fd,unsigned long long Size = 0); + APT_DEPRECATED bool AddFD(int const Fd,unsigned long long Size, unsigned int const Hashes); + bool AddFD(FileFd &Fd,unsigned long long Size = 0); + APT_DEPRECATED bool AddFD(FileFd &Fd,unsigned long long Size, unsigned int const Hashes); + + HashStringList GetHashStringList(); + +APT_IGNORE_DEPRECATED_PUSH + /** create a Hashes object to calculate all supported hashes + * + * If ALL is too much, you can limit which Hashes are calculated + * with the following other constructors which mention explicitly + * which hashes to generate. */ + Hashes(); + /** @param Hashes bitflag composed of #SupportedHashes */ + Hashes(unsigned int const Hashes); + /** @param Hashes is a list of hashes */ + Hashes(HashStringList const &Hashes); + virtual ~Hashes(); +APT_IGNORE_DEPRECATED_POP + + private: + APT_HIDDEN APT_CONST inline unsigned int boolsToFlag(bool const addMD5, bool const addSHA1, bool const addSHA256, bool const addSHA512) { - return MD5.Add(Data,Size) && SHA1.Add(Data,Size) && SHA256.Add(Data,Size) && SHA512.Add(Data,Size); + unsigned int Hashes = ~0; + if (addMD5 == false) Hashes &= ~MD5SUM; + if (addSHA1 == false) Hashes &= ~SHA1SUM; + if (addSHA256 == false) Hashes &= ~SHA256SUM; + if (addSHA512 == false) Hashes &= ~SHA512SUM; + return Hashes; + } + + public: +APT_IGNORE_DEPRECATED_PUSH + APT_DEPRECATED bool AddFD(int const Fd, unsigned long long Size, bool const addMD5, + bool const addSHA1, bool const addSHA256, bool const addSHA512) { + return AddFD(Fd, Size, boolsToFlag(addMD5, addSHA1, addSHA256, addSHA512)); + }; + APT_DEPRECATED bool AddFD(FileFd &Fd, unsigned long long Size, bool const addMD5, + bool const addSHA1, bool const addSHA256, bool const addSHA512) { + return AddFD(Fd, Size, boolsToFlag(addMD5, addSHA1, addSHA256, addSHA512)); }; - inline bool Add(const char *Data) {return Add((unsigned char const *)Data,strlen(Data));}; - inline bool AddFD(int const Fd,unsigned long long Size = 0) - { return AddFD(Fd, Size, true, true, true, true); }; - bool AddFD(int const Fd, unsigned long long Size, bool const addMD5, - bool const addSHA1, bool const addSHA256, bool const addSHA512); - inline bool AddFD(FileFd &Fd,unsigned long long Size = 0) - { return AddFD(Fd, Size, true, true, true, true); }; - bool AddFD(FileFd &Fd, unsigned long long Size, bool const addMD5, - bool const addSHA1, bool const addSHA256, bool const addSHA512); - inline bool Add(const unsigned char *Beg,const unsigned char *End) - {return Add(Beg,End-Beg);}; +APT_IGNORE_DEPRECATED_POP }; #endif diff --git a/apt-pkg/contrib/macros.h b/apt-pkg/contrib/macros.h index 2d6448e5e..2f9c6c269 100644 --- a/apt-pkg/contrib/macros.h +++ b/apt-pkg/contrib/macros.h @@ -89,7 +89,7 @@ #define APT_MUSTCHECK __attribute__((warn_unused_result)) #else #define APT_NONNULL(...) - #define APT_REQRET + #define APT_MUSTCHECK #endif #if APT_GCC_VERSION >= 0x0400 @@ -132,13 +132,30 @@ #endif #endif +#if __GNUC__ >= 4 + #define APT_IGNORE_DEPRECATED_PUSH \ + _Pragma("GCC diagnostic push") \ + _Pragma("GCC diagnostic ignored \"-Wdeprecated-declarations\"") + #define APT_IGNORE_DEPRECATED_POP \ + _Pragma("GCC diagnostic pop") + #define APT_IGNORE_DEPRECATED(XXX) \ + APT_IGNORE_DEPRECATED_PUSH \ + XXX \ + APT_IGNORE_DEPRECATED_POP +#else + #define APT_IGNORE_DEPRECATED_PUSH + #define APT_IGNORE_DEPRECATED_POP + #define APT_IGNORE_DEPRECATED(XXX) XXX +#endif + // These lines are extracted by the makefiles and the buildsystem // Increasing MAJOR or MINOR results in the need of recompiling all // reverse-dependencies of libapt-pkg against the new SONAME. // Non-ABI-Breaks should only increase RELEASE number. // See also buildlib/libversion.mak #define APT_PKG_MAJOR 4 -#define APT_PKG_MINOR 12 +#define APT_PKG_MINOR 15 #define APT_PKG_RELEASE 0 +#define APT_PKG_ABI ((APT_PKG_MAJOR * 100) + APT_PKG_MINOR) #endif diff --git a/apt-pkg/contrib/netrc.cc b/apt-pkg/contrib/netrc.cc index feaed67c8..1e3778f45 100644 --- a/apt-pkg/contrib/netrc.cc +++ b/apt-pkg/contrib/netrc.cc @@ -152,18 +152,6 @@ static int parsenetrc_string (char *host, std::string &login, std::string &passw return retcode; } -// for some unknown reason this method is exported so keep a compatible interface for now … -int parsenetrc (char *host, char *login, char *password, char *netrcfile = NULL) -{ - std::string login_string, password_string; - int const ret = parsenetrc_string(host, login_string, password_string, netrcfile); - if (ret < 0) - return ret; - strncpy(login, login_string.c_str(), LOGINSIZE - 1); - strncpy(password, password_string.c_str(), PASSWORDSIZE - 1); - return ret; -} - void maybe_add_auth (URI &Uri, string NetRCFile) { diff --git a/apt-pkg/contrib/netrc.h b/apt-pkg/contrib/netrc.h index dbeb45386..b5b56f5d4 100644 --- a/apt-pkg/contrib/netrc.h +++ b/apt-pkg/contrib/netrc.h @@ -27,9 +27,5 @@ class URI; -// FIXME: kill this export on the next ABI break - strongly doubt its in use anyway -// outside of the apt itself, its really a internal interface -APT_DEPRECATED int parsenetrc (char *host, char *login, char *password, char *filename); - void maybe_add_auth (URI &Uri, std::string NetRCFile); #endif diff --git a/apt-pkg/contrib/proxy.cc b/apt-pkg/contrib/proxy.cc new file mode 100644 index 000000000..0c753131d --- /dev/null +++ b/apt-pkg/contrib/proxy.cc @@ -0,0 +1,86 @@ +// -*- mode: cpp; mode: fold -*- +// Description /*{{{*/ +/* ###################################################################### + + Proxy - Proxy releated functions + + ##################################################################### */ + /*}}}*/ +// Include Files /*{{{*/ +#include<apt-pkg/configuration.h> +#include<apt-pkg/error.h> +#include<apt-pkg/fileutl.h> +#include<apt-pkg/strutl.h> + +#include<iostream> +#include <unistd.h> + +#include "proxy.h" + + +// AutoDetectProxy - auto detect proxy /*{{{*/ +// --------------------------------------------------------------------- +/* */ +bool AutoDetectProxy(URI &URL) +{ + // we support both http/https debug options + bool Debug = _config->FindB("Debug::Acquire::"+URL.Access,false); + + // the user already explicitly set a proxy for this host + if(_config->Find("Acquire::"+URL.Access+"::proxy::"+URL.Host, "") != "") + return true; + + // option is "Acquire::http::Proxy-Auto-Detect" but we allow the old + // name without the dash ("-") + std::string AutoDetectProxyCmd = _config->Find("Acquire::"+URL.Access+"::Proxy-Auto-Detect", + _config->Find("Acquire::"+URL.Access+"::ProxyAutoDetect")); + + if (AutoDetectProxyCmd.empty()) + return true; + + if (Debug) + std::clog << "Using auto proxy detect command: " << AutoDetectProxyCmd << std::endl; + + int Pipes[2] = {-1,-1}; + if (pipe(Pipes) != 0) + return _error->Errno("pipe", "Failed to create Pipe"); + + pid_t Process = ExecFork(); + if (Process == 0) + { + close(Pipes[0]); + dup2(Pipes[1],STDOUT_FILENO); + SetCloseExec(STDOUT_FILENO,false); + + std::string foo = URL; + const char *Args[4]; + Args[0] = AutoDetectProxyCmd.c_str(); + Args[1] = foo.c_str(); + Args[2] = 0; + execv(Args[0],(char **)Args); + std::cerr << "Failed to exec method " << Args[0] << std::endl; + _exit(100); + } + char buf[512]; + int InFd = Pipes[0]; + close(Pipes[1]); + int res = read(InFd, buf, sizeof(buf)-1); + ExecWait(Process, "ProxyAutoDetect", true); + + if (res < 0) + return _error->Errno("read", "Failed to read"); + if (res == 0) + return _error->Warning("ProxyAutoDetect returned no data"); + + // add trailing \0 + buf[res] = 0; + + if (Debug) + std::clog << "auto detect command returned: '" << buf << "'" << std::endl; + + if (strstr(buf, URL.Access.c_str()) == buf) + _config->Set("Acquire::"+URL.Access+"::proxy::"+URL.Host, _strstrip(buf)); + + return true; +} + /*}}}*/ diff --git a/apt-pkg/contrib/proxy.h b/apt-pkg/contrib/proxy.h new file mode 100644 index 000000000..2cbcd07b4 --- /dev/null +++ b/apt-pkg/contrib/proxy.h @@ -0,0 +1,16 @@ +// -*- mode: cpp; mode: fold -*- +// Description /*{{{*/ +/* ###################################################################### + + Proxy - Proxy operations + + ##################################################################### */ + /*}}}*/ +#ifndef PKGLIB_PROXY_H +#define PKGLIB_PROXY_H + +class URI; +bool AutoDetectProxy(URI &URL); + + +#endif diff --git a/apt-pkg/contrib/sha2_internal.cc b/apt-pkg/contrib/sha2_internal.cc index 131ff5beb..f70b7b17d 100644 --- a/apt-pkg/contrib/sha2_internal.cc +++ b/apt-pkg/contrib/sha2_internal.cc @@ -129,6 +129,14 @@ typedef u_int64_t sha2_word64; /* Exactly 8 bytes */ /*** ENDIAN REVERSAL MACROS *******************************************/ #if BYTE_ORDER == LITTLE_ENDIAN +#if defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 3)) +#define REVERSE32(w,x) { \ + (x) = __builtin_bswap32(w); \ +} +#define REVERSE64(w,x) { \ + (x) = __builtin_bswap64(w); \ +} +#else #define REVERSE32(w,x) { \ sha2_word32 tmp = (w); \ tmp = (tmp >> 16) | (tmp << 16); \ @@ -142,6 +150,7 @@ typedef u_int64_t sha2_word64; /* Exactly 8 bytes */ (x) = ((tmp & 0xffff0000ffff0000ULL) >> 16) | \ ((tmp & 0x0000ffff0000ffffULL) << 16); \ } +#endif #endif /* BYTE_ORDER == LITTLE_ENDIAN */ /* diff --git a/apt-pkg/contrib/strutl.cc b/apt-pkg/contrib/strutl.cc index 2100ee47b..0db4c57b2 100644 --- a/apt-pkg/contrib/strutl.cc +++ b/apt-pkg/contrib/strutl.cc @@ -45,14 +45,26 @@ using namespace std; // --------------------------------------------------------------------- namespace APT { namespace String { -std::string Strip(const std::string &s) +std::string Strip(const std::string &str) { - size_t start = s.find_first_not_of(" \t\n"); - // only whitespace - if (start == string::npos) + // ensure we have at least one character + if (str.empty() == true) + return str; + + char const * const s = str.c_str(); + size_t start = 0; + for (; isspace(s[start]) != 0; ++start) + ; // find the first not-space + + // string contains only whitespaces + if (s[start] == '\0') return ""; - size_t end = s.find_last_not_of(" \t\n"); - return s.substr(start, end-start+1); + + size_t end = str.length() - 1; + for (; isspace(s[end]) != 0; --end) + ; // find the last not-space + + return str.substr(start, end - start + 1); } bool Endswith(const std::string &s, const std::string &end) @@ -62,6 +74,13 @@ bool Endswith(const std::string &s, const std::string &end) return (s.substr(s.size() - end.size(), s.size()) == end); } +bool Startswith(const std::string &s, const std::string &start) +{ + if (start.size() > s.size()) + return false; + return (s.substr(0, start.size()) == start); +} + } } /*}}}*/ @@ -305,21 +324,19 @@ bool ParseCWord(const char *&String,string &Res) /* */ string QuoteString(const string &Str, const char *Bad) { - string Res; + std::stringstream Res; for (string::const_iterator I = Str.begin(); I != Str.end(); ++I) { - if (strchr(Bad,*I) != 0 || isprint(*I) == 0 || + if (strchr(Bad,*I) != 0 || isprint(*I) == 0 || *I == 0x25 || // percent '%' char *I <= 0x20 || *I >= 0x7F) // control chars { - char Buf[10]; - sprintf(Buf,"%%%02x",(int)*I); - Res += Buf; + ioprintf(Res,"%%%02x",(int)*I); } else - Res += *I; + Res << *I; } - return Res; + return Res.str(); } /*}}}*/ // DeQuoteString - Convert a string from quoted from /*{{{*/ @@ -360,13 +377,12 @@ string DeQuoteString(string::const_iterator const &begin, YottaBytes (E24) */ string SizeToStr(double Size) { - char S[300]; double ASize; if (Size >= 0) ASize = Size; else ASize = -1*Size; - + /* bytes, KiloBytes, MegaBytes, GigaBytes, TeraBytes, PetaBytes, ExaBytes, ZettaBytes, YottaBytes */ char Ext[] = {'\0','k','M','G','T','P','E','Z','Y'}; @@ -375,20 +391,21 @@ string SizeToStr(double Size) { if (ASize < 100 && I != 0) { - sprintf(S,"%'.1f %c",ASize,Ext[I]); - break; + std::string S; + strprintf(S, "%'.1f %c", ASize, Ext[I]); + return S; } - + if (ASize < 10000) { - sprintf(S,"%'.0f %c",ASize,Ext[I]); - break; + std::string S; + strprintf(S, "%'.0f %c", ASize, Ext[I]); + return S; } ASize /= 1000.0; I++; } - - return S; + return ""; } /*}}}*/ // TimeToStr - Convert the time into a string /*{{{*/ @@ -396,36 +413,27 @@ string SizeToStr(double Size) /* Converts a number of seconds to a hms format */ string TimeToStr(unsigned long Sec) { - char S[300]; - - while (1) + std::string S; + if (Sec > 60*60*24) { - if (Sec > 60*60*24) - { - //d means days, h means hours, min means minutes, s means seconds - sprintf(S,_("%lid %lih %limin %lis"),Sec/60/60/24,(Sec/60/60) % 24,(Sec/60) % 60,Sec % 60); - break; - } - - if (Sec > 60*60) - { - //h means hours, min means minutes, s means seconds - sprintf(S,_("%lih %limin %lis"),Sec/60/60,(Sec/60) % 60,Sec % 60); - break; - } - - if (Sec > 60) - { - //min means minutes, s means seconds - sprintf(S,_("%limin %lis"),Sec/60,Sec % 60); - break; - } - - //s means seconds - sprintf(S,_("%lis"),Sec); - break; + //TRANSLATOR: d means days, h means hours, min means minutes, s means seconds + strprintf(S,_("%lid %lih %limin %lis"),Sec/60/60/24,(Sec/60/60) % 24,(Sec/60) % 60,Sec % 60); + } + else if (Sec > 60*60) + { + //TRANSLATOR: h means hours, min means minutes, s means seconds + strprintf(S,_("%lih %limin %lis"),Sec/60/60,(Sec/60) % 60,Sec % 60); + } + else if (Sec > 60) + { + //TRANSLATOR: min means minutes, s means seconds + strprintf(S,_("%limin %lis"),Sec/60,Sec % 60); + } + else + { + //TRANSLATOR: s means seconds + strprintf(S,_("%lis"),Sec); } - return S; } /*}}}*/ @@ -434,23 +442,30 @@ string TimeToStr(unsigned long Sec) /* This replaces all occurrences of Subst with Contents in Str. */ string SubstVar(const string &Str,const string &Subst,const string &Contents) { + if (Subst.empty() == true) + return Str; + string::size_type Pos = 0; string::size_type OldPos = 0; string Temp; - - while (OldPos < Str.length() && + + while (OldPos < Str.length() && (Pos = Str.find(Subst,OldPos)) != string::npos) { - Temp += string(Str,OldPos,Pos) + Contents; - OldPos = Pos + Subst.length(); + if (OldPos != Pos) + Temp.append(Str, OldPos, Pos - OldPos); + if (Contents.empty() == false) + Temp.append(Contents); + OldPos = Pos + Subst.length(); } - + if (OldPos == 0) return Str; - + + if (OldPos >= Str.length()) + return Temp; return Temp + string(Str,OldPos); } - string SubstVar(string Str,const struct SubstVar *Vars) { for (; Vars->Subst != 0; Vars++) @@ -697,9 +712,12 @@ string LookupTag(const string &Message,const char *Tag,const char *Default) then returns the result. Several varients on true/false are checked. */ int StringToBool(const string &Text,int Default) { - char *End; - int Res = strtol(Text.c_str(),&End,0); - if (End != Text.c_str() && Res >= 0 && Res <= 1) + char *ParseEnd; + int Res = strtol(Text.c_str(),&ParseEnd,0); + // ensure that the entire string was converted by strtol to avoid + // failures on "apt-cache show -a 0ad" where the "0" is converted + const char *TextEnd = Text.c_str()+Text.size(); + if (ParseEnd == TextEnd && Res >= 0 && Res <= 1) return Res; // Check for positives @@ -750,86 +768,94 @@ string TimeRFC1123(time_t Date) In particular: this reads blocks from the input until it believes that it's run out of input text. Each block is terminated by a - double newline ('\n' followed by '\n'). As noted below, there is a - bug in this code: it assumes that all the blocks have been read if - it doesn't see additional text in the buffer after the last one is - parsed, which will cause it to lose blocks if the last block - coincides with the end of the buffer. + double newline ('\n' followed by '\n'). */ bool ReadMessages(int Fd, vector<string> &List) { char Buffer[64000]; - char *End = Buffer; // Represents any left-over from the previous iteration of the // parse loop. (i.e., if a message is split across the end // of the buffer, it goes here) string PartialMessage; - - while (1) - { - int Res = read(Fd,End,sizeof(Buffer) - (End-Buffer)); + + do { + int const Res = read(Fd, Buffer, sizeof(Buffer)); if (Res < 0 && errno == EINTR) continue; - - // Process is dead, this is kind of bad.. + + // process we read from has died if (Res == 0) return false; - + // No data - if (Res < 0 && errno == EAGAIN) + if (Res < 0 && (errno == EAGAIN || errno == EWOULDBLOCK)) return true; if (Res < 0) return false; - - End += Res; - - // Look for the end of the message - for (char *I = Buffer; I + 1 < End; I++) + + // extract the message(s) from the buffer + char const *Start = Buffer; + char const * const End = Buffer + Res; + + char const * NL = (char const *) memchr(Start, '\n', End - Start); + if (NL == NULL) { - if (I[1] != '\n' || - (I[0] != '\n' && strncmp(I, "\r\n\r\n", 4) != 0)) - continue; - - // Pull the message out - string Message(Buffer,I-Buffer); - PartialMessage += Message; - - // Fix up the buffer - for (; I < End && (*I == '\n' || *I == '\r'); ++I); - End -= I-Buffer; - memmove(Buffer,I,End-Buffer); - I = Buffer; - - List.push_back(PartialMessage); - PartialMessage.clear(); + // end of buffer: store what we have so far and read new data in + PartialMessage.append(Start, End - Start); + Start = End; } - if (End != Buffer) - { - // If there's text left in the buffer, store it - // in PartialMessage and throw the rest of the buffer - // away. This allows us to handle messages that - // are longer than the static buffer size. - PartialMessage += string(Buffer, End); - End = Buffer; - } else - { - // BUG ALERT: if a message block happens to end at a - // multiple of 64000 characters, this will cause it to - // terminate early, leading to a badly formed block and - // probably crashing the method. However, this is the only - // way we have to find the end of the message block. I have - // an idea of how to fix this, but it will require changes - // to the protocol (essentially to mark the beginning and - // end of the block). - // - // -- dburrows 2008-04-02 - return true; - } + ++NL; + + if (PartialMessage.empty() == false && Start < End) + { + // if we start with a new line, see if the partial message we have ended with one + // so that we properly detect records ending between two read() runs + // cases are: \n|\n , \r\n|\r\n and \r\n\r|\n + // the case \r|\n\r\n is handled by the usual double-newline handling + if ((NL - Start) == 1 || ((NL - Start) == 2 && *Start == '\r')) + { + if (APT::String::Endswith(PartialMessage, "\n") || APT::String::Endswith(PartialMessage, "\r\n\r")) + { + PartialMessage.erase(PartialMessage.find_last_not_of("\r\n") + 1); + List.push_back(PartialMessage); + PartialMessage.clear(); + while (NL < End && (*NL == '\n' || *NL == '\r')) ++NL; + Start = NL; + } + } + } + + while (Start < End) { + char const * NL2 = (char const *) memchr(NL, '\n', End - NL); + if (NL2 == NULL) + { + // end of buffer: store what we have so far and read new data in + PartialMessage.append(Start, End - Start); + break; + } + ++NL2; + + // did we find a double newline? + if ((NL2 - NL) == 1 || ((NL2 - NL) == 2 && *NL == '\r')) + { + PartialMessage.append(Start, NL2 - Start); + PartialMessage.erase(PartialMessage.find_last_not_of("\r\n") + 1); + List.push_back(PartialMessage); + PartialMessage.clear(); + while (NL2 < End && (*NL2 == '\n' || *NL2 == '\r')) ++NL2; + Start = NL2; + } + NL = NL2; + } + + // we have read at least one complete message and nothing left + if (PartialMessage.empty() == true) + return true; if (WaitFd(Fd) == false) return false; - } + } while (true); } /*}}}*/ // MonthConv - Converts a month string into a number /*{{{*/ @@ -1039,7 +1065,7 @@ bool StrToNum(const char *Str,unsigned long long &Res,unsigned Len,unsigned Base // --------------------------------------------------------------------- /* This is used in decoding the 256bit encoded fixed length fields in tar files */ -bool Base256ToNum(const char *Str,unsigned long &Res,unsigned int Len) +bool Base256ToNum(const char *Str,unsigned long long &Res,unsigned int Len) { if ((Str[0] & 0x80) == 0) return false; @@ -1052,6 +1078,23 @@ bool Base256ToNum(const char *Str,unsigned long &Res,unsigned int Len) } } /*}}}*/ +// Base256ToNum - Convert a fixed length binary to a number /*{{{*/ +// --------------------------------------------------------------------- +/* This is used in decoding the 256bit encoded fixed length fields in + tar files */ +bool Base256ToNum(const char *Str,unsigned long &Res,unsigned int Len) +{ + unsigned long long Num; + bool rc; + + rc = Base256ToNum(Str, Num, Len); + Res = Num; + if (Res != Num) + return false; + + return rc; +} + /*}}}*/ // HexDigit - Convert a hex character into an integer /*{{{*/ // --------------------------------------------------------------------- /* Helper for Hex2Num */ @@ -1265,10 +1308,12 @@ void ioprintf(ostream &out,const char *format,...) va_list args; ssize_t size = 400; while (true) { + bool ret = false; va_start(args,format); - if (iovprintf(out, format, args, size) == true) - return; + ret = iovprintf(out, format, args, size); va_end(args); + if (ret == true) + return; } } void strprintf(string &out,const char *format,...) @@ -1277,10 +1322,12 @@ void strprintf(string &out,const char *format,...) ssize_t size = 400; std::ostringstream outstr; while (true) { + bool ret = false; va_start(args,format); - if (iovprintf(outstr, format, args, size) == true) - break; + ret = iovprintf(outstr, format, args, size); va_end(args); + if (ret == true) + break; } out = outstr.str(); } @@ -1365,7 +1412,7 @@ size_t strv_length(const char **str_array) ; return i; } - + /*}}}*/ // DeEscapeString - unescape (\0XX and \xXX) from a string /*{{{*/ // --------------------------------------------------------------------- /* */ @@ -1547,51 +1594,46 @@ void URI::CopyFrom(const string &U) /* */ URI::operator string() { - string Res; - + std::stringstream Res; + if (Access.empty() == false) - Res = Access + ':'; - + Res << Access << ':'; + if (Host.empty() == false) - { + { if (Access.empty() == false) - Res += "//"; - + Res << "//"; + if (User.empty() == false) { // FIXME: Technically userinfo is permitted even less // characters than these, but this is not conveniently // expressed with a blacklist. - Res += QuoteString(User, ":/?#[]@"); + Res << QuoteString(User, ":/?#[]@"); if (Password.empty() == false) - Res += ":" + QuoteString(Password, ":/?#[]@"); - Res += "@"; + Res << ":" << QuoteString(Password, ":/?#[]@"); + Res << "@"; } - + // Add RFC 2732 escaping characters - if (Access.empty() == false && - (Host.find('/') != string::npos || Host.find(':') != string::npos)) - Res += '[' + Host + ']'; + if (Access.empty() == false && Host.find_first_of("/:") != string::npos) + Res << '[' << Host << ']'; else - Res += Host; - + Res << Host; + if (Port != 0) - { - char S[30]; - sprintf(S,":%u",Port); - Res += S; - } + Res << ':' << Port; } - + if (Path.empty() == false) { if (Path[0] != '/') - Res += "/" + Path; + Res << "/" << Path; else - Res += Path; + Res << Path; } - - return Res; + + return Res.str(); } /*}}}*/ // URI::SiteOnly - Return the schema and site for the URI /*{{{*/ diff --git a/apt-pkg/contrib/strutl.h b/apt-pkg/contrib/strutl.h index 185cdc3fc..d64270aaf 100644 --- a/apt-pkg/contrib/strutl.h +++ b/apt-pkg/contrib/strutl.h @@ -40,6 +40,7 @@ namespace APT { namespace String { std::string Strip(const std::string &s); bool Endswith(const std::string &s, const std::string &ending); + bool Startswith(const std::string &s, const std::string &starting); } } @@ -72,6 +73,7 @@ bool ReadMessages(int Fd, std::vector<std::string> &List); bool StrToNum(const char *Str,unsigned long &Res,unsigned Len,unsigned Base = 0); bool StrToNum(const char *Str,unsigned long long &Res,unsigned Len,unsigned Base = 0); bool Base256ToNum(const char *Str,unsigned long &Res,unsigned int Len); +bool Base256ToNum(const char *Str,unsigned long long &Res,unsigned int Len); bool Hex2Num(const std::string &Str,unsigned char *Num,unsigned int Length); // input changing string split @@ -79,7 +81,7 @@ bool TokSplitString(char Tok,char *Input,char **List, unsigned long ListMax); // split a given string by a char -std::vector<std::string> VectorizeString(std::string const &haystack, char const &split) APT_CONST; +std::vector<std::string> VectorizeString(std::string const &haystack, char const &split) APT_PURE; /* \brief Return a vector of strings from string "input" where "sep" * is used as the delimiter string. @@ -151,9 +153,9 @@ inline const char *DeNull(const char *s) {return (s == 0?"(null)":s);} class URI { void CopyFrom(const std::string &From); - + public: - + std::string Access; std::string User; std::string Password; diff --git a/apt-pkg/deb/debindexfile.cc b/apt-pkg/deb/debindexfile.cc index eee758b7a..d672b4fd8 100644 --- a/apt-pkg/deb/debindexfile.cc +++ b/apt-pkg/deb/debindexfile.cc @@ -30,9 +30,11 @@ #include <apt-pkg/pkgcachegen.h> #include <apt-pkg/pkgrecords.h> #include <apt-pkg/srcrecords.h> +#include <apt-pkg/sptr.h> #include <stdio.h> #include <iostream> +#include <sstream> #include <string> #include <sys/stat.h> /*}}}*/ @@ -80,14 +82,18 @@ pkgSrcRecords::Parser *debSourcesIndex::CreateSrcParser() const { string SourcesURI = _config->FindDir("Dir::State::lists") + URItoFileName(IndexURI("Sources")); - string SourcesURIgzip = SourcesURI + ".gz"; - if (!FileExists(SourcesURI) && !FileExists(SourcesURIgzip)) - return NULL; - else if (!FileExists(SourcesURI) && FileExists(SourcesURIgzip)) - SourcesURI = SourcesURIgzip; - - return new debSrcRecordParser(SourcesURI,this); + std::vector<std::string> types = APT::Configuration::getCompressionTypes(); + for (std::vector<std::string>::const_iterator t = types.begin(); t != types.end(); ++t) + { + string p; + p = SourcesURI + '.' + *t; + if (FileExists(p)) + return new debSrcRecordParser(p, this); + } + if (FileExists(SourcesURI)) + return new debSrcRecordParser(SourcesURI, this); + return NULL; } /*}}}*/ // SourcesIndex::Describe - Give a descriptive path to the index /*{{{*/ @@ -126,14 +132,18 @@ string debSourcesIndex::Info(const char *Type) const // SourcesIndex::Index* - Return the URI to the index files /*{{{*/ // --------------------------------------------------------------------- /* */ -inline string debSourcesIndex::IndexFile(const char *Type) const +string debSourcesIndex::IndexFile(const char *Type) const { string s = URItoFileName(IndexURI(Type)); - string sgzip = s + ".gz"; - if (!FileExists(s) && FileExists(sgzip)) - return sgzip; - else - return s; + + std::vector<std::string> types = APT::Configuration::getCompressionTypes(); + for (std::vector<std::string>::const_iterator t = types.begin(); t != types.end(); ++t) + { + string p = s + '.' + *t; + if (FileExists(p)) + return p; + } + return s; } string debSourcesIndex::IndexURI(const char *Type) const @@ -256,14 +266,18 @@ string debPackagesIndex::Info(const char *Type) const // PackagesIndex::Index* - Return the URI to the index files /*{{{*/ // --------------------------------------------------------------------- /* */ -inline string debPackagesIndex::IndexFile(const char *Type) const +string debPackagesIndex::IndexFile(const char *Type) const { string s =_config->FindDir("Dir::State::lists") + URItoFileName(IndexURI(Type)); - string sgzip = s + ".gz"; - if (!FileExists(s) && FileExists(sgzip)) - return sgzip; - else - return s; + + std::vector<std::string> types = APT::Configuration::getCompressionTypes(); + for (std::vector<std::string>::const_iterator t = types.begin(); t != types.end(); ++t) + { + string p = s + '.' + *t; + if (FileExists(p)) + return p; + } + return s; } string debPackagesIndex::IndexURI(const char *Type) const { @@ -399,8 +413,8 @@ pkgCache::PkgFileIterator debPackagesIndex::FindInCache(pkgCache &Cache) const // TranslationsIndex::debTranslationsIndex - Contructor /*{{{*/ // --------------------------------------------------------------------- /* */ -debTranslationsIndex::debTranslationsIndex(string URI,string Dist,string Section, - char const * const Translation) : +debTranslationsIndex::debTranslationsIndex(std::string const &URI, std::string const &Dist, + std::string const &Section, std::string const &Translation) : pkgIndexFile(true), URI(URI), Dist(Dist), Section(Section), Language(Translation) {} @@ -408,14 +422,18 @@ debTranslationsIndex::debTranslationsIndex(string URI,string Dist,string Section // TranslationIndex::Trans* - Return the URI to the translation files /*{{{*/ // --------------------------------------------------------------------- /* */ -inline string debTranslationsIndex::IndexFile(const char *Type) const +string debTranslationsIndex::IndexFile(const char *Type) const { string s =_config->FindDir("Dir::State::lists") + URItoFileName(IndexURI(Type)); - string sgzip = s + ".gz"; - if (!FileExists(s) && FileExists(sgzip)) - return sgzip; - else - return s; + + std::vector<std::string> types = APT::Configuration::getCompressionTypes(); + for (std::vector<std::string>::const_iterator t = types.begin(); t != types.end(); ++t) + { + string p = s + '.' + *t; + if (FileExists(p)) + return p; + } + return s; } string debTranslationsIndex::IndexURI(const char *Type) const { @@ -435,31 +453,18 @@ string debTranslationsIndex::IndexURI(const char *Type) const return Res; } /*}}}*/ -// TranslationsIndex::GetIndexes - Fetch the index files /*{{{*/ -// --------------------------------------------------------------------- -/* */ -bool debTranslationsIndex::GetIndexes(pkgAcquire *Owner) const -{ - string const TranslationFile = string("Translation-").append(Language); - new pkgAcqIndexTrans(Owner, IndexURI(Language), - Info(TranslationFile.c_str()), - TranslationFile); - - return true; -} - /*}}}*/ // TranslationsIndex::Describe - Give a descriptive path to the index /*{{{*/ // --------------------------------------------------------------------- /* This should help the user find the index in the sources.list and in the filesystem for problem solving */ string debTranslationsIndex::Describe(bool Short) const -{ - char S[300]; +{ + std::string S; if (Short == true) - snprintf(S,sizeof(S),"%s",Info(TranslationFile().c_str()).c_str()); + strprintf(S,"%s",Info(TranslationFile().c_str()).c_str()); else - snprintf(S,sizeof(S),"%s (%s)",Info(TranslationFile().c_str()).c_str(), - IndexFile(Language).c_str()); + strprintf(S,"%s (%s)",Info(TranslationFile().c_str()).c_str(), + IndexFile(Language.c_str()).c_str()); return S; } /*}}}*/ @@ -483,7 +488,7 @@ string debTranslationsIndex::Info(const char *Type) const /*}}}*/ bool debTranslationsIndex::HasPackages() const /*{{{*/ { - return FileExists(IndexFile(Language)); + return FileExists(IndexFile(Language.c_str())); } /*}}}*/ // TranslationsIndex::Exists - Check if the index is available /*{{{*/ @@ -491,7 +496,7 @@ bool debTranslationsIndex::HasPackages() const /*{{{*/ /* */ bool debTranslationsIndex::Exists() const { - return FileExists(IndexFile(Language)); + return FileExists(IndexFile(Language.c_str())); } /*}}}*/ // TranslationsIndex::Size - Return the size of the index /*{{{*/ @@ -504,7 +509,7 @@ unsigned long debTranslationsIndex::Size() const /* we need to ignore errors here; if the lists are absent, just return 0 */ _error->PushToStack(); - FileFd f(IndexFile(Language), FileFd::ReadOnly, FileFd::Extension); + FileFd f(IndexFile(Language.c_str()), FileFd::ReadOnly, FileFd::Extension); if (!f.Failed()) size = f.Size(); @@ -521,11 +526,11 @@ unsigned long debTranslationsIndex::Size() const bool debTranslationsIndex::Merge(pkgCacheGenerator &Gen,OpProgress *Prog) const { // Check the translation file, if in use - string TranslationFile = IndexFile(Language); + string TranslationFile = IndexFile(Language.c_str()); if (FileExists(TranslationFile)) { FileFd Trans(TranslationFile,FileFd::ReadOnly, FileFd::Extension); - debListParser TransParser(&Trans); + debTranslationsParser TransParser(&Trans); if (_error->PendingError() == true) return false; @@ -551,7 +556,7 @@ bool debTranslationsIndex::Merge(pkgCacheGenerator &Gen,OpProgress *Prog) const /* */ pkgCache::PkgFileIterator debTranslationsIndex::FindInCache(pkgCache &Cache) const { - string FileName = IndexFile(Language); + string FileName = IndexFile(Language.c_str()); pkgCache::PkgFileIterator File = Cache.FileBegin(); for (; File.end() == false; ++File) @@ -618,7 +623,7 @@ bool debStatusIndex::Merge(pkgCacheGenerator &Gen,OpProgress *Prog) const pkgCache::PkgFileIterator CFile = Gen.GetCurFile(); CFile->Size = Pkg.FileSize(); CFile->mtime = Pkg.ModificationTime(); - map_ptrloc const storage = Gen.WriteUniqString("now"); + map_stringitem_t const storage = Gen.StoreString(pkgCacheGenerator::MIXED, "now"); CFile->Archive = storage; if (Gen.MergeList(Parser) == false) @@ -667,14 +672,146 @@ APT_CONST bool debStatusIndex::Exists() const } /*}}}*/ +// debDebPkgFile - Single .deb file /*{{{*/ +debDebPkgFileIndex::debDebPkgFileIndex(std::string DebFile) + : pkgIndexFile(true), DebFile(DebFile) +{ + DebFileFullPath = flAbsPath(DebFile); +} + +std::string debDebPkgFileIndex::ArchiveURI(std::string /*File*/) const +{ + return "file:" + DebFileFullPath; +} + +bool debDebPkgFileIndex::Exists() const +{ + return FileExists(DebFile); +} +bool debDebPkgFileIndex::GetContent(std::ostream &content, std::string const &debfile) +{ + // get the control data out of the deb file via dpkg-deb -I + std::string dpkg = _config->Find("Dir::Bin::dpkg","dpkg-deb"); + std::vector<const char *> Args; + Args.push_back(dpkg.c_str()); + Args.push_back("-I"); + Args.push_back(debfile.c_str()); + Args.push_back("control"); + Args.push_back(NULL); + FileFd PipeFd; + pid_t Child; + if(Popen((const char**)&Args[0], PipeFd, Child, FileFd::ReadOnly) == false) + return _error->Error("Popen failed"); + + char buffer[1024]; + do { + unsigned long long actual = 0; + if (PipeFd.Read(buffer, sizeof(buffer)-1, &actual) == false) + return _error->Errno("read", "Failed to read dpkg pipe"); + if (actual == 0) + break; + buffer[actual] = '\0'; + content << buffer; + } while(true); + ExecWait(Child, "Popen"); + + content << "Filename: " << debfile << "\n"; + struct stat Buf; + if (stat(debfile.c_str(), &Buf) != 0) + return false; + content << "Size: " << Buf.st_size << "\n"; + + return true; +} +bool debDebPkgFileIndex::Merge(pkgCacheGenerator& Gen, OpProgress* Prog) const +{ + if(Prog) + Prog->SubProgress(0, "Reading deb file"); + + // write the control data to a tempfile + SPtr<FileFd> DebControl = GetTempFile("deb-file-" + flNotDir(DebFile)); + if(DebControl == NULL) + return false; + std::ostringstream content; + if (GetContent(content, DebFile) == false) + return false; + std::string const contentstr = content.str(); + DebControl->Write(contentstr.c_str(), contentstr.length()); + // rewind for the listparser + DebControl->Seek(0); + + // and give it to the list parser + debDebFileParser Parser(DebControl, DebFile); + if(Gen.SelectFile(DebFile, "local", *this) == false) + return _error->Error("Problem with SelectFile %s", DebFile.c_str()); + + pkgCache::PkgFileIterator File = Gen.GetCurFile(); + File->Size = DebControl->Size(); + File->mtime = DebControl->ModificationTime(); + + if (Gen.MergeList(Parser) == false) + return _error->Error("Problem with MergeLister for %s", DebFile.c_str()); + + return true; +} +pkgCache::PkgFileIterator debDebPkgFileIndex::FindInCache(pkgCache &Cache) const +{ + pkgCache::PkgFileIterator File = Cache.FileBegin(); + for (; File.end() == false; ++File) + { + if (File.FileName() == NULL || DebFile != File.FileName()) + continue; + + return File; + } + + return File; +} +unsigned long debDebPkgFileIndex::Size() const +{ + struct stat buf; + if(stat(DebFile.c_str(), &buf) != 0) + return 0; + return buf.st_size; +} + /*}}}*/ + +// debDscFileIndex stuff +debDscFileIndex::debDscFileIndex(std::string &DscFile) + : pkgIndexFile(true), DscFile(DscFile) +{ +} + +bool debDscFileIndex::Exists() const +{ + return FileExists(DscFile); +} + +unsigned long debDscFileIndex::Size() const +{ + struct stat buf; + if(stat(DscFile.c_str(), &buf) == 0) + return buf.st_size; + return 0; +} + +// DscFileIndex::CreateSrcParser - Get a parser for the .dsc file /*{{{*/ +pkgSrcRecords::Parser *debDscFileIndex::CreateSrcParser() const +{ + if (!FileExists(DscFile)) + return NULL; + + return new debDscRecordParser(DscFile,this); +} + /*}}}*/ // Index File types for Debian /*{{{*/ -class debIFTypeSrc : public pkgIndexFile::Type +class APT_HIDDEN debIFTypeSrc : public pkgIndexFile::Type { public: debIFTypeSrc() {Label = "Debian Source Index";}; }; -class debIFTypePkg : public pkgIndexFile::Type +class APT_HIDDEN debIFTypePkg : public pkgIndexFile::Type { public: @@ -684,12 +821,12 @@ class debIFTypePkg : public pkgIndexFile::Type }; debIFTypePkg() {Label = "Debian Package Index";}; }; -class debIFTypeTrans : public debIFTypePkg +class APT_HIDDEN debIFTypeTrans : public debIFTypePkg { public: debIFTypeTrans() {Label = "Debian Translation Index";}; }; -class debIFTypeStatus : public pkgIndexFile::Type +class APT_HIDDEN debIFTypeStatus : public pkgIndexFile::Type { public: @@ -699,10 +836,42 @@ class debIFTypeStatus : public pkgIndexFile::Type }; debIFTypeStatus() {Label = "Debian dpkg status file";}; }; -static debIFTypeSrc _apt_Src; -static debIFTypePkg _apt_Pkg; -static debIFTypeTrans _apt_Trans; -static debIFTypeStatus _apt_Status; +class APT_HIDDEN debIFTypeDebPkgFile : public pkgIndexFile::Type +{ + public: + virtual pkgRecords::Parser *CreatePkgParser(pkgCache::PkgFileIterator File) const + { + return new debDebFileRecordParser(File.FileName()); + }; + debIFTypeDebPkgFile() {Label = "deb Package file";}; +}; +class APT_HIDDEN debIFTypeDscFile : public pkgIndexFile::Type +{ + public: + virtual pkgSrcRecords::Parser *CreateSrcPkgParser(std::string DscFile) const + { + return new debDscRecordParser(DscFile, NULL); + }; + debIFTypeDscFile() {Label = "dsc File Source Index";}; +}; +class APT_HIDDEN debIFTypeDebianSourceDir : public pkgIndexFile::Type +{ + public: + virtual pkgSrcRecords::Parser *CreateSrcPkgParser(std::string SourceDir) const + { + return new debDscRecordParser(SourceDir + string("/debian/control"), NULL); + }; + debIFTypeDebianSourceDir() {Label = "debian/control File Source Index";}; +}; + +APT_HIDDEN debIFTypeSrc _apt_Src; +APT_HIDDEN debIFTypePkg _apt_Pkg; +APT_HIDDEN debIFTypeTrans _apt_Trans; +APT_HIDDEN debIFTypeStatus _apt_Status; +APT_HIDDEN debIFTypeDebPkgFile _apt_DebPkgFile; +// file based pseudo indexes +APT_HIDDEN debIFTypeDscFile _apt_DscFile; +APT_HIDDEN debIFTypeDebianSourceDir _apt_DebianSourceDir; const pkgIndexFile::Type *debSourcesIndex::GetType() const { @@ -720,5 +889,23 @@ const pkgIndexFile::Type *debStatusIndex::GetType() const { return &_apt_Status; } - +const pkgIndexFile::Type *debDebPkgFileIndex::GetType() const +{ + return &_apt_DebPkgFile; +} +const pkgIndexFile::Type *debDscFileIndex::GetType() const +{ + return &_apt_DscFile; +} +const pkgIndexFile::Type *debDebianSourceDirIndex::GetType() const +{ + return &_apt_DebianSourceDir; +} /*}}}*/ + +debStatusIndex::~debStatusIndex() {} +debPackagesIndex::~debPackagesIndex() {} +debTranslationsIndex::~debTranslationsIndex() {} +debSourcesIndex::~debSourcesIndex() {} + +debDebPkgFileIndex::~debDebPkgFileIndex() {} diff --git a/apt-pkg/deb/debindexfile.h b/apt-pkg/deb/debindexfile.h index 017c69a0a..1e5882071 100644 --- a/apt-pkg/deb/debindexfile.h +++ b/apt-pkg/deb/debindexfile.h @@ -28,7 +28,7 @@ class pkgAcquire; class pkgCacheGenerator; -class debStatusIndex : public pkgIndexFile +class APT_HIDDEN debStatusIndex : public pkgIndexFile { /** \brief dpointer placeholder (for later in case we need it) */ void *d; @@ -52,10 +52,10 @@ class debStatusIndex : public pkgIndexFile virtual pkgCache::PkgFileIterator FindInCache(pkgCache &Cache) const; debStatusIndex(std::string File); - virtual ~debStatusIndex() {}; + virtual ~debStatusIndex(); }; -class debPackagesIndex : public pkgIndexFile +class APT_HIDDEN debPackagesIndex : public pkgIndexFile { /** \brief dpointer placeholder (for later in case we need it) */ void *d; @@ -65,10 +65,10 @@ class debPackagesIndex : public pkgIndexFile std::string Section; std::string Architecture; - std::string Info(const char *Type) const; - std::string IndexFile(const char *Type) const; - std::string IndexURI(const char *Type) const; - + APT_HIDDEN std::string Info(const char *Type) const; + APT_HIDDEN std::string IndexFile(const char *Type) const; + APT_HIDDEN std::string IndexURI(const char *Type) const; + public: virtual const Type *GetType() const APT_CONST; @@ -89,33 +89,32 @@ class debPackagesIndex : public pkgIndexFile debPackagesIndex(std::string const &URI, std::string const &Dist, std::string const &Section, bool const &Trusted, std::string const &Arch = "native"); - virtual ~debPackagesIndex() {}; + virtual ~debPackagesIndex(); }; -class debTranslationsIndex : public pkgIndexFile +class APT_HIDDEN debTranslationsIndex : public pkgIndexFile { /** \brief dpointer placeholder (for later in case we need it) */ void *d; - std::string URI; - std::string Dist; - std::string Section; - const char * const Language; - - std::string Info(const char *Type) const; - std::string IndexFile(const char *Type) const; - std::string IndexURI(const char *Type) const; + std::string const URI; + std::string const Dist; + std::string const Section; + std::string const Language; - inline std::string TranslationFile() const {return std::string("Translation-").append(Language);}; + APT_HIDDEN std::string Info(const char *Type) const; + APT_HIDDEN std::string IndexFile(const char *Type) const; + APT_HIDDEN std::string IndexURI(const char *Type) const; + + APT_HIDDEN std::string TranslationFile() const {return std::string("Translation-").append(Language);}; public: - + virtual const Type *GetType() const APT_CONST; // Interface for acquire - virtual std::string Describe(bool Short) const; - virtual bool GetIndexes(pkgAcquire *Owner) const; - + virtual std::string Describe(bool Short) const; + // Interface for the Cache Generator virtual bool Exists() const; virtual bool HasPackages() const; @@ -123,11 +122,11 @@ class debTranslationsIndex : public pkgIndexFile virtual bool Merge(pkgCacheGenerator &Gen,OpProgress *Prog) const; virtual pkgCache::PkgFileIterator FindInCache(pkgCache &Cache) const; - debTranslationsIndex(std::string URI,std::string Dist,std::string Section, char const * const Language); - virtual ~debTranslationsIndex() {}; + debTranslationsIndex(std::string const &URI,std::string const &Dist,std::string const &Section, std::string const &Language); + virtual ~debTranslationsIndex(); }; -class debSourcesIndex : public pkgIndexFile +class APT_HIDDEN debSourcesIndex : public pkgIndexFile { /** \brief dpointer placeholder (for later in case we need it) */ void *d; @@ -136,10 +135,10 @@ class debSourcesIndex : public pkgIndexFile std::string Dist; std::string Section; - std::string Info(const char *Type) const; - std::string IndexFile(const char *Type) const; - std::string IndexURI(const char *Type) const; - + APT_HIDDEN std::string Info(const char *Type) const; + APT_HIDDEN std::string IndexFile(const char *Type) const; + APT_HIDDEN std::string IndexURI(const char *Type) const; + public: virtual const Type *GetType() const APT_CONST; @@ -161,7 +160,69 @@ class debSourcesIndex : public pkgIndexFile virtual unsigned long Size() const; debSourcesIndex(std::string URI,std::string Dist,std::string Section,bool Trusted); - virtual ~debSourcesIndex() {}; + virtual ~debSourcesIndex(); +}; + +class APT_HIDDEN debDebPkgFileIndex : public pkgIndexFile +{ + private: + void *d; + std::string DebFile; + std::string DebFileFullPath; + + public: + virtual const Type *GetType() const APT_CONST; + + virtual std::string Describe(bool /*Short*/) const { + return DebFile; + } + + /** get the control (file) content of the deb file + * + * @param[out] content of the control file + * @param debfile is the filename of the .deb-file + * @return \b true if successful, otherwise \b false. + */ + static bool GetContent(std::ostream &content, std::string const &debfile); + + // Interface for the Cache Generator + virtual bool Exists() const; + virtual bool HasPackages() const { + return true; + }; + virtual unsigned long Size() const; + virtual bool Merge(pkgCacheGenerator &Gen,OpProgress *Prog) const; + virtual pkgCache::PkgFileIterator FindInCache(pkgCache &Cache) const; + + // Interface for acquire + virtual std::string ArchiveURI(std::string /*File*/) const; + + debDebPkgFileIndex(std::string DebFile); + virtual ~debDebPkgFileIndex(); +}; + +class APT_HIDDEN debDscFileIndex : public pkgIndexFile +{ + private: + std::string DscFile; + public: + virtual const Type *GetType() const APT_CONST; + virtual pkgSrcRecords::Parser *CreateSrcParser() const; + virtual bool Exists() const; + virtual bool HasPackages() const {return false;}; + virtual unsigned long Size() const; + virtual std::string Describe(bool /*Short*/) const { + return DscFile; + }; + + debDscFileIndex(std::string &DscFile); + virtual ~debDscFileIndex() {}; +}; + +class APT_HIDDEN debDebianSourceDirIndex : public debDscFileIndex +{ + public: + virtual const Type *GetType() const APT_CONST; }; #endif diff --git a/apt-pkg/deb/deblistparser.cc b/apt-pkg/deb/deblistparser.cc index a1bcfb710..b80b57bc4 100644 --- a/apt-pkg/deb/deblistparser.cc +++ b/apt-pkg/deb/deblistparser.cc @@ -58,18 +58,6 @@ debListParser::debListParser(FileFd *File, string const &Arch) : Tags(File), MultiArchEnabled = Architectures.size() > 1; } /*}}}*/ -// ListParser::UniqFindTagWrite - Find the tag and write a unq string /*{{{*/ -// --------------------------------------------------------------------- -/* */ -unsigned long debListParser::UniqFindTagWrite(const char *Tag) -{ - const char *Start; - const char *Stop; - if (Section.Find(Tag,Start,Stop) == false) - return 0; - return WriteUniqString(Start,Stop - Start); -} - /*}}}*/ // ListParser::Package - Return the package name /*{{{*/ // --------------------------------------------------------------------- /* This is to return the name of the package this section describes */ @@ -108,7 +96,7 @@ unsigned char debListParser::ParseMultiArch(bool const showErrors) /*{{{*/ { unsigned char MA; string const MultiArch = Section.FindS("Multi-Arch"); - if (MultiArch.empty() == true) + if (MultiArch.empty() == true || MultiArch == "no") MA = pkgCache::Version::None; else if (MultiArch == "same") { if (ArchitectureAll() == true) @@ -144,8 +132,69 @@ unsigned char debListParser::ParseMultiArch(bool const showErrors) /*{{{*/ /* */ bool debListParser::NewVersion(pkgCache::VerIterator &Ver) { + const char *Start; + const char *Stop; + // Parse the section - Ver->Section = UniqFindTagWrite("Section"); + if (Section.Find("Section",Start,Stop) == true) + { + map_stringitem_t const idx = StoreString(pkgCacheGenerator::SECTION, Start, Stop - Start); + Ver->Section = idx; + } +#if APT_PKG_ABI >= 413 + // Parse the source package name + pkgCache::GrpIterator const G = Ver.ParentPkg().Group(); + Ver->SourcePkgName = G->Name; + Ver->SourceVerStr = Ver->VerStr; + if (Section.Find("Source",Start,Stop) == true) + { + const char * const Space = (const char * const) memchr(Start, ' ', Stop - Start); + pkgCache::VerIterator V; + + if (Space != NULL) + { + Stop = Space; + const char * const Open = (const char * const) memchr(Space, '(', Stop - Space); + if (likely(Open != NULL)) + { + const char * const Close = (const char * const) memchr(Open, ')', Stop - Open); + if (likely(Close != NULL)) + { + std::string const version(Open + 1, (Close - Open) - 1); + if (version != Ver.VerStr()) + { + map_stringitem_t const idx = StoreString(pkgCacheGenerator::VERSIONNUMBER, version); + Ver->SourceVerStr = idx; + } + } + } + } + + std::string const pkgname(Start, Stop - Start); + if (pkgname != G.Name()) + { + for (pkgCache::PkgIterator P = G.PackageList(); P.end() == false; P = G.NextPkg(P)) + { + for (V = P.VersionList(); V.end() == false; ++V) + { + if (pkgname == V.SourcePkgName()) + { + Ver->SourcePkgName = V->SourcePkgName; + break; + } + } + if (V.end() == false) + break; + } + if (V.end() == true) + { + map_stringitem_t const idx = StoreString(pkgCacheGenerator::PKGNAME, pkgname); + Ver->SourcePkgName = idx; + } + } + } +#endif + Ver->MultiArch = ParseMultiArch(true); // Archive Size Ver->Size = Section.FindULL("Size"); @@ -154,10 +203,8 @@ bool debListParser::NewVersion(pkgCache::VerIterator &Ver) Ver->InstalledSize *= 1024; // Priority - const char *Start; - const char *Stop; if (Section.Find("Priority",Start,Stop) == true) - { + { if (GrabWord(string(Start,Stop-Start),PrioList,Ver->Priority) == false) Ver->Priority = pkgCache::State::Extra; } @@ -194,35 +241,31 @@ bool debListParser::NewVersion(pkgCache::VerIterator &Ver) /* This is to return the string describing the package in debian form. If this returns the blank string then the entry is assumed to only describe package properties */ -string debListParser::Description() +string debListParser::Description(std::string const &lang) { - string const lang = DescriptionLanguage(); if (lang.empty()) return Section.FindS("Description"); else return Section.FindS(string("Description-").append(lang).c_str()); } - /*}}}*/ -// ListParser::DescriptionLanguage - Return the description lang string /*{{{*/ -// --------------------------------------------------------------------- -/* This is to return the string describing the language of - description. If this returns the blank string then the entry is - assumed to describe original description. */ -string debListParser::DescriptionLanguage() + /*}}}*/ +// ListParser::AvailableDescriptionLanguages /*{{{*/ +std::vector<std::string> debListParser::AvailableDescriptionLanguages() { - if (Section.FindS("Description").empty() == false) - return ""; - - std::vector<string> const lang = APT::Configuration::getLanguages(true); - for (std::vector<string>::const_iterator l = lang.begin(); - l != lang.end(); ++l) - if (Section.FindS(string("Description-").append(*l).c_str()).empty() == false) - return *l; - - return ""; + std::vector<std::string> const understood = APT::Configuration::getLanguages(); + std::vector<std::string> avail; + if (Section.Exists("Description") == true) + avail.push_back(""); + for (std::vector<std::string>::const_iterator lang = understood.begin(); lang != understood.end(); ++lang) + { + std::string const tagname = "Description-" + *lang; + if (Section.Exists(tagname.c_str()) == true) + avail.push_back(*lang); + } + return avail; } - /*}}}*/ -// ListParser::Description - Return the description_md5 MD5SumValue /*{{{*/ + /*}}}*/ +// ListParser::Description_md5 - Return the description_md5 MD5SumValue /*{{{*/ // --------------------------------------------------------------------- /* This is to return the md5 string to allow the check if it is the right description. If no Description-md5 is found in the section it will be @@ -233,7 +276,7 @@ MD5SumValue debListParser::Description_md5() string const value = Section.FindS("Description-md5"); if (value.empty() == true) { - std::string const desc = Description() + "\n"; + std::string const desc = Description("") + "\n"; if (desc == "\n") return MD5SumValue(); @@ -259,9 +302,6 @@ MD5SumValue debListParser::Description_md5() bool debListParser::UsePackage(pkgCache::PkgIterator &Pkg, pkgCache::VerIterator &Ver) { - if (Pkg->Section == 0) - Pkg->Section = UniqFindTagWrite("Section"); - string const static myArch = _config->Find("APT::Architecture"); // Possible values are: "all", "native", "installed" and "none" // The "installed" mode is handled by ParseStatus(), See #544481 and friends. @@ -334,13 +374,9 @@ unsigned short debListParser::VersionHash() /* Status lines are of the form, Status: want flag status want = unknown, install, hold, deinstall, purge - flag = ok, reinstreq, hold, hold-reinstreq - status = not-installed, unpacked, half-configured, - half-installed, config-files, post-inst-failed, - removal-failed, installed - - Some of the above are obsolete (I think?) flag = hold-* and - status = post-inst-failed, removal-failed at least. + flag = ok, reinstreq + status = not-installed, config-files, half-installed, unpacked, + half-configured, triggers-awaited, triggers-pending, installed */ bool debListParser::ParseStatus(pkgCache::PkgIterator &Pkg, pkgCache::VerIterator &Ver) @@ -397,15 +433,13 @@ bool debListParser::ParseStatus(pkgCache::PkgIterator &Pkg, // Process the flag field WordList StatusList[] = {{"not-installed",pkgCache::State::NotInstalled}, + {"config-files",pkgCache::State::ConfigFiles}, + {"half-installed",pkgCache::State::HalfInstalled}, {"unpacked",pkgCache::State::UnPacked}, {"half-configured",pkgCache::State::HalfConfigured}, - {"installed",pkgCache::State::Installed}, - {"half-installed",pkgCache::State::HalfInstalled}, - {"config-files",pkgCache::State::ConfigFiles}, {"triggers-awaited",pkgCache::State::TriggersAwaited}, {"triggers-pending",pkgCache::State::TriggersPending}, - {"post-inst-failed",pkgCache::State::HalfConfigured}, - {"removal-failed",pkgCache::State::HalfInstalled}, + {"installed",pkgCache::State::Installed}, {NULL, 0}}; if (GrabWord(string(Start,I-Start),StatusList,Pkg->CurrentState) == false) return _error->Error("Malformed 3rd word in the Status line"); @@ -631,72 +665,94 @@ const char *debListParser::ParseDepends(const char *Start,const char *Stop, if (ParseRestrictionsList == true) { - // Parse a restrictions list - if (I != Stop && *I == '<') + // Parse a restrictions formula which is in disjunctive normal form: + // (foo AND bar) OR (blub AND bla) + + std::vector<string> const profiles = APT::Configuration::getBuildProfiles(); + + // if the next character is a restriction list, then by default the + // dependency does not apply and the conditions have to be checked + // if the next character is not a restriction list, then by default the + // dependency applies + bool applies1 = (*I != '<'); + while (I != Stop) { + if (*I != '<') + break; + ++I; // malformed if (unlikely(I == Stop)) return 0; - std::vector<string> const profiles = APT::Configuration::getBuildProfiles(); - const char *End = I; - bool Found = false; - bool NegRestriction = false; - while (I != Stop) - { - // look for whitespace or ending '>' - for (;End != Stop && !isspace(*End) && *End != '>'; ++End); - if (unlikely(End == Stop)) - return 0; - - if (*I == '!') + // if of the prior restriction list is already fulfilled, then + // we can just skip to the end of the current list + if (applies1) { + for (;End != Stop && *End != '>'; ++End); + I = ++End; + // skip whitespace + for (;I != Stop && isspace(*I) != 0; I++); + } else { + bool applies2 = true; + // all the conditions inside a restriction list have to be + // met so once we find one that is not met, we can skip to + // the end of this list + while (I != Stop) { - NegRestriction = true; - ++I; - } + // look for whitespace or ending '>' + // End now points to the character after the current term + for (;End != Stop && !isspace(*End) && *End != '>'; ++End); - std::string restriction(I, End); + if (unlikely(End == Stop)) + return 0; - std::string prefix = "profile."; - // only support for "profile" prefix, ignore others - if (restriction.size() > prefix.size() && - restriction.substr(0, prefix.size()) == prefix) - { - // get the name of the profile - restriction = restriction.substr(prefix.size()); + bool NegRestriction = false; + if (*I == '!') + { + NegRestriction = true; + ++I; + } + + std::string restriction(I, End); if (restriction.empty() == false && profiles.empty() == false && - std::find(profiles.begin(), profiles.end(), restriction) != profiles.end()) + std::find(profiles.begin(), profiles.end(), restriction) != profiles.end()) { - Found = true; - if (I[-1] != '!') - NegRestriction = false; - // we found a match, so fast-forward to the end of the wildcards - for (; End != Stop && *End != '>'; ++End); + if (NegRestriction) { + applies2 = false; + // since one of the terms does not apply we don't have to check the others + for (; End != Stop && *End != '>'; ++End); + } + } else { + if (!NegRestriction) { + applies2 = false; + // since one of the terms does not apply we don't have to check the others + for (; End != Stop && *End != '>'; ++End); + } + } + + if (*End++ == '>') { + I = End; + // skip whitespace + for (;I != Stop && isspace(*I) != 0; I++); + break; } - } - if (*End++ == '>') { I = End; - break; + // skip whitespace + for (;I != Stop && isspace(*I) != 0; I++); + } + if (applies2) { + applies1 = true; } - - I = End; - for (;I != Stop && isspace(*I) != 0; I++); } - - if (NegRestriction == true) - Found = !Found; - - if (Found == false) - Package = ""; /* not for this restriction */ } - // Skip whitespace - for (;I != Stop && isspace(*I) != 0; I++); + if (applies1 == false) { + Package = ""; //not for this restriction + } } if (I != Stop && *I == '|') @@ -750,7 +806,7 @@ bool debListParser::ParseDepends(pkgCache::VerIterator &Ver, if (NewDepends(Ver,Package,"none",Version,Op,Type) == false) return false; } - else if (MultiArchEnabled == true && found != string::npos && + else if (found != string::npos && strcmp(Package.c_str() + found, ":any") != 0) { string Arch = Package.substr(found+1, string::npos); @@ -797,10 +853,16 @@ bool debListParser::ParseProvides(pkgCache::VerIterator &Ver) while (1) { Start = ParseDepends(Start,Stop,Package,Version,Op); + const size_t archfound = Package.rfind(':'); if (Start == 0) return _error->Error("Problem parsing Provides line"); - if (Op != pkgCache::Dep::NoOp) { - _error->Warning("Ignoring Provides line with DepCompareOp for package %s", Package.c_str()); + if (Op != pkgCache::Dep::NoOp && Op != pkgCache::Dep::Equals) { + _error->Warning("Ignoring Provides line with non-equal DepCompareOp for package %s", Package.c_str()); + } else if (archfound != string::npos) { + string OtherArch = Package.substr(archfound+1, string::npos); + Package = Package.substr(0, archfound); + if (NewProvides(Ver, Package, OtherArch, Version) == false) + return false; } else if ((Ver->MultiArch & pkgCache::Version::Foreign) == pkgCache::Version::Foreign) { if (NewProvidesAllArch(Ver, Package, Version) == false) return false; @@ -897,7 +959,7 @@ bool debListParser::LoadReleaseInfo(pkgCache::PkgFileIterator &FileI, { // apt-secure does no longer download individual (per-section) Release // file. to provide Component pinning we use the section name now - map_ptrloc const storage = WriteUniqString(component); + map_stringitem_t const storage = StoreString(pkgCacheGenerator::MIXED, component); FileI->Component = storage; pkgTagFile TagFile(&File, File.Size()); @@ -906,19 +968,19 @@ bool debListParser::LoadReleaseInfo(pkgCache::PkgFileIterator &FileI, return false; std::string data; - #define APT_INRELEASE(TAG, STORE) \ + #define APT_INRELEASE(TYPE, TAG, STORE) \ data = Section.FindS(TAG); \ if (data.empty() == false) \ { \ - map_ptrloc const storage = WriteUniqString(data); \ + map_stringitem_t const storage = StoreString(pkgCacheGenerator::TYPE, data); \ STORE = storage; \ } - APT_INRELEASE("Suite", FileI->Archive) - APT_INRELEASE("Component", FileI->Component) - APT_INRELEASE("Version", FileI->Version) - APT_INRELEASE("Origin", FileI->Origin) - APT_INRELEASE("Codename", FileI->Codename) - APT_INRELEASE("Label", FileI->Label) + APT_INRELEASE(MIXED, "Suite", FileI->Archive) + APT_INRELEASE(MIXED, "Component", FileI->Component) + APT_INRELEASE(VERSIONNUMBER, "Version", FileI->Version) + APT_INRELEASE(MIXED, "Origin", FileI->Origin) + APT_INRELEASE(MIXED, "Codename", FileI->Codename) + APT_INRELEASE(MIXED, "Label", FileI->Label) #undef APT_INRELEASE Section.FindFlag("NotAutomatic", FileI->Flags, pkgCache::Flag::NotAutomatic); Section.FindFlag("ButAutomaticUpgrades", FileI->Flags, pkgCache::Flag::ButAutomaticUpgrades); @@ -938,7 +1000,7 @@ unsigned char debListParser::GetPrio(string Str) return Out; } /*}}}*/ -#if (APT_PKG_MAJOR >= 4 && APT_PKG_MINOR >= 13) +#if APT_PKG_ABI >= 413 bool debListParser::SameVersion(unsigned short const Hash, /*{{{*/ pkgCache::VerIterator const &Ver) { @@ -959,3 +1021,22 @@ bool debListParser::SameVersion(unsigned short const Hash, /*{{{*/ } /*}}}*/ #endif + + +debDebFileParser::debDebFileParser(FileFd *File, std::string const &DebFile) + : debListParser(File, ""), DebFile(DebFile) +{ +} + +bool debDebFileParser::UsePackage(pkgCache::PkgIterator &Pkg, + pkgCache::VerIterator &Ver) +{ + bool res = debListParser::UsePackage(Pkg, Ver); + // we use the full file path as a provides so that the file is found + // by its name + if(NewProvidesAllArch(Ver, DebFile, Ver.VerStr()) == false) + return false; + return res; +} + +debListParser::~debListParser() {} diff --git a/apt-pkg/deb/deblistparser.h b/apt-pkg/deb/deblistparser.h index baace79fe..6279d8399 100644 --- a/apt-pkg/deb/deblistparser.h +++ b/apt-pkg/deb/deblistparser.h @@ -26,7 +26,7 @@ class FileFd; -class debListParser : public pkgCacheGenerator::ListParser +class APT_HIDDEN debListParser : public pkgCacheGenerator::ListParser { public: @@ -44,22 +44,22 @@ class debListParser : public pkgCacheGenerator::ListParser protected: pkgTagFile Tags; pkgTagSection Section; - unsigned long iOffset; + map_filesize_t iOffset; std::string Arch; std::vector<std::string> Architectures; bool MultiArchEnabled; - unsigned long UniqFindTagWrite(const char *Tag); virtual bool ParseStatus(pkgCache::PkgIterator &Pkg,pkgCache::VerIterator &Ver); bool ParseDepends(pkgCache::VerIterator &Ver,const char *Tag, unsigned int Type); bool ParseProvides(pkgCache::VerIterator &Ver); bool NewProvidesAllArch(pkgCache::VerIterator &Ver, std::string const &Package, std::string const &Version); static bool GrabWord(std::string Word,WordList *List,unsigned char &Out); - + APT_HIDDEN unsigned char ParseMultiArch(bool const showErrors); + public: - static unsigned char GetPrio(std::string Str); + APT_PUBLIC static unsigned char GetPrio(std::string Str); // These all operate against the current section virtual std::string Package(); @@ -67,43 +67,62 @@ class debListParser : public pkgCacheGenerator::ListParser virtual bool ArchitectureAll(); virtual std::string Version(); virtual bool NewVersion(pkgCache::VerIterator &Ver); - virtual std::string Description(); - virtual std::string DescriptionLanguage(); + virtual std::string Description(std::string const &lang); + virtual std::vector<std::string> AvailableDescriptionLanguages(); virtual MD5SumValue Description_md5(); virtual unsigned short VersionHash(); -#if (APT_PKG_MAJOR >= 4 && APT_PKG_MINOR >= 13) +#if APT_PKG_ABI >= 413 virtual bool SameVersion(unsigned short const Hash, pkgCache::VerIterator const &Ver); #endif virtual bool UsePackage(pkgCache::PkgIterator &Pkg, pkgCache::VerIterator &Ver); - virtual unsigned long Offset() {return iOffset;}; - virtual unsigned long Size() {return Section.size();}; + virtual map_filesize_t Offset() {return iOffset;}; + virtual map_filesize_t Size() {return Section.size();}; virtual bool Step(); bool LoadReleaseInfo(pkgCache::PkgFileIterator &FileI,FileFd &File, std::string section); - static const char *ParseDepends(const char *Start,const char *Stop, + APT_PUBLIC static const char *ParseDepends(const char *Start,const char *Stop, std::string &Package,std::string &Ver,unsigned int &Op); - static const char *ParseDepends(const char *Start,const char *Stop, + APT_PUBLIC static const char *ParseDepends(const char *Start,const char *Stop, std::string &Package,std::string &Ver,unsigned int &Op, bool const &ParseArchFlags); - static const char *ParseDepends(const char *Start,const char *Stop, + APT_PUBLIC static const char *ParseDepends(const char *Start,const char *Stop, std::string &Package,std::string &Ver,unsigned int &Op, bool const &ParseArchFlags, bool const &StripMultiArch); - static const char *ParseDepends(const char *Start,const char *Stop, + APT_PUBLIC static const char *ParseDepends(const char *Start,const char *Stop, std::string &Package,std::string &Ver,unsigned int &Op, bool const &ParseArchFlags, bool const &StripMultiArch, bool const &ParseRestrictionsList); - static const char *ConvertRelation(const char *I,unsigned int &Op); + APT_PUBLIC static const char *ConvertRelation(const char *I,unsigned int &Op); debListParser(FileFd *File, std::string const &Arch = ""); - virtual ~debListParser() {}; + virtual ~debListParser(); +}; - private: - APT_HIDDEN unsigned char ParseMultiArch(bool const showErrors); +class APT_HIDDEN debDebFileParser : public debListParser +{ + private: + std::string DebFile; + + public: + debDebFileParser(FileFd *File, std::string const &DebFile); + virtual bool UsePackage(pkgCache::PkgIterator &Pkg, + pkgCache::VerIterator &Ver); +}; + +class APT_HIDDEN debTranslationsParser : public debListParser +{ + public: + // a translation can never be a real package + virtual std::string Architecture() { return ""; } + virtual std::string Version() { return ""; } + + debTranslationsParser(FileFd *File, std::string const &Arch = "") + : debListParser(File, Arch) {}; }; #endif diff --git a/apt-pkg/deb/debmetaindex.cc b/apt-pkg/deb/debmetaindex.cc index 6fd12add8..aa2db8149 100644 --- a/apt-pkg/deb/debmetaindex.cc +++ b/apt-pkg/deb/debmetaindex.cc @@ -78,7 +78,6 @@ string debReleaseIndex::MetaIndexURI(const char *Type) const return Res; } -#if (APT_PKG_MAJOR >= 4 && APT_PKG_MINOR >= 13) std::string debReleaseIndex::LocalFileName() const { // see if we have a InRelease file @@ -92,7 +91,6 @@ std::string debReleaseIndex::LocalFileName() const return ""; } -#endif string debReleaseIndex::IndexURISuffix(const char *Type, string const &Section, string const &Arch) const { @@ -186,8 +184,8 @@ debReleaseIndex::~debReleaseIndex() { delete *S; } -vector <struct IndexTarget *>* debReleaseIndex::ComputeIndexTargets() const { - vector <struct IndexTarget *>* IndexTargets = new vector <IndexTarget *>; +vector <IndexTarget *>* debReleaseIndex::ComputeIndexTargets() const { + vector <IndexTarget *>* IndexTargets = new vector <IndexTarget *>; map<string, vector<debSectionEntry const*> >::const_iterator const src = ArchEntries.find("source"); if (src != ArchEntries.end()) { @@ -253,38 +251,44 @@ bool debReleaseIndex::GetIndexes(pkgAcquire *Owner, bool const &GetAll) const { bool const tryInRelease = _config->FindB("Acquire::TryInRelease", true); + indexRecords * const iR = new indexRecords(Dist); + if (Trusted == ALWAYS_TRUSTED) + iR->SetTrusted(true); + else if (Trusted == NEVER_TRUSTED) + iR->SetTrusted(false); + // special case for --print-uris if (GetAll) { - vector <struct IndexTarget *> *targets = ComputeIndexTargets(); - for (vector <struct IndexTarget*>::const_iterator Target = targets->begin(); Target != targets->end(); ++Target) { + vector <IndexTarget *> *targets = ComputeIndexTargets(); + for (vector <IndexTarget*>::const_iterator Target = targets->begin(); Target != targets->end(); ++Target) { new pkgAcqIndex(Owner, (*Target)->URI, (*Target)->Description, - (*Target)->ShortDesc, HashString()); + (*Target)->ShortDesc, HashStringList()); } delete targets; // this is normally created in pkgAcqMetaSig, but if we run // in --print-uris mode, we add it here if (tryInRelease == false) - new pkgAcqMetaIndex(Owner, MetaIndexURI("Release"), - MetaIndexInfo("Release"), "Release", - MetaIndexURI("Release.gpg"), - ComputeIndexTargets(), - new indexRecords (Dist)); + new pkgAcqMetaIndex(Owner, NULL, + MetaIndexURI("Release"), + MetaIndexInfo("Release"), "Release", + MetaIndexURI("Release.gpg"), MetaIndexInfo("Release.gpg"), "Release.gpg", + ComputeIndexTargets(), + iR); } - if (tryInRelease == true) - new pkgAcqMetaClearSig(Owner, MetaIndexURI("InRelease"), - MetaIndexInfo("InRelease"), "InRelease", + new pkgAcqMetaClearSig(Owner, + MetaIndexURI("InRelease"), MetaIndexInfo("InRelease"), "InRelease", MetaIndexURI("Release"), MetaIndexInfo("Release"), "Release", MetaIndexURI("Release.gpg"), MetaIndexInfo("Release.gpg"), "Release.gpg", ComputeIndexTargets(), - new indexRecords (Dist)); + iR); else - new pkgAcqMetaSig(Owner, MetaIndexURI("Release.gpg"), - MetaIndexInfo("Release.gpg"), "Release.gpg", - MetaIndexURI("Release"), MetaIndexInfo("Release"), "Release", - ComputeIndexTargets(), - new indexRecords (Dist)); + new pkgAcqMetaIndex(Owner, NULL, + MetaIndexURI("Release"), MetaIndexInfo("Release"), "Release", + MetaIndexURI("Release.gpg"), MetaIndexInfo("Release.gpg"), "Release.gpg", + ComputeIndexTargets(), + iR); return true; } @@ -388,7 +392,7 @@ debReleaseIndex::debSectionEntry::debSectionEntry (string const &Section, bool const &IsSrc): Section(Section), IsSrc(IsSrc) {} -class debSLTypeDebian : public pkgSourceList::Type +class APT_HIDDEN debSLTypeDebian : public pkgSourceList::Type { protected: @@ -398,9 +402,12 @@ class debSLTypeDebian : public pkgSourceList::Type { // parse arch=, arch+= and arch-= settings map<string, string>::const_iterator arch = Options.find("arch"); - vector<string> Archs = - (arch != Options.end()) ? VectorizeString(arch->second, ',') : - APT::Configuration::getArchitectures(); + vector<string> Archs; + if (arch != Options.end()) + Archs = VectorizeString(arch->second, ','); + else + Archs = APT::Configuration::getArchitectures(); + if ((arch = Options.find("arch+")) != Options.end()) { std::vector<std::string> const plusArch = VectorizeString(arch->second, ','); @@ -471,7 +478,16 @@ class debSLTypeDebian : public pkgSourceList::Type } }; -class debSLTypeDeb : public debSLTypeDebian +debDebFileMetaIndex::debDebFileMetaIndex(std::string const &DebFile) + : metaIndex(DebFile, "local-uri", "deb-dist"), DebFile(DebFile) +{ + DebIndex = new debDebPkgFileIndex(DebFile); + Indexes = new vector<pkgIndexFile *>(); + Indexes->push_back(DebIndex); +} + + +class APT_HIDDEN debSLTypeDeb : public debSLTypeDebian { public: @@ -489,7 +505,7 @@ class debSLTypeDeb : public debSLTypeDebian } }; -class debSLTypeDebSrc : public debSLTypeDebian +class APT_HIDDEN debSLTypeDebSrc : public debSLTypeDebian { public: @@ -507,5 +523,26 @@ class debSLTypeDebSrc : public debSLTypeDebian } }; -debSLTypeDeb _apt_DebType; -debSLTypeDebSrc _apt_DebSrcType; +class APT_HIDDEN debSLTypeDebFile : public pkgSourceList::Type +{ + public: + + bool CreateItem(vector<metaIndex *> &List, string const &URI, + string const &/*Dist*/, string const &/*Section*/, + std::map<string, string> const &/*Options*/) const + { + metaIndex *mi = new debDebFileMetaIndex(URI); + List.push_back(mi); + return true; + } + + debSLTypeDebFile() + { + Name = "deb-file"; + Label = "Debian Deb File"; + } +}; + +APT_HIDDEN debSLTypeDeb _apt_DebType; +APT_HIDDEN debSLTypeDebSrc _apt_DebSrcType; +APT_HIDDEN debSLTypeDebFile _apt_DebFileType; diff --git a/apt-pkg/deb/debmetaindex.h b/apt-pkg/deb/debmetaindex.h index 2286fa8b2..94d005760 100644 --- a/apt-pkg/deb/debmetaindex.h +++ b/apt-pkg/deb/debmetaindex.h @@ -18,8 +18,10 @@ class pkgAcquire; class pkgIndexFile; +class debDebPkgFileIndex; +class IndexTarget; -class debReleaseIndex : public metaIndex { +class APT_HIDDEN debReleaseIndex : public metaIndex { public: class debSectionEntry @@ -34,7 +36,7 @@ class debReleaseIndex : public metaIndex { /** \brief dpointer placeholder (for later in case we need it) */ void *d; std::map<std::string, std::vector<debSectionEntry const*> > ArchEntries; - enum { ALWAYS_TRUSTED, NEVER_TRUSTED, CHECK_TRUST } Trusted; + enum APT_HIDDEN { ALWAYS_TRUSTED, NEVER_TRUSTED, CHECK_TRUST } Trusted; public: @@ -44,16 +46,17 @@ class debReleaseIndex : public metaIndex { virtual std::string ArchiveURI(std::string const &File) const {return URI + File;}; virtual bool GetIndexes(pkgAcquire *Owner, bool const &GetAll=false) const; - std::vector <struct IndexTarget *>* ComputeIndexTargets() const; + std::vector <IndexTarget *>* ComputeIndexTargets() const; std::string Info(const char *Type, std::string const &Section, std::string const &Arch="") const; std::string MetaIndexInfo(const char *Type) const; std::string MetaIndexFile(const char *Types) const; std::string MetaIndexURI(const char *Type) const; -#if (APT_PKG_MAJOR >= 4 && APT_PKG_MINOR >= 13) - virtual std::string LocalFileName() const; +#if APT_PKG_ABI >= 413 + virtual #endif + std::string LocalFileName() const; std::string IndexURI(const char *Type, std::string const &Section, std::string const &Arch="native") const; std::string IndexURISuffix(const char *Type, std::string const &Section, std::string const &Arch="native") const; @@ -71,4 +74,27 @@ class debReleaseIndex : public metaIndex { void PushSectionEntry(const debSectionEntry *Entry); }; +class APT_HIDDEN debDebFileMetaIndex : public metaIndex +{ + private: + std::string DebFile; + debDebPkgFileIndex *DebIndex; + public: + virtual std::string ArchiveURI(std::string const& /*File*/) const { + return DebFile; + } + virtual bool GetIndexes(pkgAcquire* /*Owner*/, const bool& /*GetAll=false*/) const { + return true; + } + virtual std::vector<pkgIndexFile *> *GetIndexFiles() { + return Indexes; + } + virtual bool IsTrusted() const { + return true; + } + debDebFileMetaIndex(std::string const &DebFile); + virtual ~debDebFileMetaIndex() {}; + +}; + #endif diff --git a/apt-pkg/deb/debrecords.cc b/apt-pkg/deb/debrecords.cc index 6063db5a8..335bcfda0 100644 --- a/apt-pkg/deb/debrecords.cc +++ b/apt-pkg/deb/debrecords.cc @@ -11,35 +11,35 @@ #include <config.h> #include <apt-pkg/debrecords.h> +#include <apt-pkg/debindexfile.h> #include <apt-pkg/strutl.h> #include <apt-pkg/aptconfiguration.h> #include <apt-pkg/fileutl.h> #include <apt-pkg/cacheiterators.h> #include <apt-pkg/pkgcache.h> #include <apt-pkg/tagfile.h> +#include <apt-pkg/error.h> #include <string.h> #include <algorithm> +#include <sstream> #include <string> #include <vector> #include <langinfo.h> + +#include <apti18n.h> /*}}}*/ using std::string; // RecordParser::debRecordParser - Constructor /*{{{*/ -// --------------------------------------------------------------------- -/* */ -debRecordParser::debRecordParser(string FileName,pkgCache &Cache) : - File(FileName,FileFd::ReadOnly, FileFd::Extension), - Tags(&File, std::max(Cache.Head().MaxVerFileSize, - Cache.Head().MaxDescFileSize) + 200) +debRecordParser::debRecordParser(string FileName,pkgCache &Cache) : + debRecordParserBase(), File(FileName, FileFd::ReadOnly, FileFd::Extension), + Tags(&File, std::max(Cache.Head().MaxVerFileSize, Cache.Head().MaxDescFileSize) + 200) { } /*}}}*/ // RecordParser::Jump - Jump to a specific record /*{{{*/ -// --------------------------------------------------------------------- -/* */ bool debRecordParser::Jump(pkgCache::VerFileIterator const &Ver) { return Tags.Jump(Section,Ver->Offset); @@ -49,124 +49,109 @@ bool debRecordParser::Jump(pkgCache::DescFileIterator const &Desc) return Tags.Jump(Section,Desc->Offset); } /*}}}*/ -// RecordParser::FileName - Return the archive filename on the site /*{{{*/ -// --------------------------------------------------------------------- -/* */ -string debRecordParser::FileName() +debRecordParser::~debRecordParser() {} + +// RecordParserBase::FileName - Return the archive filename on the site /*{{{*/ +string debRecordParserBase::FileName() { return Section.FindS("Filename"); } /*}}}*/ -// RecordParser::Name - Return the package name /*{{{*/ -// --------------------------------------------------------------------- -/* */ -string debRecordParser::Name() +// RecordParserBase::Name - Return the package name /*{{{*/ +string debRecordParserBase::Name() { return Section.FindS("Package"); } /*}}}*/ -// RecordParser::Homepage - Return the package homepage /*{{{*/ -// --------------------------------------------------------------------- -/* */ -string debRecordParser::Homepage() +// RecordParserBase::Homepage - Return the package homepage /*{{{*/ +string debRecordParserBase::Homepage() { return Section.FindS("Homepage"); } /*}}}*/ -// RecordParser::MD5Hash - Return the archive hash /*{{{*/ -// --------------------------------------------------------------------- -/* */ -string debRecordParser::MD5Hash() -{ - return Section.FindS("MD5Sum"); -} - /*}}}*/ -// RecordParser::SHA1Hash - Return the archive hash /*{{{*/ -// --------------------------------------------------------------------- -/* */ -string debRecordParser::SHA1Hash() -{ - return Section.FindS("SHA1"); -} - /*}}}*/ -// RecordParser::SHA256Hash - Return the archive hash /*{{{*/ -// --------------------------------------------------------------------- -/* */ -string debRecordParser::SHA256Hash() -{ - return Section.FindS("SHA256"); -} - /*}}}*/ -// RecordParser::SHA512Hash - Return the archive hash /*{{{*/ -// --------------------------------------------------------------------- -/* */ -string debRecordParser::SHA512Hash() +// RecordParserBase::Hashes - return the available archive hashes /*{{{*/ +HashStringList debRecordParserBase::Hashes() const { - return Section.FindS("SHA512"); + HashStringList hashes; + for (char const * const * type = HashString::SupportedHashes(); *type != NULL; ++type) + { + std::string const hash = Section.FindS(*type); + if (hash.empty() == false) + hashes.push_back(HashString(*type, hash)); + } + return hashes; } /*}}}*/ -// RecordParser::Maintainer - Return the maintainer email /*{{{*/ -// --------------------------------------------------------------------- -/* */ -string debRecordParser::Maintainer() +// RecordParserBase::Maintainer - Return the maintainer email /*{{{*/ +string debRecordParserBase::Maintainer() { return Section.FindS("Maintainer"); } /*}}}*/ -// RecordParser::RecordField - Return the value of an arbitrary field /*{{*/ -// --------------------------------------------------------------------- -/* */ -string debRecordParser::RecordField(const char *fieldName) +// RecordParserBase::RecordField - Return the value of an arbitrary field /*{{*/ +string debRecordParserBase::RecordField(const char *fieldName) { return Section.FindS(fieldName); } - - /*}}}*/ -// RecordParser::ShortDesc - Return a 1 line description /*{{{*/ -// --------------------------------------------------------------------- -/* */ -string debRecordParser::ShortDesc() + /*}}}*/ +// RecordParserBase::ShortDesc - Return a 1 line description /*{{{*/ +string debRecordParserBase::ShortDesc(std::string const &lang) { - string Res = LongDesc(); - string::size_type Pos = Res.find('\n'); + string const Res = LongDesc(lang); + if (Res.empty() == true) + return ""; + string::size_type const Pos = Res.find('\n'); if (Pos == string::npos) return Res; return string(Res,0,Pos); } /*}}}*/ -// RecordParser::LongDesc - Return a longer description /*{{{*/ -// --------------------------------------------------------------------- -/* */ -string debRecordParser::LongDesc() -{ - string orig, dest; +// RecordParserBase::LongDesc - Return a longer description /*{{{*/ +string debRecordParserBase::LongDesc(std::string const &lang) +{ + string orig; + if (lang.empty() == true) + { + std::vector<string> const lang = APT::Configuration::getLanguages(); + for (std::vector<string>::const_iterator l = lang.begin(); + l != lang.end(); ++l) + { + std::string const tagname = "Description-" + *l; + orig = Section.FindS(tagname.c_str()); + if (orig.empty() == false) + break; + else if (*l == "en") + { + orig = Section.FindS("Description"); + if (orig.empty() == false) + break; + } + } + if (orig.empty() == true) + orig = Section.FindS("Description"); + } + else + { + std::string const tagname = "Description-" + lang; + orig = Section.FindS(tagname.c_str()); + if (orig.empty() == true && lang == "en") + orig = Section.FindS("Description"); + } - if (!Section.FindS("Description").empty()) - orig = Section.FindS("Description").c_str(); - else - { - std::vector<string> const lang = APT::Configuration::getLanguages(); - for (std::vector<string>::const_iterator l = lang.begin(); - orig.empty() && l != lang.end(); ++l) - orig = Section.FindS(string("Description-").append(*l).c_str()); - } + char const * const codeset = nl_langinfo(CODESET); + if (strcmp(codeset,"UTF-8") != 0) { + string dest; + UTF8ToCodeset(codeset, orig, &dest); + return dest; + } - char const * const codeset = nl_langinfo(CODESET); - if (strcmp(codeset,"UTF-8") != 0) { - UTF8ToCodeset(codeset, orig, &dest); - orig = dest; - } - return orig; } /*}}}*/ -static const char *SourceVerSeparators = " ()"; - -// RecordParser::SourcePkg - Return the source package name if any /*{{{*/ -// --------------------------------------------------------------------- -/* */ -string debRecordParser::SourcePkg() +static const char * const SourceVerSeparators = " ()"; +// RecordParserBase::SourcePkg - Return the source package name if any /*{{{*/ +string debRecordParserBase::SourcePkg() { string Res = Section.FindS("Source"); string::size_type Pos = Res.find_first_of(SourceVerSeparators); @@ -175,10 +160,8 @@ string debRecordParser::SourcePkg() return string(Res,0,Pos); } /*}}}*/ -// RecordParser::SourceVer - Return the source version number if present /*{{{*/ -// --------------------------------------------------------------------- -/* */ -string debRecordParser::SourceVer() +// RecordParserBase::SourceVer - Return the source version number if present /*{{{*/ +string debRecordParserBase::SourceVer() { string Pkg = Section.FindS("Source"); string::size_type Pos = Pkg.find_first_of(SourceVerSeparators); @@ -198,11 +181,29 @@ string debRecordParser::SourceVer() return string(Pkg, VerStart, VerEnd - VerStart); } /*}}}*/ -// RecordParser::GetRec - Return the whole record /*{{{*/ -// --------------------------------------------------------------------- -/* */ -void debRecordParser::GetRec(const char *&Start,const char *&Stop) +// RecordParserBase::GetRec - Return the whole record /*{{{*/ +void debRecordParserBase::GetRec(const char *&Start,const char *&Stop) { Section.GetSection(Start,Stop); } /*}}}*/ +debRecordParserBase::~debRecordParserBase() {} + +bool debDebFileRecordParser::LoadContent() +{ + // load content only once + if (controlContent.empty() == false) + return true; + + std::ostringstream content; + if (debDebPkgFileIndex::GetContent(content, debFileName) == false) + return false; + // add two newlines to make sure the scanner finds the section, + // which is usually done by pkgTagFile automatically if needed. + content << "\n\n"; + + controlContent = content.str(); + if (Section.Scan(controlContent.c_str(), controlContent.length()) == false) + return _error->Error(_("Unable to parse package file %s (%d)"), debFileName.c_str(), 3); + return true; +} diff --git a/apt-pkg/deb/debrecords.h b/apt-pkg/deb/debrecords.h index bdac6c90b..38e071940 100644 --- a/apt-pkg/deb/debrecords.h +++ b/apt-pkg/deb/debrecords.h @@ -25,35 +25,23 @@ #include <apt-pkg/indexfile.h> #endif -class debRecordParser : public pkgRecords::Parser +class APT_HIDDEN debRecordParserBase : public pkgRecords::Parser { - /** \brief dpointer placeholder (for later in case we need it) */ - void *d; - - FileFd File; - pkgTagFile Tags; + protected: pkgTagSection Section; - - protected: - - virtual bool Jump(pkgCache::VerFileIterator const &Ver); - virtual bool Jump(pkgCache::DescFileIterator const &Desc); - - public: + public: // These refer to the archive file for the Version virtual std::string FileName(); - virtual std::string MD5Hash(); - virtual std::string SHA1Hash(); - virtual std::string SHA256Hash(); - virtual std::string SHA512Hash(); virtual std::string SourcePkg(); virtual std::string SourceVer(); - + + virtual HashStringList Hashes() const; + // These are some general stats about the package virtual std::string Maintainer(); - virtual std::string ShortDesc(); - virtual std::string LongDesc(); + virtual std::string ShortDesc(std::string const &lang); + virtual std::string LongDesc(std::string const &lang); virtual std::string Name(); virtual std::string Homepage(); @@ -61,9 +49,42 @@ class debRecordParser : public pkgRecords::Parser virtual std::string RecordField(const char *fieldName); virtual void GetRec(const char *&Start,const char *&Stop); - + + debRecordParserBase() : Parser() {} + virtual ~debRecordParserBase(); +}; + +class APT_HIDDEN debRecordParser : public debRecordParserBase +{ + protected: + FileFd File; + pkgTagFile Tags; + + virtual bool Jump(pkgCache::VerFileIterator const &Ver); + virtual bool Jump(pkgCache::DescFileIterator const &Desc); + + public: debRecordParser(std::string FileName,pkgCache &Cache); - virtual ~debRecordParser() {}; + virtual ~debRecordParser(); +}; + +// custom record parser that reads deb files directly +class APT_HIDDEN debDebFileRecordParser : public debRecordParserBase +{ + std::string debFileName; + std::string controlContent; + + APT_HIDDEN bool LoadContent(); + protected: + // single file files, so no jumping whatsoever + bool Jump(pkgCache::VerFileIterator const &) { return LoadContent(); } + bool Jump(pkgCache::DescFileIterator const &) { return LoadContent(); } + + public: + virtual std::string FileName() { return debFileName; } + + debDebFileRecordParser(std::string FileName) + : debRecordParserBase(), debFileName(FileName) {}; }; #endif diff --git a/apt-pkg/deb/debsrcrecords.cc b/apt-pkg/deb/debsrcrecords.cc index b09588dd3..ca6d09896 100644 --- a/apt-pkg/deb/debsrcrecords.cc +++ b/apt-pkg/deb/debsrcrecords.cc @@ -18,6 +18,8 @@ #include <apt-pkg/aptconfiguration.h> #include <apt-pkg/srcrecords.h> #include <apt-pkg/tagfile.h> +#include <apt-pkg/hashes.h> +#include <apt-pkg/gpgv.h> #include <ctype.h> #include <stdlib.h> @@ -55,12 +57,13 @@ const char **debSrcRecordParser::Binaries() char* binStartNext = strchrnul(bin, ','); char* binEnd = binStartNext - 1; for (; isspace(*binEnd) != 0; --binEnd) - binEnd = '\0'; + binEnd = 0; StaticBinList.push_back(bin); if (*binStartNext != ',') break; *binStartNext = '\0'; - for (bin = binStartNext + 1; isspace(*bin) != 0; ++bin); + for (bin = binStartNext + 1; isspace(*bin) != 0; ++bin) + ; } while (*bin != '\0'); StaticBinList.push_back(NULL); @@ -118,13 +121,32 @@ bool debSrcRecordParser::BuildDepends(std::vector<pkgSrcRecords::Parser::BuildDe // --------------------------------------------------------------------- /* This parses the list of files and returns it, each file is required to have a complete source package */ -bool debSrcRecordParser::Files(std::vector<pkgSrcRecords::File> &List) +bool debSrcRecordParser::Files(std::vector<pkgSrcRecords::File> &F) { - List.erase(List.begin(),List.end()); - - string Files = Sect.FindS("Files"); - if (Files.empty() == true) + std::vector<pkgSrcRecords::File2> F2; + if (Files2(F2) == false) return false; + for (std::vector<pkgSrcRecords::File2>::const_iterator f2 = F2.begin(); f2 != F2.end(); ++f2) + { + pkgSrcRecords::File2 f; +#if __GNUC__ >= 4 + #pragma GCC diagnostic push + #pragma GCC diagnostic ignored "-Wdeprecated-declarations" +#endif + f.MD5Hash = f2->MD5Hash; + f.Size = f2->Size; +#if __GNUC__ >= 4 + #pragma GCC diagnostic pop +#endif + f.Path = f2->Path; + f.Type = f2->Type; + F.push_back(f); + } + return true; +} +bool debSrcRecordParser::Files2(std::vector<pkgSrcRecords::File2> &List) +{ + List.clear(); // Stash the / terminated directory prefix string Base = Sect.FindS("Directory"); @@ -133,51 +155,92 @@ bool debSrcRecordParser::Files(std::vector<pkgSrcRecords::File> &List) std::vector<std::string> const compExts = APT::Configuration::getCompressorExtensions(); - // Iterate over the entire list grabbing each triplet - const char *C = Files.c_str(); - while (*C != 0) - { - pkgSrcRecords::File F; - string Size; - - // Parse each of the elements - if (ParseQuoteWord(C,F.MD5Hash) == false || - ParseQuoteWord(C,Size) == false || - ParseQuoteWord(C,F.Path) == false) - return _error->Error("Error parsing file record"); - - // Parse the size and append the directory - F.Size = atoi(Size.c_str()); - F.Path = Base + F.Path; - - // Try to guess what sort of file it is we are getting. - string::size_type Pos = F.Path.length()-1; - while (1) + for (char const * const * type = HashString::SupportedHashes(); *type != NULL; ++type) + { + // derive field from checksum type + std::string checksumField("Checksums-"); + if (strcmp(*type, "MD5Sum") == 0) + checksumField = "Files"; // historic name for MD5 checksums + else + checksumField.append(*type); + + string const Files = Sect.FindS(checksumField.c_str()); + if (Files.empty() == true) + continue; + + // Iterate over the entire list grabbing each triplet + const char *C = Files.c_str(); + while (*C != 0) { - string::size_type Tmp = F.Path.rfind('.',Pos); - if (Tmp == string::npos) - break; - if (F.Type == "tar") { - // source v3 has extension 'debian.tar.*' instead of 'diff.*' - if (string(F.Path, Tmp+1, Pos-Tmp) == "debian") - F.Type = "diff"; - break; - } - F.Type = string(F.Path,Tmp+1,Pos-Tmp); - - if (std::find(compExts.begin(), compExts.end(), std::string(".").append(F.Type)) != compExts.end() || - F.Type == "tar") + string hash, size, path; + + // Parse each of the elements + if (ParseQuoteWord(C, hash) == false || + ParseQuoteWord(C, size) == false || + ParseQuoteWord(C, path) == false) + return _error->Error("Error parsing file record in %s of source package %s", checksumField.c_str(), Package().c_str()); + + HashString const hashString(*type, hash); + if (Base.empty() == false) + path = Base + path; + + // look if we have a record for this file already + std::vector<pkgSrcRecords::File2>::iterator file = List.begin(); + for (; file != List.end(); ++file) + if (file->Path == path) + break; + + // we have it already, store the new hash and be done + if (file != List.end()) { - Pos = Tmp-1; + if (checksumField == "Files") + APT_IGNORE_DEPRECATED(file->MD5Hash = hash;) + // an error here indicates that we have two different hashes for the same file + if (file->Hashes.push_back(hashString) == false) + return _error->Error("Error parsing checksum in %s of source package %s", checksumField.c_str(), Package().c_str()); continue; } - - break; + + // we haven't seen this file yet + pkgSrcRecords::File2 F; + F.Path = path; + F.FileSize = strtoull(size.c_str(), NULL, 10); + F.Hashes.push_back(hashString); + + APT_IGNORE_DEPRECATED_PUSH + F.Size = F.FileSize; + if (checksumField == "Files") + F.MD5Hash = hash; + APT_IGNORE_DEPRECATED_POP + + // Try to guess what sort of file it is we are getting. + string::size_type Pos = F.Path.length()-1; + while (1) + { + string::size_type Tmp = F.Path.rfind('.',Pos); + if (Tmp == string::npos) + break; + if (F.Type == "tar") { + // source v3 has extension 'debian.tar.*' instead of 'diff.*' + if (string(F.Path, Tmp+1, Pos-Tmp) == "debian") + F.Type = "diff"; + break; + } + F.Type = string(F.Path,Tmp+1,Pos-Tmp); + + if (std::find(compExts.begin(), compExts.end(), std::string(".").append(F.Type)) != compExts.end() || + F.Type == "tar") + { + Pos = Tmp-1; + continue; + } + + break; + } + List.push_back(F); } - - List.push_back(F); } - + return true; } /*}}}*/ @@ -186,6 +249,25 @@ bool debSrcRecordParser::Files(std::vector<pkgSrcRecords::File> &List) /* */ debSrcRecordParser::~debSrcRecordParser() { - delete[] Buffer; + // was allocated via strndup() + free(Buffer); } /*}}}*/ + + +debDscRecordParser::debDscRecordParser(std::string const &DscFile, pkgIndexFile const *Index) + : debSrcRecordParser(DscFile, Index) +{ + // support clear signed files + if (OpenMaybeClearSignedFile(DscFile, Fd) == false) + { + _error->Error("Failed to open %s", DscFile.c_str()); + return; + } + + // re-init to ensure the updated Fd is used + Tags.Init(&Fd); + // read the first (and only) record + Step(); + +} diff --git a/apt-pkg/deb/debsrcrecords.h b/apt-pkg/deb/debsrcrecords.h index b65d1480b..cd246d624 100644 --- a/apt-pkg/deb/debsrcrecords.h +++ b/apt-pkg/deb/debsrcrecords.h @@ -21,11 +21,12 @@ class pkgIndexFile; -class debSrcRecordParser : public pkgSrcRecords::Parser +class APT_HIDDEN debSrcRecordParser : public pkgSrcRecords::Parser { /** \brief dpointer placeholder (for later in case we need it) */ void *d; + protected: FileFd Fd; pkgTagFile Tags; pkgTagSection Sect; @@ -53,6 +54,7 @@ class debSrcRecordParser : public pkgSrcRecords::Parser return std::string(Start,Stop); }; virtual bool Files(std::vector<pkgSrcRecords::File> &F); + bool Files2(std::vector<pkgSrcRecords::File2> &F); debSrcRecordParser(std::string const &File,pkgIndexFile const *Index) : Parser(Index), Fd(File,FileFd::ReadOnly, FileFd::Extension), Tags(&Fd,102400), @@ -60,4 +62,10 @@ class debSrcRecordParser : public pkgSrcRecords::Parser virtual ~debSrcRecordParser(); }; +class APT_HIDDEN debDscRecordParser : public debSrcRecordParser +{ + public: + debDscRecordParser(std::string const &DscFile, pkgIndexFile const *Index); +}; + #endif diff --git a/apt-pkg/deb/debsystem.cc b/apt-pkg/deb/debsystem.cc index 142f3a6e6..9a5da9da1 100644 --- a/apt-pkg/deb/debsystem.cc +++ b/apt-pkg/deb/debsystem.cc @@ -38,7 +38,7 @@ using std::string; debSystem debSys; -class debSystemPrivate { +class APT_HIDDEN debSystemPrivate { public: debSystemPrivate() : LockFD(-1), LockCount(0), StatusFile(0) { diff --git a/apt-pkg/deb/debsystem.h b/apt-pkg/deb/debsystem.h index a945f68fb..226cd60bf 100644 --- a/apt-pkg/deb/debsystem.h +++ b/apt-pkg/deb/debsystem.h @@ -29,7 +29,7 @@ class debSystem : public pkgSystem { // private d-pointer debSystemPrivate *d; - bool CheckUpdates(); + APT_HIDDEN bool CheckUpdates(); public: diff --git a/apt-pkg/deb/dpkgpm.cc b/apt-pkg/deb/dpkgpm.cc index e410594df..a7a66c75d 100644 --- a/apt-pkg/deb/dpkgpm.cc +++ b/apt-pkg/deb/dpkgpm.cc @@ -55,12 +55,26 @@ using namespace std; +APT_PURE static unsigned int +EnvironmentSize() +{ + unsigned int size = 0; + char **envp = environ; + + while (*envp != NULL) + size += strlen (*envp++) + 1; + + return size; +} + class pkgDPkgPMPrivate { public: pkgDPkgPMPrivate() : stdin_is_dev_null(false), dpkgbuf_pos(0), - term_out(NULL), history_out(NULL), - progress(NULL), master(-1), slave(-1) + term_out(NULL), history_out(NULL), + progress(NULL), tt_is_valid(false), master(-1), + slave(NULL), protect_slave_from_dying(-1), + direct_stdin(false) { dpkgbuf[0] = '\0'; } @@ -77,14 +91,17 @@ public: APT::Progress::PackageManager *progress; // pty stuff - struct termios tt; + struct termios tt; + bool tt_is_valid; int master; - int slave; + char * slave; + int protect_slave_from_dying; // signals sigset_t sigmask; sigset_t original_sigmask; + bool direct_stdin; }; namespace @@ -186,18 +203,10 @@ pkgCache::VerIterator FindNowVersion(const pkgCache::PkgIterator &Pkg) { pkgCache::VerIterator Ver; for (Ver = Pkg.VersionList(); Ver.end() == false; ++Ver) - { - pkgCache::VerFileIterator Vf = Ver.FileList(); - pkgCache::PkgFileIterator F = Vf.File(); - for (F = Vf.File(); F.end() == false; ++F) - { - if (F && F.Archive()) - { - if (strcmp(F.Archive(), "now")) - return Ver; - } - } - } + for (pkgCache::VerFileIterator Vf = Ver.FileList(); Vf.end() == false; ++Vf) + for (pkgCache::PkgFileIterator F = Vf.File(); F.end() == false; ++F) + if (F->Archive != 0 && strcmp(F.Archive(), "now") == 0) + return Ver; return Ver; } /*}}}*/ @@ -510,14 +519,14 @@ bool pkgDPkgPM::RunScriptsWithPkgs(const char *Cnf) return result; } /*}}}*/ -// DPkgPM::DoStdin - Read stdin and pass to slave pty /*{{{*/ +// DPkgPM::DoStdin - Read stdin and pass to master pty /*{{{*/ // --------------------------------------------------------------------- /* */ void pkgDPkgPM::DoStdin(int master) { unsigned char input_buf[256] = {0,}; - ssize_t len = read(0, input_buf, sizeof(input_buf)); + ssize_t len = read(STDIN_FILENO, input_buf, sizeof(input_buf)); if (len) FileFd::Write(master, input_buf, len); else @@ -564,8 +573,8 @@ void pkgDPkgPM::ProcessDpkgStatusLine(char *line) 'status: <pkg>: <pkg qstate>' 'status: <pkg>:<arch>: <pkg qstate>' - 'processing: {install,configure,remove,purge,disappear,trigproc}: pkg' - 'processing: {install,configure,remove,purge,disappear,trigproc}: trigger' + 'processing: {install,upgrade,configure,remove,purge,disappear,trigproc}: pkg' + 'processing: {install,upgrade,configure,remove,purge,disappear,trigproc}: trigger' */ // we need to split on ": " (note the appended space) as the ':' is @@ -589,12 +598,15 @@ void pkgDPkgPM::ProcessDpkgStatusLine(char *line) std::string action; // "processing" has the form "processing: action: pkg or trigger" - // with action = ["install", "configure", "remove", "purge", "disappear", - // "trigproc"] + // with action = ["install", "upgrade", "configure", "remove", "purge", + // "disappear", "trigproc"] if (prefix == "processing") { pkgname = APT::String::Strip(list[2]); action = APT::String::Strip(list[1]); + // we don't care for the difference (as dpkg doesn't really either) + if (action == "upgrade") + action = "install"; } // "status" has the form: "status: pkg: state" // with state in ["half-installed", "unpacked", "half-configured", @@ -621,15 +633,15 @@ void pkgDPkgPM::ProcessDpkgStatusLine(char *line) { if(action == "error") { - d->progress->Error(list[1], PackagesDone, PackagesTotal, + d->progress->Error(pkgname, PackagesDone, PackagesTotal, list[3]); pkgFailures++; - WriteApportReport(list[1].c_str(), list[3].c_str()); + WriteApportReport(pkgname.c_str(), list[3].c_str()); return; } else if(action == "conffile-prompt") { - d->progress->ConffilePrompt(list[1], PackagesDone, PackagesTotal, + d->progress->ConffilePrompt(pkgname, PackagesDone, PackagesTotal, list[3]); return; } @@ -638,27 +650,26 @@ void pkgDPkgPM::ProcessDpkgStatusLine(char *line) // at this point we know that we should have a valid pkgname, so build all // the info from it - // dpkg does not send always send "pkgname:arch" so we add it here - // if needed + // dpkg does not always send "pkgname:arch" so we add it here if needed if (pkgname.find(":") == std::string::npos) { - // find the package in the group that is in a touched by dpkg - // if there are multiple dpkg will send us a full pkgname:arch + // find the package in the group that is touched by dpkg + // if there are multiple pkgs dpkg would send us a full pkgname:arch pkgCache::GrpIterator Grp = Cache.FindGrp(pkgname); - if (Grp.end() == false) + if (Grp.end() == false) { - pkgCache::PkgIterator P = Grp.PackageList(); - for (; P.end() != true; P = Grp.NextPkg(P)) - { - if(Cache[P].Mode != pkgDepCache::ModeKeep) - { - pkgname = P.FullName(); - break; - } - } + pkgCache::PkgIterator P = Grp.PackageList(); + for (; P.end() != true; P = Grp.NextPkg(P)) + { + if(Cache[P].Keep() == false || Cache[P].ReInstall() == true) + { + pkgname = P.FullName(); + break; + } + } } } - + const char* const pkg = pkgname.c_str(); std::string short_pkgname = StringSplit(pkgname, ":")[0]; std::string arch = ""; @@ -697,28 +708,29 @@ void pkgDPkgPM::ProcessDpkgStatusLine(char *line) if (prefix == "status") { vector<struct DpkgState> const &states = PackageOps[pkg]; - const char *next_action = NULL; if(PackageOpsDone[pkg] < states.size()) - next_action = states[PackageOpsDone[pkg]].state; - // check if the package moved to the next dpkg state - if(next_action && (action == next_action)) { - // only read the translation if there is actually a next - // action - const char *translation = _(states[PackageOpsDone[pkg]].str); - std::string msg; - - // we moved from one dpkg state to a new one, report that - PackageOpsDone[pkg]++; - PackagesDone++; - - strprintf(msg, translation, i18n_pkgname.c_str()); - d->progress->StatusChanged(pkgname, PackagesDone, PackagesTotal, msg); - + char const * const next_action = states[PackageOpsDone[pkg]].state; + if (next_action && Debug == true) + std::clog << "(parsed from dpkg) pkg: " << short_pkgname + << " action: " << action << " (expected: '" << next_action << "' " + << PackageOpsDone[pkg] << " of " << states.size() << ")" << endl; + + // check if the package moved to the next dpkg state + if(next_action && (action == next_action)) + { + // only read the translation if there is actually a next action + char const * const translation = _(states[PackageOpsDone[pkg]].str); + + // we moved from one dpkg state to a new one, report that + ++PackageOpsDone[pkg]; + ++PackagesDone; + + std::string msg; + strprintf(msg, translation, i18n_pkgname.c_str()); + d->progress->StatusChanged(pkgname, PackagesDone, PackagesTotal, msg); + } } - if (Debug == true) - std::clog << "(parsed from dpkg) pkg: " << short_pkgname - << " action: " << action << endl; } } /*}}}*/ @@ -1029,9 +1041,14 @@ void pkgDPkgPM::BuildPackagesProgressMap() PackagesTotal++; } } + /* one extra: We don't want the progress bar to reach 100%, especially not + if we call dpkg --configure --pending and process a bunch of triggers + while showing 100%. Also, spindown takes a while, so never reaching 100% + is way more correct than reaching 100% while still doing stuff even if + doing it this way is slightly bending the rules */ + ++PackagesTotal; } /*}}}*/ -#if (APT_PKG_MAJOR >= 4 && APT_PKG_MINOR < 13) bool pkgDPkgPM::Go(int StatusFd) { APT::Progress::PackageManager *progress = NULL; @@ -1040,68 +1057,150 @@ bool pkgDPkgPM::Go(int StatusFd) else progress = new APT::Progress::PackageManagerProgressFd(StatusFd); - return GoNoABIBreak(progress); + return Go(progress); } -#endif void pkgDPkgPM::StartPtyMagic() { if (_config->FindB("Dpkg::Use-Pty", true) == false) { - d->master = d->slave = -1; + d->master = -1; + if (d->slave != NULL) + free(d->slave); + d->slave = NULL; return; } - // setup the pty and stuff - struct winsize win; + if (isatty(STDIN_FILENO) == 0) + d->direct_stdin = true; - // if tcgetattr for both stdin/stdout returns 0 (no error) - // we do the pty magic _error->PushToStack(); - if (tcgetattr(STDIN_FILENO, &d->tt) == 0 && - tcgetattr(STDOUT_FILENO, &d->tt) == 0) + + d->master = posix_openpt(O_RDWR | O_NOCTTY); + if (d->master == -1) + _error->Errno("posix_openpt", _("Can not write log (%s)"), _("Is /dev/pts mounted?")); + else if (unlockpt(d->master) == -1) + _error->Errno("unlockpt", "Unlocking the slave of master fd %d failed!", d->master); + else { - if (ioctl(STDOUT_FILENO, TIOCGWINSZ, (char *)&win) < 0) - { - _error->Errno("ioctl", _("ioctl(TIOCGWINSZ) failed")); - } else if (openpty(&d->master, &d->slave, NULL, &d->tt, &win) < 0) - { - _error->Errno("openpty", _("Can not write log (%s)"), _("Is /dev/pts mounted?")); - d->master = d->slave = -1; - } else { - struct termios rtt; - rtt = d->tt; - cfmakeraw(&rtt); - rtt.c_lflag &= ~ECHO; - rtt.c_lflag |= ISIG; + char const * const slave_name = ptsname(d->master); + if (slave_name == NULL) + _error->Errno("ptsname", "Getting name for slave of master fd %d failed!", d->master); + else + { + d->slave = strdup(slave_name); + if (d->slave == NULL) + _error->Errno("strdup", "Copying name %s for slave of master fd %d failed!", slave_name, d->master); + else if (grantpt(d->master) == -1) + _error->Errno("grantpt", "Granting access to slave %s based on master fd %d failed!", slave_name, d->master); + else if (tcgetattr(STDIN_FILENO, &d->tt) == 0) + { + d->tt_is_valid = true; + struct termios raw_tt; + // copy window size of stdout if its a 'good' terminal + if (tcgetattr(STDOUT_FILENO, &raw_tt) == 0) + { + struct winsize win; + if (ioctl(STDOUT_FILENO, TIOCGWINSZ, &win) < 0) + _error->Errno("ioctl", "Getting TIOCGWINSZ from stdout failed!"); + if (ioctl(d->master, TIOCSWINSZ, &win) < 0) + _error->Errno("ioctl", "Setting TIOCSWINSZ for master fd %d failed!", d->master); + } + if (tcsetattr(d->master, TCSANOW, &d->tt) == -1) + _error->Errno("tcsetattr", "Setting in Start via TCSANOW for master fd %d failed!", d->master); + + raw_tt = d->tt; + cfmakeraw(&raw_tt); + raw_tt.c_lflag &= ~ECHO; + raw_tt.c_lflag |= ISIG; // block SIGTTOU during tcsetattr to prevent a hang if // the process is a member of the background process group // http://www.opengroup.org/onlinepubs/000095399/functions/tcsetattr.html sigemptyset(&d->sigmask); sigaddset(&d->sigmask, SIGTTOU); sigprocmask(SIG_BLOCK,&d->sigmask, &d->original_sigmask); - tcsetattr(0, TCSAFLUSH, &rtt); - sigprocmask(SIG_SETMASK, &d->original_sigmask, 0); - } + if (tcsetattr(STDIN_FILENO, TCSAFLUSH, &raw_tt) == -1) + _error->Errno("tcsetattr", "Setting in Start via TCSAFLUSH for stdin failed!"); + sigprocmask(SIG_SETMASK, &d->original_sigmask, NULL); + + } + if (d->slave != NULL) + { + /* on linux, closing (and later reopening) all references to the slave + makes the slave a death end, so we open it here to have one open all + the time. We could use this fd in SetupSlavePtyMagic() for linux, but + on kfreebsd we get an incorrect ("step like") output then while it has + no problem with closing all references… so to avoid platform specific + code here we combine both and be happy once more */ + d->protect_slave_from_dying = open(d->slave, O_RDWR | O_CLOEXEC | O_NOCTTY); + } } - // complain only if stdout is either a terminal (but still failed) or is an invalid - // descriptor otherwise we would complain about redirection to e.g. /dev/null as well. - else if (isatty(STDOUT_FILENO) == 1 || errno == EBADF) - _error->Errno("tcgetattr", _("Can not write log (%s)"), _("Is stdout a terminal?")); - - if (_error->PendingError() == true) - _error->DumpErrors(std::cerr); - _error->RevertToStack(); + } + + if (_error->PendingError() == true) + { + if (d->master != -1) + { + close(d->master); + d->master = -1; + } + if (d->slave != NULL) + { + free(d->slave); + d->slave = NULL; + } + _error->DumpErrors(std::cerr); + } + _error->RevertToStack(); } +void pkgDPkgPM::SetupSlavePtyMagic() +{ + if(d->master == -1 || d->slave == NULL) + return; + + if (close(d->master) == -1) + _error->FatalE("close", "Closing master %d in child failed!", d->master); + d->master = -1; + if (setsid() == -1) + _error->FatalE("setsid", "Starting a new session for child failed!"); + + int const slaveFd = open(d->slave, O_RDWR | O_NOCTTY); + if (slaveFd == -1) + _error->FatalE("open", _("Can not write log (%s)"), _("Is /dev/pts mounted?")); + else if (ioctl(slaveFd, TIOCSCTTY, 0) < 0) + _error->FatalE("ioctl", "Setting TIOCSCTTY for slave fd %d failed!", slaveFd); + else + { + unsigned short i = 0; + if (d->direct_stdin == true) + ++i; + for (; i < 3; ++i) + if (dup2(slaveFd, i) == -1) + _error->FatalE("dup2", "Dupping %d to %d in child failed!", slaveFd, i); + + if (d->tt_is_valid == true && tcsetattr(STDIN_FILENO, TCSANOW, &d->tt) < 0) + _error->FatalE("tcsetattr", "Setting in Setup via TCSANOW for slave fd %d failed!", slaveFd); + } + if (slaveFd != -1) + close(slaveFd); +} void pkgDPkgPM::StopPtyMagic() { - if(d->slave > 0) - close(d->slave); + if (d->slave != NULL) + free(d->slave); + d->slave = NULL; + if (d->protect_slave_from_dying != -1) + { + close(d->protect_slave_from_dying); + d->protect_slave_from_dying = -1; + } if(d->master >= 0) { - tcsetattr(0, TCSAFLUSH, &d->tt); + if (d->tt_is_valid == true && tcsetattr(STDIN_FILENO, TCSAFLUSH, &d->tt) == -1) + _error->FatalE("tcsetattr", "Setting in Stop via TCSAFLUSH for stdin failed!"); close(d->master); + d->master = -1; } } @@ -1114,11 +1213,7 @@ void pkgDPkgPM::StopPtyMagic() * through to human readable (and i10n-able) * names and calculates a percentage for each step. */ -#if (APT_PKG_MAJOR >= 4 && APT_PKG_MINOR >= 13) bool pkgDPkgPM::Go(APT::Progress::PackageManager *progress) -#else -bool pkgDPkgPM::GoNoABIBreak(APT::Progress::PackageManager *progress) -#endif { pkgPackageManager::SigINTStop = false; d->progress = progress; @@ -1166,8 +1261,15 @@ bool pkgDPkgPM::GoNoABIBreak(APT::Progress::PackageManager *progress) fd_set rfds; struct timespec tv; - unsigned int const MaxArgs = _config->FindI("Dpkg::MaxArgs",8*1024); - unsigned int const MaxArgBytes = _config->FindI("Dpkg::MaxArgBytes",32*1024); + // FIXME: do we really need this limit when we have MaxArgBytes? + unsigned int const MaxArgs = _config->FindI("Dpkg::MaxArgs",32*1024); + + // try to figure out the max environment size + int OSArgMax = sysconf(_SC_ARG_MAX); + if(OSArgMax < 0) + OSArgMax = 32*1024; + OSArgMax -= EnvironmentSize() - 2*1024; + unsigned int const MaxArgBytes = _config->FindI("Dpkg::MaxArgBytes", OSArgMax); bool const NoTriggers = _config->FindB("DPkg::NoTriggers", false); if (RunScripts("DPkg::Pre-Invoke") == false) @@ -1178,9 +1280,8 @@ bool pkgDPkgPM::GoNoABIBreak(APT::Progress::PackageManager *progress) // support subpressing of triggers processing for special // cases like d-i that runs the triggers handling manually - bool const SmartConf = (_config->Find("PackageManager::Configure", "all") != "all"); bool const TriggersPending = _config->FindB("DPkg::TriggersPending", false); - if (_config->FindB("DPkg::ConfigurePending", SmartConf) == true) + if (_config->FindB("DPkg::ConfigurePending", true) == true) List.push_back(Item(Item::ConfigurePending, PkgIterator())); // for the progress @@ -1413,22 +1514,8 @@ bool pkgDPkgPM::GoNoABIBreak(APT::Progress::PackageManager *progress) pid_t Child = ExecFork(KeepFDs); if (Child == 0) { - // This is the child - if(d->slave >= 0 && d->master >= 0) - { - setsid(); - int res = ioctl(d->slave, TIOCSCTTY, 0); - if (res < 0) { - std::cerr << "ioctl(TIOCSCTTY) failed for fd: " - << d->slave << std::endl; - } else { - close(d->master); - dup2(d->slave, 0); - dup2(d->slave, 1); - dup2(d->slave, 2); - close(d->slave); - } - } + // This is the child + SetupSlavePtyMagic(); close(fd[0]); // close the read end of the pipe dpkgChrootDirectory(); @@ -1438,7 +1525,8 @@ bool pkgDPkgPM::GoNoABIBreak(APT::Progress::PackageManager *progress) if (_config->FindB("DPkg::FlushSTDIN",true) == true && isatty(STDIN_FILENO)) { - int Flags,dummy; + int Flags; + int dummy = 0; if ((Flags = fcntl(STDIN_FILENO,F_GETFL,dummy)) < 0) _exit(100); @@ -1502,8 +1590,8 @@ bool pkgDPkgPM::GoNoABIBreak(APT::Progress::PackageManager *progress) // wait for input or output here FD_ZERO(&rfds); - if (d->master >= 0 && !d->stdin_is_dev_null) - FD_SET(0, &rfds); + if (d->master >= 0 && d->direct_stdin == false && d->stdin_is_dev_null == false) + FD_SET(STDIN_FILENO, &rfds); FD_SET(_dpkgin, &rfds); if(d->master >= 0) FD_SET(d->master, &rfds); @@ -1614,7 +1702,7 @@ void pkgDPkgPM::WriteApportReport(const char *pkgpath, const char *errormsg) if (apportPkg.end() == true || apportPkg->CurrentVer == 0) return; - string pkgname, reportfile, srcpkgname, pkgver, arch; + string pkgname, reportfile, pkgver, arch; string::size_type pos; FILE *report; @@ -1667,9 +1755,10 @@ void pkgDPkgPM::WriteApportReport(const char *pkgpath, const char *errormsg) // do not report dpkg I/O errors, this is a format string, so we compare // the prefix and the suffix of the error with the dpkg error message vector<string> io_errors; - io_errors.push_back(string("failed to read on buffer copy for %s")); - io_errors.push_back(string("failed in write on buffer copy for %s")); - io_errors.push_back(string("short read on buffer copy for %s")); + io_errors.push_back(string("failed to read")); + io_errors.push_back(string("failed to write")); + io_errors.push_back(string("failed to seek")); + io_errors.push_back(string("unexpected end of file or stream")); for (vector<string>::iterator I = io_errors.begin(); I != io_errors.end(); ++I) { @@ -1702,11 +1791,6 @@ void pkgDPkgPM::WriteApportReport(const char *pkgpath, const char *errormsg) if (Ver.end() == true) return; pkgver = Ver.VerStr() == NULL ? "unknown" : Ver.VerStr(); - pkgRecords Recs(Cache); - pkgRecords::Parser &Parse = Recs.Lookup(Ver.FileList()); - srcpkgname = Parse.SourcePkg(); - if(srcpkgname.empty()) - srcpkgname = pkgname; // if the file exists already, we check: // - if it was reported already (touched by apport). @@ -1757,7 +1841,16 @@ void pkgDPkgPM::WriteApportReport(const char *pkgpath, const char *errormsg) time_t now = time(NULL); fprintf(report, "Date: %s" , ctime(&now)); fprintf(report, "Package: %s %s\n", pkgname.c_str(), pkgver.c_str()); +#if APT_PKG_ABI >= 413 + fprintf(report, "SourcePackage: %s\n", Ver.SourcePkgName()); +#else + pkgRecords Recs(Cache); + pkgRecords::Parser &Parse = Recs.Lookup(Ver.FileList()); + std::string srcpkgname = Parse.SourcePkg(); + if(srcpkgname.empty()) + srcpkgname = pkgname; fprintf(report, "SourcePackage: %s\n", srcpkgname.c_str()); +#endif fprintf(report, "ErrorMessage:\n %s\n", errormsg); // ensure that the log is flushed @@ -1797,8 +1890,15 @@ void pkgDPkgPM::WriteApportReport(const char *pkgpath, const char *errormsg) } } - // log the ordering - const char *ops_str[] = {"Install", "Configure","Remove","Purge"}; + // log the ordering, see dpkgpm.h and the "Ops" enum there + const char *ops_str[] = { + "Install", + "Configure", + "Remove", + "Purge", + "ConfigurePending", + "TriggersPending", + }; fprintf(report, "AptOrdering:\n"); for (vector<Item>::iterator I = List.begin(); I != List.end(); ++I) if ((*I).Pkg != NULL) diff --git a/apt-pkg/deb/dpkgpm.h b/apt-pkg/deb/dpkgpm.h index 859c74b46..2a6e7e004 100644 --- a/apt-pkg/deb/dpkgpm.h +++ b/apt-pkg/deb/dpkgpm.h @@ -52,7 +52,7 @@ class pkgDPkgPM : public pkgPackageManager needs to declare a Replaces on the disappeared package. \param pkgname Name of the package that disappeared */ - void handleDisappearAction(std::string const &pkgname); + APT_HIDDEN void handleDisappearAction(std::string const &pkgname); protected: int pkgFailures; @@ -110,6 +110,7 @@ class pkgDPkgPM : public pkgPackageManager // helper void BuildPackagesProgressMap(); void StartPtyMagic(); + void SetupSlavePtyMagic(); void StopPtyMagic(); // input processing @@ -117,27 +118,14 @@ class pkgDPkgPM : public pkgPackageManager void DoTerminalPty(int master); void DoDpkgStatusFd(int statusfd); void ProcessDpkgStatusLine(char *line); -#if (APT_PKG_MAJOR >= 4 && APT_PKG_MINOR < 13) - void DoDpkgStatusFd(int statusfd, int /*unused*/) { - DoDpkgStatusFd(statusfd); - } - void ProcessDpkgStatusLine(int /*unused*/, char *line) { - ProcessDpkgStatusLine(line); - } -#endif - // The Actuall installation implementation virtual bool Install(PkgIterator Pkg,std::string File); virtual bool Configure(PkgIterator Pkg); virtual bool Remove(PkgIterator Pkg,bool Purge = false); -#if (APT_PKG_MAJOR >= 4 && APT_PKG_MINOR >= 13) virtual bool Go(APT::Progress::PackageManager *progress); -#else virtual bool Go(int StatusFd=-1); - bool GoNoABIBreak(APT::Progress::PackageManager *progress); -#endif virtual void Reset(); diff --git a/apt-pkg/depcache.cc b/apt-pkg/depcache.cc index 19a6e0d7e..b73c336db 100644 --- a/apt-pkg/depcache.cc +++ b/apt-pkg/depcache.cc @@ -33,7 +33,6 @@ #include <vector> #include <algorithm> #include <iostream> -#include <sstream> #include <set> #include <sys/stat.h> @@ -237,9 +236,11 @@ bool pkgDepCache::writeStateFile(OpProgress * /*prog*/, bool InstalledOnly) /*{{ FileFd StateFile; string const state = _config->FindFile("Dir::State::extended_states"); + if (CreateAPTDirectoryIfNeeded(_config->FindDir("Dir::State"), flNotFile(state)) == false) + return false; // if it does not exist, create a empty one - if(!RealFileExists(state)) + if(!RealFileExists(state)) { StateFile.Open(state, FileFd::WriteAtomic); StateFile.Close(); @@ -250,17 +251,14 @@ bool pkgDepCache::writeStateFile(OpProgress * /*prog*/, bool InstalledOnly) /*{{ return _error->Error(_("Failed to open StateFile %s"), state.c_str()); - FILE *OutFile; - string const outfile = state + ".tmp"; - if((OutFile = fopen(outfile.c_str(),"w")) == NULL) - return _error->Error(_("Failed to write temporary StateFile %s"), - outfile.c_str()); + FileFd OutFile(state, FileFd::ReadWrite | FileFd::Atomic); + if (OutFile.IsOpen() == false || OutFile.Failed() == true) + return _error->Error(_("Failed to write temporary StateFile %s"), state.c_str()); // first merge with the existing sections pkgTagFile tagfile(&StateFile); pkgTagSection section; std::set<string> pkgs_seen; - const char *nullreorderlist[] = {0}; while(tagfile.Step(section)) { string const pkgname = section.FindS("Package"); string pkgarch = section.FindS("Architecture"); @@ -269,7 +267,7 @@ bool pkgDepCache::writeStateFile(OpProgress * /*prog*/, bool InstalledOnly) /*{{ // Silently ignore unknown packages and packages with no actual // version. pkgCache::PkgIterator pkg = Cache->FindPkg(pkgname, pkgarch); - if(pkg.end() || pkg.VersionList().end()) + if(pkg.end() || pkg.VersionList().end()) continue; StateCache const &P = PkgState[pkg->ID]; bool newAuto = (P.Flags & Flag::Auto); @@ -290,21 +288,17 @@ bool pkgDepCache::writeStateFile(OpProgress * /*prog*/, bool InstalledOnly) /*{{ if(_config->FindB("Debug::pkgAutoRemove",false)) std::clog << "Update existing AutoInstall info: " << pkg.FullName() << std::endl; - TFRewriteData rewrite[3]; - rewrite[0].Tag = "Architecture"; - rewrite[0].Rewrite = pkg.Arch(); - rewrite[0].NewTag = 0; - rewrite[1].Tag = "Auto-Installed"; - rewrite[1].Rewrite = newAuto ? "1" : "0"; - rewrite[1].NewTag = 0; - rewrite[2].Tag = 0; - TFRewrite(OutFile, section, nullreorderlist, rewrite); - fprintf(OutFile,"\n"); + + std::vector<pkgTagSection::Tag> rewrite; + rewrite.push_back(pkgTagSection::Tag::Rewrite("Architecture", pkg.Arch())); + rewrite.push_back(pkgTagSection::Tag::Rewrite("Auto-Installed", newAuto ? "1" : "0")); + section.Write(OutFile, NULL, rewrite); + if (OutFile.Write("\n", 1) == false) + return false; pkgs_seen.insert(pkg.FullName()); } - + // then write the ones we have not seen yet - std::ostringstream ostr; for(pkgCache::PkgIterator pkg=Cache->PkgBegin(); !pkg.end(); ++pkg) { StateCache const &P = PkgState[pkg->ID]; if(P.Flags & Flag::Auto) { @@ -323,19 +317,17 @@ bool pkgDepCache::writeStateFile(OpProgress * /*prog*/, bool InstalledOnly) /*{{ continue; if(debug_autoremove) std::clog << "Writing new AutoInstall: " << pkg.FullName() << std::endl; - ostr.str(string("")); - ostr << "Package: " << pkg.Name() - << "\nArchitecture: " << pkgarch - << "\nAuto-Installed: 1\n\n"; - fprintf(OutFile,"%s",ostr.str().c_str()); + std::string stanza = "Package: "; + stanza.append(pkg.Name()) + .append("\nArchitecture: ").append(pkgarch) + .append("\nAuto-Installed: 1\n\n"); + if (OutFile.Write(stanza.c_str(), stanza.length()) == false) + return false; } } - fclose(OutFile); - - // move the outfile over the real file and set permissions - rename(outfile.c_str(), state.c_str()); + if (OutFile.Close() == false) + return false; chmod(state.c_str(), 0644); - return true; } /*}}}*/ @@ -663,10 +655,11 @@ void pkgDepCache::Update(OpProgress *Prog) { iUsrSize = 0; iDownloadSize = 0; - iDelCount = 0; iInstCount = 0; + iDelCount = 0; iKeepCount = 0; iBrokenCount = 0; + iPolicyBrokenCount = 0; iBadCount = 0; // Perform the depends pass @@ -1225,7 +1218,7 @@ bool pkgDepCache::MarkInstall(PkgIterator const &Pkg,bool AutoInst, continue; } // now check if we should consider it a automatic dependency or not - if(InstPkg->CurrentVer == 0 && Pkg->Section != 0 && ConfigValueInSubTree("APT::Never-MarkAuto-Sections", Pkg.Section())) + if(InstPkg->CurrentVer == 0 && InstVer->Section != 0 && ConfigValueInSubTree("APT::Never-MarkAuto-Sections", InstVer.Section())) { if(DebugAutoInstall == true) std::clog << OutputInDepth(Depth) << "Setting NOT as auto-installed (direct " @@ -1312,14 +1305,18 @@ bool pkgDepCache::IsInstallOkMultiArchSameVersionSynced(PkgIterator const &Pkg, GrpIterator const Grp = Pkg.Group(); for (PkgIterator P = Grp.PackageList(); P.end() == false; P = Grp.NextPkg(P)) { - // not installed or version synced: fine by definition + // not installed or self-check: fine by definition + if (P->CurrentVer == 0 || P == Pkg) + continue; + + // not having a candidate or being in sync // (simple string-compare as stuff like '1' == '0:1-0' can't happen here) - if (P->CurrentVer == 0 || strcmp(Pkg.CandVersion(), P.CandVersion()) == 0) + VerIterator CV = PkgState[P->ID].CandidateVerIter(*this); + if (CV.end() == true || strcmp(Pkg.CandVersion(), CV.VerStr()) == 0) continue; + // packages losing M-A:same can be out-of-sync - VerIterator CV = PkgState[P->ID].CandidateVerIter(*this); - if (unlikely(CV.end() == true) || - (CV->MultiArch & pkgCache::Version::Same) != pkgCache::Version::Same) + if ((CV->MultiArch & pkgCache::Version::Same) != pkgCache::Version::Same) continue; // not downloadable means the package is obsolete, so allow out-of-sync @@ -1329,7 +1326,8 @@ bool pkgDepCache::IsInstallOkMultiArchSameVersionSynced(PkgIterator const &Pkg, PkgState[Pkg->ID].iFlags |= AutoKept; if (unlikely(DebugMarker == true)) std::clog << OutputInDepth(Depth) << "Ignore MarkInstall of " << Pkg - << " as its M-A:same siblings are not version-synced" << std::endl; + << " as it is not in sync with its M-A:same sibling " << P + << " (" << Pkg.CandVersion() << " != " << CV.VerStr() << ")" << std::endl; return false; } @@ -1374,7 +1372,7 @@ bool pkgDepCache::IsInstallOkDependenciesSatisfiableByCandidates(PkgIterator con // the dependency is critical, but can't be installed, so discard the candidate // as the problemresolver will trip over it otherwise trying to install it (#735967) - if (Pkg->CurrentVer != 0) + if (Pkg->CurrentVer != 0 && (PkgState[Pkg->ID].iFlags & Protected) != Protected) SetCandidateVersion(Pkg.CurrentVer()); return false; } @@ -1678,7 +1676,7 @@ pkgCache::VerIterator pkgDepCache::Policy::GetCandidateVer(PkgIterator const &Pk { /* Not source/not automatic versions cannot be a candidate version unless they are already installed */ - VerIterator Last(*(pkgCache *)this,0); + VerIterator Last; for (VerIterator I = Pkg.VersionList(); I.end() == false; ++I) { @@ -1960,3 +1958,17 @@ bool pkgDepCache::Sweep() /*{{{*/ return true; } /*}}}*/ +// DepCache::MarkAndSweep /*{{{*/ +bool pkgDepCache::MarkAndSweep(InRootSetFunc &rootFunc) +{ + return MarkRequired(rootFunc) && Sweep(); +} +bool pkgDepCache::MarkAndSweep() +{ + std::auto_ptr<InRootSetFunc> f(GetRootSetFunc()); + if(f.get() != NULL) + return MarkAndSweep(*f.get()); + else + return false; +} + /*}}}*/ diff --git a/apt-pkg/depcache.h b/apt-pkg/depcache.h index bec651279..20d263c67 100644 --- a/apt-pkg/depcache.h +++ b/apt-pkg/depcache.h @@ -91,7 +91,7 @@ class pkgDepCache : protected pkgCache::Namespace * \param follow_suggests If \b true, suggestions of the package * will be recursively marked. */ - void MarkPackage(const pkgCache::PkgIterator &pkg, + APT_HIDDEN void MarkPackage(const pkgCache::PkgIterator &pkg, const pkgCache::VerIterator &ver, bool const &follow_recommends, bool const &follow_suggests); @@ -169,7 +169,7 @@ class pkgDepCache : protected pkgCache::Namespace bool released; /** Action groups are noncopyable. */ - ActionGroup(const ActionGroup &other); + APT_HIDDEN ActionGroup(const ActionGroup &other); public: /** \brief Create a new ActionGroup. * @@ -396,19 +396,8 @@ class pkgDepCache : protected pkgCache::Namespace * \param rootFunc A predicate that returns \b true for packages * that should be added to the root set. */ - bool MarkAndSweep(InRootSetFunc &rootFunc) - { - return MarkRequired(rootFunc) && Sweep(); - } - - bool MarkAndSweep() - { - std::auto_ptr<InRootSetFunc> f(GetRootSetFunc()); - if(f.get() != NULL) - return MarkAndSweep(*f.get()); - else - return false; - } + bool MarkAndSweep(InRootSetFunc &rootFunc); + bool MarkAndSweep(); /** \name State Manipulators */ @@ -514,7 +503,7 @@ class pkgDepCache : protected pkgCache::Namespace bool const rPurge, unsigned long const Depth, bool const FromUser); private: - bool IsModeChangeOk(ModeList const mode, PkgIterator const &Pkg, + APT_HIDDEN bool IsModeChangeOk(ModeList const mode, PkgIterator const &Pkg, unsigned long const Depth, bool const FromUser); }; diff --git a/apt-pkg/edsp.cc b/apt-pkg/edsp.cc index ee42267bc..3c6a7e30f 100644 --- a/apt-pkg/edsp.cc +++ b/apt-pkg/edsp.cc @@ -18,6 +18,7 @@ #include <apt-pkg/pkgcache.h> #include <apt-pkg/cacheiterators.h> #include <apt-pkg/strutl.h> +#include <apt-pkg/pkgrecords.h> #include <ctype.h> #include <stddef.h> @@ -25,6 +26,7 @@ #include <time.h> #include <unistd.h> #include <stdio.h> +#include <algorithm> #include <iostream> #include <vector> #include <limits> @@ -49,7 +51,12 @@ bool EDSP::WriteScenario(pkgDepCache &Cache, FILE* output, OpProgress *Progress) if (Progress != NULL) Progress->SubProgress(Cache.Head().VersionCount, _("Send scenario to solver")); unsigned long p = 0; + std::vector<std::string> archs = APT::Configuration::getArchitectures(); for (pkgCache::PkgIterator Pkg = Cache.PkgBegin(); Pkg.end() == false; ++Pkg) + { + std::string const arch = Pkg.Arch(); + if (std::find(archs.begin(), archs.end(), arch) == archs.end()) + continue; for (pkgCache::VerIterator Ver = Pkg.VersionList(); Ver.end() == false; ++Ver, ++p) { WriteScenarioVersion(Cache, output, Pkg, Ver); @@ -58,6 +65,7 @@ bool EDSP::WriteScenario(pkgDepCache &Cache, FILE* output, OpProgress *Progress) if (Progress != NULL && p % 100 == 0) Progress->Progress(p); } + } return true; } /*}}}*/ @@ -88,6 +96,14 @@ void EDSP::WriteScenarioVersion(pkgDepCache &Cache, FILE* output, pkgCache::PkgI pkgCache::VerIterator const &Ver) { fprintf(output, "Package: %s\n", Pkg.Name()); +#if APT_PKG_ABI >= 413 + fprintf(output, "Source: %s\n", Ver.SourcePkgName()); +#else + pkgRecords Recs(Cache); + pkgRecords::Parser &rec = Recs.Lookup(Ver.FileList()); + string srcpkg = rec.SourcePkg().empty() ? Pkg.Name() : rec.SourcePkg(); + fprintf(output, "Source: %s\n", srcpkg.c_str()); +#endif fprintf(output, "Architecture: %s\n", Ver.Arch()); fprintf(output, "Version: %s\n", Ver.VerStr()); if (Pkg.CurrentVer() == Ver) @@ -107,10 +123,22 @@ void EDSP::WriteScenarioVersion(pkgDepCache &Cache, FILE* output, pkgCache::PkgI else if ((Ver->MultiArch & pkgCache::Version::Same) == pkgCache::Version::Same) fprintf(output, "Multi-Arch: same\n"); signed short Pin = std::numeric_limits<signed short>::min(); - for (pkgCache::VerFileIterator File = Ver.FileList(); File.end() == false; ++File) { - signed short const p = Cache.GetPolicy().GetPriority(File.File()); + std::set<string> Releases; + for (pkgCache::VerFileIterator I = Ver.FileList(); I.end() == false; ++I) { + pkgCache::PkgFileIterator File = I.File(); + signed short const p = Cache.GetPolicy().GetPriority(File); if (Pin < p) Pin = p; + if ((File->Flags & pkgCache::Flag::NotSource) != pkgCache::Flag::NotSource) { + string Release = File.RelStr(); + if (!Release.empty()) + Releases.insert(Release); + } + } + if (!Releases.empty()) { + fprintf(output, "APT-Release:\n"); + for (std::set<string>::iterator R = Releases.begin(); R != Releases.end(); ++R) + fprintf(output, " %s\n", R->c_str()); } fprintf(output, "APT-Pin: %d\n", Pin); if (Cache.GetCandidateVer(Pkg) == Ver) @@ -231,7 +259,16 @@ bool EDSP::WriteRequest(pkgDepCache &Cache, FILE* output, bool const Upgrade, continue; req->append(" ").append(Pkg.FullName()); } - fprintf(output, "Request: EDSP 0.4\n"); + fprintf(output, "Request: EDSP 0.5\n"); + + const char *arch = _config->Find("APT::Architecture").c_str(); + std::vector<string> archs = APT::Configuration::getArchitectures(); + fprintf(output, "Architecture: %s\n", arch); + fprintf(output, "Architectures:"); + for (std::vector<string>::const_iterator a = archs.begin(); a != archs.end(); ++a) + fprintf(output, " %s", a->c_str()); + fprintf(output, "\n"); + if (del.empty() == false) fprintf(output, "Remove: %s\n", del.c_str()+1); if (inst.empty() == false) @@ -411,6 +448,13 @@ bool EDSP::ReadRequest(int const input, std::list<std::string> &install, distUpgrade = EDSP::StringToBool(line.c_str() + 14, false); else if (line.compare(0, 11, "Autoremove:") == 0) autoRemove = EDSP::StringToBool(line.c_str() + 12, false); + else if (line.compare(0, 13, "Architecture:") == 0) + _config->Set("APT::Architecture", line.c_str() + 14); + else if (line.compare(0, 14, "Architectures:") == 0) + { + std::string const archs = line.c_str() + 15; + _config->Set("APT::Architectures", SubstVar(archs, " ", ",")); + } else _error->Warning("Unknown line in EDSP Request stanza: %s", line.c_str()); @@ -508,7 +552,7 @@ bool EDSP::WriteError(char const * const uuid, std::string const &message, FILE* } /*}}}*/ // EDSP::ExecuteSolver - fork requested solver and setup ipc pipes {{{*/ -bool EDSP::ExecuteSolver(const char* const solver, int *solver_in, int *solver_out) { +pid_t EDSP::ExecuteSolver(const char* const solver, int * const solver_in, int * const solver_out, bool) { std::vector<std::string> const solverDirs = _config->FindVector("Dir::Bin::Solvers"); std::string file; for (std::vector<std::string>::const_iterator dir = solverDirs.begin(); @@ -520,10 +564,16 @@ bool EDSP::ExecuteSolver(const char* const solver, int *solver_in, int *solver_o } if (file.empty() == true) - return _error->Error("Can't call external solver '%s' as it is not in a configured directory!", solver); + { + _error->Error("Can't call external solver '%s' as it is not in a configured directory!", solver); + return 0; + } int external[4] = {-1, -1, -1, -1}; if (pipe(external) != 0 || pipe(external + 2) != 0) - return _error->Errno("Resolve", "Can't create needed IPC pipes for EDSP"); + { + _error->Errno("Resolve", "Can't create needed IPC pipes for EDSP"); + return 0; + } for (int i = 0; i < 4; ++i) SetCloseExec(external[i], true); @@ -540,11 +590,19 @@ bool EDSP::ExecuteSolver(const char* const solver, int *solver_in, int *solver_o close(external[3]); if (WaitFd(external[1], true, 5) == false) - return _error->Errno("Resolve", "Timed out while Waiting on availability of solver stdin"); + { + _error->Errno("Resolve", "Timed out while Waiting on availability of solver stdin"); + return 0; + } *solver_in = external[1]; *solver_out = external[2]; - return true; + return Solver; +} +bool EDSP::ExecuteSolver(const char* const solver, int *solver_in, int *solver_out) { + if (ExecuteSolver(solver, solver_in, solver_out, true) == 0) + return false; + return true; } /*}}}*/ // EDSP::ResolveExternal - resolve problems by asking external for help {{{*/ @@ -552,7 +610,8 @@ bool EDSP::ResolveExternal(const char* const solver, pkgDepCache &Cache, bool const upgrade, bool const distUpgrade, bool const autoRemove, OpProgress *Progress) { int solver_in, solver_out; - if (EDSP::ExecuteSolver(solver, &solver_in, &solver_out) == false) + pid_t const solver_pid = EDSP::ExecuteSolver(solver, &solver_in, &solver_out, true); + if (solver_pid == 0) return false; FILE* output = fdopen(solver_in, "w"); @@ -572,6 +631,6 @@ bool EDSP::ResolveExternal(const char* const solver, pkgDepCache &Cache, if (EDSP::ReadResponse(solver_out, Cache, Progress) == false) return false; - return true; + return ExecWait(solver_pid, solver); } /*}}}*/ diff --git a/apt-pkg/edsp.h b/apt-pkg/edsp.h index f3092d3c6..9e833556a 100644 --- a/apt-pkg/edsp.h +++ b/apt-pkg/edsp.h @@ -205,10 +205,10 @@ public: * \param[out] solver_in will be the stdin of the solver * \param[out] solver_out will be the stdout of the solver * - * \return true if the solver could be started and the pipes - * are set up correctly, otherwise false and the pipes are invalid + * \return PID of the started solver or 0 if failure occurred */ - bool static ExecuteSolver(const char* const solver, int *solver_in, int *solver_out); + pid_t static ExecuteSolver(const char* const solver, int * const solver_in, int * const solver_out, bool /*overload*/); + APT_DEPRECATED bool static ExecuteSolver(const char* const solver, int *solver_in, int *solver_out); /** \brief call an external resolver to handle the request * diff --git a/apt-pkg/edsp/edspindexfile.cc b/apt-pkg/edsp/edspindexfile.cc index 10313fd61..d00536362 100644 --- a/apt-pkg/edsp/edspindexfile.cc +++ b/apt-pkg/edsp/edspindexfile.cc @@ -56,7 +56,7 @@ bool edspIndex::Merge(pkgCacheGenerator &Gen,OpProgress *Prog) const pkgCache::PkgFileIterator CFile = Gen.GetCurFile(); CFile->Size = Pkg.FileSize(); CFile->mtime = Pkg.ModificationTime(); - map_ptrloc const storage = Gen.WriteUniqString("edsp::scenario"); + map_stringitem_t const storage = Gen.StoreString(pkgCacheGenerator::MIXED, "edsp::scenario"); CFile->Archive = storage; if (Gen.MergeList(Parser) == false) @@ -65,7 +65,7 @@ bool edspIndex::Merge(pkgCacheGenerator &Gen,OpProgress *Prog) const } /*}}}*/ // Index File types for APT /*{{{*/ -class edspIFType: public pkgIndexFile::Type +class APT_HIDDEN edspIFType: public pkgIndexFile::Type { public: virtual pkgRecords::Parser *CreatePkgParser(pkgCache::PkgFileIterator) const @@ -75,7 +75,7 @@ class edspIFType: public pkgIndexFile::Type }; edspIFType() {Label = "EDSP scenario file";}; }; -static edspIFType _apt_Universe; +APT_HIDDEN edspIFType _apt_Universe; const pkgIndexFile::Type *edspIndex::GetType() const { diff --git a/apt-pkg/edsp/edspindexfile.h b/apt-pkg/edsp/edspindexfile.h index 609a2cde4..8c18d8cbd 100644 --- a/apt-pkg/edsp/edspindexfile.h +++ b/apt-pkg/edsp/edspindexfile.h @@ -18,7 +18,7 @@ class OpProgress; class pkgCacheGenerator; -class edspIndex : public debStatusIndex +class APT_HIDDEN edspIndex : public debStatusIndex { /** \brief dpointer placeholder (for later in case we need it) */ void *d; diff --git a/apt-pkg/edsp/edsplistparser.h b/apt-pkg/edsp/edsplistparser.h index 959fb587f..86cd77606 100644 --- a/apt-pkg/edsp/edsplistparser.h +++ b/apt-pkg/edsp/edsplistparser.h @@ -25,7 +25,7 @@ class FileFd; -class edspListParser : public debListParser +class APT_HIDDEN edspListParser : public debListParser { public: virtual bool NewVersion(pkgCache::VerIterator &Ver); diff --git a/apt-pkg/edsp/edspsystem.cc b/apt-pkg/edsp/edspsystem.cc index 92edb8d77..063517421 100644 --- a/apt-pkg/edsp/edspsystem.cc +++ b/apt-pkg/edsp/edspsystem.cc @@ -26,8 +26,6 @@ #include <apti18n.h> /*}}}*/ -edspSystem edspSys; - // System::debSystem - Constructor /*{{{*/ edspSystem::edspSystem() { @@ -126,3 +124,5 @@ bool edspSystem::FindIndex(pkgCache::PkgFileIterator File, return false; } /*}}}*/ + +APT_HIDDEN edspSystem edspSys; diff --git a/apt-pkg/edsp/edspsystem.h b/apt-pkg/edsp/edspsystem.h index 65e36d714..06a63f40c 100644 --- a/apt-pkg/edsp/edspsystem.h +++ b/apt-pkg/edsp/edspsystem.h @@ -22,7 +22,7 @@ class pkgIndexFile; class pkgPackageManager; class edspIndex; -class edspSystem : public pkgSystem +class APT_HIDDEN edspSystem : public pkgSystem { /** \brief dpointer placeholder (for later in case we need it) */ void *d; @@ -45,6 +45,4 @@ class edspSystem : public pkgSystem ~edspSystem(); }; -extern edspSystem edspSys; - #endif diff --git a/apt-pkg/indexcopy.cc b/apt-pkg/indexcopy.cc index 854ba1bd7..461aa4217 100644 --- a/apt-pkg/indexcopy.cc +++ b/apt-pkg/indexcopy.cc @@ -108,10 +108,7 @@ bool IndexCopy::CopyPackages(string CDROM,string Name,vector<string> &List, } if (_error->PendingError() == true) return false; - FILE *TargetFl = fdopen(dup(Target.Fd()),"w"); - if (TargetFl == 0) - return _error->Errno("fdopen","Failed to reopen fd"); - + // Setup the progress meter if(Progress) Progress->OverallProgress(CurrentSize,TotalSize,FileSize, @@ -132,14 +129,11 @@ bool IndexCopy::CopyPackages(string CDROM,string Name,vector<string> &List, string File; unsigned long long Size; if (GetFile(File,Size) == false) - { - fclose(TargetFl); return false; - } - + if (Chop != 0) File = OrigPath + ChopDirs(File,Chop); - + // See if the file exists if (NoStat == false || Hits < 10) { @@ -157,10 +151,10 @@ bool IndexCopy::CopyPackages(string CDROM,string Name,vector<string> &List, if (Chop != 0) File = OrigPath + ChopDirs(File,Chop); } - + // Get the size struct stat Buf; - if (stat((CDROM + Prefix + File).c_str(),&Buf) != 0 || + if (stat((CDROM + Prefix + File).c_str(),&Buf) != 0 || Buf.st_size == 0) { bool Mangled = false; @@ -173,7 +167,7 @@ bool IndexCopy::CopyPackages(string CDROM,string Name,vector<string> &List, File.replace(Start,End-Start,"binary-all"); Mangled = true; } - + if (Mangled == false || stat((CDROM + Prefix + File).c_str(),&Buf) != 0) { @@ -181,9 +175,9 @@ bool IndexCopy::CopyPackages(string CDROM,string Name,vector<string> &List, clog << "Missed(2): " << OrigFile << endl; NotFound++; continue; - } - } - + } + } + // Size match if ((unsigned long long)Buf.st_size != Size) { @@ -193,21 +187,17 @@ bool IndexCopy::CopyPackages(string CDROM,string Name,vector<string> &List, continue; } } - + Packages++; Hits++; - - if (RewriteEntry(TargetFl,File) == false) - { - fclose(TargetFl); + + if (RewriteEntry(Target, File) == false) return false; - } } - fclose(TargetFl); if (Debug == true) cout << " Processed by using Prefix '" << Prefix << "' and chop " << Chop << endl; - + if (_config->FindB("APT::CDROM::NoAct",false) == false) { // Move out of the partial directory @@ -216,40 +206,40 @@ bool IndexCopy::CopyPackages(string CDROM,string Name,vector<string> &List, FinalF += URItoFileName(S); if (rename(TargetF.c_str(),FinalF.c_str()) != 0) return _error->Errno("rename","Failed to rename"); + ChangeOwnerAndPermissionOfFile("CopyPackages", FinalF.c_str(), "root", "root", 0644); } - + /* Mangle the source to be in the proper notation with - prefix dist [component] */ + prefix dist [component] */ *I = string(*I,Prefix.length()); ConvertToSourceList(CDROM,*I); *I = Prefix + ' ' + *I; - + CurrentSize += FileSize; - } + } if(Progress) Progress->Done(); - + // Some stats if(log) { stringstream msg; if(NotFound == 0 && WrongSize == 0) ioprintf(msg, _("Wrote %i records.\n"), Packages); else if (NotFound != 0 && WrongSize == 0) - ioprintf(msg, _("Wrote %i records with %i missing files.\n"), + ioprintf(msg, _("Wrote %i records with %i missing files.\n"), Packages, NotFound); else if (NotFound == 0 && WrongSize != 0) - ioprintf(msg, _("Wrote %i records with %i mismatched files\n"), + ioprintf(msg, _("Wrote %i records with %i mismatched files\n"), Packages, WrongSize); if (NotFound != 0 && WrongSize != 0) ioprintf(msg, _("Wrote %i records with %i missing files and %i mismatched files\n"), Packages, NotFound, WrongSize); } - + if (Packages == 0) _error->Warning("No valid records were found."); if (NotFound + WrongSize > 10) _error->Warning("A lot of entries were discarded, something may be wrong.\n"); - return true; } @@ -266,10 +256,10 @@ string IndexCopy::ChopDirs(string Path,unsigned int Depth) Depth--; } while (I != string::npos && Depth != 0); - + if (I == string::npos) return string(); - + return string(Path,I+1); } /*}}}*/ @@ -432,17 +422,15 @@ bool PackageCopy::GetFile(string &File,unsigned long long &Size) } /*}}}*/ // PackageCopy::RewriteEntry - Rewrite the entry with a new filename /*{{{*/ -// --------------------------------------------------------------------- -/* */ -bool PackageCopy::RewriteEntry(FILE *Target,string File) +bool PackageCopy::RewriteEntry(FileFd &Target,string const &File) { - TFRewriteData Changes[] = {{ "Filename", File.c_str(), NULL }, - { NULL, NULL, NULL }}; - - if (TFRewrite(Target,*Section,TFRewritePackageOrder,Changes) == false) + string const Dir(File,0,File.rfind('/')); + std::vector<pkgTagSection::Tag> Changes; + Changes.push_back(pkgTagSection::Tag::Rewrite("Filename", File)); + + if (Section->Write(Target, TFRewritePackageOrder, Changes) == false) return false; - fputc('\n',Target); - return true; + return Target.Write("\n", 1); } /*}}}*/ // SourceCopy::GetFile - Get the file information from the section /*{{{*/ @@ -477,23 +465,18 @@ bool SourceCopy::GetFile(string &File,unsigned long long &Size) } /*}}}*/ // SourceCopy::RewriteEntry - Rewrite the entry with a new filename /*{{{*/ -// --------------------------------------------------------------------- -/* */ -bool SourceCopy::RewriteEntry(FILE *Target,string File) +bool SourceCopy::RewriteEntry(FileFd &Target, std::string const &File) { - string Dir(File,0,File.rfind('/')); - TFRewriteData Changes[] = {{ "Directory", Dir.c_str(), NULL }, - { NULL, NULL, NULL }}; - - if (TFRewrite(Target,*Section,TFRewriteSourceOrder,Changes) == false) + string const Dir(File,0,File.rfind('/')); + std::vector<pkgTagSection::Tag> Changes; + Changes.push_back(pkgTagSection::Tag::Rewrite("Directory", Dir)); + + if (Section->Write(Target, TFRewriteSourceOrder, Changes) == false) return false; - fputc('\n',Target); - return true; + return Target.Write("\n", 1); } /*}}}*/ -// SigVerify::Verify - Verify a files md5sum against its metaindex /*{{{*/ -// --------------------------------------------------------------------- -/* */ +// SigVerify::Verify - Verify a files md5sum against its metaindex /*{{{*/ bool SigVerify::Verify(string prefix, string file, indexRecords *MetaIndex) { const indexRecords::checkSum *Record = MetaIndex->Lookup(file); @@ -516,7 +499,7 @@ bool SigVerify::Verify(string prefix, string file, indexRecords *MetaIndex) return false; } - if (!Record->Hash.VerifyFile(prefix+file)) + if (!Record->Hashes.VerifyFile(prefix+file)) { _error->Warning(_("Hash mismatch for: %s"),file.c_str()); return false; @@ -524,8 +507,10 @@ bool SigVerify::Verify(string prefix, string file, indexRecords *MetaIndex) if(Debug == true) { - cout << "File: " << prefix+file << endl; - cout << "Expected Hash " << Record->Hash.toStr() << endl; + cout << "File: " << prefix+file << endl + << "Expected Hash " << endl; + for (HashStringList::const_iterator hs = Record->Hashes.begin(); hs != Record->Hashes.end(); ++hs) + std::cout << "\t- " << hs->toStr() << std::endl; } return true; @@ -544,8 +529,9 @@ bool SigVerify::CopyMetaIndex(string CDROM, string CDName, /*{{{*/ FileFd Rel; Target.Open(TargetF,FileFd::WriteAtomic); Rel.Open(prefix + file,FileFd::ReadOnly); - if (CopyFile(Rel,Target) == false) + if (CopyFile(Rel,Target) == false || Target.Close() == false) return _error->Error("Copying of '%s' for '%s' from '%s' failed", file.c_str(), CDName.c_str(), prefix.c_str()); + ChangeOwnerAndPermissionOfFile("CopyPackages", TargetF.c_str(), "root", "root", 0644); return true; } @@ -698,7 +684,7 @@ bool TranslationsCopy::CopyTranslations(string CDROM,string Name, /*{{{*/ pkgTagFile Parser(&Pkg); if (_error->PendingError() == true) return false; - + // Open the output file char S[400]; snprintf(S,sizeof(S),"cdrom:[%s]/%s",Name.c_str(), @@ -715,10 +701,7 @@ bool TranslationsCopy::CopyTranslations(string CDROM,string Name, /*{{{*/ } if (_error->PendingError() == true) return false; - FILE *TargetFl = fdopen(dup(Target.Fd()),"w"); - if (TargetFl == 0) - return _error->Errno("fdopen","Failed to reopen fd"); - + // Setup the progress meter if(Progress) Progress->OverallProgress(CurrentSize,TotalSize,FileSize, @@ -736,20 +719,16 @@ bool TranslationsCopy::CopyTranslations(string CDROM,string Name, /*{{{*/ if(Progress) Progress->Progress(Parser.Offset()); - const char *Start; - const char *Stop; - Section.GetSection(Start,Stop); - fwrite(Start,Stop-Start, 1, TargetFl); - fputc('\n',TargetFl); + if (Section.Write(Target) == false || Target.Write("\n", 1) == false) + return false; Packages++; Hits++; } - fclose(TargetFl); if (Debug == true) cout << " Processed by using Prefix '" << Prefix << "' and chop " << endl; - + if (_config->FindB("APT::CDROM::NoAct",false) == false) { // Move out of the partial directory @@ -758,36 +737,37 @@ bool TranslationsCopy::CopyTranslations(string CDROM,string Name, /*{{{*/ FinalF += URItoFileName(S); if (rename(TargetF.c_str(),FinalF.c_str()) != 0) return _error->Errno("rename","Failed to rename"); + ChangeOwnerAndPermissionOfFile("CopyTranslations", FinalF.c_str(), "root", "root", 0644); } - - + CurrentSize += FileSize; - } + } if(Progress) Progress->Done(); - + // Some stats if(log) { stringstream msg; if(NotFound == 0 && WrongSize == 0) ioprintf(msg, _("Wrote %i records.\n"), Packages); else if (NotFound != 0 && WrongSize == 0) - ioprintf(msg, _("Wrote %i records with %i missing files.\n"), + ioprintf(msg, _("Wrote %i records with %i missing files.\n"), Packages, NotFound); else if (NotFound == 0 && WrongSize != 0) - ioprintf(msg, _("Wrote %i records with %i mismatched files\n"), + ioprintf(msg, _("Wrote %i records with %i mismatched files\n"), Packages, WrongSize); if (NotFound != 0 && WrongSize != 0) ioprintf(msg, _("Wrote %i records with %i missing files and %i mismatched files\n"), Packages, NotFound, WrongSize); } - + if (Packages == 0) _error->Warning("No valid records were found."); if (NotFound + WrongSize > 10) _error->Warning("A lot of entries were discarded, something may be wrong.\n"); - return true; } /*}}}*/ + +APT_CONST IndexCopy::~IndexCopy() {} diff --git a/apt-pkg/indexcopy.h b/apt-pkg/indexcopy.h index 43cdb3f0a..729b0c8cb 100644 --- a/apt-pkg/indexcopy.h +++ b/apt-pkg/indexcopy.h @@ -28,6 +28,7 @@ using std::vector; class pkgTagSection; class indexRecords; class pkgCdromStatus; +class FileFd; class IndexCopy /*{{{*/ { @@ -45,7 +46,7 @@ class IndexCopy /*{{{*/ void ConvertToSourceList(std::string CD,std::string &Path); bool GrabFirst(std::string Path,std::string &To,unsigned int Depth); virtual bool GetFile(std::string &Filename,unsigned long long &Size) = 0; - virtual bool RewriteEntry(FILE *Target,std::string File) = 0; + virtual bool RewriteEntry(FileFd &Target, std::string const &File) = 0; virtual const char *GetFileName() = 0; virtual const char *Type() = 0; @@ -53,7 +54,7 @@ class IndexCopy /*{{{*/ bool CopyPackages(std::string CDROM,std::string Name,std::vector<std::string> &List, pkgCdromStatus *log); - virtual ~IndexCopy() {}; + virtual ~IndexCopy(); }; /*}}}*/ class PackageCopy : public IndexCopy /*{{{*/ @@ -61,7 +62,7 @@ class PackageCopy : public IndexCopy /*{{{*/ protected: virtual bool GetFile(std::string &Filename,unsigned long long &Size); - virtual bool RewriteEntry(FILE *Target,std::string File); + virtual bool RewriteEntry(FileFd &Target, std::string const &File); virtual const char *GetFileName() {return "Packages";}; virtual const char *Type() {return "Package";}; @@ -72,7 +73,7 @@ class SourceCopy : public IndexCopy /*{{{*/ protected: virtual bool GetFile(std::string &Filename,unsigned long long &Size); - virtual bool RewriteEntry(FILE *Target,std::string File); + virtual bool RewriteEntry(FileFd &Target, std::string const &File); virtual const char *GetFileName() {return "Sources";}; virtual const char *Type() {return "Source";}; @@ -93,8 +94,8 @@ class SigVerify /*{{{*/ /** \brief dpointer placeholder (for later in case we need it) */ void *d; - bool Verify(std::string prefix,std::string file, indexRecords *records); - bool CopyMetaIndex(std::string CDROM, std::string CDName, + APT_HIDDEN bool Verify(std::string prefix,std::string file, indexRecords *records); + APT_HIDDEN bool CopyMetaIndex(std::string CDROM, std::string CDName, std::string prefix, std::string file); public: diff --git a/apt-pkg/indexfile.h b/apt-pkg/indexfile.h index b5c9ac77e..817165f08 100644 --- a/apt-pkg/indexfile.h +++ b/apt-pkg/indexfile.h @@ -59,6 +59,7 @@ class pkgIndexFile const char *Label; virtual pkgRecords::Parser *CreatePkgParser(pkgCache::PkgFileIterator /*File*/) const {return 0;}; + virtual pkgSrcRecords::Parser *CreateSrcPkgParser(std::string /*File*/) const {return 0;}; Type(); virtual ~Type() {}; }; diff --git a/apt-pkg/indexrecords.cc b/apt-pkg/indexrecords.cc index 5353d1098..de2617833 100644 --- a/apt-pkg/indexrecords.cc +++ b/apt-pkg/indexrecords.cc @@ -37,6 +37,11 @@ APT_PURE string indexRecords::GetSuite() const return this->Suite; } +APT_PURE bool indexRecords::GetSupportsAcquireByHash() const +{ + return this->SupportsAcquireByHash; +} + APT_PURE bool indexRecords::CheckDist(const string MaybeDist) const { return (this->Dist == MaybeDist @@ -53,7 +58,12 @@ APT_PURE time_t indexRecords::GetValidUntil() const return this->ValidUntil; } -APT_PURE const indexRecords::checkSum *indexRecords::Lookup(const string MetaKey) +APT_PURE time_t indexRecords::GetDate() const +{ + return this->Date; +} + +APT_PURE indexRecords::checkSum *indexRecords::Lookup(const string MetaKey) { std::map<std::string, indexRecords::checkSum* >::const_iterator sum = Entries.find(MetaKey); if (sum == Entries.end()) @@ -86,12 +96,14 @@ bool indexRecords::Load(const string Filename) /*{{{*/ strprintf(ErrorText, _("No sections in Release file %s"), Filename.c_str()); return false; } + // FIXME: find better tag name + SupportsAcquireByHash = Section.FindB("Acquire-By-Hash", false); Suite = Section.FindS("Suite"); Dist = Section.FindS("Codename"); - int i; - for (i=0;HashString::SupportedHashes()[i] != NULL; i++) + bool FoundHashSum = false; + for (int i=0;HashString::SupportedHashes()[i] != NULL; i++) { if (!Section.Find(HashString::SupportedHashes()[i], Start, End)) continue; @@ -103,24 +115,38 @@ bool indexRecords::Load(const string Filename) /*{{{*/ { if (!parseSumData(Start, End, Name, Hash, Size)) return false; - indexRecords::checkSum *Sum = new indexRecords::checkSum; - Sum->MetaKeyFilename = Name; - Sum->Hash = HashString(HashString::SupportedHashes()[i],Hash); - Sum->Size = Size; - Entries[Name] = Sum; + + if (Entries.find(Name) == Entries.end()) + { + indexRecords::checkSum *Sum = new indexRecords::checkSum; + Sum->MetaKeyFilename = Name; + Sum->Size = Size; + std::string SizeStr; + strprintf(SizeStr, "%llu", Size); + Sum->Hashes.push_back(HashString("Checksum-FileSize", SizeStr)); + APT_IGNORE_DEPRECATED(Sum->Hash = HashString(HashString::SupportedHashes()[i],Hash);) + Entries[Name] = Sum; + } + Entries[Name]->Hashes.push_back(HashString(HashString::SupportedHashes()[i],Hash)); + FoundHashSum = true; } - break; } - if(HashString::SupportedHashes()[i] == NULL) + if(FoundHashSum == false) { strprintf(ErrorText, _("No Hash entry in Release file %s"), Filename.c_str()); return false; } - string Label = Section.FindS("Label"); - string StrDate = Section.FindS("Date"); - string StrValidUntil = Section.FindS("Valid-Until"); + string const StrDate = Section.FindS("Date"); + if (RFC1123StrToTime(StrDate.c_str(), Date) == false) + { + strprintf(ErrorText, _("Invalid 'Date' entry in Release file %s"), Filename.c_str()); + return false; + } + + string const Label = Section.FindS("Label"); + string const StrValidUntil = Section.FindS("Valid-Until"); // if we have a Valid-Until header in the Release file, use it as default if (StrValidUntil.empty() == false) @@ -143,20 +169,13 @@ bool indexRecords::Load(const string Filename) /*{{{*/ (MinAge == 0 || ValidUntil == 0)) // No user settings, use the one from the Release file return true; - time_t date; - if (RFC1123StrToTime(StrDate.c_str(), date) == false) - { - strprintf(ErrorText, _("Invalid 'Date' entry in Release file %s"), Filename.c_str()); - return false; - } - if (MinAge != 0 && ValidUntil != 0) { - time_t const min_date = date + MinAge; + time_t const min_date = Date + MinAge; if (ValidUntil < min_date) ValidUntil = min_date; } if (MaxAge != 0) { - time_t const max_date = date + MaxAge; + time_t const max_date = Date + MaxAge; if (ValidUntil == 0 || ValidUntil > max_date) ValidUntil = max_date; } @@ -234,11 +253,44 @@ bool indexRecords::parseSumData(const char *&Start, const char *End, /*{{{*/ return true; } /*}}}*/ -indexRecords::indexRecords() + +APT_PURE bool indexRecords::IsAlwaysTrusted() const +{ + if (Trusted == ALWAYS_TRUSTED) + return true; + return false; +} +APT_PURE bool indexRecords::IsNeverTrusted() const +{ + if (Trusted == NEVER_TRUSTED) + return true; + return false; +} +void indexRecords::SetTrusted(bool const Trusted) { + if (Trusted == true) + this->Trusted = ALWAYS_TRUSTED; + else + this->Trusted = NEVER_TRUSTED; } +#if APT_PKG_ABI >= 413 +indexRecords::indexRecords(const string &ExpectedDist) : + Trusted(CHECK_TRUST), d(NULL), ExpectedDist(ExpectedDist), ValidUntil(0), + SupportsAcquireByHash(false) +{ +} +#else +indexRecords::indexRecords() : + Trusted(CHECK_TRUST), d(NULL), ExpectedDist(""), ValidUntil(0), + SupportsAcquireByHash(false) +{ +} indexRecords::indexRecords(const string ExpectedDist) : - ExpectedDist(ExpectedDist), ValidUntil(0) + Trusted(CHECK_TRUST), d(NULL), ExpectedDist(ExpectedDist), ValidUntil(0), + SupportsAcquireByHash(false) { } +#endif + +indexRecords::~indexRecords() {} diff --git a/apt-pkg/indexrecords.h b/apt-pkg/indexrecords.h index e31f889ad..6ed5f0c2b 100644 --- a/apt-pkg/indexrecords.h +++ b/apt-pkg/indexrecords.h @@ -21,45 +21,76 @@ class indexRecords { - bool parseSumData(const char *&Start, const char *End, std::string &Name, + APT_HIDDEN bool parseSumData(const char *&Start, const char *End, std::string &Name, std::string &Hash, unsigned long long &Size); public: struct checkSum; std::string ErrorText; - + + private: + enum APT_HIDDEN { ALWAYS_TRUSTED, NEVER_TRUSTED, CHECK_TRUST } Trusted; + // dpointer (for later) + void * d; + protected: std::string Dist; std::string Suite; std::string ExpectedDist; + time_t Date; time_t ValidUntil; + bool SupportsAcquireByHash; std::map<std::string,checkSum *> Entries; public: - +#if APT_PKG_ABI >= 413 + indexRecords(const std::string &ExpectedDist = ""); +#else indexRecords(); indexRecords(const std::string ExpectedDist); +#endif // Lookup function - virtual const checkSum *Lookup(const std::string MetaKey); + virtual checkSum *Lookup(const std::string MetaKey); /** \brief tests if a checksum for this file is available */ bool Exists(std::string const &MetaKey) const; std::vector<std::string> MetaKeys(); virtual bool Load(std::string Filename); + virtual bool CheckDist(const std::string MaybeDist) const; + std::string GetDist() const; std::string GetSuite() const; + bool GetSupportsAcquireByHash() const; time_t GetValidUntil() const; - virtual bool CheckDist(const std::string MaybeDist) const; + time_t GetDate() const; std::string GetExpectedDist() const; - virtual ~indexRecords(){}; + + /** \brief check if source is marked as always trusted */ + bool IsAlwaysTrusted() const; + /** \brief check if source is marked as never trusted */ + bool IsNeverTrusted() const; + + /** \brief sets an explicit trust value + * + * \b true means that the source should always be considered trusted, + * while \b false marks a source as always untrusted, even if we have + * a valid signature and everything. + */ + void SetTrusted(bool const Trusted); + + virtual ~indexRecords(); }; +APT_IGNORE_DEPRECATED_PUSH struct indexRecords::checkSum { std::string MetaKeyFilename; - HashString Hash; + HashStringList Hashes; unsigned long long Size; + + APT_DEPRECATED HashString Hash; }; +APT_IGNORE_DEPRECATED_POP #endif diff --git a/apt-pkg/init.cc b/apt-pkg/init.cc index 241628632..f756eab26 100644 --- a/apt-pkg/init.cc +++ b/apt-pkg/init.cc @@ -88,9 +88,19 @@ bool pkgInitConfig(Configuration &Cnf) Cnf.Set("Dir::Ignore-Files-Silently::", "\\.orig$"); Cnf.Set("Dir::Ignore-Files-Silently::", "\\.distUpgrade$"); + // Repository security + // FIXME: this is set to "true" for backward compatibility, once + // jessie is out we want to change this to "false" to + // improve security + Cnf.CndSet("Acquire::AllowInsecureRepositories", true); + Cnf.CndSet("Acquire::AllowDowngradeToInsecureRepositories", false); + // Default cdrom mount point Cnf.CndSet("Acquire::cdrom::mount", "/media/cdrom/"); + // The default user we drop to in the methods + Cnf.CndSet("APT::Sandbox::User", "_apt"); + bool Res = true; // Read an alternate config file diff --git a/apt-pkg/install-progress.cc b/apt-pkg/install-progress.cc index cf6c85912..5ea8bf4d0 100644 --- a/apt-pkg/install-progress.cc +++ b/apt-pkg/install-progress.cc @@ -21,6 +21,8 @@ namespace APT { namespace Progress { +PackageManager::PackageManager() : d(NULL), percentage(0.0), last_reported_progress(-1) {} +PackageManager::~PackageManager() {} /* Return a APT::Progress::PackageManager based on the global * apt configuration (i.e. APT::Status-Fd and APT::Status-deb822-Fd) diff --git a/apt-pkg/install-progress.h b/apt-pkg/install-progress.h index 5d1a20e9b..d8b4a5c82 100644 --- a/apt-pkg/install-progress.h +++ b/apt-pkg/install-progress.h @@ -26,9 +26,8 @@ namespace Progress { int last_reported_progress; public: - PackageManager() - : percentage(0.0), last_reported_progress(-1) {}; - virtual ~PackageManager() {}; + PackageManager(); + virtual ~PackageManager(); /* Global Start/Stop */ virtual void Start(int /*child_pty*/=-1) {}; @@ -120,7 +119,7 @@ namespace Progress { class PackageManagerFancy : public PackageManager { private: - static void staticSIGWINCH(int); + APT_HIDDEN static void staticSIGWINCH(int); static std::vector<PackageManagerFancy*> instances; APT_HIDDEN bool DrawStatusLine(); diff --git a/apt-pkg/metaindex.cc b/apt-pkg/metaindex.cc new file mode 100644 index 000000000..31a8ec009 --- /dev/null +++ b/apt-pkg/metaindex.cc @@ -0,0 +1,40 @@ +// Include Files /*{{{*/ +#include <apt-pkg/indexfile.h> +#include <apt-pkg/metaindex.h> + +#include <stddef.h> + +#include <string> +#include <vector> + /*}}}*/ + +#if (APT_PKG_MAJOR >= 4 && APT_PKG_MINOR >= 13) +std::string metaIndex::LocalFileName() const { return ""; } +#else +#include <apt-pkg/debmetaindex.h> +std::string metaIndex::LocalFileName() const +{ + debReleaseIndex const * deb = dynamic_cast<debReleaseIndex const*>(this); + if (deb != NULL) + return deb->LocalFileName(); + + return ""; +} +#endif + +metaIndex::metaIndex(std::string const &URI, std::string const &Dist, + char const * const Type) +: Indexes(NULL), Type(Type), URI(URI), Dist(Dist), Trusted(false) +{ + /* nothing */ +} + +metaIndex::~metaIndex() +{ + if (Indexes == 0) + return; + for (std::vector<pkgIndexFile *>::iterator I = (*Indexes).begin(); + I != (*Indexes).end(); ++I) + delete *I; + delete Indexes; +} diff --git a/apt-pkg/metaindex.h b/apt-pkg/metaindex.h index ffabaadbf..6c3d2880b 100644 --- a/apt-pkg/metaindex.h +++ b/apt-pkg/metaindex.h @@ -40,33 +40,22 @@ class metaIndex virtual const char* GetType() const {return Type;} // interface to to query it -#if (APT_PKG_MAJOR >= 4 && APT_PKG_MINOR >= 13) - // returns the path of the local file (or "" if its not available) - virtual std::string LocalFileName() const {return "";}; +#if APT_PKG_ABI >= 413 + /** \return the path of the local file (or "" if its not available) */ + virtual std::string LocalFileName() const; +#else + std::string LocalFileName() const; #endif // Interface for acquire virtual std::string ArchiveURI(std::string const& File) const = 0; virtual bool GetIndexes(pkgAcquire *Owner, bool const &GetAll=false) const = 0; - virtual std::vector<pkgIndexFile *> *GetIndexFiles() = 0; + virtual std::vector<pkgIndexFile *> *GetIndexFiles() = 0; virtual bool IsTrusted() const = 0; - metaIndex(std::string const &URI, std::string const &Dist, - char const * const Type) - : Indexes(NULL), Type(Type), URI(URI), Dist(Dist) - { - /* nothing */ - } - - virtual ~metaIndex() - { - if (Indexes == 0) - return; - for (std::vector<pkgIndexFile *>::iterator I = (*Indexes).begin(); - I != (*Indexes).end(); ++I) - delete *I; - delete Indexes; - } + metaIndex(std::string const &URI, std::string const &Dist, + char const * const Type); + virtual ~metaIndex(); }; #endif diff --git a/apt-pkg/packagemanager.cc b/apt-pkg/packagemanager.cc index 5d6bc6bd2..d137dc75a 100644 --- a/apt-pkg/packagemanager.cc +++ b/apt-pkg/packagemanager.cc @@ -28,6 +28,7 @@ #include <apt-pkg/pkgcache.h> #include <apt-pkg/cacheiterators.h> #include <apt-pkg/strutl.h> +#include <apt-pkg/install-progress.h> #include <stddef.h> #include <list> @@ -261,7 +262,7 @@ bool pkgPackageManager::CheckRConflicts(PkgIterator Pkg,DepIterator D, if (Cache.VS().CheckDep(Ver,D->CompareOp,D.TargetVer()) == false) continue; - if (EarlyRemove(D.ParentPkg()) == false) + if (EarlyRemove(D.ParentPkg(), &D) == false) return _error->Error("Reverse conflicts early remove for package '%s' failed", Pkg.FullName().c_str()); } @@ -313,18 +314,41 @@ bool pkgPackageManager::ConfigureAll() return true; } /*}}}*/ +// PM::NonLoopingSmart - helper to avoid loops while calling Smart methods /*{{{*/ +// ----------------------------------------------------------------------- +/* ensures that a loop of the form A depends B, B depends A (and similar) + is not leading us down into infinite recursion segfault land */ +bool pkgPackageManager::NonLoopingSmart(SmartAction const action, pkgCache::PkgIterator &Pkg, + pkgCache::PkgIterator DepPkg, int const Depth, bool const PkgLoop, + bool * const Bad, bool * const Changed) +{ + if (PkgLoop == false) + List->Flag(Pkg,pkgOrderList::Loop); + bool success = false; + switch(action) + { + case UNPACK_IMMEDIATE: success = SmartUnPack(DepPkg, true, Depth + 1); break; + case UNPACK: success = SmartUnPack(DepPkg, false, Depth + 1); break; + case CONFIGURE: success = SmartConfigure(DepPkg, Depth + 1); break; + } + if (PkgLoop == false) + List->RmFlag(Pkg,pkgOrderList::Loop); + + if (success == false) + return false; + + if (Bad != NULL) + *Bad = false; + if (Changed != NULL && List->IsFlag(DepPkg,pkgOrderList::Loop) == false) + *Changed = true; + return true; +} + /*}}}*/ // PM::SmartConfigure - Perform immediate configuration of the pkg /*{{{*/ // --------------------------------------------------------------------- /* This function tries to put the system in a state where Pkg can be configured. - This involves checking each of Pkg's dependanies and unpacking and - configuring packages where needed. - - Note on failure: This method can fail, without causing any problems. - This can happen when using Immediate-Configure-All, SmartUnPack may call - SmartConfigure, it may fail because of a complex dependency situation, but - a error will only be reported if ConfigureAll fails. This is why some of the - messages this function reports on failure (return false;) as just warnings - only shown when debuging*/ + This involves checking each of Pkg's dependencies and unpacking and + configuring packages where needed. */ bool pkgPackageManager::SmartConfigure(PkgIterator Pkg, int const Depth) { // If this is true, only check and correct and dependencies without the Loop flag @@ -339,9 +363,9 @@ bool pkgPackageManager::SmartConfigure(PkgIterator Pkg, int const Depth) } VerIterator const instVer = Cache[Pkg].InstVerIter(Cache); - - /* Because of the ordered list, most dependencies should be unpacked, - however if there is a loop (A depends on B, B depends on A) this will not + + /* Because of the ordered list, most dependencies should be unpacked, + however if there is a loop (A depends on B, B depends on A) this will not be the case, so check for dependencies before configuring. */ bool Bad = false, Changed = false; const unsigned int max_loops = _config->FindI("APT::pkgPackageManager::MaxLoopCount", 5000); @@ -349,6 +373,8 @@ bool pkgPackageManager::SmartConfigure(PkgIterator Pkg, int const Depth) std::list<DepIterator> needConfigure; do { + // Check each dependency and see if anything needs to be done + // so that it can be configured Changed = false; for (DepIterator D = instVer.DependsList(); D.end() == false; ) { @@ -360,7 +386,8 @@ bool pkgPackageManager::SmartConfigure(PkgIterator Pkg, int const Depth) continue; Bad = true; - // Check for dependencies that have not been unpacked, probably due to loops. + // the first pass checks if we its all good, i.e. if we have + // to do anything at all for (DepIterator Cur = Start; true; ++Cur) { SPtrArray<Version *> VList = Cur.AllTargets(); @@ -373,7 +400,8 @@ bool pkgPackageManager::SmartConfigure(PkgIterator Pkg, int const Depth) // Check if the current version of the package is available and will satisfy this dependency if (DepPkg.CurrentVer() == Ver && List->IsNow(DepPkg) == true && List->IsFlag(DepPkg,pkgOrderList::Removed) == false && - DepPkg.State() == PkgIterator::NeedsNothing) + DepPkg.State() == PkgIterator::NeedsNothing && + (Cache[DepPkg].iFlags & pkgDepCache::ReInstall) != pkgDepCache::ReInstall) { Bad = false; break; @@ -386,27 +414,71 @@ bool pkgPackageManager::SmartConfigure(PkgIterator Pkg, int const Depth) if (PkgLoop == true) { if (Debug) - std::clog << OutputInDepth(Depth) << "Package " << Pkg << " loops in SmartConfigure" << std::endl; - Bad = false; - break; + std::clog << OutputInDepth(Depth) << "Package " << Pkg << " loops in SmartConfigure"; + if (List->IsFlag(DepPkg,pkgOrderList::UnPacked)) + Bad = false; + else if (Debug) + std::clog << ", but it isn't unpacked yet"; + if (Debug) + std::clog << std::endl; + } + } + + if (Cur == End || Bad == false) + break; + } + + // this dependency is in a good state, so we can stop + if (Bad == false) + { + if (Debug) + std::clog << OutputInDepth(Depth) << "Found ok dep " << Start.TargetPkg() << std::endl; + continue; + } + + // Check for dependencies that have not been unpacked, + // probably due to loops. + for (DepIterator Cur = Start; true; ++Cur) + { + SPtrArray<Version *> VList = Cur.AllTargets(); + + for (Version **I = VList; *I != 0; ++I) + { + VerIterator Ver(Cache,*I); + PkgIterator DepPkg = Ver.ParentPkg(); + + // Check if the current version of the package is available and will satisfy this dependency + if (DepPkg.CurrentVer() == Ver && List->IsNow(DepPkg) == true && + List->IsFlag(DepPkg,pkgOrderList::Removed) == false && + DepPkg.State() == PkgIterator::NeedsNothing && + (Cache[DepPkg].iFlags & pkgDepCache::ReInstall) != pkgDepCache::ReInstall) + continue; + + // Check if the version that is going to be installed will satisfy the dependency + if (Cache[DepPkg].InstallVer != *I || List->IsNow(DepPkg) == false) + continue; + + if (PkgLoop == true) + { + if (Debug) + std::clog << OutputInDepth(Depth) << "Package " << Pkg << " loops in SmartConfigure"; + if (List->IsFlag(DepPkg,pkgOrderList::UnPacked)) + Bad = false; + else if (Debug) + std::clog << ", but it isn't unpacked yet"; + if (Debug) + std::clog << std::endl; } else { if (Debug) clog << OutputInDepth(Depth) << "Unpacking " << DepPkg.FullName() << " to avoid loop " << Cur << endl; - if (PkgLoop == false) - List->Flag(Pkg,pkgOrderList::Loop); - if (SmartUnPack(DepPkg, true, Depth + 1) == true) - { - Bad = false; - if (List->IsFlag(DepPkg,pkgOrderList::Loop) == false) - Changed = true; - } - if (PkgLoop == false) - List->RmFlag(Pkg,pkgOrderList::Loop); - if (Bad == false) - break; + if (NonLoopingSmart(UNPACK_IMMEDIATE, Pkg, DepPkg, Depth, PkgLoop, &Bad, &Changed) == false) + return false; } + // at this point we either unpacked a Dep or we are in a loop, + // no need to unpack a second one + break; } if (Cur == End || Bad == false) @@ -422,6 +494,7 @@ bool pkgPackageManager::SmartConfigure(PkgIterator Pkg, int const Depth) return _error->Error("Internal error: MaxLoopCount reached in SmartUnPack (1) for %s, aborting", Pkg.FullName().c_str()); } while (Changed == true); + // now go over anything that needs configuring Bad = false, Changed = false, i = 0; do { @@ -461,25 +534,12 @@ bool pkgPackageManager::SmartConfigure(PkgIterator Pkg, int const Depth) Bad = false; break; } - /* Check for a loop to prevent one forming - If A depends on B and B depends on A, SmartConfigure will - just hop between them if this is not checked. Dont remove the - loop flag after finishing however as loop is already set. - This means that there is another SmartConfigure call for this - package and it will remove the loop flag */ - if (PkgLoop == false) - List->Flag(Pkg,pkgOrderList::Loop); - if (SmartConfigure(DepPkg, Depth + 1) == true) - { - Bad = false; - if (List->IsFlag(DepPkg,pkgOrderList::Loop) == false) - Changed = true; - } - if (PkgLoop == false) - List->RmFlag(Pkg,pkgOrderList::Loop); - // If SmartConfigure was succesfull, Bad is false, so break - if (Bad == false) - break; + if (Debug) + std::clog << OutputInDepth(Depth) << "Configure already unpacked " << DepPkg << std::endl; + if (NonLoopingSmart(CONFIGURE, Pkg, DepPkg, Depth, PkgLoop, &Bad, &Changed) == false) + return false; + break; + } else if (List->IsFlag(DepPkg,pkgOrderList::Configured)) { @@ -498,19 +558,16 @@ bool pkgPackageManager::SmartConfigure(PkgIterator Pkg, int const Depth) if (i++ > max_loops) return _error->Error("Internal error: MaxLoopCount reached in SmartUnPack (2) for %s, aborting", Pkg.FullName().c_str()); } while (Changed == true); - - if (Bad) { - if (Debug) - _error->Warning(_("Could not configure '%s'. "),Pkg.FullName().c_str()); - return false; - } - + + if (Bad == true) + return _error->Error(_("Could not configure '%s'. "),Pkg.FullName().c_str()); + if (PkgLoop) return true; static std::string const conf = _config->Find("PackageManager::Configure","all"); static bool const ConfigurePkgs = (conf == "all" || conf == "smart"); - if (List->IsFlag(Pkg,pkgOrderList::Configured)) + if (List->IsFlag(Pkg,pkgOrderList::Configured)) return _error->Error("Internal configure error on '%s'.", Pkg.FullName().c_str()); if (ConfigurePkgs == true && Configure(Pkg) == false) @@ -527,7 +584,8 @@ bool pkgPackageManager::SmartConfigure(PkgIterator Pkg, int const Depth) Cache[P].InstallVer == 0 || (P.CurrentVer() == Cache[P].InstallVer && (Cache[Pkg].iFlags & pkgDepCache::ReInstall) != pkgDepCache::ReInstall)) continue; - SmartConfigure(P, (Depth +1)); + if (SmartConfigure(P, (Depth +1)) == false) + return false; } // Sanity Check @@ -542,28 +600,36 @@ bool pkgPackageManager::SmartConfigure(PkgIterator Pkg, int const Depth) /* This is called to deal with conflicts arising from unpacking */ bool pkgPackageManager::EarlyRemove(PkgIterator Pkg) { + return EarlyRemove(Pkg, NULL); +} +bool pkgPackageManager::EarlyRemove(PkgIterator Pkg, DepIterator const * const Dep) +{ if (List->IsNow(Pkg) == false) return true; - + // Already removed it if (List->IsFlag(Pkg,pkgOrderList::Removed) == true) return true; - + // Woops, it will not be re-installed! if (List->IsFlag(Pkg,pkgOrderList::InList) == false) return false; + // these breaks on M-A:same packages can be dealt with. They 'loop' by design + if (Dep != NULL && (*Dep)->Type == pkgCache::Dep::DpkgBreaks && Dep->IsMultiArchImplicit() == true) + return true; + // Essential packages get special treatment bool IsEssential = false; if ((Pkg->Flags & pkgCache::Flag::Essential) != 0 || (Pkg->Flags & pkgCache::Flag::Important) != 0) IsEssential = true; - /* Check for packages that are the dependents of essential packages and + /* Check for packages that are the dependents of essential packages and promote them too */ if (Pkg->CurrentVer != 0) { - for (DepIterator D = Pkg.RevDependsList(); D.end() == false && + for (pkgCache::DepIterator D = Pkg.RevDependsList(); D.end() == false && IsEssential == false; ++D) if (D->Type == pkgCache::Dep::Depends || D->Type == pkgCache::Dep::PreDepends) if ((D.ParentPkg()->Flags & pkgCache::Flag::Essential) != 0 || @@ -580,11 +646,14 @@ bool pkgPackageManager::EarlyRemove(PkgIterator Pkg) "but if you really want to do it, activate the " "APT::Force-LoopBreak option."),Pkg.FullName().c_str()); } - + // dpkg will auto-deconfigure it, no need for the big remove hammer + else if (Dep != NULL && (*Dep)->Type == pkgCache::Dep::DpkgBreaks) + return true; + bool Res = SmartRemove(Pkg); if (Cache[Pkg].Delete() == false) List->Flag(Pkg,pkgOrderList::Removed,pkgOrderList::States); - + return Res; } /*}}}*/ @@ -629,13 +698,14 @@ bool pkgPackageManager::SmartUnPack(PkgIterator Pkg, bool const Immediate, int c VerIterator const instVer = Cache[Pkg].InstVerIter(Cache); - /* PreUnpack Checks: This loop checks and attempts to rectify and problems that would prevent the package being unpacked. + /* PreUnpack Checks: This loop checks and attempts to rectify any problems that would prevent the package being unpacked. It addresses: PreDepends, Conflicts, Obsoletes and Breaks (DpkgBreaks). Any resolutions that do not require it should avoid configuration (calling SmartUnpack with Immediate=true), this is because when unpacking some packages with - complex dependency structures, trying to configure some packages while breaking the loops can complicate things . + complex dependency structures, trying to configure some packages while breaking the loops can complicate things. This will be either dealt with if the package is configured as a dependency of Pkg (if and when Pkg is configured), or by the ConfigureAll call at the end of the for loop in OrderInstall. */ - bool Changed = false; + bool SomethingBad = false, Changed = false; + bool couldBeTemporaryRemoved = Depth != 0 && List->IsFlag(Pkg,pkgOrderList::Removed) == false; const unsigned int max_loops = _config->FindI("APT::pkgPackageManager::MaxLoopCount", 5000); unsigned int i = 0; do @@ -664,7 +734,8 @@ bool pkgPackageManager::SmartUnPack(PkgIterator Pkg, bool const Immediate, int c // See if the current version is ok if (Pkg.CurrentVer() == Ver && List->IsNow(Pkg) == true && - Pkg.State() == PkgIterator::NeedsNothing) + Pkg.State() == PkgIterator::NeedsNothing && + (Cache[Pkg].iFlags & pkgDepCache::ReInstall) != pkgDepCache::ReInstall) { Bad = false; if (Debug) @@ -683,184 +754,155 @@ bool pkgPackageManager::SmartUnPack(PkgIterator Pkg, bool const Immediate, int c for (Version **I = VList; *I != 0; ++I) { VerIterator Ver(Cache,*I); - PkgIterator Pkg = Ver.ParentPkg(); + PkgIterator DepPkg = Ver.ParentPkg(); // Not the install version - if (Cache[Pkg].InstallVer != *I || - (Cache[Pkg].Keep() == true && Pkg.State() == PkgIterator::NeedsNothing)) + if (Cache[DepPkg].InstallVer != *I) + continue; + + if (Cache[DepPkg].Keep() == true && DepPkg.State() == PkgIterator::NeedsNothing && + (Cache[DepPkg].iFlags & pkgDepCache::ReInstall) != pkgDepCache::ReInstall) continue; - if (List->IsFlag(Pkg,pkgOrderList::Configured)) + if (List->IsFlag(DepPkg,pkgOrderList::Configured)) { Bad = false; break; } // check if it needs unpack or if if configure is enough - if (List->IsFlag(Pkg,pkgOrderList::UnPacked) == false) + if (List->IsFlag(DepPkg,pkgOrderList::UnPacked) == false) { - if (Debug) - clog << OutputInDepth(Depth) << "Trying to SmartUnpack " << Pkg.FullName() << endl; - // SmartUnpack with the ImmediateFlag to ensure its really ready - if (SmartUnPack(Pkg, true, Depth + 1) == true) + // two packages pre-depending on each other can't be handled sanely + if (List->IsFlag(DepPkg,pkgOrderList::Loop) && PkgLoop) { - Bad = false; - if (List->IsFlag(Pkg,pkgOrderList::Loop) == false) - Changed = true; - break; + // this isn't an error as there is potential for something else to satisfy it + // (like a provides or an or-group member) + if (Debug) + clog << OutputInDepth(Depth) << "Unpack loop detected between " << DepPkg.FullName() << " and " << Pkg.FullName() << endl; + continue; } + + if (Debug) + clog << OutputInDepth(Depth) << "Trying to SmartUnpack " << DepPkg.FullName() << endl; + if (NonLoopingSmart(UNPACK_IMMEDIATE, Pkg, DepPkg, Depth, PkgLoop, &Bad, &Changed) == false) + return false; } else { if (Debug) - clog << OutputInDepth(Depth) << "Trying to SmartConfigure " << Pkg.FullName() << endl; - if (SmartConfigure(Pkg, Depth + 1) == true) - { - Bad = false; - if (List->IsFlag(Pkg,pkgOrderList::Loop) == false) - Changed = true; - break; - } + clog << OutputInDepth(Depth) << "Trying to SmartConfigure " << DepPkg.FullName() << endl; + if (NonLoopingSmart(CONFIGURE, Pkg, DepPkg, Depth, PkgLoop, &Bad, &Changed) == false) + return false; } + break; } } if (Bad == true) - { - if (Start == End) - return _error->Error("Couldn't configure pre-depend %s for %s, " - "probably a dependency cycle.", - End.TargetPkg().FullName().c_str(),Pkg.FullName().c_str()); - } - else - continue; + SomethingBad = true; } else if (End->Type == pkgCache::Dep::Conflicts || - End->Type == pkgCache::Dep::Obsoletes) + End->Type == pkgCache::Dep::Obsoletes || + End->Type == pkgCache::Dep::DpkgBreaks) { - /* Look for conflicts. Two packages that are both in the install - state cannot conflict so we don't check.. */ SPtrArray<Version *> VList = End.AllTargets(); - for (Version **I = VList; *I != 0; I++) + for (Version **I = VList; *I != 0; ++I) { VerIterator Ver(Cache,*I); PkgIterator ConflictPkg = Ver.ParentPkg(); - VerIterator InstallVer(Cache,Cache[ConflictPkg].InstallVer); + if (ConflictPkg.CurrentVer() != Ver) + { + if (Debug) + std::clog << OutputInDepth(Depth) << "Ignore not-installed version " << Ver.VerStr() << " of " << ConflictPkg.FullName() << " for " << End << std::endl; + continue; + } - // See if the current version is conflicting - if (ConflictPkg.CurrentVer() == Ver && List->IsNow(ConflictPkg)) + if (List->IsNow(ConflictPkg) == false) { if (Debug) - clog << OutputInDepth(Depth) << Pkg.FullName() << " conflicts with " << ConflictPkg.FullName() << endl; - /* If a loop is not present or has not yet been detected, attempt to unpack packages - to resolve this conflict. If there is a loop present, remove packages to resolve this conflict */ - if (List->IsFlag(ConflictPkg,pkgOrderList::Loop) == false) - { - if (Cache[ConflictPkg].Keep() == 0 && Cache[ConflictPkg].InstallVer != 0) - { - if (Debug) - clog << OutputInDepth(Depth) << OutputInDepth(Depth) << "Unpacking " << ConflictPkg.FullName() << " to prevent conflict" << endl; - List->Flag(Pkg,pkgOrderList::Loop); - if (SmartUnPack(ConflictPkg,false, Depth + 1) == true) - if (List->IsFlag(ConflictPkg,pkgOrderList::Loop) == false) - Changed = true; - // Remove loop to allow it to be used later if needed - List->RmFlag(Pkg,pkgOrderList::Loop); - } - else if (EarlyRemove(ConflictPkg) == false) - return _error->Error("Internal Error, Could not early remove %s (1)",ConflictPkg.FullName().c_str()); - } - else if (List->IsFlag(ConflictPkg,pkgOrderList::Removed) == false) + std::clog << OutputInDepth(Depth) << "Ignore already dealt-with version " << Ver.VerStr() << " of " << ConflictPkg.FullName() << " for " << End << std::endl; + continue; + } + + if (List->IsFlag(ConflictPkg,pkgOrderList::Removed) == true) + { + if (Debug) + clog << OutputInDepth(Depth) << "Ignoring " << End << " as " << ConflictPkg.FullName() << "was temporarily removed" << endl; + continue; + } + + if (List->IsFlag(ConflictPkg,pkgOrderList::Loop) && PkgLoop) + { + if (End->Type == pkgCache::Dep::DpkgBreaks && End.IsMultiArchImplicit() == true) { if (Debug) - clog << OutputInDepth(Depth) << "Because of conficts knot, removing " << ConflictPkg.FullName() << " to conflict violation" << endl; - if (EarlyRemove(ConflictPkg) == false) - return _error->Error("Internal Error, Could not early remove %s (2)",ConflictPkg.FullName().c_str()); + clog << OutputInDepth(Depth) << "Because dependency is MultiArchImplicit we ignored looping on: " << ConflictPkg << endl; + continue; } - } - } - } - else if (End->Type == pkgCache::Dep::DpkgBreaks) - { - SPtrArray<Version *> VList = End.AllTargets(); - for (Version **I = VList; *I != 0; ++I) - { - VerIterator Ver(Cache,*I); - PkgIterator BrokenPkg = Ver.ParentPkg(); - if (BrokenPkg.CurrentVer() != Ver) - { if (Debug) - std::clog << OutputInDepth(Depth) << " Ignore not-installed version " << Ver.VerStr() << " of " << Pkg.FullName() << " for " << End << std::endl; + { + if (End->Type == pkgCache::Dep::DpkgBreaks) + clog << OutputInDepth(Depth) << "Because of breaks knot, deconfigure " << ConflictPkg.FullName() << " temporarily" << endl; + else + clog << OutputInDepth(Depth) << "Because of conflict knot, removing " << ConflictPkg.FullName() << " temporarily" << endl; + } + if (EarlyRemove(ConflictPkg, &End) == false) + return _error->Error("Internal Error, Could not early remove %s (2)",ConflictPkg.FullName().c_str()); + SomethingBad = true; continue; } - // Check if it needs to be unpacked - if (List->IsFlag(BrokenPkg,pkgOrderList::InList) && Cache[BrokenPkg].Delete() == false && - List->IsNow(BrokenPkg)) + if (Cache[ConflictPkg].Delete() == false) { - if (List->IsFlag(BrokenPkg,pkgOrderList::Loop) && PkgLoop) + if (Debug) { - // This dependency has already been dealt with by another SmartUnPack on Pkg - break; + clog << OutputInDepth(Depth) << "Unpacking " << ConflictPkg.FullName() << " to avoid " << End; + if (PkgLoop == true) + clog << " (Looping)"; + clog << std::endl; } - else + // we would like to avoid temporary removals and all that at best via a simple unpack + _error->PushToStack(); + if (NonLoopingSmart(UNPACK, Pkg, ConflictPkg, Depth, PkgLoop, NULL, &Changed) == false) { - // Found a break, so see if we can unpack the package to avoid it - // but do not set loop if another SmartUnPack already deals with it - // Also, avoid it if the package we would unpack pre-depends on this one - VerIterator InstallVer(Cache,Cache[BrokenPkg].InstallVer); - bool circle = false; - for (pkgCache::DepIterator D = InstallVer.DependsList(); D.end() == false; ++D) + // but if it fails ignore this failure and look for alternative ways of solving + if (Debug) { - if (D->Type != pkgCache::Dep::PreDepends) - continue; - SPtrArray<Version *> VL = D.AllTargets(); - for (Version **I = VL; *I != 0; ++I) - { - VerIterator V(Cache,*I); - PkgIterator P = V.ParentPkg(); - // we are checking for installation as an easy 'protection' against or-groups and (unchosen) providers - if (P != Pkg || (P.CurrentVer() != V && Cache[P].InstallVer != V)) - continue; - circle = true; - break; - } - if (circle == true) - break; + clog << OutputInDepth(Depth) << "Avoidance unpack of " << ConflictPkg.FullName() << " failed for " << End << " ignoring:" << std::endl; + _error->DumpErrors(std::clog); } - if (circle == true) + _error->RevertToStack(); + // ignorance can only happen if a) one of the offenders is already gone + if (List->IsFlag(ConflictPkg,pkgOrderList::Removed) == true) { if (Debug) - clog << OutputInDepth(Depth) << " Avoiding " << End << " avoided as " << BrokenPkg.FullName() << " has a pre-depends on " << Pkg.FullName() << std::endl; - continue; + clog << OutputInDepth(Depth) << "But " << ConflictPkg.FullName() << " was temporarily removed in the meantime to satisfy " << End << endl; } + else if (List->IsFlag(Pkg,pkgOrderList::Removed) == true) + { + if (Debug) + clog << OutputInDepth(Depth) << "But " << Pkg.FullName() << " was temporarily removed in the meantime to satisfy " << End << endl; + } + // or b) we can make one go (removal or dpkg auto-deconfigure) else { if (Debug) - { - clog << OutputInDepth(Depth) << " Unpacking " << BrokenPkg.FullName() << " to avoid " << End; - if (PkgLoop == true) - clog << " (Looping)"; - clog << std::endl; - } - if (PkgLoop == false) - List->Flag(Pkg,pkgOrderList::Loop); - if (SmartUnPack(BrokenPkg, false, Depth + 1) == true) - { - if (List->IsFlag(BrokenPkg,pkgOrderList::Loop) == false) - Changed = true; - } - if (PkgLoop == false) - List->RmFlag(Pkg,pkgOrderList::Loop); + clog << OutputInDepth(Depth) << "So temprorary remove/deconfigure " << ConflictPkg.FullName() << " to satisfy " << End << endl; + if (EarlyRemove(ConflictPkg, &End) == false) + return _error->Error("Internal Error, Could not early remove %s (2)",ConflictPkg.FullName().c_str()); } } + else + _error->MergeWithStack(); } - // Check if a package needs to be removed - else if (Cache[BrokenPkg].Delete() == true && List->IsFlag(BrokenPkg,pkgOrderList::Configured) == false) + else { if (Debug) - clog << OutputInDepth(Depth) << " Removing " << BrokenPkg.FullName() << " to avoid " << End << endl; - SmartRemove(BrokenPkg); + clog << OutputInDepth(Depth) << "Removing " << ConflictPkg.FullName() << " now to avoid " << End << endl; + // no earlyremove() here as user has already agreed to the permanent removal + if (SmartRemove(Pkg) == false) + return _error->Error("Internal Error, Could not early remove %s (1)",ConflictPkg.FullName().c_str()); } } } @@ -868,7 +910,17 @@ bool pkgPackageManager::SmartUnPack(PkgIterator Pkg, bool const Immediate, int c if (i++ > max_loops) return _error->Error("Internal error: APT::pkgPackageManager::MaxLoopCount reached in SmartConfigure for %s, aborting", Pkg.FullName().c_str()); } while (Changed == true); - + + if (SomethingBad == true) + return _error->Error("Couldn't configure %s, probably a dependency cycle.", Pkg.FullName().c_str()); + + if (couldBeTemporaryRemoved == true && List->IsFlag(Pkg,pkgOrderList::Removed) == true) + { + if (Debug) + std::clog << OutputInDepth(Depth) << "Prevent unpack as " << Pkg << " is currently temporarily removed" << std::endl; + return true; + } + // Check for reverse conflicts. if (CheckRConflicts(Pkg,Pkg.RevDependsList(), instVer.VerStr()) == false) @@ -929,7 +981,7 @@ bool pkgPackageManager::SmartUnPack(PkgIterator Pkg, bool const Immediate, int c if (Immediate == true) { // Perform immedate configuration of the package. if (SmartConfigure(Pkg, Depth + 1) == false) - _error->Warning(_("Could not perform immediate configuration on '%s'. " + _error->Error(_("Could not perform immediate configuration on '%s'. " "Please see man 5 apt.conf under APT::Immediate-Configure for details. (%d)"),Pkg.FullName().c_str(),2); } @@ -1033,7 +1085,7 @@ pkgPackageManager::OrderResult pkgPackageManager::OrderInstall() // PM::DoInstallPostFork - compat /*{{{*/ // --------------------------------------------------------------------- /*}}}*/ -#if (APT_PKG_MAJOR >= 4 && APT_PKG_MINOR >= 13) +#if APT_PKG_ABI >= 413 pkgPackageManager::OrderResult pkgPackageManager::DoInstallPostFork(int statusFd) { @@ -1054,7 +1106,7 @@ pkgPackageManager::DoInstallPostFork(APT::Progress::PackageManager *progress) return Failed; return Res; -}; +} #else pkgPackageManager::OrderResult pkgPackageManager::DoInstallPostFork(int statusFd) @@ -1070,7 +1122,7 @@ pkgPackageManager::DoInstallPostFork(int statusFd) // PM::DoInstall - Does the installation /*{{{*/ // --------------------------------------------------------------------- /* compat */ -#if (APT_PKG_MAJOR >= 4 && APT_PKG_MINOR >= 13) +#if APT_PKG_ABI >= 413 pkgPackageManager::OrderResult pkgPackageManager::DoInstall(int statusFd) { @@ -1094,7 +1146,7 @@ pkgPackageManager::OrderResult pkgPackageManager::DoInstall(int statusFd) // --------------------------------------------------------------------- /* This uses the filenames in FileNames and the information in the DepCache to perform the installation of packages.*/ -#if (APT_PKG_MAJOR >= 4 && APT_PKG_MINOR >= 13) +#if APT_PKG_ABI >= 413 pkgPackageManager::OrderResult pkgPackageManager::DoInstall(APT::Progress::PackageManager *progress) { diff --git a/apt-pkg/packagemanager.h b/apt-pkg/packagemanager.h index 344ed9192..fce0ad301 100644 --- a/apt-pkg/packagemanager.h +++ b/apt-pkg/packagemanager.h @@ -44,6 +44,11 @@ class pkgDepCache; class pkgSourceList; class pkgOrderList; class pkgRecords; +namespace APT { + namespace Progress { + class PackageManager; + } +} class pkgPackageManager : protected pkgCache::Namespace @@ -79,22 +84,22 @@ class pkgPackageManager : protected pkgCache::Namespace // Install helpers bool ConfigureAll(); - bool SmartConfigure(PkgIterator Pkg, int const Depth); + bool SmartConfigure(PkgIterator Pkg, int const Depth) APT_MUSTCHECK; //FIXME: merge on abi break - bool SmartUnPack(PkgIterator Pkg); - bool SmartUnPack(PkgIterator Pkg, bool const Immediate, int const Depth); - bool SmartRemove(PkgIterator Pkg); - bool EarlyRemove(PkgIterator Pkg); - + bool SmartUnPack(PkgIterator Pkg) APT_MUSTCHECK; + bool SmartUnPack(PkgIterator Pkg, bool const Immediate, int const Depth) APT_MUSTCHECK; + bool SmartRemove(PkgIterator Pkg) APT_MUSTCHECK; + bool EarlyRemove(PkgIterator Pkg, DepIterator const * const Dep) APT_MUSTCHECK; + APT_DEPRECATED bool EarlyRemove(PkgIterator Pkg) APT_MUSTCHECK; + // The Actual installation implementation virtual bool Install(PkgIterator /*Pkg*/,std::string /*File*/) {return false;}; virtual bool Configure(PkgIterator /*Pkg*/) {return false;}; virtual bool Remove(PkgIterator /*Pkg*/,bool /*Purge*/=false) {return false;}; -#if (APT_PKG_MAJOR >= 4 && APT_PKG_MINOR >= 13) +#if APT_PKG_ABI >= 413 virtual bool Go(APT::Progress::PackageManager * /*progress*/) {return true;}; -#else - virtual bool Go(int /*statusFd*/=-1) {return true;}; #endif + virtual bool Go(int /*statusFd*/=-1) {return true;}; virtual void Reset() {}; @@ -107,8 +112,8 @@ class pkgPackageManager : protected pkgCache::Namespace bool GetArchives(pkgAcquire *Owner,pkgSourceList *Sources, pkgRecords *Recs); - // Do the installation -#if (APT_PKG_MAJOR >= 4 && APT_PKG_MINOR >= 13) + // Do the installation +#if APT_PKG_ABI >= 413 OrderResult DoInstall(APT::Progress::PackageManager *progress); // compat APT_DEPRECATED OrderResult DoInstall(int statusFd=-1); @@ -122,7 +127,7 @@ class pkgPackageManager : protected pkgCache::Namespace Res = OrderInstall(); return Res; }; -#if (APT_PKG_MAJOR >= 4 && APT_PKG_MINOR >= 13) +#if APT_PKG_ABI >= 413 // stuff that needs to be done after the fork OrderResult DoInstallPostFork(APT::Progress::PackageManager *progress); // compat @@ -139,6 +144,12 @@ class pkgPackageManager : protected pkgCache::Namespace pkgPackageManager(pkgDepCache *Cache); virtual ~pkgPackageManager(); + + private: + enum APT_HIDDEN SmartAction { UNPACK_IMMEDIATE, UNPACK, CONFIGURE }; + APT_HIDDEN bool NonLoopingSmart(SmartAction const action, pkgCache::PkgIterator &Pkg, + pkgCache::PkgIterator DepPkg, int const Depth, bool const PkgLoop, + bool * const Bad, bool * const Changed) APT_MUSTCHECK; }; #endif diff --git a/apt-pkg/pkgcache.cc b/apt-pkg/pkgcache.cc index 91b75f52e..864ae0f60 100644 --- a/apt-pkg/pkgcache.cc +++ b/apt-pkg/pkgcache.cc @@ -54,12 +54,8 @@ pkgCache::Header::Header() /* Whenever the structures change the major version should be bumped, whenever the generator changes the minor version should be bumped. */ - MajorVersion = 8; -#if (APT_PKG_MAJOR >= 4 && APT_PKG_MINOR >= 13) - MinorVersion = 2; -#else - MinorVersion = 1; -#endif + MajorVersion = 10; + MinorVersion = 0; Dirty = false; HeaderSz = sizeof(pkgCache::Header); @@ -86,11 +82,13 @@ pkgCache::Header::Header() MaxDescFileSize = 0; FileList = 0; - StringList = 0; +#if APT_PKG_ABI < 413 + APT_IGNORE_DEPRECATED(StringList = 0;) +#endif VerSysName = 0; Architecture = 0; - memset(PkgHashTable,0,sizeof(PkgHashTable)); - memset(GrpHashTable,0,sizeof(GrpHashTable)); + SetArchitectures(0); + SetHashTableSize(_config->FindI("APT::Cache-HashTableSize", 10 * 1048)); memset(Pools,0,sizeof(Pools)); CacheFileSize = 0; @@ -119,6 +117,7 @@ bool pkgCache::Header::CheckSizes(Header &Against) const // Cache::pkgCache - Constructor /*{{{*/ // --------------------------------------------------------------------- /* */ +APT_IGNORE_DEPRECATED_PUSH pkgCache::pkgCache(MMap *Map, bool DoMap) : Map(*Map) { // call getArchitectures() with cached=false to ensure that the @@ -128,6 +127,7 @@ pkgCache::pkgCache(MMap *Map, bool DoMap) : Map(*Map) if (DoMap == true) ReMap(); } +APT_IGNORE_DEPRECATED_POP /*}}}*/ // Cache::ReMap - Reopen the cache file /*{{{*/ // --------------------------------------------------------------------- @@ -145,7 +145,6 @@ bool pkgCache::ReMap(bool const &Errorchecks) DescP = (Description *)Map.Data(); ProvideP = (Provides *)Map.Data(); DepP = (Dependency *)Map.Data(); - StringItemP = (StringItem *)Map.Data(); StrP = (char *)Map.Data(); if (Errorchecks == false) @@ -168,15 +167,23 @@ bool pkgCache::ReMap(bool const &Errorchecks) if (Map.Size() < HeaderP->CacheFileSize) return _error->Error(_("The package cache file is corrupted, it is too small")); + if (HeaderP->VerSysName == 0 || HeaderP->Architecture == 0 || HeaderP->GetArchitectures() == 0) + return _error->Error(_("The package cache file is corrupted")); + // Locate our VS.. - if (HeaderP->VerSysName == 0 || - (VS = pkgVersioningSystem::GetVS(StrP + HeaderP->VerSysName)) == 0) + if ((VS = pkgVersioningSystem::GetVS(StrP + HeaderP->VerSysName)) == 0) return _error->Error(_("This APT does not support the versioning system '%s'"),StrP + HeaderP->VerSysName); - // Chcek the arhcitecture - if (HeaderP->Architecture == 0 || - _config->Find("APT::Architecture") != StrP + HeaderP->Architecture) - return _error->Error(_("The package cache was built for a different architecture")); + // Check the architecture + std::vector<std::string> archs = APT::Configuration::getArchitectures(); + std::vector<std::string>::const_iterator a = archs.begin(); + std::string list = *a; + for (++a; a != archs.end(); ++a) + list.append(",").append(*a); + if (_config->Find("APT::Architecture") != StrP + HeaderP->Architecture || + list != StrP + HeaderP->GetArchitectures()) + return _error->Error(_("The package cache was built for different architectures: %s vs %s"), StrP + HeaderP->GetArchitectures(), list.c_str()); + return true; } /*}}}*/ @@ -185,20 +192,20 @@ bool pkgCache::ReMap(bool const &Errorchecks) /* This is used to generate the hash entries for the HashTable. With my package list from bo this function gets 94% table usage on a 512 item table (480 used items) */ -unsigned long pkgCache::sHash(const string &Str) const +map_id_t pkgCache::sHash(const string &Str) const { unsigned long Hash = 0; for (string::const_iterator I = Str.begin(); I != Str.end(); ++I) Hash = 41 * Hash + tolower_ascii(*I); - return Hash % _count(HeaderP->PkgHashTable); + return Hash % HeaderP->GetHashTableSize(); } -unsigned long pkgCache::sHash(const char *Str) const +map_id_t pkgCache::sHash(const char *Str) const { unsigned long Hash = tolower_ascii(*Str); for (const char *I = Str + 1; *I != 0; ++I) Hash = 41 * Hash + tolower_ascii(*I); - return Hash % _count(HeaderP->PkgHashTable); + return Hash % HeaderP->GetHashTableSize(); } /*}}}*/ // Cache::SingleArchFindPkg - Locate a package by name /*{{{*/ @@ -209,13 +216,10 @@ unsigned long pkgCache::sHash(const char *Str) const pkgCache::PkgIterator pkgCache::SingleArchFindPkg(const string &Name) { // Look at the hash bucket - Package *Pkg = PkgP + HeaderP->PkgHashTable[Hash(Name)]; + Package *Pkg = PkgP + HeaderP->PkgHashTableP()[Hash(Name)]; for (; Pkg != PkgP; Pkg = PkgP + Pkg->NextPackage) { - if (unlikely(Pkg->Name == 0)) - continue; - - int const cmp = strcasecmp(Name.c_str(), StrP + Pkg->Name); + int const cmp = strcmp(Name.c_str(), StrP + (GrpP + Pkg->Group)->Name); if (cmp == 0) return PkgIterator(*this, Pkg); else if (cmp < 0) @@ -230,12 +234,7 @@ pkgCache::PkgIterator pkgCache::SingleArchFindPkg(const string &Name) pkgCache::PkgIterator pkgCache::FindPkg(const string &Name) { size_t const found = Name.find(':'); if (found == string::npos) - { - if (MultiArchCache() == false) - return SingleArchFindPkg(Name); - else - return FindPkg(Name, "native"); - } + return FindPkg(Name, "native"); string const Arch = Name.substr(found+1); /* Beware: This is specialcased to handle pkg:any in dependencies as these are linked to virtual pkg:any named packages with all archs. @@ -249,13 +248,6 @@ pkgCache::PkgIterator pkgCache::FindPkg(const string &Name) { // --------------------------------------------------------------------- /* Returns 0 on error, pointer to the package otherwise */ pkgCache::PkgIterator pkgCache::FindPkg(const string &Name, string const &Arch) { - if (MultiArchCache() == false && Arch != "none") { - if (Arch == "native" || Arch == "all" || Arch == "any" || - Arch == NativeArch()) - return SingleArchFindPkg(Name); - else - return PkgIterator(*this,0); - } /* We make a detour via the GrpIterator here as on a multi-arch environment a group is easier to find than a package (less entries in the buckets) */ @@ -274,12 +266,9 @@ pkgCache::GrpIterator pkgCache::FindGrp(const string &Name) { return GrpIterator(*this,0); // Look at the hash bucket for the group - Group *Grp = GrpP + HeaderP->GrpHashTable[sHash(Name)]; + Group *Grp = GrpP + HeaderP->GrpHashTableP()[sHash(Name)]; for (; Grp != GrpP; Grp = GrpP + Grp->Next) { - if (unlikely(Grp->Name == 0)) - continue; - - int const cmp = strcasecmp(Name.c_str(), StrP + Grp->Name); + int const cmp = strcmp(Name.c_str(), StrP + Grp->Name); if (cmp == 0) return GrpIterator(*this, Grp); else if (cmp < 0) @@ -356,19 +345,15 @@ pkgCache::PkgIterator pkgCache::GrpIterator::FindPkg(string Arch) const { last one we check, so we do it now. */ if (Arch == "native" || Arch == myArch || Arch == "all") { pkgCache::Package *Pkg = Owner->PkgP + S->LastPackage; - if (strcasecmp(myArch, Owner->StrP + Pkg->Arch) == 0) + if (strcmp(myArch, Owner->StrP + Pkg->Arch) == 0) return PkgIterator(*Owner, Pkg); Arch = myArch; } - /* Iterate over the list to find the matching arch - unfortunately this list includes "package noise" - (= different packages with same calculated hash), - so we need to check the name also */ + // Iterate over the list to find the matching arch for (pkgCache::Package *Pkg = PackageList(); Pkg != Owner->PkgP; Pkg = Owner->PkgP + Pkg->NextPackage) { - if (S->Name == Pkg->Name && - stringcasecmp(Arch, Owner->StrP + Pkg->Arch) == 0) + if (stringcmp(Arch, Owner->StrP + Pkg->Arch) == 0) return PkgIterator(*Owner, Pkg); if ((Owner->PkgP + S->LastPackage) == Pkg) break; @@ -428,10 +413,10 @@ void pkgCache::GrpIterator::operator ++(int) S = Owner->GrpP + S->Next; // Follow the hash table - while (S == Owner->GrpP && (HashIndex+1) < (signed)_count(Owner->HeaderP->GrpHashTable)) + while (S == Owner->GrpP && (HashIndex+1) < (signed)Owner->HeaderP->GetHashTableSize()) { HashIndex++; - S = Owner->GrpP + Owner->HeaderP->GrpHashTable[HashIndex]; + S = Owner->GrpP + Owner->HeaderP->GrpHashTableP()[HashIndex]; } } /*}}}*/ @@ -445,10 +430,10 @@ void pkgCache::PkgIterator::operator ++(int) S = Owner->PkgP + S->NextPackage; // Follow the hash table - while (S == Owner->PkgP && (HashIndex+1) < (signed)_count(Owner->HeaderP->PkgHashTable)) + while (S == Owner->PkgP && (HashIndex+1) < (signed)Owner->HeaderP->GetHashTableSize()) { HashIndex++; - S = Owner->PkgP + Owner->HeaderP->PkgHashTable[HashIndex]; + S = Owner->PkgP + Owner->HeaderP->PkgHashTableP()[HashIndex]; } } /*}}}*/ @@ -524,7 +509,10 @@ operator<<(std::ostream& out, pkgCache::PkgIterator Pkg) out << " -> " << candidate; if ( newest != "none" && candidate != newest) out << " | " << newest; - out << " > ( " << string(Pkg.Section()==0?"none":Pkg.Section()) << " )"; + if (Pkg->VersionList == 0) + out << " > ( none )"; + else + out << " > ( " << string(Pkg.VersionList().Section()==0?"unknown":Pkg.VersionList().Section()) << " )"; return out; } /*}}}*/ @@ -822,7 +810,7 @@ int pkgCache::VerIterator::CompareVer(const VerIterator &B) const // VerIterator::Downloadable - Checks if the version is downloadable /*{{{*/ // --------------------------------------------------------------------- /* */ -bool pkgCache::VerIterator::Downloadable() const +APT_PURE bool pkgCache::VerIterator::Downloadable() const { VerFileIterator Files = FileList(); for (; Files.end() == false; ++Files) @@ -835,7 +823,7 @@ bool pkgCache::VerIterator::Downloadable() const // --------------------------------------------------------------------- /* This checks to see if any of the versions files are not NotAutomatic. True if this version is selectable for automatic installation. */ -bool pkgCache::VerIterator::Automatic() const +APT_PURE bool pkgCache::VerIterator::Automatic() const { VerFileIterator Files = FileList(); for (; Files.end() == false; ++Files) @@ -1031,7 +1019,7 @@ bool pkgCache::PrvIterator::IsMultiArchImplicit() const { pkgCache::PkgIterator const Owner = OwnerPkg(); pkgCache::PkgIterator const Parent = ParentPkg(); - if (strcmp(Owner.Arch(), Parent.Arch()) != 0 || Owner->Name == Parent->Name) + if (strcmp(Owner.Arch(), Parent.Arch()) != 0 || Owner.Group()->Name == Parent.Group()->Name) return true; return false; } diff --git a/apt-pkg/pkgcache.h b/apt-pkg/pkgcache.h index 5e8a9630a..2ba23c5c0 100644 --- a/apt-pkg/pkgcache.h +++ b/apt-pkg/pkgcache.h @@ -79,11 +79,52 @@ #include <string> #include <time.h> +#include <stdint.h> #ifndef APT_8_CLEANER_HEADERS using std::string; #endif +#if APT_PKG_ABI >= 413 +// storing file sizes of indexes, which are way below 4 GB for now +typedef uint32_t map_filesize_t; +typedef map_filesize_t should_be_map_filesize_t; +#else +typedef unsigned long map_filesize_t; +typedef unsigned int should_be_map_filesize_t; +#endif +#if APT_PKG_ABI >= 413 +// each package/group/dependency gets an id +typedef uint32_t map_id_t; +typedef map_id_t should_be_map_id_t; +#else +typedef unsigned long map_id_t; +typedef unsigned int should_be_map_id_t; +#endif +#if APT_PKG_ABI >= 413 +// some files get an id, too, but in far less absolute numbers +typedef uint16_t map_fileid_t; +typedef map_fileid_t should_be_map_fileid_t; +#else +typedef unsigned long map_fileid_t; +typedef unsigned int should_be_map_fileid_t; +#endif +#if APT_PKG_ABI >= 413 +// relative pointer from cache start +typedef uint32_t map_pointer_t; +#else +typedef unsigned int map_pointer_t; +#endif +// same as the previous, but documented to be to a string item +typedef map_pointer_t map_stringitem_t; +#if APT_PKG_ABI >= 413 +typedef uint64_t should_be_uint64_t; +typedef uint64_t should_be_uint64_small_t; +#else +typedef unsigned long long should_be_uint64_t; +typedef unsigned long should_be_uint64_small_t; +#endif + class pkgVersioningSystem; class pkgCache /*{{{*/ { @@ -138,7 +179,7 @@ class pkgCache /*{{{*/ /** \brief priority of a package version Zero is used for unparsable or absent Priority fields. */ - enum VerPriority {Important=1,Required=2,Standard=3,Optional=4,Extra=5}; + enum VerPriority {Required=1,Important=2,Standard=3,Optional=4,Extra=5}; enum PkgSelectedState {Unknown=0,Install=1,Hold=2,DeInstall=3,Purge=4}; enum PkgInstState {Ok=0,ReInstReq=1,HoldInst=2,HoldReInstReq=3}; enum PkgCurrentState {NotInstalled=0,UnPacked=1,HalfConfigured=2, @@ -158,8 +199,8 @@ class pkgCache /*{{{*/ std::string CacheFile; MMap ⤅ - unsigned long sHash(const std::string &S) const APT_PURE; - unsigned long sHash(const char *S) const APT_PURE; + map_id_t sHash(const std::string &S) const APT_PURE; + map_id_t sHash(const char *S) const APT_PURE; public: @@ -174,7 +215,7 @@ class pkgCache /*{{{*/ Description *DescP; Provides *ProvideP; Dependency *DepP; - StringItem *StringItemP; + APT_DEPRECATED StringItem *StringItemP; char *StrP; virtual bool ReMap(bool const &Errorchecks = true); @@ -183,8 +224,8 @@ class pkgCache /*{{{*/ inline void *DataEnd() {return ((unsigned char *)Map.Data()) + Map.Size();} // String hashing function (512 range) - inline unsigned long Hash(const std::string &S) const {return sHash(S);} - inline unsigned long Hash(const char *S) const {return sHash(S);} + inline map_id_t Hash(const std::string &S) const {return sHash(S);} + inline map_id_t Hash(const char *S) const {return sHash(S);} // Useful transformation things const char *Priority(unsigned char Priority); @@ -218,7 +259,7 @@ class pkgCache /*{{{*/ private: bool MultiArchEnabled; - PkgIterator SingleArchFindPkg(const std::string &Name); + APT_HIDDEN PkgIterator SingleArchFindPkg(const std::string &Name); }; /*}}}*/ // Header structure /*{{{*/ @@ -263,35 +304,36 @@ struct pkgCache::Header These indicate the number of each structure contained in the cache. PackageCount is especially useful for generating user state structures. See Package::Id for more info. */ - unsigned long GroupCount; - unsigned long PackageCount; - unsigned long VersionCount; - unsigned long DescriptionCount; - unsigned long DependsCount; - unsigned long PackageFileCount; - unsigned long VerFileCount; - unsigned long DescFileCount; - unsigned long ProvidesCount; + map_id_t GroupCount; + map_id_t PackageCount; + map_id_t VersionCount; + map_id_t DescriptionCount; + map_id_t DependsCount; + map_fileid_t PackageFileCount; + map_fileid_t VerFileCount; + map_fileid_t DescFileCount; + map_id_t ProvidesCount; /** \brief index of the first PackageFile structure The PackageFile structures are singly linked lists that represent all package files that have been merged into the cache. */ - map_ptrloc FileList; - /** \brief index of the first StringItem structure - - The cache contains a list of all the unique strings (StringItems). - The parser reads this list into memory so it can match strings - against it.*/ - map_ptrloc StringList; + map_pointer_t FileList; +#if APT_PKG_ABI < 413 + APT_DEPRECATED map_pointer_t StringList; +#endif /** \brief String representing the version system used */ - map_ptrloc VerSysName; - /** \brief Architecture(s) the cache was built against */ - map_ptrloc Architecture; + map_pointer_t VerSysName; + /** \brief native architecture the cache was built against */ + map_pointer_t Architecture; +#if APT_PKG_ABI >= 413 + /** \brief all architectures the cache was built against */ + map_pointer_t Architectures; +#endif /** \brief The maximum size of a raw entry from the original Package file */ - unsigned long MaxVerFileSize; + map_filesize_t MaxVerFileSize; /** \brief The maximum size of a raw entry from the original Translation file */ - unsigned long MaxDescFileSize; + map_filesize_t MaxDescFileSize; /** \brief The Pool structures manage the allocation pools that the generator uses @@ -302,23 +344,37 @@ struct pkgCache::Header stores this information so future additions can make use of any unused pool blocks. */ DynamicMMap::Pool Pools[9]; - + /** \brief hash tables providing rapid group/package name lookup - Each group/package name is inserted into the hash table using pkgCache::Hash(const &string) + Each group/package name is inserted into a hash table using pkgCache::Hash(const &string) By iterating over each entry in the hash table it is possible to iterate over the entire list of packages. Hash Collisions are handled with a singly linked list of packages based at the hash item. The linked list contains only packages that match the hashing function. In the PkgHashTable is it possible that multiple packages have the same name - these packages are stored as a sequence in the list. - - Beware: The Hashmethod assumes that the hash table sizes are equal */ + The size of both tables is the same. */ +#if APT_PKG_ABI >= 413 + unsigned int HashTableSize; + unsigned int GetHashTableSize() const { return HashTableSize; } + void SetHashTableSize(unsigned int const sz) { HashTableSize = sz; } + map_pointer_t GetArchitectures() const { return Architectures; } + void SetArchitectures(map_pointer_t const idx) { Architectures = idx; } +#else + // BEWARE: these tables are pretty much empty and just here for abi compat map_ptrloc PkgHashTable[2*1048]; map_ptrloc GrpHashTable[2*1048]; + unsigned int GetHashTableSize() const { return PkgHashTable[0]; } + void SetHashTableSize(unsigned int const sz) { PkgHashTable[0] = sz; } + map_pointer_t GetArchitectures() const { return PkgHashTable[1]; } + void SetArchitectures(map_pointer_t const idx) { PkgHashTable[1] = idx; } +#endif + map_pointer_t * PkgHashTableP() const { return (map_pointer_t*) (this + 1); } + map_pointer_t * GrpHashTableP() const { return PkgHashTableP() + GetHashTableSize(); } /** \brief Size of the complete cache file */ - unsigned long CacheFileSize; + should_be_uint64_small_t CacheFileSize; bool CheckSizes(Header &Against) const APT_PURE; Header(); @@ -334,17 +390,17 @@ struct pkgCache::Header struct pkgCache::Group { /** \brief Name of the group */ - map_ptrloc Name; // StringItem + map_stringitem_t Name; // Linked List /** \brief Link to the first package which belongs to the group */ - map_ptrloc FirstPackage; // Package + map_pointer_t FirstPackage; // Package /** \brief Link to the last package which belongs to the group */ - map_ptrloc LastPackage; // Package + map_pointer_t LastPackage; // Package /** \brief Link to the next Group */ - map_ptrloc Next; // Group + map_pointer_t Next; // Group /** \brief unique sequel ID */ - unsigned int ID; + should_be_map_id_t ID; }; /*}}}*/ @@ -362,10 +418,13 @@ struct pkgCache::Group */ struct pkgCache::Package { - /** \brief Name of the package */ - map_ptrloc Name; // StringItem + /** \brief Name of the package + * Note that the access method Name() will remain. It is just this data member + * deprecated as this information is already stored and available via the + * associated Group – so it is wasting precious binary cache space */ + APT_DEPRECATED map_stringitem_t Name; /** \brief Architecture of the package */ - map_ptrloc Arch; // StringItem + map_stringitem_t Arch; /** \brief Base of a singly linked list of versions Each structure represents a unique version of the package. @@ -375,24 +434,25 @@ struct pkgCache::Package versions of a package can be cleanly handled by the system. Furthermore, this linked list is guaranteed to be sorted from Highest version to lowest version with no duplicate entries. */ - map_ptrloc VersionList; // Version + map_pointer_t VersionList; // Version /** \brief index to the installed version */ - map_ptrloc CurrentVer; // Version - /** \brief indicates the deduced section - - Should be the index to the string "Unknown" or to the section - of the last parsed item. */ - map_ptrloc Section; // StringItem + map_pointer_t CurrentVer; // Version + /** \brief indicates nothing (consistently) + This field used to contain ONE section the package belongs to, + if those differs between versions it is a RANDOM one. + The Section() method tries to reproduce it, but the only sane + thing to do is use the Section field from the version! */ + APT_DEPRECATED map_ptrloc Section; // StringItem /** \brief index of the group this package belongs to */ - map_ptrloc Group; // Group the Package belongs to + map_pointer_t Group; // Group the Package belongs to // Linked list /** \brief Link to the next package in the same bucket */ - map_ptrloc NextPackage; // Package + map_pointer_t NextPackage; // Package /** \brief List of all dependencies on this package */ - map_ptrloc RevDepends; // Dependency + map_pointer_t RevDepends; // Dependency /** \brief List of all "packages" this package provide */ - map_ptrloc ProvidesList; // Provides + map_pointer_t ProvidesList; // Provides // Install/Remove/Purge etc /** \brief state that the user wishes the package to be in */ @@ -412,7 +472,7 @@ struct pkgCache::Package This allows clients to create an array of size PackageCount and use it to store state information for the package map. For instance the status file emitter uses this to track which packages have been emitted already. */ - unsigned int ID; + should_be_map_id_t ID; /** \brief some useful indicators of the package's state */ unsigned long Flags; }; @@ -426,30 +486,30 @@ struct pkgCache::Package struct pkgCache::PackageFile { /** \brief physical disk file that this PackageFile represents */ - map_ptrloc FileName; // StringItem + map_stringitem_t FileName; /** \brief the release information Please see the files document for a description of what the release information means. */ - map_ptrloc Archive; // StringItem - map_ptrloc Codename; // StringItem - map_ptrloc Component; // StringItem - map_ptrloc Version; // StringItem - map_ptrloc Origin; // StringItem - map_ptrloc Label; // StringItem - map_ptrloc Architecture; // StringItem + map_stringitem_t Archive; + map_stringitem_t Codename; + map_stringitem_t Component; + map_stringitem_t Version; + map_stringitem_t Origin; + map_stringitem_t Label; + map_stringitem_t Architecture; /** \brief The site the index file was fetched from */ - map_ptrloc Site; // StringItem + map_stringitem_t Site; /** \brief indicates what sort of index file this is @TODO enumerate at least the possible indexes */ - map_ptrloc IndexType; // StringItem + map_stringitem_t IndexType; /** \brief Size of the file Used together with the modification time as a simple check to ensure that the Packages file has not been altered since Cache generation. */ - unsigned long Size; + map_filesize_t Size; /** \brief Modification time for the file */ time_t mtime; @@ -458,9 +518,9 @@ struct pkgCache::PackageFile // Linked list /** \brief Link to the next PackageFile in the Cache */ - map_ptrloc NextFile; // PackageFile + map_pointer_t NextFile; // PackageFile /** \brief unique sequel ID */ - unsigned int ID; + should_be_map_fileid_t ID; }; /*}}}*/ // VerFile structure /*{{{*/ @@ -471,13 +531,13 @@ struct pkgCache::PackageFile struct pkgCache::VerFile { /** \brief index of the package file that this version was found in */ - map_ptrloc File; // PackageFile + map_pointer_t File; // PackageFile /** \brief next step in the linked list */ - map_ptrloc NextFile; // PkgVerFile + map_pointer_t NextFile; // PkgVerFile /** \brief position in the package file */ - map_ptrloc Offset; // File offset + should_be_map_filesize_t Offset; // File offset /** @TODO document pkgCache::VerFile::Size */ - unsigned long Size; + map_filesize_t Size; }; /*}}}*/ // DescFile structure /*{{{*/ @@ -485,13 +545,13 @@ struct pkgCache::VerFile struct pkgCache::DescFile { /** \brief index of the file that this description was found in */ - map_ptrloc File; // PackageFile + map_pointer_t File; // PackageFile /** \brief next step in the linked list */ - map_ptrloc NextFile; // PkgVerFile + map_pointer_t NextFile; // PkgVerFile /** \brief position in the file */ - map_ptrloc Offset; // File offset + should_be_map_filesize_t Offset; // File offset /** @TODO document pkgCache::DescFile::Size */ - unsigned long Size; + map_filesize_t Size; }; /*}}}*/ // Version structure /*{{{*/ @@ -503,9 +563,17 @@ struct pkgCache::DescFile struct pkgCache::Version { /** \brief complete version string */ - map_ptrloc VerStr; // StringItem + map_stringitem_t VerStr; /** \brief section this version is filled in */ - map_ptrloc Section; // StringItem + map_stringitem_t Section; +#if APT_PKG_ABI >= 413 + /** \brief source package name this version comes from + Always contains the name, even if it is the same as the binary name */ + map_stringitem_t SourcePkgName; + /** \brief source version this version comes from + Always contains the version string, even if it is the same as the binary version */ + map_stringitem_t SourceVerStr; +#endif /** \brief Multi-Arch capabilities of a package version */ enum VerMultiArch { None = 0, /*!< is the default and doesn't trigger special behaviour */ @@ -527,33 +595,33 @@ struct pkgCache::Version applies to. If FileList is 0 then this is a blank version. The structure should also have a 0 in all other fields excluding pkgCache::Version::VerStr and Possibly pkgCache::Version::NextVer. */ - map_ptrloc FileList; // VerFile + map_pointer_t FileList; // VerFile /** \brief next (lower or equal) version in the linked list */ - map_ptrloc NextVer; // Version + map_pointer_t NextVer; // Version /** \brief next description in the linked list */ - map_ptrloc DescriptionList; // Description + map_pointer_t DescriptionList; // Description /** \brief base of the dependency list */ - map_ptrloc DependsList; // Dependency + map_pointer_t DependsList; // Dependency /** \brief links to the owning package This allows reverse dependencies to determine the package */ - map_ptrloc ParentPkg; // Package + map_pointer_t ParentPkg; // Package /** \brief list of pkgCache::Provides */ - map_ptrloc ProvidesList; // Provides + map_pointer_t ProvidesList; // Provides /** \brief archive size for this version For Debian this is the size of the .deb file. */ - unsigned long long Size; // These are the .deb size + should_be_uint64_t Size; // These are the .deb size /** \brief uncompressed size for this version */ - unsigned long long InstalledSize; + should_be_uint64_t InstalledSize; /** \brief characteristic value representing this version No two packages in existence should have the same VerStr and Hash with different contents. */ unsigned short Hash; /** \brief unique sequel ID */ - unsigned int ID; + should_be_map_id_t ID; /** \brief parsed priority value */ unsigned char Priority; }; @@ -566,22 +634,22 @@ struct pkgCache::Description If the value has a 0 length then this is read using the Package file else the Translation-CODE file is used. */ - map_ptrloc language_code; // StringItem + map_stringitem_t language_code; /** \brief MD5sum of the original description Used to map Translations of a description to a version and to check that the Translation is up-to-date. */ - map_ptrloc md5sum; // StringItem + map_stringitem_t md5sum; /** @TODO document pkgCache::Description::FileList */ - map_ptrloc FileList; // DescFile + map_pointer_t FileList; // DescFile /** \brief next translation for this description */ - map_ptrloc NextDesc; // Description + map_pointer_t NextDesc; // Description /** \brief the text is a description of this package */ - map_ptrloc ParentPkg; // Package + map_pointer_t ParentPkg; // Package /** \brief unique sequel ID */ - unsigned int ID; + should_be_map_id_t ID; }; /*}}}*/ // Dependency structure /*{{{*/ @@ -594,21 +662,21 @@ struct pkgCache::Description struct pkgCache::Dependency { /** \brief string of the version the dependency is applied against */ - map_ptrloc Version; // StringItem + map_stringitem_t Version; /** \brief index of the package this depends applies to The generator will - if the package does not already exist - create a blank (no version records) package. */ - map_ptrloc Package; // Package + map_pointer_t Package; // Package /** \brief next dependency of this version */ - map_ptrloc NextDepends; // Dependency + map_pointer_t NextDepends; // Dependency /** \brief next reverse dependency of this package */ - map_ptrloc NextRevDepends; // Dependency + map_pointer_t NextRevDepends; // Dependency /** \brief version of the package which has the reverse depends */ - map_ptrloc ParentVer; // Version + map_pointer_t ParentVer; // Version /** \brief unique sequel ID */ - map_ptrloc ID; + should_be_map_id_t ID; /** \brief Dependency type - Depends, Recommends, Conflicts, etc */ unsigned char Type; /** \brief comparison operator specified on the depends line @@ -629,31 +697,23 @@ struct pkgCache::Dependency struct pkgCache::Provides { /** \brief index of the package providing this */ - map_ptrloc ParentPkg; // Package + map_pointer_t ParentPkg; // Package /** \brief index of the version this provide line applies to */ - map_ptrloc Version; // Version + map_pointer_t Version; // Version /** \brief version in the provides line (if any) This version allows dependencies to depend on specific versions of a Provides, as well as allowing Provides to override existing packages. This is experimental. Note that Debian doesn't allow versioned provides */ - map_ptrloc ProvideVersion; // StringItem + map_stringitem_t ProvideVersion; /** \brief next provides (based of package) */ - map_ptrloc NextProvides; // Provides + map_pointer_t NextProvides; // Provides /** \brief next provides (based of version) */ - map_ptrloc NextPkgProv; // Provides + map_pointer_t NextPkgProv; // Provides }; /*}}}*/ -// StringItem structure /*{{{*/ -/** \brief used for generating single instances of strings - - Some things like Section Name are are useful to have as unique tags. - It is part of a linked list based at pkgCache::Header::StringList - - All strings are simply inlined any place in the file that is natural - for the writer. The client should make no assumptions about the positioning - of strings. All StringItems should be null-terminated. */ -struct pkgCache::StringItem +// UNUSED StringItem structure /*{{{*/ +struct APT_DEPRECATED pkgCache::StringItem { /** \brief string this refers to */ map_ptrloc String; // StringItem @@ -662,7 +722,6 @@ struct pkgCache::StringItem }; /*}}}*/ - inline char const * pkgCache::NativeArch() { return StrP + HeaderP->Architecture; } diff --git a/apt-pkg/pkgcachegen.cc b/apt-pkg/pkgcachegen.cc index 810f0b022..ba454f057 100644 --- a/apt-pkg/pkgcachegen.cc +++ b/apt-pkg/pkgcachegen.cc @@ -57,8 +57,7 @@ pkgCacheGenerator::pkgCacheGenerator(DynamicMMap *pMap,OpProgress *Prog) : FoundFileDeps(0) { CurrentFile = 0; - memset(UniqHash,0,sizeof(UniqHash)); - + if (_error->PendingError() == true) return; @@ -73,14 +72,35 @@ pkgCacheGenerator::pkgCacheGenerator(DynamicMMap *pMap,OpProgress *Prog) : // Starting header *Cache.HeaderP = pkgCache::Header(); - map_ptrloc const idxVerSysName = WriteStringInMap(_system->VS->Label); + + // make room for the hashtables for packages and groups + if (Map.RawAllocate(2 * (Cache.HeaderP->GetHashTableSize() * sizeof(map_pointer_t))) == 0) + return; + + map_stringitem_t const idxVerSysName = WriteStringInMap(_system->VS->Label); + if (unlikely(idxVerSysName == 0)) + return; Cache.HeaderP->VerSysName = idxVerSysName; - // this pointer is set in ReMap, but we need it now for WriteUniqString - Cache.StringItemP = (pkgCache::StringItem *)Map.Data(); - map_ptrloc const idxArchitecture = WriteUniqString(_config->Find("APT::Architecture")); - Cache.HeaderP->Architecture = idxArchitecture; - if (unlikely(idxVerSysName == 0 || idxArchitecture == 0)) + map_stringitem_t const idxArchitecture = StoreString(MIXED, _config->Find("APT::Architecture")); + if (unlikely(idxArchitecture == 0)) return; + Cache.HeaderP->Architecture = idxArchitecture; + + std::vector<std::string> archs = APT::Configuration::getArchitectures(); + if (archs.size() > 1) + { + std::vector<std::string>::const_iterator a = archs.begin(); + std::string list = *a; + for (++a; a != archs.end(); ++a) + list.append(",").append(*a); + map_stringitem_t const idxArchitectures = WriteStringInMap(list); + if (unlikely(idxArchitectures == 0)) + return; + Cache.HeaderP->SetArchitectures(idxArchitectures); + } + else + Cache.HeaderP->SetArchitectures(idxArchitecture); + Cache.ReMap(); } else @@ -92,9 +112,9 @@ pkgCacheGenerator::pkgCacheGenerator(DynamicMMap *pMap,OpProgress *Prog) : { _error->Error(_("Cache has an incompatible versioning system")); return; - } + } } - + Cache.HeaderP->Dirty = true; Map.Sync(0,sizeof(pkgCache::Header)); } @@ -126,10 +146,6 @@ void pkgCacheGenerator::ReMap(void const * const oldMap, void const * const newM CurrentFile += (pkgCache::PackageFile const * const) newMap - (pkgCache::PackageFile const * const) oldMap; - for (size_t i = 0; i < _count(UniqHash); ++i) - if (UniqHash[i] != 0) - UniqHash[i] += (pkgCache::StringItem const * const) newMap - (pkgCache::StringItem const * const) oldMap; - for (std::vector<pkgCache::GrpIterator*>::const_iterator i = Dynamic<pkgCache::GrpIterator>::toReMap.begin(); i != Dynamic<pkgCache::GrpIterator>::toReMap.end(); ++i) (*i)->ReMap(oldMap, newMap); @@ -153,27 +169,27 @@ void pkgCacheGenerator::ReMap(void const * const oldMap, void const * const newM (*i)->ReMap(oldMap, newMap); } /*}}}*/ // CacheGenerator::WriteStringInMap /*{{{*/ -map_ptrloc pkgCacheGenerator::WriteStringInMap(const char *String, +map_stringitem_t pkgCacheGenerator::WriteStringInMap(const char *String, const unsigned long &Len) { void const * const oldMap = Map.Data(); - map_ptrloc const index = Map.WriteString(String, Len); + map_stringitem_t const index = Map.WriteString(String, Len); if (index != 0) ReMap(oldMap, Map.Data()); return index; } /*}}}*/ // CacheGenerator::WriteStringInMap /*{{{*/ -map_ptrloc pkgCacheGenerator::WriteStringInMap(const char *String) { +map_stringitem_t pkgCacheGenerator::WriteStringInMap(const char *String) { void const * const oldMap = Map.Data(); - map_ptrloc const index = Map.WriteString(String); + map_stringitem_t const index = Map.WriteString(String); if (index != 0) ReMap(oldMap, Map.Data()); return index; } /*}}}*/ -map_ptrloc pkgCacheGenerator::AllocateInMap(const unsigned long &size) {/*{{{*/ +map_pointer_t pkgCacheGenerator::AllocateInMap(const unsigned long &size) {/*{{{*/ void const * const oldMap = Map.Data(); - map_ptrloc const index = Map.Allocate(size); + map_pointer_t const index = Map.Allocate(size); if (index != 0) ReMap(oldMap, Map.Data()); return index; @@ -253,16 +269,16 @@ bool pkgCacheGenerator::MergeList(ListParser &List, } } - if (Cache.HeaderP->PackageCount >= (1ULL<<sizeof(Cache.PkgP->ID)*8)-1) + if (Cache.HeaderP->PackageCount >= std::numeric_limits<map_id_t>::max()) return _error->Error(_("Wow, you exceeded the number of package " "names this APT is capable of.")); - if (Cache.HeaderP->VersionCount >= (1ULL<<(sizeof(Cache.VerP->ID)*8))-1) + if (Cache.HeaderP->VersionCount >= std::numeric_limits<map_id_t>::max()) return _error->Error(_("Wow, you exceeded the number of versions " "this APT is capable of.")); - if (Cache.HeaderP->DescriptionCount >= (1ULL<<(sizeof(Cache.DescP->ID)*8))-1) + if (Cache.HeaderP->DescriptionCount >= std::numeric_limits<map_id_t>::max()) return _error->Error(_("Wow, you exceeded the number of descriptions " "this APT is capable of.")); - if (Cache.HeaderP->DependsCount >= (1ULL<<(sizeof(Cache.DepP->ID)*8))-1ULL) + if (Cache.HeaderP->DependsCount >= std::numeric_limits<map_id_t>::max()) return _error->Error(_("Wow, you exceeded the number of dependencies " "this APT is capable of.")); @@ -302,10 +318,9 @@ bool pkgCacheGenerator::MergeListPackage(ListParser &List, pkgCache::PkgIterator // Find the right version to write the description MD5SumValue CurMd5 = List.Description_md5(); - if (CurMd5.Value().empty() == true || List.Description().empty() == true) + if (CurMd5.Value().empty() == true && List.Description("").empty() == true) return true; - std::string CurLang = List.DescriptionLanguage(); - + std::vector<std::string> availDesc = List.AvailableDescriptionLanguages(); for (Ver = Pkg.VersionList(); Ver.end() == false; ++Ver) { pkgCache::DescIterator VerDesc = Ver.DescriptionList(); @@ -314,31 +329,16 @@ bool pkgCacheGenerator::MergeListPackage(ListParser &List, pkgCache::PkgIterator if (VerDesc.end() == true || MD5SumValue(VerDesc.md5()) != CurMd5) continue; - // don't add a new description if we have one for the given - // md5 && language - if (IsDuplicateDescription(VerDesc, CurMd5, CurLang) == true) - continue; - - pkgCache::DescIterator Desc; - Dynamic<pkgCache::DescIterator> DynDesc(Desc); - - map_ptrloc const descindex = NewDescription(Desc, CurLang, CurMd5, VerDesc->md5sum); - if (unlikely(descindex == 0 && _error->PendingError())) - return _error->Error(_("Error occurred while processing %s (%s%d)"), - Pkg.Name(), "NewDescription", 1); - - Desc->ParentPkg = Pkg.Index(); - - // we add at the end, so that the start is constant as we need - // that to be able to efficiently share these lists - VerDesc = Ver.DescriptionList(); // old value might be invalid after ReMap - for (;VerDesc.end() == false && VerDesc->NextDesc != 0; ++VerDesc); - map_ptrloc * const LastNextDesc = (VerDesc.end() == true) ? &Ver->DescriptionList : &VerDesc->NextDesc; - *LastNextDesc = descindex; + map_stringitem_t md5idx = VerDesc->md5sum; + for (std::vector<std::string>::const_iterator CurLang = availDesc.begin(); CurLang != availDesc.end(); ++CurLang) + { + // don't add a new description if we have one for the given + // md5 && language + if (IsDuplicateDescription(VerDesc, CurMd5, *CurLang) == true) + continue; - if (NewFileDesc(Desc,List) == false) - return _error->Error(_("Error occurred while processing %s (%s%d)"), - Pkg.Name(), "NewFileDesc", 1); + AddNewDescription(List, Ver, *CurLang, CurMd5, md5idx); + } // we can stop here as all "same" versions will share the description break; @@ -353,7 +353,7 @@ bool pkgCacheGenerator::MergeListVersion(ListParser &List, pkgCache::PkgIterator { pkgCache::VerIterator Ver = Pkg.VersionList(); Dynamic<pkgCache::VerIterator> DynVer(Ver); - map_ptrloc *LastVer = &Pkg->VersionList; + map_pointer_t *LastVer = &Pkg->VersionList; void const * oldMap = Map.Data(); unsigned short const Hash = List.VersionHash(); @@ -362,7 +362,7 @@ bool pkgCacheGenerator::MergeListVersion(ListParser &List, pkgCache::PkgIterator /* We know the list is sorted so we use that fact in the search. Insertion of new versions is done with correct sorting */ int Res = 1; - for (; Ver.end() == false; LastVer = &Ver->NextVer, Ver++) + for (; Ver.end() == false; LastVer = &Ver->NextVer, ++Ver) { Res = Cache.VS->CmpVersion(Version,Ver.VerStr()); // Version is higher as current version - insert here @@ -398,13 +398,13 @@ bool pkgCacheGenerator::MergeListVersion(ListParser &List, pkgCache::PkgIterator } // Add a new version - map_ptrloc const verindex = NewVersion(Ver, Version, Pkg.Index(), Hash, *LastVer); + map_pointer_t const verindex = NewVersion(Ver, Version, Pkg.Index(), Hash, *LastVer); if (verindex == 0 && _error->PendingError()) return _error->Error(_("Error occurred while processing %s (%s%d)"), Pkg.Name(), "NewVersion", 1); if (oldMap != Map.Data()) - LastVer += (map_ptrloc const * const) Map.Data() - (map_ptrloc const * const) oldMap; + LastVer += (map_pointer_t const * const) Map.Data() - (map_pointer_t const * const) oldMap; *LastVer = verindex; if (unlikely(List.NewVersion(Ver) == false)) @@ -465,7 +465,7 @@ bool pkgCacheGenerator::MergeListVersion(ListParser &List, pkgCache::PkgIterator D.ParentPkg().Group() == Grp) continue; - map_ptrloc *OldDepLast = NULL; + map_pointer_t *OldDepLast = NULL; pkgCache::VerIterator ConVersion = D.ParentVer(); Dynamic<pkgCache::VerIterator> DynV(ConVersion); // duplicate the Conflicts/Breaks/Replaces for :none arch @@ -486,11 +486,10 @@ bool pkgCacheGenerator::MergeListVersion(ListParser &List, pkgCache::PkgIterator return true; } - /* Record the Description (it is not translated) */ + /* Record the Description(s) based on their master md5sum */ MD5SumValue CurMd5 = List.Description_md5(); - if (CurMd5.Value().empty() == true || List.Description().empty() == true) + if (CurMd5.Value().empty() == true && List.Description("").empty() == true) return true; - std::string CurLang = List.DescriptionLanguage(); /* Before we add a new description we first search in the group for a version with a description of the same MD5 - if so we reuse this @@ -501,28 +500,44 @@ bool pkgCacheGenerator::MergeListVersion(ListParser &List, pkgCache::PkgIterator for (pkgCache::VerIterator V = P.VersionList(); V.end() == false; ++V) { - if (IsDuplicateDescription(V.DescriptionList(), CurMd5, "") == false) + if (V->DescriptionList == 0 || MD5SumValue(V.DescriptionList().md5()) != CurMd5) continue; Ver->DescriptionList = V->DescriptionList; - return true; } } - // We haven't found reusable descriptions, so add the first description - pkgCache::DescIterator Desc = Ver.DescriptionList(); + // We haven't found reusable descriptions, so add the first description(s) + map_stringitem_t md5idx = Ver->DescriptionList == 0 ? 0 : Ver.DescriptionList()->md5sum; + std::vector<std::string> availDesc = List.AvailableDescriptionLanguages(); + for (std::vector<std::string>::const_iterator CurLang = availDesc.begin(); CurLang != availDesc.end(); ++CurLang) + if (AddNewDescription(List, Ver, *CurLang, CurMd5, md5idx) == false) + return false; + return true; +} + /*}}}*/ +bool pkgCacheGenerator::AddNewDescription(ListParser &List, pkgCache::VerIterator &Ver, std::string const &lang, MD5SumValue const &CurMd5, map_stringitem_t &md5idx) /*{{{*/ +{ + pkgCache::DescIterator Desc; Dynamic<pkgCache::DescIterator> DynDesc(Desc); - map_ptrloc const descindex = NewDescription(Desc, CurLang, CurMd5, 0); + map_pointer_t const descindex = NewDescription(Desc, lang, CurMd5, md5idx); if (unlikely(descindex == 0 && _error->PendingError())) return _error->Error(_("Error occurred while processing %s (%s%d)"), - Pkg.Name(), "NewDescription", 2); + Ver.ParentPkg().Name(), "NewDescription", 1); - Desc->ParentPkg = Pkg.Index(); - Ver->DescriptionList = descindex; + md5idx = Desc->md5sum; + Desc->ParentPkg = Ver.ParentPkg().Index(); + + // we add at the end, so that the start is constant as we need + // that to be able to efficiently share these lists + pkgCache::DescIterator VerDesc = Ver.DescriptionList(); // old value might be invalid after ReMap + for (;VerDesc.end() == false && VerDesc->NextDesc != 0; ++VerDesc); + map_pointer_t * const LastNextDesc = (VerDesc.end() == true) ? &Ver->DescriptionList : &VerDesc->NextDesc; + *LastNextDesc = descindex; if (NewFileDesc(Desc,List) == false) return _error->Error(_("Error occurred while processing %s (%s%d)"), - Pkg.Name(), "NewFileDesc", 2); + Ver.ParentPkg().Name(), "NewFileDesc", 1); return true; } @@ -589,19 +604,19 @@ bool pkgCacheGenerator::NewGroup(pkgCache::GrpIterator &Grp, const string &Name) return true; // Get a structure - map_ptrloc const Group = AllocateInMap(sizeof(pkgCache::Group)); + map_pointer_t const Group = AllocateInMap(sizeof(pkgCache::Group)); if (unlikely(Group == 0)) return false; Grp = pkgCache::GrpIterator(Cache, Cache.GrpP + Group); - map_ptrloc const idxName = WriteStringInMap(Name); + map_stringitem_t const idxName = StoreString(PKGNAME, Name); if (unlikely(idxName == 0)) return false; Grp->Name = idxName; // Insert it into the hash table unsigned long const Hash = Cache.Hash(Name); - map_ptrloc *insertAt = &Cache.HeaderP->GrpHashTable[Hash]; + map_pointer_t *insertAt = &Cache.HeaderP->GrpHashTableP()[Hash]; while (*insertAt != 0 && strcasecmp(Name.c_str(), Cache.StrP + (Cache.GrpP + *insertAt)->Name) > 0) insertAt = &(Cache.GrpP + *insertAt)->Next; Grp->Next = *insertAt; @@ -626,7 +641,7 @@ bool pkgCacheGenerator::NewPackage(pkgCache::PkgIterator &Pkg,const string &Name return true; // Get a structure - map_ptrloc const Package = AllocateInMap(sizeof(pkgCache::Package)); + map_pointer_t const Package = AllocateInMap(sizeof(pkgCache::Package)); if (unlikely(Package == 0)) return false; Pkg = pkgCache::PkgIterator(Cache,Cache.PkgP + Package); @@ -636,9 +651,9 @@ bool pkgCacheGenerator::NewPackage(pkgCache::PkgIterator &Pkg,const string &Name { Grp->FirstPackage = Package; // Insert it into the hash table - unsigned long const Hash = Cache.Hash(Name); - map_ptrloc *insertAt = &Cache.HeaderP->PkgHashTable[Hash]; - while (*insertAt != 0 && strcasecmp(Name.c_str(), Cache.StrP + (Cache.PkgP + *insertAt)->Name) > 0) + map_id_t const Hash = Cache.Hash(Name); + map_pointer_t *insertAt = &Cache.HeaderP->PkgHashTableP()[Hash]; + while (*insertAt != 0 && strcasecmp(Name.c_str(), Cache.StrP + (Cache.GrpP + (Cache.PkgP + *insertAt)->Group)->Name) > 0) insertAt = &(Cache.PkgP + *insertAt)->NextPackage; Pkg->NextPackage = *insertAt; *insertAt = Package; @@ -653,10 +668,10 @@ bool pkgCacheGenerator::NewPackage(pkgCache::PkgIterator &Pkg,const string &Name Grp->LastPackage = Package; // Set the name, arch and the ID - Pkg->Name = Grp->Name; + APT_IGNORE_DEPRECATED(Pkg->Name = Grp->Name;) Pkg->Group = Grp.Index(); // all is mapped to the native architecture - map_ptrloc const idxArch = (Arch == "all") ? Cache.HeaderP->Architecture : WriteUniqString(Arch.c_str()); + map_stringitem_t const idxArch = (Arch == "all") ? Cache.HeaderP->Architecture : StoreString(MIXED, Arch); if (unlikely(idxArch == 0)) return false; Pkg->Arch = idxArch; @@ -673,14 +688,14 @@ bool pkgCacheGenerator::AddImplicitDepends(pkgCache::GrpIterator &G, // copy P.Arch() into a string here as a cache remap // in NewDepends() later may alter the pointer location string Arch = P.Arch() == NULL ? "" : P.Arch(); - map_ptrloc *OldDepLast = NULL; + map_pointer_t *OldDepLast = NULL; /* MultiArch handling introduces a lot of implicit Dependencies: - MultiArch: same → Co-Installable if they have the same version - All others conflict with all other group members */ bool const coInstall = ((V->MultiArch & pkgCache::Version::Same) == pkgCache::Version::Same); pkgCache::PkgIterator D = G.PackageList(); Dynamic<pkgCache::PkgIterator> DynD(D); - map_ptrloc const VerStrIdx = V->VerStr; + map_stringitem_t const VerStrIdx = V->VerStr; for (; D.end() != true; D = G.NextPkg(D)) { if (Arch == D.Arch() || D->VersionList == 0) @@ -713,11 +728,11 @@ bool pkgCacheGenerator::AddImplicitDepends(pkgCache::VerIterator &V, /* MultiArch handling introduces a lot of implicit Dependencies: - MultiArch: same → Co-Installable if they have the same version - All others conflict with all other group members */ - map_ptrloc *OldDepLast = NULL; + map_pointer_t *OldDepLast = NULL; bool const coInstall = ((V->MultiArch & pkgCache::Version::Same) == pkgCache::Version::Same); if (coInstall == true) { - map_ptrloc const VerStrIdx = V->VerStr; + map_stringitem_t const VerStrIdx = V->VerStr; // Replaces: ${self}:other ( << ${binary:Version}) NewDepends(D, V, VerStrIdx, pkgCache::Dep::Less, pkgCache::Dep::Replaces, @@ -746,15 +761,15 @@ bool pkgCacheGenerator::NewFileVer(pkgCache::VerIterator &Ver, return true; // Get a structure - map_ptrloc const VerFile = AllocateInMap(sizeof(pkgCache::VerFile)); + map_pointer_t const VerFile = AllocateInMap(sizeof(pkgCache::VerFile)); if (VerFile == 0) - return 0; + return false; pkgCache::VerFileIterator VF(Cache,Cache.VerFileP + VerFile); VF->File = CurrentFile - Cache.PkgFileP; // Link it to the end of the list - map_ptrloc *Last = &Ver->FileList; + map_pointer_t *Last = &Ver->FileList; for (pkgCache::VerFileIterator V = Ver.FileList(); V.end() == false; ++V) Last = &V->NextFile; VF->NextFile = *Last; @@ -772,14 +787,14 @@ bool pkgCacheGenerator::NewFileVer(pkgCache::VerIterator &Ver, // CacheGenerator::NewVersion - Create a new Version /*{{{*/ // --------------------------------------------------------------------- /* This puts a version structure in the linked list */ -unsigned long pkgCacheGenerator::NewVersion(pkgCache::VerIterator &Ver, +map_pointer_t pkgCacheGenerator::NewVersion(pkgCache::VerIterator &Ver, const string &VerStr, - map_ptrloc const ParentPkg, - unsigned long const Hash, - unsigned long Next) + map_pointer_t const ParentPkg, + unsigned short const Hash, + map_pointer_t const Next) { // Get a structure - map_ptrloc const Version = AllocateInMap(sizeof(pkgCache::Version)); + map_pointer_t const Version = AllocateInMap(sizeof(pkgCache::Version)); if (Version == 0) return 0; @@ -814,7 +829,7 @@ unsigned long pkgCacheGenerator::NewVersion(pkgCache::VerIterator &Ver, } } // haven't found the version string, so create - map_ptrloc const idxVerStr = WriteStringInMap(VerStr); + map_stringitem_t const idxVerStr = StoreString(VERSIONNUMBER, VerStr); if (unlikely(idxVerStr == 0)) return 0; Ver->VerStr = idxVerStr; @@ -831,7 +846,7 @@ bool pkgCacheGenerator::NewFileDesc(pkgCache::DescIterator &Desc, return true; // Get a structure - map_ptrloc const DescFile = AllocateInMap(sizeof(pkgCache::DescFile)); + map_pointer_t const DescFile = AllocateInMap(sizeof(pkgCache::DescFile)); if (DescFile == 0) return false; @@ -839,7 +854,7 @@ bool pkgCacheGenerator::NewFileDesc(pkgCache::DescIterator &Desc, DF->File = CurrentFile - Cache.PkgFileP; // Link it to the end of the list - map_ptrloc *Last = &Desc->FileList; + map_pointer_t *Last = &Desc->FileList; for (pkgCache::DescFileIterator D = Desc.FileList(); D.end() == false; ++D) Last = &D->NextFile; @@ -858,20 +873,20 @@ bool pkgCacheGenerator::NewFileDesc(pkgCache::DescIterator &Desc, // CacheGenerator::NewDescription - Create a new Description /*{{{*/ // --------------------------------------------------------------------- /* This puts a description structure in the linked list */ -map_ptrloc pkgCacheGenerator::NewDescription(pkgCache::DescIterator &Desc, +map_pointer_t pkgCacheGenerator::NewDescription(pkgCache::DescIterator &Desc, const string &Lang, const MD5SumValue &md5sum, - map_ptrloc idxmd5str) + map_stringitem_t const idxmd5str) { // Get a structure - map_ptrloc const Description = AllocateInMap(sizeof(pkgCache::Description)); + map_pointer_t const Description = AllocateInMap(sizeof(pkgCache::Description)); if (Description == 0) return 0; // Fill it in Desc = pkgCache::DescIterator(Cache,Cache.DescP + Description); Desc->ID = Cache.HeaderP->DescriptionCount++; - map_ptrloc const idxlanguage_code = WriteUniqString(Lang); + map_stringitem_t const idxlanguage_code = StoreString(MIXED, Lang); if (unlikely(idxlanguage_code == 0)) return 0; Desc->language_code = idxlanguage_code; @@ -880,7 +895,7 @@ map_ptrloc pkgCacheGenerator::NewDescription(pkgCache::DescIterator &Desc, Desc->md5sum = idxmd5str; else { - map_ptrloc const idxmd5sum = WriteStringInMap(md5sum.Value()); + map_stringitem_t const idxmd5sum = WriteStringInMap(md5sum.Value()); if (unlikely(idxmd5sum == 0)) return 0; Desc->md5sum = idxmd5sum; @@ -898,9 +913,9 @@ bool pkgCacheGenerator::NewDepends(pkgCache::PkgIterator &Pkg, string const &Version, unsigned int const &Op, unsigned int const &Type, - map_ptrloc* &OldDepLast) + map_stringitem_t* &OldDepLast) { - map_ptrloc index = 0; + map_stringitem_t index = 0; if (Version.empty() == false) { int const CmpOp = Op & 0x0F; @@ -911,25 +926,25 @@ bool pkgCacheGenerator::NewDepends(pkgCache::PkgIterator &Pkg, if (index == 0) { void const * const oldMap = Map.Data(); - index = WriteStringInMap(Version); + index = StoreString(VERSIONNUMBER, Version); if (unlikely(index == 0)) return false; if (OldDepLast != 0 && oldMap != Map.Data()) - OldDepLast += (map_ptrloc const * const) Map.Data() - (map_ptrloc const * const) oldMap; + OldDepLast += (map_pointer_t const * const) Map.Data() - (map_pointer_t const * const) oldMap; } } return NewDepends(Pkg, Ver, index, Op, Type, OldDepLast); } bool pkgCacheGenerator::NewDepends(pkgCache::PkgIterator &Pkg, pkgCache::VerIterator &Ver, - map_ptrloc const Version, + map_pointer_t const Version, unsigned int const &Op, unsigned int const &Type, - map_ptrloc* &OldDepLast) + map_pointer_t* &OldDepLast) { void const * const oldMap = Map.Data(); // Get a structure - map_ptrloc const Dependency = AllocateInMap(sizeof(pkgCache::Dependency)); + map_pointer_t const Dependency = AllocateInMap(sizeof(pkgCache::Dependency)); if (unlikely(Dependency == 0)) return false; @@ -954,7 +969,7 @@ bool pkgCacheGenerator::NewDepends(pkgCache::PkgIterator &Pkg, for (pkgCache::DepIterator D = Ver.DependsList(); D.end() == false; ++D) OldDepLast = &D->NextDepends; } else if (oldMap != Map.Data()) - OldDepLast += (map_ptrloc const * const) Map.Data() - (map_ptrloc const * const) oldMap; + OldDepLast += (map_pointer_t const * const) Map.Data() - (map_pointer_t const * const) oldMap; Dep->NextDepends = *OldDepLast; *OldDepLast = Dep.Index(); @@ -1019,7 +1034,7 @@ bool pkgCacheGenerator::ListParser::NewProvides(pkgCache::VerIterator &Ver, return true; // Get a structure - map_ptrloc const Provides = Owner->AllocateInMap(sizeof(pkgCache::Provides)); + map_pointer_t const Provides = Owner->AllocateInMap(sizeof(pkgCache::Provides)); if (unlikely(Provides == 0)) return false; Cache.HeaderP->ProvidesCount++; @@ -1031,7 +1046,7 @@ bool pkgCacheGenerator::ListParser::NewProvides(pkgCache::VerIterator &Ver, Prv->NextPkgProv = Ver->ProvidesList; Ver->ProvidesList = Prv.Index(); if (Version.empty() == false) { - map_ptrloc const idxProvideVersion = WriteString(Version); + map_stringitem_t const idxProvideVersion = WriteString(Version); Prv->ProvideVersion = idxProvideVersion; if (unlikely(idxProvideVersion == 0)) return false; @@ -1066,14 +1081,14 @@ bool pkgCacheGenerator::SelectFile(const string &File,const string &Site, unsigned long Flags) { // Get some space for the structure - map_ptrloc const idxFile = AllocateInMap(sizeof(*CurrentFile)); + map_pointer_t const idxFile = AllocateInMap(sizeof(*CurrentFile)); if (unlikely(idxFile == 0)) return false; CurrentFile = Cache.PkgFileP + idxFile; // Fill it in - map_ptrloc const idxFileName = WriteStringInMap(File); - map_ptrloc const idxSite = WriteUniqString(Site); + map_stringitem_t const idxFileName = WriteStringInMap(File); + map_stringitem_t const idxSite = StoreString(MIXED, Site); if (unlikely(idxFileName == 0 || idxSite == 0)) return false; CurrentFile->FileName = idxFileName; @@ -1081,7 +1096,7 @@ bool pkgCacheGenerator::SelectFile(const string &File,const string &Site, CurrentFile->NextFile = Cache.HeaderP->FileList; CurrentFile->Flags = Flags; CurrentFile->ID = Cache.HeaderP->PackageFileCount; - map_ptrloc const idxIndexType = WriteUniqString(Index.GetType()->Label); + map_stringitem_t const idxIndexType = StoreString(MIXED, Index.GetType()->Label); if (unlikely(idxIndexType == 0)) return false; CurrentFile->IndexType = idxIndexType; @@ -1098,57 +1113,27 @@ bool pkgCacheGenerator::SelectFile(const string &File,const string &Site, // --------------------------------------------------------------------- /* This is used to create handles to strings. Given the same text it always returns the same number */ -unsigned long pkgCacheGenerator::WriteUniqString(const char *S, +map_stringitem_t pkgCacheGenerator::StoreString(enum StringType const type, const char *S, unsigned int Size) { - /* We use a very small transient hash table here, this speeds up generation - by a fair amount on slower machines */ - pkgCache::StringItem *&Bucket = UniqHash[(S[0]*5 + S[1]) % _count(UniqHash)]; - if (Bucket != 0 && - stringcmp(S,S+Size,Cache.StrP + Bucket->String) == 0) - return Bucket->String; - - // Search for an insertion point - pkgCache::StringItem *I = Cache.StringItemP + Cache.HeaderP->StringList; - int Res = 1; - map_ptrloc *Last = &Cache.HeaderP->StringList; - for (; I != Cache.StringItemP; Last = &I->NextItem, - I = Cache.StringItemP + I->NextItem) - { - Res = stringcmp(S,S+Size,Cache.StrP + I->String); - if (Res >= 0) - break; - } - - // Match - if (Res == 0) - { - Bucket = I; - return I->String; - } - - // Get a structure - void const * const oldMap = Map.Data(); - map_ptrloc const Item = AllocateInMap(sizeof(pkgCache::StringItem)); - if (Item == 0) - return 0; - - map_ptrloc const idxString = WriteStringInMap(S,Size); - if (unlikely(idxString == 0)) - return 0; - if (oldMap != Map.Data()) { - Last += (map_ptrloc const * const) Map.Data() - (map_ptrloc const * const) oldMap; - I += (pkgCache::StringItem const * const) Map.Data() - (pkgCache::StringItem const * const) oldMap; + std::string const key(S, Size); + + std::map<std::string,map_stringitem_t> * strings; + switch(type) { + case MIXED: strings = &strMixed; break; + case PKGNAME: strings = &strPkgNames; break; + case VERSIONNUMBER: strings = &strVersions; break; + case SECTION: strings = &strSections; break; + default: _error->Fatal("Unknown enum type used for string storage of '%s'", key.c_str()); return 0; } - *Last = Item; - // Fill in the structure - pkgCache::StringItem *ItemP = Cache.StringItemP + Item; - ItemP->NextItem = I - Cache.StringItemP; - ItemP->String = idxString; + std::map<std::string,map_stringitem_t>::const_iterator const item = strings->find(key); + if (item != strings->end()) + return item->second; - Bucket = ItemP; - return ItemP->String; + map_stringitem_t const idxString = WriteStringInMap(S,Size); + strings->insert(std::make_pair(key, idxString)); + return idxString; } /*}}}*/ // CheckValidity - Check that a cache is up-to-date /*{{{*/ @@ -1258,9 +1243,9 @@ static bool CheckValidity(const string &CacheFile, // --------------------------------------------------------------------- /* Size is kind of an abstract notion that is only used for the progress meter */ -static unsigned long ComputeSize(FileIterator Start,FileIterator End) +static map_filesize_t ComputeSize(FileIterator Start,FileIterator End) { - unsigned long TotalSize = 0; + map_filesize_t TotalSize = 0; for (; Start < End; ++Start) { if ((*Start)->HasPackages() == false) @@ -1275,7 +1260,7 @@ static unsigned long ComputeSize(FileIterator Start,FileIterator End) /* */ static bool BuildCache(pkgCacheGenerator &Gen, OpProgress *Progress, - unsigned long &CurrentSize,unsigned long TotalSize, + map_filesize_t &CurrentSize,map_filesize_t TotalSize, FileIterator Start, FileIterator End) { FileIterator I; @@ -1294,7 +1279,7 @@ static bool BuildCache(pkgCacheGenerator &Gen, continue; } - unsigned long Size = (*I)->Size(); + map_filesize_t Size = (*I)->Size(); if (Progress != NULL) Progress->OverallProgress(CurrentSize,TotalSize,Size,_("Reading package lists")); CurrentSize += Size; @@ -1311,7 +1296,7 @@ static bool BuildCache(pkgCacheGenerator &Gen, CurrentSize = 0; for (I = Start; I != End; ++I) { - unsigned long Size = (*I)->Size(); + map_filesize_t Size = (*I)->Size(); if (Progress != NULL) Progress->OverallProgress(CurrentSize,TotalSize,Size,_("Collecting File Provides")); CurrentSize += Size; @@ -1325,9 +1310,9 @@ static bool BuildCache(pkgCacheGenerator &Gen, /*}}}*/ // CacheGenerator::CreateDynamicMMap - load an mmap with configuration options /*{{{*/ DynamicMMap* pkgCacheGenerator::CreateDynamicMMap(FileFd *CacheF, unsigned long Flags) { - unsigned long const MapStart = _config->FindI("APT::Cache-Start", 24*1024*1024); - unsigned long const MapGrow = _config->FindI("APT::Cache-Grow", 1*1024*1024); - unsigned long const MapLimit = _config->FindI("APT::Cache-Limit", 0); + map_filesize_t const MapStart = _config->FindI("APT::Cache-Start", 24*1024*1024); + map_filesize_t const MapGrow = _config->FindI("APT::Cache-Grow", 1*1024*1024); + map_filesize_t const MapLimit = _config->FindI("APT::Cache-Limit", 0); Flags |= MMap::Moveable; if (_config->FindB("APT::Cache-Fallback", false) == true) Flags |= MMap::Fallback; @@ -1365,7 +1350,7 @@ bool pkgCacheGenerator::MakeStatusCache(pkgSourceList &List,OpProgress *Progress Files.push_back (*j); } - unsigned long const EndOfSource = Files.size(); + map_filesize_t const EndOfSource = Files.size(); if (_system->AddStatusFiles(Files) == false) return false; @@ -1455,8 +1440,8 @@ bool pkgCacheGenerator::MakeStatusCache(pkgSourceList &List,OpProgress *Progress } // Lets try the source cache. - unsigned long CurrentSize = 0; - unsigned long TotalSize = 0; + map_filesize_t CurrentSize = 0; + map_filesize_t TotalSize = 0; if (CheckValidity(SrcCacheFile, List, Files.begin(), Files.begin()+EndOfSource) == true) { @@ -1464,7 +1449,7 @@ bool pkgCacheGenerator::MakeStatusCache(pkgSourceList &List,OpProgress *Progress std::clog << "srcpkgcache.bin is valid - populate MMap with it." << std::endl; // Preload the map with the source cache FileFd SCacheF(SrcCacheFile,FileFd::ReadOnly); - unsigned long const alloc = Map->RawAllocate(SCacheF.Size()); + map_pointer_t const alloc = Map->RawAllocate(SCacheF.Size()); if ((alloc == 0 && _error->PendingError()) || SCacheF.Read((unsigned char *)Map->Data() + alloc, SCacheF.Size()) == false) @@ -1551,13 +1536,13 @@ APT_DEPRECATED bool pkgMakeOnlyStatusCache(OpProgress &Progress,DynamicMMap **Ou bool pkgCacheGenerator::MakeOnlyStatusCache(OpProgress *Progress,DynamicMMap **OutMap) { std::vector<pkgIndexFile *> Files; - unsigned long EndOfSource = Files.size(); + map_filesize_t EndOfSource = Files.size(); if (_system->AddStatusFiles(Files) == false) return false; SPtr<DynamicMMap> Map = CreateDynamicMMap(NULL); - unsigned long CurrentSize = 0; - unsigned long TotalSize = 0; + map_filesize_t CurrentSize = 0; + map_filesize_t TotalSize = 0; TotalSize = ComputeSize(Files.begin()+EndOfSource,Files.end()); diff --git a/apt-pkg/pkgcachegen.h b/apt-pkg/pkgcachegen.h index 1e1a71026..c4ace713d 100644 --- a/apt-pkg/pkgcachegen.h +++ b/apt-pkg/pkgcachegen.h @@ -27,21 +27,25 @@ #include <vector> #include <string> +#include <map> class FileFd; class pkgSourceList; class OpProgress; class pkgIndexFile; -class pkgCacheGenerator /*{{{*/ +class APT_HIDDEN pkgCacheGenerator /*{{{*/ { private: + APT_HIDDEN map_stringitem_t WriteStringInMap(std::string const &String) { return WriteStringInMap(String.c_str()); }; + APT_HIDDEN map_stringitem_t WriteStringInMap(const char *String); + APT_HIDDEN map_stringitem_t WriteStringInMap(const char *String, const unsigned long &Len); + APT_HIDDEN map_pointer_t AllocateInMap(const unsigned long &size); - pkgCache::StringItem *UniqHash[26]; - APT_HIDDEN map_ptrloc WriteStringInMap(std::string const &String) { return WriteStringInMap(String.c_str()); }; - APT_HIDDEN map_ptrloc WriteStringInMap(const char *String); - APT_HIDDEN map_ptrloc WriteStringInMap(const char *String, const unsigned long &Len); - APT_HIDDEN map_ptrloc AllocateInMap(const unsigned long &size); + std::map<std::string,map_stringitem_t> strMixed; + std::map<std::string,map_stringitem_t> strSections; + std::map<std::string,map_stringitem_t> strPkgNames; + std::map<std::string,map_stringitem_t> strVersions; public: @@ -78,21 +82,22 @@ class pkgCacheGenerator /*{{{*/ bool NewFileDesc(pkgCache::DescIterator &Desc,ListParser &List); bool NewDepends(pkgCache::PkgIterator &Pkg, pkgCache::VerIterator &Ver, std::string const &Version, unsigned int const &Op, - unsigned int const &Type, map_ptrloc* &OldDepLast); + unsigned int const &Type, map_pointer_t* &OldDepLast); bool NewDepends(pkgCache::PkgIterator &Pkg, pkgCache::VerIterator &Ver, - map_ptrloc const Version, unsigned int const &Op, - unsigned int const &Type, map_ptrloc* &OldDepLast); - unsigned long NewVersion(pkgCache::VerIterator &Ver,const std::string &VerStr,unsigned long Next) APT_DEPRECATED + map_pointer_t const Version, unsigned int const &Op, + unsigned int const &Type, map_pointer_t* &OldDepLast); + map_pointer_t NewVersion(pkgCache::VerIterator &Ver,const std::string &VerStr,map_pointer_t const Next) APT_DEPRECATED { return NewVersion(Ver, VerStr, 0, 0, Next); } - unsigned long NewVersion(pkgCache::VerIterator &Ver,const std::string &VerStr, - map_ptrloc const ParentPkg, unsigned long const Hash, - unsigned long Next); - map_ptrloc NewDescription(pkgCache::DescIterator &Desc,const std::string &Lang,const MD5SumValue &md5sum,map_ptrloc Next); + map_pointer_t NewVersion(pkgCache::VerIterator &Ver,const std::string &VerStr, + map_pointer_t const ParentPkg, unsigned short const Hash, + map_pointer_t const Next); + map_pointer_t NewDescription(pkgCache::DescIterator &Desc,const std::string &Lang,const MD5SumValue &md5sum,map_stringitem_t const idxmd5str); public: - unsigned long WriteUniqString(const char *S,unsigned int Size); - inline unsigned long WriteUniqString(const std::string &S) {return WriteUniqString(S.c_str(),S.length());}; + enum StringType { MIXED, PKGNAME, VERSIONNUMBER, SECTION }; + map_stringitem_t StoreString(StringType const type, const char * S, unsigned int const Size); + inline map_stringitem_t StoreString(enum StringType const type, const std::string &S) {return StoreString(type, S.c_str(),S.length());}; void DropProgress() {Progress = 0;}; bool SelectFile(const std::string &File,const std::string &Site,pkgIndexFile const &Index, @@ -106,10 +111,10 @@ class pkgCacheGenerator /*{{{*/ bool MergeFileProvides(ListParser &List); bool FinishCache(OpProgress *Progress) APT_DEPRECATED APT_CONST; - static bool MakeStatusCache(pkgSourceList &List,OpProgress *Progress, + APT_PUBLIC static bool MakeStatusCache(pkgSourceList &List,OpProgress *Progress, MMap **OutMap = 0,bool AllowMem = false); - static bool MakeOnlyStatusCache(OpProgress *Progress,DynamicMMap **OutMap); - static DynamicMMap* CreateDynamicMMap(FileFd *CacheF, unsigned long Flags = 0); + APT_PUBLIC static bool MakeOnlyStatusCache(OpProgress *Progress,DynamicMMap **OutMap); + APT_PUBLIC static DynamicMMap* CreateDynamicMMap(FileFd *CacheF, unsigned long Flags = 0); void ReMap(void const * const oldMap, void const * const newMap); @@ -125,27 +130,31 @@ class pkgCacheGenerator /*{{{*/ APT_HIDDEN bool AddImplicitDepends(pkgCache::GrpIterator &G, pkgCache::PkgIterator &P, pkgCache::VerIterator &V); APT_HIDDEN bool AddImplicitDepends(pkgCache::VerIterator &V, pkgCache::PkgIterator &D); + + APT_HIDDEN bool AddNewDescription(ListParser &List, pkgCache::VerIterator &Ver, + std::string const &lang, MD5SumValue const &CurMd5, map_stringitem_t &md5idx); }; /*}}}*/ // This is the abstract package list parser class. /*{{{*/ -class pkgCacheGenerator::ListParser +class APT_HIDDEN pkgCacheGenerator::ListParser { pkgCacheGenerator *Owner; friend class pkgCacheGenerator; // Some cache items pkgCache::VerIterator OldDepVer; - map_ptrloc *OldDepLast; + map_pointer_t *OldDepLast; // Flag file dependencies bool FoundFileDeps; protected: - inline unsigned long WriteUniqString(std::string S) {return Owner->WriteUniqString(S);}; - inline unsigned long WriteUniqString(const char *S,unsigned int Size) {return Owner->WriteUniqString(S,Size);}; - inline unsigned long WriteString(const std::string &S) {return Owner->WriteStringInMap(S);}; - inline unsigned long WriteString(const char *S,unsigned int Size) {return Owner->WriteStringInMap(S,Size);}; + inline map_stringitem_t StoreString(pkgCacheGenerator::StringType const type, std::string const &S) {return Owner->StoreString(type, S);}; + inline map_stringitem_t StoreString(pkgCacheGenerator::StringType const type, const char *S,unsigned int Size) {return Owner->StoreString(type, S, Size);}; + + inline map_stringitem_t WriteString(const std::string &S) {return Owner->WriteStringInMap(S);}; + inline map_stringitem_t WriteString(const char *S,unsigned int Size) {return Owner->WriteStringInMap(S,Size);}; bool NewDepends(pkgCache::VerIterator &Ver,const std::string &Package, const std::string &Arch, const std::string &Version,unsigned int Op, unsigned int Type); @@ -160,8 +169,8 @@ class pkgCacheGenerator::ListParser virtual bool ArchitectureAll() = 0; virtual std::string Version() = 0; virtual bool NewVersion(pkgCache::VerIterator &Ver) = 0; - virtual std::string Description() = 0; - virtual std::string DescriptionLanguage() = 0; + virtual std::string Description(std::string const &lang) = 0; + virtual std::vector<std::string> AvailableDescriptionLanguages() = 0; virtual MD5SumValue Description_md5() = 0; virtual unsigned short VersionHash() = 0; /** compare currently parsed version with given version @@ -169,14 +178,14 @@ class pkgCacheGenerator::ListParser * \param Hash of the currently parsed version * \param Ver to compare with */ -#if (APT_PKG_MAJOR >= 4 && APT_PKG_MINOR >= 13) +#if APT_PKG_ABI >= 413 virtual #endif APT_PURE bool SameVersion(unsigned short const Hash, pkgCache::VerIterator const &Ver); virtual bool UsePackage(pkgCache::PkgIterator &Pkg, pkgCache::VerIterator &Ver) = 0; - virtual unsigned long Offset() = 0; - virtual unsigned long Size() = 0; + virtual map_filesize_t Offset() = 0; + virtual map_filesize_t Size() = 0; virtual bool Step() = 0; @@ -184,7 +193,7 @@ class pkgCacheGenerator::ListParser virtual bool CollectFileProvides(pkgCache &/*Cache*/, pkgCache::VerIterator &/*Ver*/) {return true;}; - ListParser() : FoundFileDeps(false) {}; + ListParser() : Owner(NULL), OldDepLast(NULL), FoundFileDeps(false) {}; virtual ~ListParser() {}; }; /*}}}*/ diff --git a/apt-pkg/pkgrecords.cc b/apt-pkg/pkgrecords.cc index c403e4dc3..859af3a09 100644 --- a/apt-pkg/pkgrecords.cc +++ b/apt-pkg/pkgrecords.cc @@ -26,7 +26,7 @@ // Records::pkgRecords - Constructor /*{{{*/ // --------------------------------------------------------------------- /* This will create the necessary structures to access the status files */ -pkgRecords::pkgRecords(pkgCache &Cache) : d(NULL), Cache(Cache), +pkgRecords::pkgRecords(pkgCache &aCache) : d(NULL), Cache(aCache), Files(Cache.HeaderP->PackageFileCount) { for (pkgCache::PkgFileIterator I = Cache.FileBegin(); diff --git a/apt-pkg/pkgrecords.h b/apt-pkg/pkgrecords.h index b5237b3a0..bcc05baba 100644 --- a/apt-pkg/pkgrecords.h +++ b/apt-pkg/pkgrecords.h @@ -18,6 +18,8 @@ #define PKGLIB_PKGRECORDS_H #include <apt-pkg/pkgcache.h> +#include <apt-pkg/hashes.h> +#include <apt-pkg/macros.h> #include <string> #include <vector> @@ -56,17 +58,46 @@ class pkgRecords::Parser /*{{{*/ // These refer to the archive file for the Version virtual std::string FileName() {return std::string();}; - virtual std::string MD5Hash() {return std::string();}; - virtual std::string SHA1Hash() {return std::string();}; - virtual std::string SHA256Hash() {return std::string();}; - virtual std::string SHA512Hash() {return std::string();}; virtual std::string SourcePkg() {return std::string();}; virtual std::string SourceVer() {return std::string();}; + /** return all known hashes in this record. + * + * For authentication proposes packages come with hashsums which + * this method is supposed to parse and return so that clients can + * choose the hash to be used. + */ + virtual HashStringList Hashes() const { return HashStringList(); }; +#if APT_PKG_ABI >= 413 + APT_DEPRECATED std::string MD5Hash() const { return GetHashFromHashes("MD5Sum"); }; + APT_DEPRECATED std::string SHA1Hash() const { return GetHashFromHashes("SHA1"); }; + APT_DEPRECATED std::string SHA256Hash() const { return GetHashFromHashes("SHA256"); }; + APT_DEPRECATED std::string SHA512Hash() const { return GetHashFromHashes("SHA512"); }; +#else + APT_DEPRECATED std::string MD5Hash() { return GetHashFromHashes("MD5Sum"); }; + APT_DEPRECATED std::string SHA1Hash() { return GetHashFromHashes("SHA1"); }; + APT_DEPRECATED std::string SHA256Hash() { return GetHashFromHashes("SHA256"); }; + APT_DEPRECATED std::string SHA512Hash() { return GetHashFromHashes("SHA512"); }; +#endif + // These are some general stats about the package virtual std::string Maintainer() {return std::string();}; - virtual std::string ShortDesc() {return std::string();}; - virtual std::string LongDesc() {return std::string();}; + /** return short description in language from record. + * + * @see #LongDesc + */ + virtual std::string ShortDesc(std::string const &/*lang*/) {return std::string();}; + /** return long description in language from record. + * + * If \b lang is empty the "best" available language will be + * returned as determined by the APT::Languages configuration. + * If a (requested) language can't be found in this record an empty + * string will be returned. + */ + virtual std::string LongDesc(std::string const &/*lang*/) {return std::string();}; + std::string ShortDesc() {return ShortDesc("");}; + std::string LongDesc() {return LongDesc("");}; + virtual std::string Name() {return std::string();}; virtual std::string Homepage() {return std::string();} @@ -77,6 +108,14 @@ class pkgRecords::Parser /*{{{*/ virtual void GetRec(const char *&Start,const char *&Stop) {Start = Stop = 0;}; virtual ~Parser() {}; + + private: + APT_HIDDEN std::string GetHashFromHashes(char const * const type) const + { + HashStringList const hashes = Hashes(); + HashString const * const hs = hashes.find(type); + return hs != NULL ? hs->HashValue() : ""; + }; }; /*}}}*/ #endif diff --git a/apt-pkg/pkgsystem.h b/apt-pkg/pkgsystem.h index 6e33c67ed..f88ffa7c8 100644 --- a/apt-pkg/pkgsystem.h +++ b/apt-pkg/pkgsystem.h @@ -85,10 +85,12 @@ class pkgSystem virtual bool AddStatusFiles(std::vector<pkgIndexFile *> &List) = 0; virtual bool FindIndex(pkgCache::PkgFileIterator File, pkgIndexFile *&Found) const = 0; - + /* Evauluate how 'right' we are for this system based on the filesystem etc.. */ - virtual signed Score(Configuration const &/*Cnf*/) {return 0;}; + virtual signed Score(Configuration const &/*Cnf*/) { + return 0; + }; pkgSystem(); virtual ~pkgSystem() {}; diff --git a/apt-pkg/policy.cc b/apt-pkg/policy.cc index 3cfc32829..00693ce54 100644 --- a/apt-pkg/policy.cc +++ b/apt-pkg/policy.cc @@ -4,22 +4,12 @@ /* ###################################################################### Package Version Policy implementation - + This is just a really simple wrapper around pkgVersionMatch with some added goodies to manage the list of things.. - - Priority Table: - - 1000 -> inf = Downgradeable priorities - 1000 = The 'no downgrade' pseduo-status file - 100 -> 1000 = Standard priorities - 990 = Config file override package files - 989 = Start for preference auto-priorities - 500 = Default package files - 100 = The status file and ButAutomaticUpgrades sources - 0 -> 100 = NotAutomatic sources like experimental - -inf -> 0 = Never selected - + + See man apt_preferences for what value means what. + ##################################################################### */ /*}}}*/ // Include Files /*{{{*/ @@ -56,7 +46,7 @@ using namespace std; file matches the V0 policy engine. */ pkgPolicy::pkgPolicy(pkgCache *Owner) : Pins(0), PFPriority(0), Cache(Owner) { - if (Owner == 0 || &(Owner->Head()) == 0) + if (Owner == 0) return; PFPriority = new signed short[Owner->Head().PackageFileCount]; Pins = new Pin[Owner->Head().PackageCount]; @@ -107,37 +97,29 @@ bool pkgPolicy::InitDefaults() // Apply the defaults.. SPtrArray<bool> Fixed = new bool[Cache->HeaderP->PackageFileCount]; memset(Fixed,0,sizeof(*Fixed)*Cache->HeaderP->PackageFileCount); - signed Cur = 989; StatusOverride = false; - for (vector<Pin>::const_iterator I = Defaults.begin(); I != Defaults.end(); - ++I, --Cur) + for (vector<Pin>::const_iterator I = Defaults.begin(); I != Defaults.end(); ++I) { pkgVersionMatch Match(I->Data,I->Type); for (pkgCache::PkgFileIterator F = Cache->FileBegin(); F != Cache->FileEnd(); ++F) { - if (Match.FileMatch(F) == true && Fixed[F->ID] == false) + if (Fixed[F->ID] == false && Match.FileMatch(F) == true) { - if (I->Priority != 0 && I->Priority > 0) - Cur = I->Priority; - - if (I->Priority < 0) - PFPriority[F->ID] = I->Priority; - else - PFPriority[F->ID] = Cur; - - if (PFPriority[F->ID] > 1000) + PFPriority[F->ID] = I->Priority; + + if (PFPriority[F->ID] >= 1000) StatusOverride = true; - + Fixed[F->ID] = true; - } - } + } + } } if (_config->FindB("Debug::pkgPolicy",false) == true) for (pkgCache::PkgFileIterator F = Cache->FileBegin(); F != Cache->FileEnd(); ++F) - std::clog << "Prio of " << F.FileName() << ' ' << PFPriority[F->ID] << std::endl; - - return true; + std::clog << "Prio of " << F.FileName() << ' ' << PFPriority[F->ID] << std::endl; + + return true; } /*}}}*/ // Policy::GetCandidateVer - Get the candidate install version /*{{{*/ @@ -166,9 +148,7 @@ pkgCache::VerIterator pkgPolicy::GetCandidateVer(pkgCache::PkgIterator const &Pk effectively excludes everything <= 0 which are the non-automatic priorities.. The status file is given a prio of 100 which will exclude not-automatic sources, except in a single shot not-installed mode. - The second pseduo-status file is at prio 1000, above which will permit - the user to force-downgrade things. - + The user pin is subject to the same priority rules as default selections. Thus there are two ways to create a pin - a pin that tracks the default when the default is taken away, and a permanent @@ -218,9 +198,9 @@ pkgCache::VerIterator pkgPolicy::GetCandidateVer(pkgCache::PkgIterator const &Pk Pref = Ver; PrefSeen = true; } - /* Elevate our current selection (or the status file itself) - to the Pseudo-status priority. */ - Max = 1000; + /* Elevate our current selection (or the status file itself) so that only + a downgrade can override it from now on */ + Max = 999; // Fast path optimize. if (StatusOverride == false) @@ -336,13 +316,7 @@ pkgCache::VerIterator pkgPolicy::GetMatch(pkgCache::PkgIterator const &Pkg) APT_PURE signed short pkgPolicy::GetPriority(pkgCache::PkgIterator const &Pkg) { if (Pins[Pkg->ID].Type != pkgVersionMatch::None) - { - // In this case 0 means default priority - if (Pins[Pkg->ID].Priority == 0) - return 989; return Pins[Pkg->ID].Priority; - } - return 0; } APT_PURE signed short pkgPolicy::GetPriority(pkgCache::PkgFileIterator const &File) diff --git a/apt-pkg/sourcelist.cc b/apt-pkg/sourcelist.cc index e37899ec6..8b960572b 100644 --- a/apt-pkg/sourcelist.cc +++ b/apt-pkg/sourcelist.cc @@ -119,7 +119,7 @@ bool pkgSourceList::Type::ParseStanza(vector<metaIndex *> &List, std::vector<std::string> list_section = StringSplit(Section, " "); for (std::vector<std::string>::const_iterator U = list_uris.begin(); - U != list_uris.end(); U++) + U != list_uris.end(); ++U) { std::string URI = (*U); if (!FixupURI(URI)) @@ -129,10 +129,10 @@ bool pkgSourceList::Type::ParseStanza(vector<metaIndex *> &List, } for (std::vector<std::string>::const_iterator I = list_dist.begin(); - I != list_dist.end(); I++) + I != list_dist.end(); ++I) { for (std::vector<std::string>::const_iterator J = list_section.begin(); - J != list_section.end(); J++) + J != list_section.end(); ++J) { if (CreateItem(List, URI, (*I), (*J), Options) == false) { @@ -331,7 +331,7 @@ bool pkgSourceList::ParseFileOldStyle(string File) { // Open the stream for reading ifstream F(File.c_str(),ios::in /*| ios::nocreate*/); - if (!F != 0) + if (F.fail() == true) return _error->Errno("ifstream::ifstream",_("Opening %s"),File.c_str()); // CNC:2003-12-10 - 300 is too short. @@ -408,7 +408,7 @@ int pkgSourceList::ParseFileDeb822(string File) string const types = Tags.FindS("Types"); std::vector<std::string> list_types = StringSplit(types, " "); for (std::vector<std::string>::const_iterator I = list_types.begin(); - I != list_types.end(); I++) + I != list_types.end(); ++I) { Type *Parse = Type::GetType((*I).c_str()); if (Parse == 0) diff --git a/apt-pkg/sourcelist.h b/apt-pkg/sourcelist.h index 9df0c1d74..998357509 100644 --- a/apt-pkg/sourcelist.h +++ b/apt-pkg/sourcelist.h @@ -55,12 +55,12 @@ class metaIndex; class pkgSourceList { public: - + // List of supported source list types class Type { public: - + // Global list of Items supported static Type **GlobalList; static unsigned long GlobalListLen; @@ -83,9 +83,9 @@ class pkgSourceList Type(); virtual ~Type() {}; }; - + typedef std::vector<metaIndex *>::const_iterator const_iterator; - + protected: std::vector<metaIndex *> SrcList; diff --git a/apt-pkg/srcrecords.cc b/apt-pkg/srcrecords.cc index 775cf2e5f..3175ee75f 100644 --- a/apt-pkg/srcrecords.cc +++ b/apt-pkg/srcrecords.cc @@ -14,6 +14,7 @@ #include<config.h> #include <apt-pkg/srcrecords.h> +#include <apt-pkg/debsrcrecords.h> #include <apt-pkg/error.h> #include <apt-pkg/sourcelist.h> #include <apt-pkg/metaindex.h> @@ -81,6 +82,27 @@ bool pkgSrcRecords::Restart() return true; } /*}}}*/ +// SrcRecords::Step - Step to the next Source Record /*{{{*/ +// --------------------------------------------------------------------- +/* Step to the next source package record */ +const pkgSrcRecords::Parser* pkgSrcRecords::Step() +{ + if (Current == Files.end()) + return 0; + + // Step to the next record, possibly switching files + while ((*Current)->Step() == false) + { + if (_error->PendingError() == true) + return 0; + ++Current; + if (Current == Files.end()) + return 0; + } + + return *Current; +} + /*}}}*/ // SrcRecords::Find - Find the first source package with the given name /*{{{*/ // --------------------------------------------------------------------- /* This searches on both source package names and output binary names and @@ -88,21 +110,11 @@ bool pkgSrcRecords::Restart() function to be called multiple times to get successive entries */ pkgSrcRecords::Parser *pkgSrcRecords::Find(const char *Package,bool const &SrcOnly) { - if (Current == Files.end()) - return 0; - while (true) { - // Step to the next record, possibly switching files - while ((*Current)->Step() == false) - { - if (_error->PendingError() == true) - return 0; - ++Current; - if (Current == Files.end()) - return 0; - } - + if(Step() == 0) + return 0; + // IO error somehow if (_error->PendingError() == true) return 0; @@ -136,5 +148,33 @@ const char *pkgSrcRecords::Parser::BuildDepType(unsigned char const &Type) return fields[Type]; } /*}}}*/ +bool pkgSrcRecords::Parser::Files2(std::vector<pkgSrcRecords::File2> &F2)/*{{{*/ +{ + debSrcRecordParser * const deb = dynamic_cast<debSrcRecordParser*>(this); + if (deb != NULL) + return deb->Files2(F2); - + std::vector<pkgSrcRecords::File> F; + if (Files(F) == false) + return false; + for (std::vector<pkgSrcRecords::File>::const_iterator f = F.begin(); f != F.end(); ++f) + { + pkgSrcRecords::File2 f2; +#if __GNUC__ >= 4 + #pragma GCC diagnostic push + #pragma GCC diagnostic ignored "-Wdeprecated-declarations" +#endif + f2.MD5Hash = f->MD5Hash; + f2.Size = f->Size; + f2.Hashes.push_back(HashString("MD5Sum", f->MD5Hash)); + f2.FileSize = f->Size; +#if __GNUC__ >= 4 + #pragma GCC diagnostic pop +#endif + f2.Path = f->Path; + f2.Type = f->Type; + F2.push_back(f2); + } + return true; +} + /*}}}*/ diff --git a/apt-pkg/srcrecords.h b/apt-pkg/srcrecords.h index 9915debfe..c931e17b7 100644 --- a/apt-pkg/srcrecords.h +++ b/apt-pkg/srcrecords.h @@ -14,6 +14,7 @@ #define PKGLIB_SRCRECORDS_H #include <apt-pkg/macros.h> +#include <apt-pkg/hashes.h> #include <string> #include <vector> @@ -29,15 +30,22 @@ class pkgSrcRecords { public: +APT_IGNORE_DEPRECATED_PUSH // Describes a single file struct File { - std::string MD5Hash; - unsigned long Size; + APT_DEPRECATED std::string MD5Hash; + APT_DEPRECATED unsigned long Size; std::string Path; std::string Type; }; - + struct File2 : public File + { + unsigned long long FileSize; + HashStringList Hashes; + }; +APT_IGNORE_DEPRECATED_POP + // Abstract parser for each source record class Parser { @@ -77,6 +85,7 @@ class pkgSrcRecords static const char *BuildDepType(unsigned char const &Type) APT_PURE; virtual bool Files(std::vector<pkgSrcRecords::File> &F) = 0; + bool Files2(std::vector<pkgSrcRecords::File2> &F); Parser(const pkgIndexFile *Index) : iIndex(Index) {}; virtual ~Parser() {}; @@ -95,8 +104,13 @@ class pkgSrcRecords // Reset the search bool Restart(); - // Locate a package by name - Parser *Find(const char *Package,bool const &SrcOnly = false); + // Step to the next SourcePackage and return pointer to the + // next SourceRecord. The pointer is owned by libapt. + const Parser* Step(); + + // Locate a package by name and return pointer to the Parser. + // The pointer is owned by libapt. + Parser* Find(const char *Package,bool const &SrcOnly = false); pkgSrcRecords(pkgSourceList &List); virtual ~pkgSrcRecords(); diff --git a/apt-pkg/tagfile-order.c b/apt-pkg/tagfile-order.c new file mode 100644 index 000000000..10c61ab94 --- /dev/null +++ b/apt-pkg/tagfile-order.c @@ -0,0 +1,109 @@ +/* In this file is the order defined in which e.g. apt-ftparchive will write stanzas in. + Other commands might or might not use this. 'apt-cache show' e.g. does NOT! + + The order we chose here is inspired by both dpkg and dak. + The testcase test/integration/test-apt-tagfile-fields-order intends to ensure that + this file isn't lacking (too far) behind dpkg over time. */ + +static const char *iTFRewritePackageOrder[] = { + "Package", + "Package-Type", + "Architecture", + "Subarchitecture", // Used only by d-i + "Version", + "Revision", // Obsolete (warning in dpkg) + "Package-Revision", // Obsolete (warning in dpkg) + "Package_Revision", // Obsolete (warning in dpkg) + "Kernel-Version", // Used only by d-i + "Built-Using", + "Built-For-Profiles", + "Multi-Arch", + "Status", + "Priority", + "Class", // dpkg nickname for Priority + "Essential", + "Installer-Menu-Item", // Used only by d-i + "Section", + "Source", + "Origin", + "Maintainer", + "Original-Maintainer", // unknown in dpkg order + "Bugs", + "Config-Version", // Internal of dpkg + "Conffiles", + "Triggers-Awaited", + "Triggers-Pending", + "Installed-Size", + "Provides", + "Pre-Depends", + "Depends", + "Recommends", + "Recommended", // dpkg nickname for Recommends + "Suggests", + "Optional", // dpkg nickname for Suggests + "Conflicts", + "Breaks", + "Replaces", + "Enhances", + "Filename", + "MSDOS-Filename", // Obsolete (used by dselect) + "Size", + "MD5sum", + "SHA1", + "SHA256", + "SHA512", + "Homepage", + "Description", + "Tag", + "Task", + 0 +}; +static const char *iTFRewriteSourceOrder[] = { + "Package", + "Source", // dsc file, renamed to Package in Sources + "Format", + "Binary", + "Architecture", + "Version", + "Priority", + "Class", // dpkg nickname for Priority + "Section", + "Origin", + "Maintainer", + "Original-Maintainer", // unknown in dpkg order + "Uploaders", + "Dm-Upload-Allowed", // Obsolete (ignored by dak) + "Standards-Version", + "Build-Depends", + "Build-Depends-Arch", + "Build-Depends-Indep", + "Build-Conflicts", + "Build-Conflicts-Arch", + "Build-Conflicts-Indep", + "Testsuite", + "Homepage", + "Vcs-Browser", + "Vcs-Browse", // dak only (nickname?) + "Vcs-Arch", + "Vcs-Bzr", + "Vcs-Cvs", + "Vcs-Darcs", + "Vcs-Git", + "Vcs-Hg", + "Vcs-Mtn", + "Vcs-Svn", + "Directory", + "Package-List", + "Files", + "Checksums-Md5", + "Checksums-Sha1", + "Checksums-Sha256", + "Checksums-Sha512", + 0 +}; + +/* Two levels of initialization are used because gcc will set the symbol + size of an array to the length of the array, causing dynamic relinking + errors. Doing this makes the symbol size constant */ +const char **TFRewritePackageOrder = iTFRewritePackageOrder; +const char **TFRewriteSourceOrder = iTFRewriteSourceOrder; diff --git a/apt-pkg/tagfile.cc b/apt-pkg/tagfile.cc index 91d176e3c..5ff495fbd 100644 --- a/apt-pkg/tagfile.cc +++ b/apt-pkg/tagfile.cc @@ -47,16 +47,60 @@ public: unsigned long long Size; }; +class pkgTagSectionPrivate +{ +public: + pkgTagSectionPrivate() + { + } + struct TagData { + unsigned int StartTag; + unsigned int EndTag; + unsigned int StartValue; + unsigned int NextInBucket; + + TagData(unsigned int const StartTag) : StartTag(StartTag), EndTag(0), StartValue(0), NextInBucket(0) {} + }; + std::vector<TagData> Tags; +}; + +static unsigned long AlphaHash(const char *Text, size_t Length) /*{{{*/ +{ + /* This very simple hash function for the last 8 letters gives + very good performance on the debian package files */ + if (Length > 8) + { + Text += (Length - 8); + Length = 8; + } + unsigned long Res = 0; + for (size_t i = 0; i < Length; ++i) + Res = ((unsigned long)(Text[i]) & 0xDF) ^ (Res << 1); + return Res & 0xFF; +} + /*}}}*/ + // TagFile::pkgTagFile - Constructor /*{{{*/ // --------------------------------------------------------------------- /* */ pkgTagFile::pkgTagFile(FileFd *pFd,unsigned long long Size) + : d(NULL) +{ + Init(pFd, Size); +} + +void pkgTagFile::Init(FileFd *pFd,unsigned long long Size) { /* The size is increased by 4 because if we start with the Size of the filename we need to try to read 1 char more to see an EOF faster, 1 char the end-pointer can be on and maybe 2 newlines need to be added to the end of the file -> 4 extra chars */ Size += 4; + if(d != NULL) + { + free(d->Buffer); + delete d; + } d = new pkgTagFilePrivate(pFd, Size); if (d->Fd.IsOpen() == false) @@ -128,18 +172,23 @@ bool pkgTagFile::Resize(unsigned long long const newSize) */ bool pkgTagFile::Step(pkgTagSection &Tag) { - while (Tag.Scan(d->Start,d->End - d->Start) == false) + if(Tag.Scan(d->Start,d->End - d->Start) == false) { - if (Fill() == false) - return false; - - if(Tag.Scan(d->Start,d->End - d->Start)) - break; + do + { + if (Fill() == false) + return false; + + if(Tag.Scan(d->Start,d->End - d->Start, false)) + break; - if (Resize() == false) - return _error->Error(_("Unable to parse package file %s (1)"), - d->Fd.Name().c_str()); + if (Resize() == false) + return _error->Error(_("Unable to parse package file %s (%d)"), + d->Fd.Name().c_str(), 1); + + } while (Tag.Scan(d->Start,d->End - d->Start, false) == false); } + d->Start += Tag.size(); d->iOffset += Tag.size(); @@ -233,8 +282,8 @@ bool pkgTagFile::Jump(pkgTagSection &Tag,unsigned long long Offset) if (Fill() == false) return false; - if (Tag.Scan(d->Start, d->End - d->Start) == false) - return _error->Error(_("Unable to parse package file %s (2)"),d->Fd.Name().c_str()); + if (Tag.Scan(d->Start, d->End - d->Start, false) == false) + return _error->Error(_("Unable to parse package file %s (%d)"),d->Fd.Name().c_str(), 2); return true; } @@ -242,28 +291,64 @@ bool pkgTagFile::Jump(pkgTagSection &Tag,unsigned long long Offset) // pkgTagSection::pkgTagSection - Constructor /*{{{*/ // --------------------------------------------------------------------- /* */ +APT_IGNORE_DEPRECATED_PUSH pkgTagSection::pkgTagSection() - : Section(0), TagCount(0), d(NULL), Stop(0) + : Section(0), d(NULL), Stop(0) { + d = new pkgTagSectionPrivate(); +#if APT_PKG_ABI < 413 + TagCount = 0; memset(&Indexes, 0, sizeof(Indexes)); +#endif memset(&AlphaIndexes, 0, sizeof(AlphaIndexes)); } +APT_IGNORE_DEPRECATED_POP /*}}}*/ // TagSection::Scan - Scan for the end of the header information /*{{{*/ -// --------------------------------------------------------------------- -/* This looks for the first double new line in the data stream. - It also indexes the tags in the section. */ +#if APT_PKG_ABI < 413 bool pkgTagSection::Scan(const char *Start,unsigned long MaxLength) { + return Scan(Start, MaxLength, true); +} +#endif +bool pkgTagSection::Scan(const char *Start,unsigned long MaxLength, bool const Restart) +{ + Section = Start; const char *End = Start + MaxLength; - Stop = Section = Start; - memset(AlphaIndexes,0,sizeof(AlphaIndexes)); + + if (Restart == false && d->Tags.empty() == false) + { + Stop = Section + d->Tags.back().StartTag; + if (End <= Stop) + return false; + Stop = (const char *)memchr(Stop,'\n',End - Stop); + if (Stop == NULL) + return false; + ++Stop; + } + else + { + Stop = Section; + if (d->Tags.empty() == false) + { + memset(&AlphaIndexes, 0, sizeof(AlphaIndexes)); + d->Tags.clear(); + } + d->Tags.reserve(0x100); + } +#if APT_PKG_ABI >= 413 + unsigned int TagCount = d->Tags.size(); +#else + APT_IGNORE_DEPRECATED(TagCount = d->Tags.size();) +#endif if (Stop == 0) return false; - TagCount = 0; - while (TagCount+1 < sizeof(Indexes)/sizeof(Indexes[0]) && Stop < End) + pkgTagSectionPrivate::TagData lastTagData(0); + lastTagData.EndTag = 0; + unsigned long lastTagHash = 0; + while (Stop < End) { TrimRecord(true,End); @@ -275,12 +360,45 @@ bool pkgTagSection::Scan(const char *Start,unsigned long MaxLength) // Start a new index and add it to the hash if (isspace(Stop[0]) == 0) { - Indexes[TagCount++] = Stop - Section; - AlphaIndexes[AlphaHash(Stop,End)] = TagCount; + // store the last found tag + if (lastTagData.EndTag != 0) + { + if (AlphaIndexes[lastTagHash] != 0) + lastTagData.NextInBucket = AlphaIndexes[lastTagHash]; + APT_IGNORE_DEPRECATED_PUSH + AlphaIndexes[lastTagHash] = TagCount; +#if APT_PKG_ABI < 413 + if (d->Tags.size() < sizeof(Indexes)/sizeof(Indexes[0])) + Indexes[d->Tags.size()] = lastTagData.StartTag; +#endif + APT_IGNORE_DEPRECATED_POP + d->Tags.push_back(lastTagData); + } + + APT_IGNORE_DEPRECATED(++TagCount;) + lastTagData = pkgTagSectionPrivate::TagData(Stop - Section); + // find the colon separating tag and value + char const * Colon = (char const *) memchr(Stop, ':', End - Stop); + if (Colon == NULL) + return false; + // find the end of the tag (which might or might not be the colon) + char const * EndTag = Colon; + --EndTag; + for (; EndTag > Stop && isspace(*EndTag) != 0; --EndTag) + ; + ++EndTag; + lastTagData.EndTag = EndTag - Section; + lastTagHash = AlphaHash(Stop, EndTag - Stop); + // find the beginning of the value + Stop = Colon + 1; + for (; isspace(*Stop) != 0; ++Stop); + if (Stop >= End) + return false; + lastTagData.StartValue = Stop - Section; } Stop = (const char *)memchr(Stop,'\n',End - Stop); - + if (Stop == 0) return false; @@ -291,7 +409,22 @@ bool pkgTagSection::Scan(const char *Start,unsigned long MaxLength) // Double newline marks the end of the record if (Stop+1 < End && Stop[1] == '\n') { - Indexes[TagCount] = Stop - Section; + if (lastTagData.EndTag != 0) + { + if (AlphaIndexes[lastTagHash] != 0) + lastTagData.NextInBucket = AlphaIndexes[lastTagHash]; + APT_IGNORE_DEPRECATED(AlphaIndexes[lastTagHash] = TagCount;) +#if APT_PKG_ABI < 413 + APT_IGNORE_DEPRECATED(Indexes[d->Tags.size()] = lastTagData.StartTag;) +#endif + d->Tags.push_back(lastTagData); + } + + pkgTagSectionPrivate::TagData const td(Stop - Section); +#if APT_PKG_ABI < 413 + APT_IGNORE_DEPRECATED(Indexes[d->Tags.size()] = td.StartTag;) +#endif + d->Tags.push_back(td); TrimRecord(false,End); return true; } @@ -320,8 +453,12 @@ void pkgTagSection::Trim() for (; Stop > Section + 2 && (Stop[-2] == '\n' || Stop[-2] == '\r'); Stop--); } /*}}}*/ -// TagSection::Exists - return True if a tag exists /*{{{*/ +// TagSection::Exists - return True if a tag exists /*{{{*/ +#if APT_PKG_ABI >= 413 +bool pkgTagSection::Exists(const char* const Tag) const +#else bool pkgTagSection::Exists(const char* const Tag) +#endif { unsigned int tmp; return Find(Tag, tmp); @@ -332,78 +469,46 @@ bool pkgTagSection::Exists(const char* const Tag) /* This searches the section for a tag that matches the given string. */ bool pkgTagSection::Find(const char *Tag,unsigned int &Pos) const { - unsigned int Length = strlen(Tag); - unsigned int I = AlphaIndexes[AlphaHash(Tag)]; - if (I == 0) + size_t const Length = strlen(Tag); + unsigned int Bucket = AlphaIndexes[AlphaHash(Tag, Length)]; + if (Bucket == 0) return false; - I--; - - for (unsigned int Counter = 0; Counter != TagCount; Counter++, - I = (I+1)%TagCount) + + for (; Bucket != 0; Bucket = d->Tags[Bucket - 1].NextInBucket) { - const char *St; - St = Section + Indexes[I]; - if (strncasecmp(Tag,St,Length) != 0) + if ((d->Tags[Bucket - 1].EndTag - d->Tags[Bucket - 1].StartTag) != Length) continue; - // Make sure the colon is in the right place - const char *C = St + Length; - for (; isspace(*C) != 0; C++); - if (*C != ':') + char const * const St = Section + d->Tags[Bucket - 1].StartTag; + if (strncasecmp(Tag,St,Length) != 0) continue; - Pos = I; + + Pos = Bucket - 1; return true; } Pos = 0; return false; } - /*}}}*/ -// TagSection::Find - Locate a tag /*{{{*/ -// --------------------------------------------------------------------- -/* This searches the section for a tag that matches the given string. */ bool pkgTagSection::Find(const char *Tag,const char *&Start, const char *&End) const { - unsigned int Length = strlen(Tag); - unsigned int I = AlphaIndexes[AlphaHash(Tag)]; - if (I == 0) + unsigned int Pos; + if (Find(Tag, Pos) == false) return false; - I--; - - for (unsigned int Counter = 0; Counter != TagCount; Counter++, - I = (I+1)%TagCount) - { - const char *St; - St = Section + Indexes[I]; - if (strncasecmp(Tag,St,Length) != 0) - continue; - - // Make sure the colon is in the right place - const char *C = St + Length; - for (; isspace(*C) != 0; C++); - if (*C != ':') - continue; - // Strip off the gunk from the start end - Start = C; - End = Section + Indexes[I+1]; - if (Start >= End) - return _error->Error("Internal parsing error"); - - for (; (isspace(*Start) != 0 || *Start == ':') && Start < End; Start++); - for (; isspace(End[-1]) != 0 && End > Start; End--); - - return true; - } - - Start = End = 0; - return false; + Start = Section + d->Tags[Pos].StartValue; + // Strip off the gunk from the end + End = Section + d->Tags[Pos + 1].StartTag; + if (unlikely(Start > End)) + return _error->Error("Internal parsing error"); + + for (; isspace(End[-1]) != 0 && End > Start; --End); + + return true; } /*}}}*/ // TagSection::FindS - Find a string /*{{{*/ -// --------------------------------------------------------------------- -/* */ string pkgTagSection::FindS(const char *Tag) const { const char *Start; @@ -413,6 +518,24 @@ string pkgTagSection::FindS(const char *Tag) const return string(Start,End); } /*}}}*/ +// TagSection::FindRawS - Find a string /*{{{*/ +string pkgTagSection::FindRawS(const char *Tag) const +{ + unsigned int Pos; + if (Find(Tag, Pos) == false) + return ""; + + char const *Start = (char const *) memchr(Section + d->Tags[Pos].EndTag, ':', d->Tags[Pos].StartValue - d->Tags[Pos].EndTag); + ++Start; + char const *End = Section + d->Tags[Pos + 1].StartTag; + if (unlikely(Start > End)) + return ""; + + for (; isspace(End[-1]) != 0 && End > Start; --End); + + return std::string(Start, End - Start); +} + /*}}}*/ // TagSection::FindI - Find an integer /*{{{*/ // --------------------------------------------------------------------- /* */ @@ -461,6 +584,17 @@ unsigned long long pkgTagSection::FindULL(const char *Tag, unsigned long long co return Result; } /*}}}*/ +// TagSection::FindB - Find boolean value /*{{{*/ +// --------------------------------------------------------------------- +/* */ +bool pkgTagSection::FindB(const char *Tag, bool const &Default) const +{ + const char *Start, *Stop; + if (Find(Tag, Start, Stop) == false) + return Default; + return StringToBool(string(Start, Stop)); +} + /*}}}*/ // TagSection::FindFlag - Locate a yes/no type flag /*{{{*/ // --------------------------------------------------------------------- /* The bits marked in Flag are masked on/off in Flags */ @@ -493,71 +627,153 @@ bool pkgTagSection::FindFlag(unsigned long &Flags, unsigned long Flag, return true; } /*}}}*/ +void pkgTagSection::Get(const char *&Start,const char *&Stop,unsigned int I) const +{ + Start = Section + d->Tags[I].StartTag; + Stop = Section + d->Tags[I+1].StartTag; +} +APT_PURE unsigned int pkgTagSection::Count() const { /*{{{*/ + if (d->Tags.empty() == true) + return 0; + // the last element is just marking the end and isn't a real one + return d->Tags.size() - 1; +} + /*}}}*/ +// TagSection::Write - Ordered (re)writing of fields /*{{{*/ +pkgTagSection::Tag pkgTagSection::Tag::Remove(std::string const &Name) +{ + return Tag(REMOVE, Name, ""); +} +pkgTagSection::Tag pkgTagSection::Tag::Rename(std::string const &OldName, std::string const &NewName) +{ + return Tag(RENAME, OldName, NewName); +} +pkgTagSection::Tag pkgTagSection::Tag::Rewrite(std::string const &Name, std::string const &Data) +{ + if (Data.empty() == true) + return Tag(REMOVE, Name, ""); + else + return Tag(REWRITE, Name, Data); +} +static bool WriteTag(FileFd &File, std::string Tag, std::string const &Value) +{ + if (Value.empty() || isspace(Value[0]) != 0) + Tag.append(":"); + else + Tag.append(": "); + Tag.append(Value); + Tag.append("\n"); + return File.Write(Tag.c_str(), Tag.length()); +} +static bool RewriteTags(FileFd &File, pkgTagSection const * const This, char const * const Tag, + std::vector<pkgTagSection::Tag>::const_iterator &R, + std::vector<pkgTagSection::Tag>::const_iterator const &REnd) +{ + size_t const TagLen = strlen(Tag); + for (; R != REnd; ++R) + { + std::string data; + if (R->Name.length() == TagLen && strncasecmp(R->Name.c_str(), Tag, R->Name.length()) == 0) + { + if (R->Action != pkgTagSection::Tag::REWRITE) + break; + data = R->Data; + } + else if(R->Action == pkgTagSection::Tag::RENAME && R->Data.length() == TagLen && + strncasecmp(R->Data.c_str(), Tag, R->Data.length()) == 0) + data = This->FindRawS(R->Name.c_str()); + else + continue; + + return WriteTag(File, Tag, data); + } + return true; +} +bool pkgTagSection::Write(FileFd &File, char const * const * const Order, std::vector<Tag> const &Rewrite) const +{ + // first pass: Write everything we have an order for + if (Order != NULL) + { + for (unsigned int I = 0; Order[I] != 0; ++I) + { + std::vector<Tag>::const_iterator R = Rewrite.begin(); + if (RewriteTags(File, this, Order[I], R, Rewrite.end()) == false) + return false; + if (R != Rewrite.end()) + continue; + + if (Exists(Order[I]) == false) + continue; + + if (WriteTag(File, Order[I], FindRawS(Order[I])) == false) + return false; + } + } + // second pass: See if we have tags which aren't ordered + if (d->Tags.empty() == false) + { + for (std::vector<pkgTagSectionPrivate::TagData>::const_iterator T = d->Tags.begin(); T != d->Tags.end() - 1; ++T) + { + char const * const fieldname = Section + T->StartTag; + size_t fieldnamelen = T->EndTag - T->StartTag; + if (Order != NULL) + { + unsigned int I = 0; + for (; Order[I] != 0; ++I) + { + if (fieldnamelen == strlen(Order[I]) && strncasecmp(fieldname, Order[I], fieldnamelen) == 0) + break; + } + if (Order[I] != 0) + continue; + } + + std::string const name(fieldname, fieldnamelen); + std::vector<Tag>::const_iterator R = Rewrite.begin(); + if (RewriteTags(File, this, name.c_str(), R, Rewrite.end()) == false) + return false; + if (R != Rewrite.end()) + continue; + + if (WriteTag(File, name, FindRawS(name.c_str())) == false) + return false; + } + } + // last pass: see if there are any rewrites remaining we haven't done yet + for (std::vector<Tag>::const_iterator R = Rewrite.begin(); R != Rewrite.end(); ++R) + { + if (R->Action == Tag::REMOVE) + continue; + std::string const name = ((R->Action == Tag::RENAME) ? R->Data : R->Name); + if (Exists(name.c_str())) + continue; + if (Order != NULL) + { + unsigned int I = 0; + for (; Order[I] != 0; ++I) + { + if (strncasecmp(name.c_str(), Order[I], name.length()) == 0 && name.length() == strlen(Order[I])) + break; + } + if (Order[I] != 0) + continue; + } + + if (WriteTag(File, name, ((R->Action == Tag::RENAME) ? FindRawS(R->Name.c_str()) : R->Data)) == false) + return false; + } + return true; +} + /*}}}*/ + +#include "tagfile-order.c" + // TFRewrite - Rewrite a control record /*{{{*/ // --------------------------------------------------------------------- /* This writes the control record to stdout rewriting it as necessary. The override map item specificies the rewriting rules to follow. This also takes the time to sort the feild list. */ - -/* The order of this list is taken from dpkg source lib/parse.c the fieldinfos - array. */ -static const char *iTFRewritePackageOrder[] = { - "Package", - "Essential", - "Status", - "Priority", - "Section", - "Installed-Size", - "Maintainer", - "Original-Maintainer", - "Architecture", - "Source", - "Version", - "Revision", // Obsolete - "Config-Version", // Obsolete - "Replaces", - "Provides", - "Depends", - "Pre-Depends", - "Recommends", - "Suggests", - "Conflicts", - "Breaks", - "Conffiles", - "Filename", - "Size", - "MD5Sum", - "SHA1", - "SHA256", - "SHA512", - "MSDOS-Filename", // Obsolete - "Description", - 0}; -static const char *iTFRewriteSourceOrder[] = {"Package", - "Source", - "Binary", - "Version", - "Priority", - "Section", - "Maintainer", - "Original-Maintainer", - "Build-Depends", - "Build-Depends-Indep", - "Build-Conflicts", - "Build-Conflicts-Indep", - "Architecture", - "Standards-Version", - "Format", - "Directory", - "Files", - 0}; - -/* Two levels of initialization are used because gcc will set the symbol - size of an array to the length of the array, causing dynamic relinking - errors. Doing this makes the symbol size constant */ -const char **TFRewritePackageOrder = iTFRewritePackageOrder; -const char **TFRewriteSourceOrder = iTFRewriteSourceOrder; - +APT_IGNORE_DEPRECATED_PUSH bool TFRewrite(FILE *Output,pkgTagSection const &Tags,const char *Order[], TFRewriteData *Rewrite) { @@ -681,4 +897,7 @@ bool TFRewrite(FILE *Output,pkgTagSection const &Tags,const char *Order[], return true; } +APT_IGNORE_DEPRECATED_POP /*}}}*/ + +pkgTagSection::~pkgTagSection() { delete d; } diff --git a/apt-pkg/tagfile.h b/apt-pkg/tagfile.h index d5b62e76d..118954541 100644 --- a/apt-pkg/tagfile.h +++ b/apt-pkg/tagfile.h @@ -25,69 +25,123 @@ #include <stdio.h> #include <string> +#include <vector> +#include <list> #ifndef APT_8_CLEANER_HEADERS #include <apt-pkg/fileutl.h> #endif class FileFd; +class pkgTagSectionPrivate; class pkgTagSection { const char *Section; - // We have a limit of 256 tags per section. - unsigned int Indexes[256]; + // We have a limit of 256 tags per section with the old abi +#if APT_PKG_ABI < 413 + APT_DEPRECATED unsigned int Indexes[256]; +#endif unsigned int AlphaIndexes[0x100]; - unsigned int TagCount; - // dpointer placeholder (for later in case we need it) - void *d; +#if APT_PKG_ABI < 413 + APT_DEPRECATED unsigned int TagCount; +#endif - /* This very simple hash function for the last 8 letters gives - very good performance on the debian package files */ - inline static unsigned long AlphaHash(const char *Text, const char *End = 0) - { - unsigned long Res = 0; - for (; Text != End && *Text != ':' && *Text != 0; Text++) - Res = ((unsigned long)(*Text) & 0xDF) ^ (Res << 1); - return Res & 0xFF; - } + // dpointer placeholder (for later in case we need it) + pkgTagSectionPrivate *d; protected: const char *Stop; public: - + inline bool operator ==(const pkgTagSection &rhs) {return Section == rhs.Section;}; inline bool operator !=(const pkgTagSection &rhs) {return Section != rhs.Section;}; - + bool Find(const char *Tag,const char *&Start, const char *&End) const; bool Find(const char *Tag,unsigned int &Pos) const; std::string FindS(const char *Tag) const; - signed int FindI(const char *Tag,signed long Default = 0) const ; + std::string FindRawS(const char *Tag) const; + signed int FindI(const char *Tag,signed long Default = 0) const; + bool FindB(const char *Tag, bool const &Default = false) const; unsigned long long FindULL(const char *Tag, unsigned long long const &Default = 0) const; bool FindFlag(const char *Tag,unsigned long &Flags, unsigned long Flag) const; bool static FindFlag(unsigned long &Flags, unsigned long Flag, const char* Start, const char* Stop); - bool Scan(const char *Start,unsigned long MaxLength); + + /** \brief searches the boundaries of the current section + * + * While parameter Start marks the beginning of the section, this method + * will search for the first double newline in the data stream which marks + * the end of the section. It also does a first pass over the content of + * the section parsing it as encountered for processing later on by Find + * + * @param Start is the beginning of the section + * @param MaxLength is the size of valid data in the stream pointed to by Start + * @param Restart if enabled internal state will be cleared, otherwise it is + * assumed that now more data is available in the stream and the parsing will + * start were it encountered insufficent data the last time. + * + * @return \b true if section end was found, \b false otherwise. + * Beware that internal state will be inconsistent if \b false is returned! + */ +#if APT_PKG_ABI >= 413 + APT_MUSTCHECK bool Scan(const char *Start, unsigned long MaxLength, bool const Restart = true); +#else + APT_MUSTCHECK bool Scan(const char *Start, unsigned long MaxLength, bool const Restart); + APT_MUSTCHECK bool Scan(const char *Start, unsigned long MaxLength); +#endif + inline unsigned long size() const {return Stop - Section;}; void Trim(); virtual void TrimRecord(bool BeforeRecord, const char* &End); - - inline unsigned int Count() const {return TagCount;}; + + /** \brief amount of Tags in the current section + * + * Note: if a Tag is mentioned repeatly it will be counted multiple + * times, but only the last occurrence is available via Find methods. + */ + unsigned int Count() const; +#if APT_PKG_ABI >= 413 + bool Exists(const char* const Tag) const; +#else bool Exists(const char* const Tag); - - inline void Get(const char *&Start,const char *&Stop,unsigned int I) const - {Start = Section + Indexes[I]; Stop = Section + Indexes[I+1];} - +#endif + + void Get(const char *&Start,const char *&Stop,unsigned int I) const; + inline void GetSection(const char *&Start,const char *&Stop) const { Start = Section; Stop = this->Stop; }; - + pkgTagSection(); - virtual ~pkgTagSection() {}; + virtual ~pkgTagSection(); + + struct Tag + { + enum ActionType { REMOVE, RENAME, REWRITE } Action; + std::string Name; + std::string Data; + + static Tag Remove(std::string const &Name); + static Tag Rename(std::string const &OldName, std::string const &NewName); + static Tag Rewrite(std::string const &Name, std::string const &Data); + private: + Tag(ActionType const Action, std::string const &Name, std::string const &Data) : + Action(Action), Name(Name), Data(Data) {} + }; + + /** Write this section (with optional rewrites) to a file + * + * @param File to write the section to + * @param Order in which tags should appear in the file + * @param Rewrite is a set of tags to be renamed, rewitten and/or removed + * @return \b true if successful, otherwise \b false + */ + bool Write(FileFd &File, char const * const * const Order = NULL, std::vector<Tag> const &Rewrite = std::vector<Tag>()) const; }; class pkgTagFilePrivate; @@ -105,24 +159,25 @@ class pkgTagFile unsigned long Offset(); bool Jump(pkgTagSection &Tag,unsigned long long Offset); + void Init(FileFd *F,unsigned long long Size = 32*1024); + pkgTagFile(FileFd *F,unsigned long long Size = 32*1024); virtual ~pkgTagFile(); }; -/* This is the list of things to rewrite. The rewriter - goes through and changes or adds each of these headers - to suit. A zero forces the header to be erased, an empty string - causes the old value to be used. (rewrite rule ignored) */ -struct TFRewriteData +extern const char **TFRewritePackageOrder; +extern const char **TFRewriteSourceOrder; + +// Use pkgTagSection::Tag and pkgTagSection::Write() instead +APT_IGNORE_DEPRECATED_PUSH +struct APT_DEPRECATED TFRewriteData { const char *Tag; const char *Rewrite; const char *NewTag; }; -extern const char **TFRewritePackageOrder; -extern const char **TFRewriteSourceOrder; - -bool TFRewrite(FILE *Output,pkgTagSection const &Tags,const char *Order[], +APT_DEPRECATED bool TFRewrite(FILE *Output,pkgTagSection const &Tags,const char *Order[], TFRewriteData *Rewrite); +APT_IGNORE_DEPRECATED_POP #endif diff --git a/apt-pkg/update.cc b/apt-pkg/update.cc index 5d5b19626..2908a4820 100644 --- a/apt-pkg/update.cc +++ b/apt-pkg/update.cc @@ -27,8 +27,8 @@ bool ListUpdate(pkgAcquireStatus &Stat, pkgSourceList &List, int PulseInterval) { - pkgAcquire Fetcher; - if (Fetcher.Setup(&Stat, _config->FindDir("Dir::State::Lists")) == false) + pkgAcquire Fetcher(&Stat); + if (Fetcher.GetLock(_config->FindDir("Dir::State::Lists")) == false) return false; // Populate it with the source selection diff --git a/apt-pkg/update.h b/apt-pkg/update.h index 3835644de..e35cd14f6 100644 --- a/apt-pkg/update.h +++ b/apt-pkg/update.h @@ -11,7 +11,8 @@ #define PKGLIB_UPDATE_H class pkgAcquireStatus; - +class pkgSourceList; +class pkgAcquire; bool ListUpdate(pkgAcquireStatus &progress, pkgSourceList &List, int PulseInterval=0); bool AcquireUpdate(pkgAcquire &Fetcher, int const PulseInterval = 0, diff --git a/apt-pkg/upgrade.cc b/apt-pkg/upgrade.cc index 7926845c2..6c8721da8 100644 --- a/apt-pkg/upgrade.cc +++ b/apt-pkg/upgrade.cc @@ -24,13 +24,14 @@ The problem resolver is used to resolve the problems. */ -bool pkgDistUpgrade(pkgDepCache &Cache) +static bool pkgDistUpgrade(pkgDepCache &Cache, OpProgress * const Progress) { std::string const solver = _config->Find("APT::Solver", "internal"); - if (solver != "internal") { - OpTextProgress Prog(*_config); - return EDSP::ResolveExternal(solver.c_str(), Cache, false, true, false, &Prog); - } + if (solver != "internal") + return EDSP::ResolveExternal(solver.c_str(), Cache, false, true, false, Progress); + + if (Progress != NULL) + Progress->OverallProgress(0, 100, 1, _("Calculating upgrade")); pkgDepCache::ActionGroup group(Cache); @@ -41,12 +42,18 @@ bool pkgDistUpgrade(pkgDepCache &Cache) if (I->CurrentVer != 0) Cache.MarkInstall(I, false, 0, false); + if (Progress != NULL) + Progress->Progress(10); + /* Auto upgrade all installed packages, this provides the basis for the installation */ for (pkgCache::PkgIterator I = Cache.PkgBegin(); I.end() == false; ++I) if (I->CurrentVer != 0) Cache.MarkInstall(I, true, 0, false); + if (Progress != NULL) + Progress->Progress(50); + /* Now, install each essential package which is not installed (and not provided by another package in the same name group) */ std::string essential = _config->Find("pkgCacheGen::Essential", "all"); @@ -77,15 +84,24 @@ bool pkgDistUpgrade(pkgDepCache &Cache) for (pkgCache::PkgIterator I = Cache.PkgBegin(); I.end() == false; ++I) if ((I->Flags & pkgCache::Flag::Essential) == pkgCache::Flag::Essential) Cache.MarkInstall(I, true, 0, false); - + + if (Progress != NULL) + Progress->Progress(55); + /* We do it again over all previously installed packages to force conflict resolution on them all. */ for (pkgCache::PkgIterator I = Cache.PkgBegin(); I.end() == false; ++I) if (I->CurrentVer != 0) Cache.MarkInstall(I, false, 0, false); + if (Progress != NULL) + Progress->Progress(65); + pkgProblemResolver Fix(&Cache); + if (Progress != NULL) + Progress->Progress(95); + // Hold back held packages. if (_config->FindB("APT::Ignore-Hold",false) == false) { @@ -98,18 +114,26 @@ bool pkgDistUpgrade(pkgDepCache &Cache) } } } - - return Fix.Resolve(); + + bool const success = Fix.Resolve(false, Progress); + if (Progress != NULL) + Progress->Done(); + return success; +} +bool pkgDistUpgrade(pkgDepCache &Cache) +{ + return pkgDistUpgrade(Cache, NULL); } /*}}}*/ // AllUpgradeNoNewPackages - Upgrade but no removals or new pkgs /*{{{*/ -static bool pkgAllUpgradeNoNewPackages(pkgDepCache &Cache) +static bool pkgAllUpgradeNoNewPackages(pkgDepCache &Cache, OpProgress * const Progress) { std::string const solver = _config->Find("APT::Solver", "internal"); - if (solver != "internal") { - OpTextProgress Prog(*_config); - return EDSP::ResolveExternal(solver.c_str(), Cache, true, false, false, &Prog); - } + if (solver != "internal") + return EDSP::ResolveExternal(solver.c_str(), Cache, true, false, false, Progress); + + if (Progress != NULL) + Progress->OverallProgress(0, 100, 1, _("Calculating upgrade")); pkgDepCache::ActionGroup group(Cache); @@ -131,8 +155,15 @@ static bool pkgAllUpgradeNoNewPackages(pkgDepCache &Cache) if (I->CurrentVer != 0 && Cache[I].InstallVer != 0) Cache.MarkInstall(I, false, 0, false); } - - return Fix.ResolveByKeep(); + + if (Progress != NULL) + Progress->Progress(50); + + // resolve remaining issues via keep + bool const success = Fix.ResolveByKeep(Progress); + if (Progress != NULL) + Progress->Done(); + return success; } /*}}}*/ // AllUpgradeWithNewInstalls - Upgrade + install new packages as needed /*{{{*/ @@ -141,8 +172,15 @@ static bool pkgAllUpgradeNoNewPackages(pkgDepCache &Cache) * Upgrade as much as possible without deleting anything (useful for * stable systems) */ -static bool pkgAllUpgradeWithNewPackages(pkgDepCache &Cache) +static bool pkgAllUpgradeWithNewPackages(pkgDepCache &Cache, OpProgress * const Progress) { + std::string const solver = _config->Find("APT::Solver", "internal"); + if (solver != "internal") + return EDSP::ResolveExternal(solver.c_str(), Cache, true, false, false, Progress); + + if (Progress != NULL) + Progress->OverallProgress(0, 100, 1, _("Calculating upgrade")); + pkgDepCache::ActionGroup group(Cache); pkgProblemResolver Fix(&Cache); @@ -164,18 +202,30 @@ static bool pkgAllUpgradeWithNewPackages(pkgDepCache &Cache) } } + if (Progress != NULL) + Progress->Progress(10); + // then let auto-install loose for (pkgCache::PkgIterator I = Cache.PkgBegin(); I.end() == false; ++I) if (Cache[I].Install()) Cache.MarkInstall(I, true, 0, false); + if (Progress != NULL) + Progress->Progress(50); + // ... but it may remove stuff, we we need to clean up afterwards again for (pkgCache::PkgIterator I = Cache.PkgBegin(); I.end() == false; ++I) if (Cache[I].Delete() == true) Cache.MarkKeep(I, false, false); + if (Progress != NULL) + Progress->Progress(60); + // resolve remaining issues via keep - return Fix.ResolveByKeep(); + bool const success = Fix.ResolveByKeep(Progress); + if (Progress != NULL) + Progress->Done(); + return success; } /*}}}*/ // AllUpgrade - Upgrade as many packages as possible /*{{{*/ @@ -183,9 +233,13 @@ static bool pkgAllUpgradeWithNewPackages(pkgDepCache &Cache) /* Right now the system must be consistent before this can be called. It also will not change packages marked for install, it only tries to install packages not marked for install */ +static bool pkgAllUpgrade(pkgDepCache &Cache, OpProgress * const Progress) +{ + return pkgAllUpgradeNoNewPackages(Cache, Progress); +} bool pkgAllUpgrade(pkgDepCache &Cache) { - return pkgAllUpgradeNoNewPackages(Cache); + return pkgAllUpgrade(Cache, NULL); } /*}}}*/ // MinimizeUpgrade - Minimizes the set of packages to be upgraded /*{{{*/ @@ -233,24 +287,25 @@ bool pkgMinimizeUpgrade(pkgDepCache &Cache) return true; } /*}}}*/ -// APT::Upgrade::Upgrade - Upgrade using a specific strategy /*{{{*/ +// APT::Upgrade::Upgrade - Upgrade using a specific strategy /*{{{*/ +#if APT_PKG_ABI < 413 bool APT::Upgrade::Upgrade(pkgDepCache &Cache, int mode) { - if (mode == 0) - { - return pkgDistUpgrade(Cache); - } + return Upgrade(Cache, mode, NULL); +} +#endif +bool APT::Upgrade::Upgrade(pkgDepCache &Cache, int mode, OpProgress * const Progress) +{ +APT_IGNORE_DEPRECATED_PUSH + if (mode == ALLOW_EVERYTHING) + return pkgDistUpgrade(Cache, Progress); else if ((mode & ~FORBID_REMOVE_PACKAGES) == 0) - { - return pkgAllUpgradeWithNewPackages(Cache); - } + return pkgAllUpgradeWithNewPackages(Cache, Progress); else if ((mode & ~(FORBID_REMOVE_PACKAGES|FORBID_INSTALL_NEW_PACKAGES)) == 0) - { - return pkgAllUpgradeNoNewPackages(Cache); - } + return pkgAllUpgradeNoNewPackages(Cache, Progress); else _error->Error("pkgAllUpgrade called with unsupported mode %i", mode); - +APT_IGNORE_DEPRECATED_POP return false; } /*}}}*/ diff --git a/apt-pkg/upgrade.h b/apt-pkg/upgrade.h index aa883df10..18b6aac7b 100644 --- a/apt-pkg/upgrade.h +++ b/apt-pkg/upgrade.h @@ -10,23 +10,32 @@ #ifndef PKGLIB_UPGRADE_H #define PKGLIB_UPGRADE_H +#include <stddef.h> +#include <apt-pkg/macros.h> + class pkgDepCache; +class OpProgress; namespace APT { namespace Upgrade { // FIXME: make this "enum class UpgradeMode {" once we enable c++11 enum UpgradeMode { FORBID_REMOVE_PACKAGES = 1, - FORBID_INSTALL_NEW_PACKAGES = 2 + FORBID_INSTALL_NEW_PACKAGES = 2, + ALLOW_EVERYTHING = 0 }; +#if APT_PKG_ABI >= 413 + bool Upgrade(pkgDepCache &Cache, int UpgradeMode, OpProgress * const Progress = NULL); +#else bool Upgrade(pkgDepCache &Cache, int UpgradeMode); + bool Upgrade(pkgDepCache &Cache, int UpgradeMode, OpProgress * const Progress); +#endif } } // please use APT::Upgrade::Upgrade() instead -bool pkgDistUpgrade(pkgDepCache &Cache); -bool pkgAllUpgrade(pkgDepCache &Cache); -bool pkgMinimizeUpgrade(pkgDepCache &Cache); - +APT_DEPRECATED bool pkgDistUpgrade(pkgDepCache &Cache); +APT_DEPRECATED bool pkgAllUpgrade(pkgDepCache &Cache); +bool pkgMinimizeUpgrade(pkgDepCache &Cache); #endif diff --git a/apt-pkg/vendorlist.cc b/apt-pkg/vendorlist.cc index fb33ff17d..db5b87fc0 100644 --- a/apt-pkg/vendorlist.cc +++ b/apt-pkg/vendorlist.cc @@ -11,10 +11,8 @@ #include <apti18n.h> -#if __GNUC__ >= 4 - #pragma GCC diagnostic push - #pragma GCC diagnostic ignored "-Wdeprecated-declarations" -#endif +// The whole vendor system is deprecated +APT_IGNORE_DEPRECATED_PUSH #include <apt-pkg/vendor.h> #include <apt-pkg/vendorlist.h> @@ -163,6 +161,4 @@ const Vendor* pkgVendorList::FindVendor(const std::vector<string> GPGVOutput) /* } /*}}}*/ -#if __GNUC__ >= 4 - #pragma GCC diagnostic pop -#endif +APT_IGNORE_DEPRECATED_POP |