diff options
author | Arch Librarian <arch@canonical.com> | 2004-09-20 17:00:47 +0000 |
---|---|---|
committer | Arch Librarian <arch@canonical.com> | 2004-09-20 17:00:47 +0000 |
commit | dc738e7ae6a9c14992279dc2c52f71b14ced53aa (patch) | |
tree | f499d824a6da6d4fcce2a927192ba30fdbebd143 /ftparchive | |
parent | bac2e715204b341d7818444c4a4ad2e3e4e5b18d (diff) |
Lots and lots of i18n updates.
Author: doogie
Date: 2003-02-10 07:34:41 GMT
Lots and lots of i18n updates.
Diffstat (limited to 'ftparchive')
-rw-r--r-- | ftparchive/apt-ftparchive.cc | 32 | ||||
-rw-r--r-- | ftparchive/cachedb.cc | 15 | ||||
-rw-r--r-- | ftparchive/contents.cc | 7 | ||||
-rw-r--r-- | ftparchive/multicompress.cc | 31 | ||||
-rw-r--r-- | ftparchive/override.cc | 23 | ||||
-rw-r--r-- | ftparchive/writer.cc | 52 |
6 files changed, 82 insertions, 78 deletions
diff --git a/ftparchive/apt-ftparchive.cc b/ftparchive/apt-ftparchive.cc index b150d4092..480d32805 100644 --- a/ftparchive/apt-ftparchive.cc +++ b/ftparchive/apt-ftparchive.cc @@ -1,6 +1,6 @@ // -*- mode: cpp; mode: fold -*- // Description /*{{{*/ -// $Id: apt-ftparchive.cc,v 1.5 2002/11/11 04:27:51 doogie Exp $ +// $Id: apt-ftparchive.cc,v 1.6 2003/02/10 07:34:41 doogie Exp $ /* ###################################################################### apt-scanpackages - Efficient work-alike for dpkg-scanpackages @@ -160,9 +160,9 @@ bool PackageMap::GenPackages(Configuration &Setup,struct CacheDB::Stats &Stats) flCombine(OverrideDir,BinOverride), flCombine(OverrideDir,ExtraOverride)); if (PkgExt.empty() == false && Packages.SetExts(PkgExt) == false) - return _error->Error("Package extension list is too long"); + return _error->Error(_("Package extension list is too long")); if (_error->PendingError() == true) - return _error->Error("Error Processing directory %s",BaseDir.c_str()); + return _error->Error(_("Error Processing directory %s"),BaseDir.c_str()); Packages.PathPrefix = PathPrefix; Packages.DirStrip = ArchiveDir; @@ -176,7 +176,7 @@ bool PackageMap::GenPackages(Configuration &Setup,struct CacheDB::Stats &Stats) PkgCompress,Permissions); Packages.Output = Comp.Input; if (_error->PendingError() == true) - return _error->Error("Error Processing directory %s",BaseDir.c_str()); + return _error->Error(_("Error Processing directory %s"),BaseDir.c_str()); c0out << ' ' << BaseDir << ":" << flush; @@ -199,7 +199,7 @@ bool PackageMap::GenPackages(Configuration &Setup,struct CacheDB::Stats &Stats) if (Comp.Finalize(Size) == false) { c0out << endl; - return _error->Error("Error Processing directory %s",BaseDir.c_str()); + return _error->Error(_("Error Processing directory %s"),BaseDir.c_str()); } if (Size != 0) @@ -246,9 +246,9 @@ bool PackageMap::GenSources(Configuration &Setup,struct CacheDB::Stats &Stats) flCombine(OverrideDir,SrcOverride), flCombine(OverrideDir,SrcExtraOverride)); if (SrcExt.empty() == false && Sources.SetExts(SrcExt) == false) - return _error->Error("Source extension list is too long"); + return _error->Error(_("Source extension list is too long")); if (_error->PendingError() == true) - return _error->Error("Error Processing directory %s",BaseDir.c_str()); + return _error->Error(_("Error Processing directory %s"),BaseDir.c_str()); Sources.PathPrefix = PathPrefix; Sources.DirStrip = ArchiveDir; @@ -262,7 +262,7 @@ bool PackageMap::GenSources(Configuration &Setup,struct CacheDB::Stats &Stats) SrcCompress,Permissions); Sources.Output = Comp.Input; if (_error->PendingError() == true) - return _error->Error("Error Processing directory %s",BaseDir.c_str()); + return _error->Error(_("Error Processing directory %s"),BaseDir.c_str()); c0out << ' ' << BaseDir << ":" << flush; @@ -284,7 +284,7 @@ bool PackageMap::GenSources(Configuration &Setup,struct CacheDB::Stats &Stats) if (Comp.Finalize(Size) == false) { c0out << endl; - return _error->Error("Error Processing directory %s",BaseDir.c_str()); + return _error->Error(_("Error Processing directory %s"),BaseDir.c_str()); } if (Size != 0) @@ -333,7 +333,7 @@ bool PackageMap::GenContents(Configuration &Setup, // Create a package writer object. ContentsWriter Contents(""); if (PkgExt.empty() == false && Contents.SetExts(PkgExt) == false) - return _error->Error("Package extension list is too long"); + return _error->Error(_("Package extension list is too long")); if (_error->PendingError() == true) return false; @@ -363,7 +363,7 @@ bool PackageMap::GenContents(Configuration &Setup, return false; if (fwrite(Buf,1,ToRead,Comp.Input) != ToRead) - return _error->Errno("fwrite","Error writing header to contents file"); + return _error->Errno("fwrite",_("Error writing header to contents file")); Size -= ToRead; } @@ -393,7 +393,7 @@ bool PackageMap::GenContents(Configuration &Setup, if (Comp.Finalize(Size) == false || _error->PendingError() == true) { c0out << endl; - return _error->Error("Error Processing Contents %s", + return _error->Error(_("Error Processing Contents %s"), this->Contents.c_str()); } @@ -547,7 +547,7 @@ bool ShowHelp(CommandLine &CmdL) return true; cout << - "Usage: apt-ftparchive [options] command\n" + _("Usage: apt-ftparchive [options] command\n" "Commands: packges binarypath [overridefile [pathprefix]]\n" " sources srcpath [overridefile [pathprefix]]\n" " contents path\n" @@ -583,7 +583,7 @@ bool ShowHelp(CommandLine &CmdL) " --no-delink Enable delinking debug mode\n" " --contents Control contents file generation\n" " -c=? Read this configuration file\n" - " -o=? Set an arbitary configuration option" << endl; + " -o=? Set an arbitary configuration option") << endl; return true; } @@ -728,7 +728,7 @@ bool Generate(CommandLine &CmdL) if (RegexChoice(List,CmdL.FileList + 2,CmdL.FileList + CmdL.FileSize()) == 0) { delete [] List; - return _error->Error("No selections matched"); + return _error->Error(_("No selections matched")); } _error->DumpErrors(); @@ -801,7 +801,7 @@ bool Generate(CommandLine &CmdL) { if (MultiCompress::GetStat(flCombine(ArchiveDir,I->PkgFile),I->PkgCompress,B) == false) { - _error->Warning("Some files are missing in the package file group `%s'",I->PkgFile.c_str()); + _error->Warning(_("Some files are missing in the package file group `%s'"),I->PkgFile.c_str()); continue; } diff --git a/ftparchive/cachedb.cc b/ftparchive/cachedb.cc index b25d61d3b..440a90665 100644 --- a/ftparchive/cachedb.cc +++ b/ftparchive/cachedb.cc @@ -1,6 +1,6 @@ // -*- mode: cpp; mode: fold -*- // Description /*{{{*/ -// $Id: cachedb.cc,v 1.5 2002/11/22 18:02:08 doogie Exp $ +// $Id: cachedb.cc,v 1.6 2003/02/10 07:34:41 doogie Exp $ /* ###################################################################### CacheDB @@ -16,6 +16,7 @@ #include "cachedb.h" +#include <apti18n.h> #include <apt-pkg/error.h> #include <apt-pkg/md5.h> #include <apt-pkg/strutl.h> @@ -39,7 +40,7 @@ bool CacheDB::ReadyDB(string DB) corrupted DB */ if (DBFailed() == true) { - _error->Warning("DB was corrupted, file renamed to %s.old",DBFile.c_str()); + _error->Warning(_("DB was corrupted, file renamed to %s.old"),DBFile.c_str()); rename(DBFile.c_str(),(DBFile+".old").c_str()); } @@ -55,7 +56,7 @@ bool CacheDB::ReadyDB(string DB) 0644,0,0,&Dbp)) != 0) { Dbp = 0; - return _error->Errno("db_open","Unable to open DB2 file %s",DB.c_str()); + return _error->Errno("db_open",_("Unable to open DB2 file %s"),DB.c_str()); } DBFile = DB; @@ -95,7 +96,7 @@ bool CacheDB::SetFile(string FileName,struct stat St,FileFd *Fd) { CurStat.mtime = htonl(St.st_mtime); CurStat.Flags = 0; - _error->Warning("File date has changed %s",FileName.c_str()); + _error->Warning(_("File date has changed %s"),FileName.c_str()); } } else @@ -136,7 +137,7 @@ bool CacheDB::LoadControl() return false; if (Control.Control == 0) - return _error->Error("Archive has no control record"); + return _error->Error(_("Archive has no control record")); // Write back the control information InitQuery("cl"); @@ -249,11 +250,11 @@ bool CacheDB::Clean() #if DB_VERSION_MAJOR >= 2 && DB_VERSION_MINOR >= 7 DBC *Cursor; if ((errno = Dbp->cursor(Dbp,0,&Cursor,0)) != 0) - return _error->Error("Unable to get a cursor"); + return _error->Error(_("Unable to get a cursor")); #else DBC *Cursor; if ((errno = Dbp->cursor(Dbp,0,&Cursor)) != 0) - return _error->Error("Unable to get a cursor"); + return _error->Error(_("Unable to get a cursor")); #endif DBT Key; diff --git a/ftparchive/contents.cc b/ftparchive/contents.cc index b6de47b58..4f2b1d163 100644 --- a/ftparchive/contents.cc +++ b/ftparchive/contents.cc @@ -1,6 +1,6 @@ // -*- mode: cpp; mode: fold -*- // Description /*{{{*/ -// $Id: contents.cc,v 1.3 2001/02/27 04:24:09 jgg Exp $ +// $Id: contents.cc,v 1.4 2003/02/10 07:34:41 doogie Exp $ /* ###################################################################### contents - Archive contents generator @@ -35,6 +35,7 @@ // Include Files /*{{{*/ #include "contents.h" +#include <apti18n.h> #include <apt-pkg/extracttar.h> #include <apt-pkg/error.h> #include <stdio.h> @@ -342,7 +343,7 @@ bool ContentsExtract::DoItem(Item &Itm,int &Fd) MaxSize = 512*1024/2; char *NewData = (char *)realloc(Data,MaxSize*2); if (NewData == 0) - return _error->Error("realloc - Failed to allocate memory"); + return _error->Error(_("realloc - Failed to allocate memory")); Data = NewData; MaxSize *= 2; } @@ -373,7 +374,7 @@ bool ContentsExtract::TakeContents(const void *NewData,unsigned long Length) char *NewData = (char *)realloc(Data,MaxSize*2); if (NewData == 0) - return _error->Error("realloc - Failed to allocate memory"); + return _error->Error(_("realloc - Failed to allocate memory")); Data = NewData; MaxSize *= 2; } diff --git a/ftparchive/multicompress.cc b/ftparchive/multicompress.cc index 56a1a41a2..fabd460f8 100644 --- a/ftparchive/multicompress.cc +++ b/ftparchive/multicompress.cc @@ -1,6 +1,6 @@ // -*- mode: cpp; mode: fold -*- // Description /*{{{*/ -// $Id: multicompress.cc,v 1.3 2001/05/29 03:48:27 jgg Exp $ +// $Id: multicompress.cc,v 1.4 2003/02/10 07:34:41 doogie Exp $ /* ###################################################################### MultiCompressor @@ -20,6 +20,7 @@ #include "multicompress.h" +#include <apti18n.h> #include <apt-pkg/strutl.h> #include <apt-pkg/error.h> #include <apt-pkg/md5.h> @@ -71,7 +72,7 @@ MultiCompress::MultiCompress(string Output,string Compress, // Hmm.. unknown. if (Comp->Name == 0) { - _error->Warning("Unknown Compresison Algorithm '%s'",string(Start,I).c_str()); + _error->Warning(_("Unknown Compresison Algorithm '%s'"),string(Start,I).c_str()); continue; } @@ -101,7 +102,7 @@ MultiCompress::MultiCompress(string Output,string Compress, if (Outputs == 0) { - _error->Error("Compressed output %s needs a compression set",Output.c_str()); + _error->Error(_("Compressed output %s needs a compression set"),Output.c_str()); return; } @@ -168,7 +169,7 @@ bool MultiCompress::Start() // Create a data pipe int Pipe[2] = {-1,-1}; if (pipe(Pipe) != 0) - return _error->Errno("pipe","Failed to create IPC pipe to subprocess"); + return _error->Errno("pipe",_("Failed to create IPC pipe to subprocess")); for (int I = 0; I != 2; I++) SetCloseExec(Pipe[I],true); @@ -194,10 +195,10 @@ bool MultiCompress::Start() close(Pipe[0]); Input = fdopen(Pipe[1],"w"); if (Input == 0) - return _error->Errno("fdopen","Failed to create FILE*"); + return _error->Errno("fdopen",_("Failed to create FILE*")); if (Outputter == -1) - return _error->Errno("fork","Failed to fork"); + return _error->Errno("fork",_("Failed to fork")); return true; } /*}}}*/ @@ -211,7 +212,7 @@ bool MultiCompress::Die() fclose(Input); Input = 0; - bool Res = ExecWait(Outputter,"Compress Child",false); + bool Res = ExecWait(Outputter,_("Compress Child"),false); Outputter = -1; return Res; } @@ -234,7 +235,7 @@ bool MultiCompress::Finalize(unsigned long &OutSize) { struct stat St; if (stat(I->Output.c_str(),&St) != 0) - return _error->Error("Internal Error, Failed to create %s", + return _error->Error(_("Internal Error, Failed to create %s"), I->Output.c_str()); if (I->OldMTime != St.st_mtime) @@ -285,7 +286,7 @@ bool MultiCompress::OpenCompress(const CompType *Prog,int &Pid,int FileFd, // Create a data pipe int Pipe[2] = {-1,-1}; if (pipe(Pipe) != 0) - return _error->Errno("pipe","Failed to create subprocess IPC"); + return _error->Errno("pipe",_("Failed to create subprocess IPC")); for (int J = 0; J != 2; J++) SetCloseExec(Pipe[J],true); @@ -320,7 +321,7 @@ bool MultiCompress::OpenCompress(const CompType *Prog,int &Pid,int FileFd, Args[1] = Prog->UnCompArgs; Args[2] = 0; execvp(Args[0],(char **)Args); - cerr << "Failed to exec compressor " << Args[0] << endl; + cerr << _("Failed to exec compressor ") << Args[0] << endl; _exit(100); }; if (Comp == true) @@ -359,7 +360,7 @@ bool MultiCompress::CloseOld(int Fd,int Proc) { close(Fd); if (Proc != -1) - if (ExecWait(Proc,"decompressor",false) == false) + if (ExecWait(Proc,_("decompressor"),false) == false) return false; return true; } @@ -402,7 +403,7 @@ bool MultiCompress::Child(int FD) { if (write(I->Fd,Buffer,Res) != Res) { - _error->Errno("write","IO to subprocess/file failed"); + _error->Errno("write",_("IO to subprocess/file failed")); break; } } @@ -454,7 +455,7 @@ bool MultiCompress::Child(int FD) if (Res == 0) break; if (Res < 0) - return _error->Errno("read","Failed to read while computing MD5"); + return _error->Errno("read",_("Failed to read while computing MD5")); NewFileSize += Res; OldMD5.Add(Buffer,Res); } @@ -471,7 +472,7 @@ bool MultiCompress::Child(int FD) { I->TmpFile.Close(); if (unlink(I->TmpFile.Name().c_str()) != 0) - _error->Errno("unlink","Problem unlinking %s", + _error->Errno("unlink",_("Problem unlinking %s"), I->TmpFile.Name().c_str()); } return !_error->PendingError(); @@ -486,7 +487,7 @@ bool MultiCompress::Child(int FD) fchmod(I->TmpFile.Fd(),Permissions); if (rename(I->TmpFile.Name().c_str(),I->Output.c_str()) != 0) - _error->Errno("rename","Failed to rename %s to %s", + _error->Errno("rename",_("Failed to rename %s to %s"), I->TmpFile.Name().c_str(),I->Output.c_str()); I->TmpFile.Close(); } diff --git a/ftparchive/override.cc b/ftparchive/override.cc index 40047c5b0..669ce25bf 100644 --- a/ftparchive/override.cc +++ b/ftparchive/override.cc @@ -1,6 +1,6 @@ // -*- mode: cpp; mode: fold -*- // Description /*{{{*/ -// $Id: override.cc,v 1.3 2001/06/26 02:50:27 jgg Exp $ +// $Id: override.cc,v 1.4 2003/02/10 07:34:41 doogie Exp $ /* ###################################################################### Override @@ -16,6 +16,7 @@ #include "override.h" +#include <apti18n.h> #include <apt-pkg/strutl.h> #include <apt-pkg/error.h> @@ -34,7 +35,7 @@ bool Override::ReadOverride(string File,bool Source) FILE *F = fopen(File.c_str(),"r"); if (F == 0) - return _error->Errno("fopen","Unable to open %s",File.c_str()); + return _error->Errno("fopen",_("Unable to open %s"),File.c_str()); char Line[500]; unsigned long Counter = 0; @@ -60,7 +61,7 @@ bool Override::ReadOverride(string File,bool Source) for (; isspace(*End) == 0 && *End != 0; End++); if (*End == 0) { - _error->Warning("Malformed override %s line %lu #1",File.c_str(), + _error->Warning(_("Malformed override %s line %lu #1"),File.c_str(), Counter); continue; } @@ -74,7 +75,7 @@ bool Override::ReadOverride(string File,bool Source) for (; isspace(*End) == 0 && *End != 0; End++); if (*End == 0) { - _error->Warning("Malformed override %s line %lu #2",File.c_str(), + _error->Warning(_("Malformed override %s line %lu #2"),File.c_str(), Counter); continue; } @@ -88,7 +89,7 @@ bool Override::ReadOverride(string File,bool Source) for (; isspace(*End) == 0 && *End != 0; End++); if (*End == 0) { - _error->Warning("Malformed override %s line %lu #3",File.c_str(), + _error->Warning(_("Malformed override %s line %lu #3"),File.c_str(), Counter); continue; } @@ -127,7 +128,7 @@ bool Override::ReadOverride(string File,bool Source) } if (ferror(F)) - _error->Errno("fgets","Failed to read the override file %s",File.c_str()); + _error->Errno("fgets",_("Failed to read the override file %s"),File.c_str()); fclose(F); return true; } @@ -142,7 +143,7 @@ bool Override::ReadExtraOverride(string File,bool Source) FILE *F = fopen(File.c_str(),"r"); if (F == 0) - return _error->Errno("fopen","Unable to open %s",File.c_str()); + return _error->Errno("fopen",_("Unable to open %s"),File.c_str()); char Line[500]; unsigned long Counter = 0; @@ -166,7 +167,7 @@ bool Override::ReadExtraOverride(string File,bool Source) for (; isspace(*End) == 0 && *End != 0; End++); if (*End == 0) { - _error->Warning("Malformed override %s line %lu #1",File.c_str(), + _error->Warning(_("Malformed override %s line %lu #1"),File.c_str(), Counter); continue; } @@ -178,7 +179,7 @@ bool Override::ReadExtraOverride(string File,bool Source) for (; isspace(*End) == 0 && *End != 0; End++); if (*End == 0) { - _error->Warning("Malformed override %s line %lu #2",File.c_str(), + _error->Warning(_("Malformed override %s line %lu #2"),File.c_str(), Counter); continue; } @@ -191,7 +192,7 @@ bool Override::ReadExtraOverride(string File,bool Source) for (; isspace(*(End-1)) && End > Value; End--); if (End == Value) { - _error->Warning("Malformed override %s line %lu #3",File.c_str(), + _error->Warning(_("Malformed override %s line %lu #3"),File.c_str(), Counter); continue; } @@ -201,7 +202,7 @@ bool Override::ReadExtraOverride(string File,bool Source) } if (ferror(F)) - _error->Errno("fgets","Failed to read the override file %s",File.c_str()); + _error->Errno("fgets",_("Failed to read the override file %s"),File.c_str()); fclose(F); return true; } diff --git a/ftparchive/writer.cc b/ftparchive/writer.cc index 9f053bd2c..94d88388a 100644 --- a/ftparchive/writer.cc +++ b/ftparchive/writer.cc @@ -1,6 +1,6 @@ // -*- mode: cpp; mode: fold -*- // Description /*{{{*/ -// $Id: writer.cc,v 1.6 2002/11/11 04:27:51 doogie Exp $ +// $Id: writer.cc,v 1.7 2003/02/10 07:34:41 doogie Exp $ /* ###################################################################### Writer @@ -17,6 +17,7 @@ #include "writer.h" +#include <apti18n.h> #include <apt-pkg/strutl.h> #include <apt-pkg/error.h> #include <apt-pkg/configuration.h> @@ -74,12 +75,12 @@ int FTWScanner::Scanner(const char *File,const struct stat *sb,int Flag) if (Flag == FTW_DNR) { Owner->NewLine(1); - c1out << "W: Unable to read directory " << File << endl; + ioprintf(c1out, _("W: Unable to read directory %s\n"), File); } if (Flag == FTW_NS) { Owner->NewLine(1); - c1out << "W: Unable to stat " << File << endl; + ioprintf(c1out, _("W: Unable to stat %s\n"), File); } if (Flag != FTW_F) return 0; @@ -118,16 +119,16 @@ int FTWScanner::Scanner(const char *File,const struct stat *sb,int Flag) bool Type = _error->PopMessage(Err); if (Type == true) - cerr << "E: " << Err << endl; + cerr << _("E: ") << Err << endl; else - cerr << "W: " << Err << endl; + cerr << _("W: ") << Err << endl; if (Err.find(File) != string::npos) SeenPath = true; } if (SeenPath == false) - cerr << "E: Errors apply to file '" << File << "'" << endl; + cerr << _("E: Errors apply to file ") << "'" << File << "'" << endl; return 0; } @@ -144,7 +145,7 @@ bool FTWScanner::RecursiveScan(string Dir) if (InternalPrefix.empty() == true) { if (realpath(Dir.c_str(),RealPath) == 0) - return _error->Errno("realpath","Failed to resolve %s",Dir.c_str()); + return _error->Errno("realpath",_("Failed to resolve %s"),Dir.c_str()); InternalPrefix = RealPath; } @@ -156,7 +157,7 @@ bool FTWScanner::RecursiveScan(string Dir) if (Res != 0) { if (_error->PendingError() == false) - _error->Errno("ftw","Tree walking failed"); + _error->Errno("ftw",_("Tree walking failed")); return false; } @@ -174,14 +175,14 @@ bool FTWScanner::LoadFileList(string Dir,string File) if (InternalPrefix.empty() == true) { if (realpath(Dir.c_str(),RealPath) == 0) - return _error->Errno("realpath","Failed to resolve %s",Dir.c_str()); + return _error->Errno("realpath",_("Failed to resolve %s"),Dir.c_str()); InternalPrefix = RealPath; } Owner = this; FILE *List = fopen(File.c_str(),"r"); if (List == 0) - return _error->Errno("fopen","Failed to open %s",File.c_str()); + return _error->Errno("fopen",_("Failed to open %s"),File.c_str()); /* We are a tad tricky here.. We prefix the buffer with the directory name, that way if we need a full path with just use line.. Sneaky and @@ -238,25 +239,26 @@ bool FTWScanner::Delink(string &FileName,const char *OriginalPath, cout << endl; NewLine(1); - c1out << " DeLink " << (OriginalPath + InternalPrefix.length()) - << " [" << SizeToStr(St.st_size) << "B]" << endl << flush; + ioprintf(c1out, _(" DeLink %s [%s]\n"), (OriginalPath + InternalPrefix.length()), + SizeToStr(St.st_size).c_str()); + c1out << flush; if (NoLinkAct == false) { char OldLink[400]; if (readlink(OriginalPath,OldLink,sizeof(OldLink)) == -1) - _error->Errno("readlink","Failed to readlink %s",OriginalPath); + _error->Errno("readlink",_("Failed to readlink %s"),OriginalPath); else { if (unlink(OriginalPath) != 0) - _error->Errno("unlink","Failed to unlink %s",OriginalPath); + _error->Errno("unlink",_("Failed to unlink %s"),OriginalPath); else { if (link(FileName.c_str(),OriginalPath) != 0) { // Panic! Restore the symlink symlink(OldLink,OriginalPath); - return _error->Errno("link","*** Failed to link %s to %s", + return _error->Errno("link",_("*** Failed to link %s to %s"), FileName.c_str(), OriginalPath); } @@ -266,7 +268,7 @@ bool FTWScanner::Delink(string &FileName,const char *OriginalPath, DeLinkBytes += St.st_size; if (DeLinkBytes/1024 >= DeLinkLimit) - c1out << " DeLink limit of " << SizeToStr(DeLinkBytes) << "B hit." << endl; + ioprintf(c1out, _(" DeLink limit of %sB hit.\n"), SizeToStr(DeLinkBytes).c_str()); } FileName = OriginalPath; @@ -333,7 +335,7 @@ bool PackagesWriter::DoPackage(string FileName) // Stat the file for later struct stat St; if (fstat(F.Fd(),&St) != 0) - return _error->Errno("fstat","Failed to stat %s",FileName.c_str()); + return _error->Errno("fstat",_("Failed to stat %s"),FileName.c_str()); // Pull all the data we need form the DB string MD5Res; @@ -353,7 +355,7 @@ bool PackagesWriter::DoPackage(string FileName) Override::Item *OverItem = Over.GetItem(Package); if (Package.empty() == true) - return _error->Error("Archive had no package field"); + return _error->Error(_("Archive had no package field")); // If we need to do any rewriting of the header do it now.. if (OverItem == 0) @@ -361,7 +363,7 @@ bool PackagesWriter::DoPackage(string FileName) if (NoOverride == false) { NewLine(1); - c1out << " " << Package << " has no override entry" << endl; + ioprintf(c1out, _(" %s has no override entry\n"), Package.c_str()); } OverItem = &Tmp; @@ -404,9 +406,8 @@ bool PackagesWriter::DoPackage(string FileName) if (NoOverride == false) { NewLine(1); - c1out << " " << Package << " maintainer is " << - Tags.FindS("Maintainer") << " not " << - OverItem->OldMaint << endl; + ioprintf(c1out, _(" %s maintainer is %s not %s\n"), + Package.c_str(), Tags.FindS("Maintainer").c_str(), OverItem->OldMaint.c_str()); } } @@ -572,7 +573,7 @@ bool SourcesWriter::DoPackage(string FileName) if (NoOverride == false) { NewLine(1); - c1out << " " << Tags.FindS("Source") << " has no override entry" << endl; + ioprintf(c1out, _(" %s has no override entry\n"), Tags.FindS("Source").c_str()); } OverItem = &Tmp; @@ -654,9 +655,8 @@ bool SourcesWriter::DoPackage(string FileName) if (NoOverride == false) { NewLine(1); - c1out << " " << Package << " maintainer is " << - Tags.FindS("Maintainer") << " not " << - OverItem->OldMaint << endl; + ioprintf(c1out, _(" %s maintainer is %s not %s\n"), Package.c_str(), + Tags.FindS("Maintainer").c_str(), OverItem->OldMaint.c_str()); } } if (NewMaint.empty() == false) |