summaryrefslogtreecommitdiff
path: root/methods
diff options
context:
space:
mode:
authorMichael Vogt <michael.vogt@ubuntu.com>2012-01-16 22:17:23 +0100
committerMichael Vogt <michael.vogt@ubuntu.com>2012-01-16 22:17:23 +0100
commite70c1d067335bfa686fa0c6e75e32afb095e1aaa (patch)
treee985e74ed4d613126b24839d710e4b49db753824 /methods
parente75aa33384d52635fba502bed628bc68f9cb5066 (diff)
parent85e1885525977809ff6a3b70abb84d3a63e41817 (diff)
merged from debian-experimental2
Diffstat (limited to 'methods')
-rw-r--r--methods/cdrom.cc2
-rw-r--r--methods/copy.cc2
-rw-r--r--methods/file.cc2
-rw-r--r--methods/ftp.cc2
-rw-r--r--methods/gzip.cc2
-rw-r--r--methods/http.cc24
-rw-r--r--methods/https.cc2
-rw-r--r--methods/rred.cc91
-rw-r--r--methods/rsh.cc2
9 files changed, 41 insertions, 88 deletions
diff --git a/methods/cdrom.cc b/methods/cdrom.cc
index e7114b168..22d4b9164 100644
--- a/methods/cdrom.cc
+++ b/methods/cdrom.cc
@@ -268,7 +268,7 @@ bool CDROMMethod::Fetch(FetchItem *Itm)
Hashes Hash;
FileFd Fd(Res.Filename, FileFd::ReadOnly);
- Hash.AddFD(Fd.Fd(), Fd.Size());
+ Hash.AddFD(Fd);
Res.TakeHashes(Hash);
URIDone(Res);
diff --git a/methods/copy.cc b/methods/copy.cc
index f8d58e479..e81d0022b 100644
--- a/methods/copy.cc
+++ b/methods/copy.cc
@@ -85,7 +85,7 @@ bool CopyMethod::Fetch(FetchItem *Itm)
Hashes Hash;
FileFd Fd(Res.Filename, FileFd::ReadOnly);
- Hash.AddFD(Fd.Fd(), Fd.Size());
+ Hash.AddFD(Fd);
Res.TakeHashes(Hash);
URIDone(Res);
diff --git a/methods/file.cc b/methods/file.cc
index 5025c996d..7ed4e6f60 100644
--- a/methods/file.cc
+++ b/methods/file.cc
@@ -83,7 +83,7 @@ bool FileMethod::Fetch(FetchItem *Itm)
Hashes Hash;
FileFd Fd(Res.Filename, FileFd::ReadOnly);
- Hash.AddFD(Fd.Fd(), Fd.Size());
+ Hash.AddFD(Fd);
Res.TakeHashes(Hash);
URIDone(Res);
return true;
diff --git a/methods/ftp.cc b/methods/ftp.cc
index 2ca0ac6f7..ad8a7b828 100644
--- a/methods/ftp.cc
+++ b/methods/ftp.cc
@@ -868,7 +868,7 @@ bool FTPConn::Get(const char *Path,FileFd &To,unsigned long long Resume,
if (Resume != 0)
{
- if (Hash.AddFD(To.Fd(),Resume) == false)
+ if (Hash.AddFD(To,Resume) == false)
{
_error->Errno("read",_("Problem hashing file"));
return false;
diff --git a/methods/gzip.cc b/methods/gzip.cc
index a51497948..6ab6548ef 100644
--- a/methods/gzip.cc
+++ b/methods/gzip.cc
@@ -48,7 +48,7 @@ bool GzipMethod::Fetch(FetchItem *Itm)
URIStart(Res);
// Open the source and destination files
- FileFd From(Path,FileFd::ReadOnlyGzip);
+ FileFd From(Path,FileFd::ReadOnly, FileFd::Gzip);
if(From.FileSize() == 0)
return _error->Error(_("Empty files can't be valid archives"));
diff --git a/methods/http.cc b/methods/http.cc
index 0d81c73ed..b8ed43cd2 100644
--- a/methods/http.cc
+++ b/methods/http.cc
@@ -1007,31 +1007,21 @@ HttpMethod::DealWithHeaders(FetchResult &Res,ServerState *Srv)
FailFile.c_str(); // Make sure we dont do a malloc in the signal handler
FailFd = File->Fd();
FailTime = Srv->Date;
-
- // Set the expected size
- if (Srv->StartPos >= 0)
- {
- Res.ResumePoint = Srv->StartPos;
- if (ftruncate(File->Fd(),Srv->StartPos) < 0)
- _error->Errno("ftruncate", _("Failed to truncate file"));
- }
-
- // Set the start point
- lseek(File->Fd(),0,SEEK_END);
delete Srv->In.Hash;
Srv->In.Hash = new Hashes;
-
- // Fill the Hash if the file is non-empty (resume)
- if (Srv->StartPos > 0)
+
+ // Set the expected size and read file for the hashes
+ if (Srv->StartPos >= 0)
{
- lseek(File->Fd(),0,SEEK_SET);
- if (Srv->In.Hash->AddFD(File->Fd(),Srv->StartPos) == false)
+ Res.ResumePoint = Srv->StartPos;
+ File->Truncate(Srv->StartPos);
+
+ if (Srv->In.Hash->AddFD(*File,Srv->StartPos) == false)
{
_error->Errno("read",_("Problem hashing file"));
return ERROR_NOT_FROM_SERVER;
}
- lseek(File->Fd(),0,SEEK_END);
}
SetNonBlock(File->Fd(),true);
diff --git a/methods/https.cc b/methods/https.cc
index 335699907..317c8a587 100644
--- a/methods/https.cc
+++ b/methods/https.cc
@@ -314,7 +314,7 @@ bool HttpsMethod::Fetch(FetchItem *Itm)
// take hashes
Hashes Hash;
FileFd Fd(Res.Filename, FileFd::ReadOnly);
- Hash.AddFD(Fd.Fd(), Fd.Size());
+ Hash.AddFD(Fd);
Res.TakeHashes(Hash);
// keep apt updated
diff --git a/methods/rred.cc b/methods/rred.cc
index ef00fcaa3..e37a12ed9 100644
--- a/methods/rred.cc
+++ b/methods/rred.cc
@@ -37,13 +37,10 @@ class RredMethod : public pkgAcqMethod {
// return values
enum State {ED_OK, ED_ORDERING, ED_PARSER, ED_FAILURE, MMAP_FAILED};
- State applyFile(gzFile &ed_cmds, FILE *in_file, FILE *out_file,
+ State applyFile(FileFd &ed_cmds, FileFd &in_file, FileFd &out_file,
unsigned long &line, char *buffer, Hashes *hash) const;
- void ignoreLineInFile(FILE *fin, char *buffer) const;
- void ignoreLineInFile(gzFile &fin, char *buffer) const;
- void copyLinesFromFileToFile(FILE *fin, FILE *fout, unsigned int lines,
- Hashes *hash, char *buffer) const;
- void copyLinesFromFileToFile(gzFile &fin, FILE *fout, unsigned int lines,
+ void ignoreLineInFile(FileFd &fin, char *buffer) const;
+ void copyLinesFromFileToFile(FileFd &fin, FileFd &fout, unsigned int lines,
Hashes *hash, char *buffer) const;
State patchFile(FileFd &Patch, FileFd &From, FileFd &out_file, Hashes *hash) const;
@@ -72,10 +69,10 @@ public:
* \param hash the created file for correctness
* \return the success State of the ed command executor
*/
-RredMethod::State RredMethod::applyFile(gzFile &ed_cmds, FILE *in_file, FILE *out_file,
+RredMethod::State RredMethod::applyFile(FileFd &ed_cmds, FileFd &in_file, FileFd &out_file,
unsigned long &line, char *buffer, Hashes *hash) const {
// get the current command and parse it
- if (gzgets(ed_cmds, buffer, BUF_SIZE) == NULL) {
+ if (ed_cmds.ReadLine(buffer, BUF_SIZE) == NULL) {
if (Debug == true)
std::clog << "rred: encounter end of file - we can start patching now." << std::endl;
line = 0;
@@ -130,7 +127,7 @@ RredMethod::State RredMethod::applyFile(gzFile &ed_cmds, FILE *in_file, FILE *ou
unsigned char mode = *idx;
// save the current position
- unsigned const long pos = gztell(ed_cmds);
+ unsigned const long long pos = ed_cmds.Tell();
// if this is add or change then go to the next full stop
unsigned int data_length = 0;
@@ -164,7 +161,7 @@ RredMethod::State RredMethod::applyFile(gzFile &ed_cmds, FILE *in_file, FILE *ou
// include data from ed script
if (mode == MODE_CHANGED || mode == MODE_ADDED) {
- gzseek(ed_cmds, pos, SEEK_SET);
+ ed_cmds.Seek(pos);
copyLinesFromFileToFile(ed_cmds, out_file, data_length, hash, buffer);
}
@@ -178,44 +175,24 @@ RredMethod::State RredMethod::applyFile(gzFile &ed_cmds, FILE *in_file, FILE *ou
return ED_OK;
}
/*}}}*/
-void RredMethod::copyLinesFromFileToFile(FILE *fin, FILE *fout, unsigned int lines,/*{{{*/
- Hashes *hash, char *buffer) const {
- while (0 < lines--) {
- do {
- fgets(buffer, BUF_SIZE, fin);
- size_t const written = fwrite(buffer, 1, strlen(buffer), fout);
- hash->Add((unsigned char*)buffer, written);
- } while (strlen(buffer) == (BUF_SIZE - 1) &&
- buffer[BUF_SIZE - 2] != '\n');
- }
-}
- /*}}}*/
-void RredMethod::copyLinesFromFileToFile(gzFile &fin, FILE *fout, unsigned int lines,/*{{{*/
+void RredMethod::copyLinesFromFileToFile(FileFd &fin, FileFd &fout, unsigned int lines,/*{{{*/
Hashes *hash, char *buffer) const {
while (0 < lines--) {
do {
- gzgets(fin, buffer, BUF_SIZE);
- size_t const written = fwrite(buffer, 1, strlen(buffer), fout);
- hash->Add((unsigned char*)buffer, written);
+ fin.ReadLine(buffer, BUF_SIZE);
+ unsigned long long const towrite = strlen(buffer);
+ fout.Write(buffer, towrite);
+ hash->Add((unsigned char*)buffer, towrite);
} while (strlen(buffer) == (BUF_SIZE - 1) &&
buffer[BUF_SIZE - 2] != '\n');
}
}
/*}}}*/
-void RredMethod::ignoreLineInFile(FILE *fin, char *buffer) const { /*{{{*/
- fgets(buffer, BUF_SIZE, fin);
+void RredMethod::ignoreLineInFile(FileFd &fin, char *buffer) const { /*{{{*/
+ fin.ReadLine(buffer, BUF_SIZE);
while (strlen(buffer) == (BUF_SIZE - 1) &&
buffer[BUF_SIZE - 2] != '\n') {
- fgets(buffer, BUF_SIZE, fin);
- buffer[0] = ' ';
- }
-}
- /*}}}*/
-void RredMethod::ignoreLineInFile(gzFile &fin, char *buffer) const { /*{{{*/
- gzgets(fin, buffer, BUF_SIZE);
- while (strlen(buffer) == (BUF_SIZE - 1) &&
- buffer[BUF_SIZE - 2] != '\n') {
- gzgets(fin, buffer, BUF_SIZE);
+ fin.ReadLine(buffer, BUF_SIZE);
buffer[0] = ' ';
}
}
@@ -223,21 +200,18 @@ void RredMethod::ignoreLineInFile(gzFile &fin, char *buffer) const { /*{{{*/
RredMethod::State RredMethod::patchFile(FileFd &Patch, FileFd &From, /*{{{*/
FileFd &out_file, Hashes *hash) const {
char buffer[BUF_SIZE];
- FILE* fFrom = fdopen(From.Fd(), "r");
- gzFile fPatch = Patch.gzFd();
- FILE* fTo = fdopen(out_file.Fd(), "w");
/* we do a tail recursion to read the commands in the right order */
unsigned long line = -1; // assign highest possible value
- State const result = applyFile(fPatch, fFrom, fTo, line, buffer, hash);
+ State const result = applyFile(Patch, From, out_file, line, buffer, hash);
/* read the rest from infile */
if (result == ED_OK) {
- while (fgets(buffer, BUF_SIZE, fFrom) != NULL) {
- size_t const written = fwrite(buffer, 1, strlen(buffer), fTo);
- hash->Add((unsigned char*)buffer, written);
+ while (From.ReadLine(buffer, BUF_SIZE) != NULL) {
+ unsigned long long const towrite = strlen(buffer);
+ out_file.Write(buffer, towrite);
+ hash->Add((unsigned char*)buffer, towrite);
}
- fflush(fTo);
}
return result;
}
@@ -258,23 +232,12 @@ struct EdCommand {
RredMethod::State RredMethod::patchMMap(FileFd &Patch, FileFd &From, /*{{{*/
FileFd &out_file, Hashes *hash) const {
#ifdef _POSIX_MAPPED_FILES
- MMap ed_cmds(MMap::ReadOnly);
- if (Patch.gzFd() != NULL) {
- unsigned long long mapSize = Patch.Size();
- DynamicMMap* dyn = new DynamicMMap(0, mapSize, 0);
- if (dyn->validData() == false) {
- delete dyn;
- return MMAP_FAILED;
- }
- dyn->AddSize(mapSize);
- gzread(Patch.gzFd(), dyn->Data(), mapSize);
- ed_cmds = *dyn;
- } else
- ed_cmds = MMap(Patch, MMap::ReadOnly);
-
+ MMap ed_cmds(Patch, MMap::ReadOnly);
MMap in_file(From, MMap::ReadOnly);
- if (ed_cmds.Size() == 0 || in_file.Size() == 0)
+ unsigned long long const ed_size = ed_cmds.Size();
+ unsigned long long const in_size = in_file.Size();
+ if (ed_size == 0 || in_size == 0)
return MMAP_FAILED;
EdCommand* commands = 0;
@@ -283,10 +246,10 @@ RredMethod::State RredMethod::patchMMap(FileFd &Patch, FileFd &From, /*{{{*/
const char* begin = (char*) ed_cmds.Data();
const char* end = begin;
- const char* ed_end = (char*) ed_cmds.Data() + ed_cmds.Size();
+ const char* ed_end = (char*) ed_cmds.Data() + ed_size;
const char* input = (char*) in_file.Data();
- const char* input_end = (char*) in_file.Data() + in_file.Size();
+ const char* input_end = (char*) in_file.Data() + in_size;
size_t i;
@@ -489,7 +452,7 @@ bool RredMethod::Fetch(FetchItem *Itm) /*{{{*/
// Open the source and destination files (the d'tor of FileFd will do
// the cleanup/closing of the fds)
FileFd From(Path,FileFd::ReadOnly);
- FileFd Patch(Path+".ed",FileFd::ReadOnlyGzip);
+ FileFd Patch(Path+".ed",FileFd::ReadOnly, FileFd::Gzip);
FileFd To(Itm->DestFile,FileFd::WriteAtomic);
To.EraseOnFailure();
if (_error->PendingError() == true)
diff --git a/methods/rsh.cc b/methods/rsh.cc
index da9777fc4..d249ae961 100644
--- a/methods/rsh.cc
+++ b/methods/rsh.cc
@@ -305,7 +305,7 @@ bool RSHConn::Get(const char *Path,FileFd &To,unsigned long long Resume,
return false;
if (Resume != 0) {
- if (Hash.AddFD(To.Fd(),Resume) == false) {
+ if (Hash.AddFD(To,Resume) == false) {
_error->Errno("read",_("Problem hashing file"));
return false;
}