summaryrefslogtreecommitdiff
path: root/methods
diff options
context:
space:
mode:
Diffstat (limited to 'methods')
-rw-r--r--methods/copy.cc9
-rw-r--r--methods/ftp.cc25
-rw-r--r--methods/ftp.h3
-rw-r--r--methods/gpgv.cc58
-rw-r--r--methods/gzip.cc11
-rw-r--r--methods/http.cc15
-rw-r--r--methods/http.h4
-rw-r--r--methods/http_main.cc4
-rw-r--r--methods/https.cc34
-rw-r--r--methods/https.h14
-rw-r--r--methods/rred.cc4
-rw-r--r--methods/server.cc72
-rw-r--r--methods/server.h10
13 files changed, 196 insertions, 67 deletions
diff --git a/methods/copy.cc b/methods/copy.cc
index 40f8f85ec..a23c0316c 100644
--- a/methods/copy.cc
+++ b/methods/copy.cc
@@ -67,6 +67,14 @@ bool CopyMethod::Fetch(FetchItem *Itm)
Res.LastModified = Buf.st_mtime;
Res.IMSHit = false;
URIStart(Res);
+
+ // when the files are identical, just compute the hashes
+ if(File == Itm->DestFile)
+ {
+ CalculateHashes(Res);
+ URIDone(Res);
+ return true;
+ }
// just calc the hashes if the source and destination are identical
if (File == Itm->DestFile)
@@ -116,5 +124,6 @@ int main()
setlocale(LC_ALL, "");
CopyMethod Mth;
+
return Mth.Run();
}
diff --git a/methods/ftp.cc b/methods/ftp.cc
index 66787a7be..0504e5872 100644
--- a/methods/ftp.cc
+++ b/methods/ftp.cc
@@ -75,9 +75,10 @@ time_t FtpMethod::FailTime = 0;
// FTPConn::FTPConn - Constructor /*{{{*/
// ---------------------------------------------------------------------
/* */
-FTPConn::FTPConn(URI Srv) : Len(0), ServerFd(-1), DataFd(-1),
+FTPConn::FTPConn(URI Srv) : Len(0), ServerFd(-1), DataFd(-1),
DataListenFd(-1), ServerName(Srv),
- ForceExtended(false), TryPassive(true)
+ ForceExtended(false), TryPassive(true),
+ PeerAddrLen(0), ServerAddrLen(0)
{
Debug = _config->FindB("Debug::Acquire::Ftp",false);
PasvAddr = 0;
@@ -848,7 +849,8 @@ bool FTPConn::Finalize()
/* This opens a data connection, sends REST and RETR and then
transfers the file over. */
bool FTPConn::Get(const char *Path,FileFd &To,unsigned long long Resume,
- Hashes &Hash,bool &Missing)
+ Hashes &Hash,bool &Missing, unsigned long long MaximumSize,
+ pkgAcqMethod *Owner)
{
Missing = false;
if (CreateDataFd() == false)
@@ -921,7 +923,14 @@ bool FTPConn::Get(const char *Path,FileFd &To,unsigned long long Resume,
{
Close();
return false;
- }
+ }
+
+ if (MaximumSize > 0 && To.Tell() > MaximumSize)
+ {
+ Owner->SetFailReason("MaximumSizeExceeded");
+ return _error->Error("Writing more data than expected (%llu > %llu)",
+ To.Tell(), MaximumSize);
+ }
}
// All done
@@ -979,6 +988,10 @@ bool FtpMethod::Configuration(string Message)
return false;
TimeOut = _config->FindI("Acquire::Ftp::Timeout",TimeOut);
+
+ // no more active ftp, sorry
+ DropPrivsOrDie();
+
return true;
}
/*}}}*/
@@ -1062,7 +1075,7 @@ bool FtpMethod::Fetch(FetchItem *Itm)
FailFd = Fd.Fd();
bool Missing;
- if (Server->Get(File,Fd,Res.ResumePoint,Hash,Missing) == false)
+ if (Server->Get(File,Fd,Res.ResumePoint,Hash,Missing,Itm->MaximumSize,this) == false)
{
Fd.Close();
@@ -1131,6 +1144,6 @@ int main(int, const char *argv[])
}
FtpMethod Mth;
-
+
return Mth.Run();
}
diff --git a/methods/ftp.h b/methods/ftp.h
index dd92f0086..2efd28ec6 100644
--- a/methods/ftp.h
+++ b/methods/ftp.h
@@ -62,7 +62,8 @@ class FTPConn
bool Size(const char *Path,unsigned long long &Size);
bool ModTime(const char *Path, time_t &Time);
bool Get(const char *Path,FileFd &To,unsigned long long Resume,
- Hashes &MD5,bool &Missing);
+ Hashes &MD5,bool &Missing, unsigned long long MaximumSize,
+ pkgAcqMethod *Owner);
FTPConn(URI Srv);
~FTPConn();
diff --git a/methods/gpgv.cc b/methods/gpgv.cc
index ae521a2ed..41f138be6 100644
--- a/methods/gpgv.cc
+++ b/methods/gpgv.cc
@@ -5,6 +5,7 @@
#include <apt-pkg/error.h>
#include <apt-pkg/gpgv.h>
#include <apt-pkg/strutl.h>
+#include <apt-pkg/fileutl.h>
#include <ctype.h>
#include <errno.h>
@@ -43,12 +44,22 @@ class GPGVMethod : public pkgAcqMethod
protected:
virtual bool Fetch(FetchItem *Itm);
-
+ virtual bool Configuration(string Message);
public:
GPGVMethod() : pkgAcqMethod("1.0",SingleInstance | SendConfig) {};
};
+bool GPGVMethod::Configuration(string Message)
+{
+ if (pkgAcqMethod::Configuration(Message) == false)
+ return false;
+
+ DropPrivsOrDie();
+
+ return true;
+}
+
string GPGVMethod::VerifyGetSigners(const char *file, const char *outfile,
vector<string> &GoodSigners,
vector<string> &BadSigners,
@@ -74,34 +85,13 @@ string GPGVMethod::VerifyGetSigners(const char *file, const char *outfile,
FILE *pipein = fdopen(fd[0], "r");
- // Loop over the output of gpgv, and check the signatures.
- size_t buffersize = 64;
- char *buffer = (char *) malloc(buffersize);
- size_t bufferoff = 0;
+ // Loop over the output of apt-key (which really is gnupg), and check the signatures.
+ size_t buffersize = 0;
+ char *buffer = NULL;
while (1)
{
- int c;
-
- // Read a line. Sigh.
- while ((c = getc(pipein)) != EOF && c != '\n')
- {
- if (bufferoff == buffersize)
- {
- char* newBuffer = (char *) realloc(buffer, buffersize *= 2);
- if (newBuffer == NULL)
- {
- free(buffer);
- return "Couldn't allocate a buffer big enough for reading";
- }
- buffer = newBuffer;
- }
- *(buffer+bufferoff) = c;
- bufferoff++;
- }
- if (bufferoff == 0 && c == EOF)
- break;
- *(buffer+bufferoff) = '\0';
- bufferoff = 0;
+ if (getline(&buffer, &buffersize, pipein) == -1)
+ break;
if (Debug == true)
std::clog << "Read: " << buffer << std::endl;
@@ -115,7 +105,7 @@ string GPGVMethod::VerifyGetSigners(const char *file, const char *outfile,
std::clog << "Got BADSIG! " << std::endl;
BadSigners.push_back(string(buffer+sizeof(GNUPGPREFIX)));
}
-
+
if (strncmp(buffer, GNUPGNOPUBKEY, sizeof(GNUPGNOPUBKEY)-1) == 0)
{
if (Debug == true)
@@ -159,7 +149,7 @@ string GPGVMethod::VerifyGetSigners(const char *file, const char *outfile,
waitpid(pid, &status, 0);
if (Debug == true)
{
- std::clog << "gpgv exited\n";
+ ioprintf(std::clog, "gpgv exited with status %i\n", WEXITSTATUS(status));
}
if (WEXITSTATUS(status) == 0)
@@ -171,7 +161,7 @@ string GPGVMethod::VerifyGetSigners(const char *file, const char *outfile,
else if (WEXITSTATUS(status) == 1)
return _("At least one invalid signature was encountered.");
else if (WEXITSTATUS(status) == 111)
- return _("Could not execute 'gpgv' to verify signature (is gpgv installed?)");
+ return _("Could not execute 'apt-key' to verify signature (is gnupg installed?)");
else if (WEXITSTATUS(status) == 112)
{
// acquire system checks for "NODATA" to generate GPG errors (the others are only warnings)
@@ -181,7 +171,7 @@ string GPGVMethod::VerifyGetSigners(const char *file, const char *outfile,
return errmsg;
}
else
- return _("Unknown error executing gpgv");
+ return _("Unknown error executing apt-key");
}
bool GPGVMethod::Fetch(FetchItem *Itm)
@@ -199,7 +189,7 @@ bool GPGVMethod::Fetch(FetchItem *Itm)
Res.Filename = Itm->DestFile;
URIStart(Res);
- // Run gpgv on file, extract contents and get the key ID of the signer
+ // Run apt-key on file, extract contents and get the key ID of the signer
string msg = VerifyGetSigners(Path.c_str(), Itm->DestFile.c_str(),
GoodSigners, BadSigners, WorthlessSigners,
NoPubKeySigners);
@@ -251,7 +241,7 @@ bool GPGVMethod::Fetch(FetchItem *Itm)
if (_config->FindB("Debug::Acquire::gpgv", false))
{
- std::clog << "gpgv succeeded\n";
+ std::clog << "apt-key succeeded\n";
}
return true;
@@ -261,7 +251,7 @@ bool GPGVMethod::Fetch(FetchItem *Itm)
int main()
{
setlocale(LC_ALL, "");
-
+
GPGVMethod Mth;
return Mth.Run();
diff --git a/methods/gzip.cc b/methods/gzip.cc
index df3f8828f..387c05f2e 100644
--- a/methods/gzip.cc
+++ b/methods/gzip.cc
@@ -33,12 +33,22 @@ const char *Prog;
class GzipMethod : public pkgAcqMethod
{
virtual bool Fetch(FetchItem *Itm);
+ virtual bool Configuration(std::string Message);
public:
GzipMethod() : pkgAcqMethod("1.1",SingleInstance | SendConfig) {};
};
+bool GzipMethod::Configuration(std::string Message)
+{
+ if (pkgAcqMethod::Configuration(Message) == false)
+ return false;
+
+ DropPrivsOrDie();
+
+ return true;
+}
// GzipMethod::Fetch - Decompress the passed URI /*{{{*/
// ---------------------------------------------------------------------
@@ -139,5 +149,6 @@ int main(int, char *argv[])
++Prog;
GzipMethod Mth;
+
return Mth.Run();
}
diff --git a/methods/http.cc b/methods/http.cc
index 1b996db98..ad1347d36 100644
--- a/methods/http.cc
+++ b/methods/http.cc
@@ -64,7 +64,8 @@ const unsigned int CircleBuf::BW_HZ=10;
// CircleBuf::CircleBuf - Circular input buffer /*{{{*/
// ---------------------------------------------------------------------
/* */
-CircleBuf::CircleBuf(unsigned long long Size) : Size(Size), Hash(0)
+CircleBuf::CircleBuf(unsigned long long Size)
+ : Size(Size), Hash(0), TotalWriten(0)
{
Buf = new unsigned char[Size];
Reset();
@@ -80,6 +81,7 @@ void CircleBuf::Reset()
InP = 0;
OutP = 0;
StrPos = 0;
+ TotalWriten = 0;
MaxGet = (unsigned long long)-1;
OutQueue = string();
if (Hash != 0)
@@ -217,6 +219,8 @@ bool CircleBuf::Write(int Fd)
return false;
}
+
+ TotalWriten += Res;
if (Hash != 0)
Hash->Add(Buf + (OutP%Size),Res);
@@ -653,6 +657,13 @@ bool HttpServerState::Go(bool ToFile, FileFd * const File)
return _error->Errno("write",_("Error writing to output file"));
}
+ if (MaximumSize > 0 && File && File->Tell() > MaximumSize)
+ {
+ Owner->SetFailReason("MaximumSizeExceeded");
+ return _error->Error("Writing more data than expected (%llu > %llu)",
+ File->Tell(), MaximumSize);
+ }
+
// Handle commands from APT
if (FD_ISSET(STDIN_FILENO,&rfds))
{
@@ -761,6 +772,8 @@ bool HttpMethod::Configuration(string Message)
if (ServerMethod::Configuration(Message) == false)
return false;
+ DropPrivsOrDie();
+
AllowRedirect = _config->FindB("Acquire::http::AllowRedirect",true);
PipelineDepth = _config->FindI("Acquire::http::Pipeline-Depth",
PipelineDepth);
diff --git a/methods/http.h b/methods/http.h
index 1df9fa07d..40a88a7be 100644
--- a/methods/http.h
+++ b/methods/http.h
@@ -63,6 +63,8 @@ class CircleBuf
public:
Hashes *Hash;
+ // total amount of data that got written so far
+ unsigned long long TotalWriten;
// Read data in
bool Read(int Fd);
@@ -81,8 +83,8 @@ class CircleBuf
bool ReadSpace() const {return Size - (InP - OutP) > 0;};
bool WriteSpace() const {return InP - OutP > 0;};
- // Dump everything
void Reset();
+ // Dump everything
void Stats();
CircleBuf(unsigned long long Size);
diff --git a/methods/http_main.cc b/methods/http_main.cc
index 3b346a514..cd52c42e8 100644
--- a/methods/http_main.cc
+++ b/methods/http_main.cc
@@ -1,5 +1,6 @@
#include <config.h>
-
+#include <apt-pkg/fileutl.h>
+#include <apt-pkg/error.h>
#include <signal.h>
#include "http.h"
@@ -13,5 +14,6 @@ int main()
signal(SIGPIPE, SIG_IGN);
HttpMethod Mth;
+
return Mth.Loop();
}
diff --git a/methods/https.cc b/methods/https.cc
index 3a5981b58..37a8ff5fd 100644
--- a/methods/https.cc
+++ b/methods/https.cc
@@ -37,6 +37,16 @@
/*}}}*/
using namespace std;
+bool HttpsMethod::Configuration(std::string Message)
+{
+ if (pkgAcqMethod::Configuration(Message) == false)
+ return false;
+
+ DropPrivsOrDie();
+
+ return true;
+}
+
size_t
HttpsMethod::parse_header(void *buffer, size_t size, size_t nmemb, void *userp)
{
@@ -85,21 +95,33 @@ HttpsMethod::write_data(void *buffer, size_t size, size_t nmemb, void *userp)
if (me->Server->JunkSize != 0)
return buffer_size;
- if (me->ReceivedData == false)
+ if (me->Server->ReceivedData == false)
{
me->URIStart(me->Res);
- me->ReceivedData = true;
+ me->Server->ReceivedData = true;
}
if(me->File->Write(buffer, buffer_size) != true)
- return false;
+ return 0;
+
+ if(me->Queue->MaximumSize > 0)
+ {
+ unsigned long long const TotalWritten = me->File->Tell();
+ if (TotalWritten > me->Queue->MaximumSize)
+ {
+ me->SetFailReason("MaximumSizeExceeded");
+ _error->Error("Writing more data than expected (%llu > %llu)",
+ TotalWritten, me->Queue->MaximumSize);
+ return 0;
+ }
+ }
return buffer_size;
}
int
HttpsMethod::progress_callback(void *clientp, double dltotal, double /*dlnow*/,
- double /*ultotal*/, double /*ulnow*/)
+ double /*ultotal*/, double /*ulnow*/)
{
HttpsMethod *me = (HttpsMethod *)clientp;
if(dltotal > 0 && me->Res.Size == 0) {
@@ -112,6 +134,7 @@ HttpsMethod::progress_callback(void *clientp, double dltotal, double /*dlnow*/,
HttpsServerState::HttpsServerState(URI Srv,HttpsMethod * /*Owner*/) : ServerState(Srv, NULL)
{
TimeOut = _config->FindI("Acquire::https::Timeout",TimeOut);
+ ReceivedData = false;
Reset();
}
/*}}}*/
@@ -179,11 +202,10 @@ void HttpsMethod::SetupProxy() /*{{{*/
bool HttpsMethod::Fetch(FetchItem *Itm)
{
struct stat SBuf;
- struct curl_slist *headers=NULL;
+ struct curl_slist *headers=NULL;
char curl_errorstr[CURL_ERROR_SIZE];
URI Uri = Itm->Uri;
string remotehost = Uri.Host;
- ReceivedData = false;
// TODO:
// - http::Pipeline-Depth
diff --git a/methods/https.h b/methods/https.h
index 411b71440..6917a6ff6 100644
--- a/methods/https.h
+++ b/methods/https.h
@@ -50,6 +50,8 @@ class HttpsServerState : public ServerState
HttpsServerState(URI Srv, HttpsMethod *Owner);
virtual ~HttpsServerState() {Close();};
+
+ bool ReceivedData;
};
class HttpsMethod : public pkgAcqMethod
@@ -58,22 +60,22 @@ class HttpsMethod : public pkgAcqMethod
static const int DL_MIN_SPEED = 10;
virtual bool Fetch(FetchItem *);
+ virtual bool Configuration(std::string Message);
+
static size_t parse_header(void *buffer, size_t size, size_t nmemb, void *userp);
static size_t write_data(void *buffer, size_t size, size_t nmemb, void *userp);
- static int progress_callback(void *clientp, double dltotal, double dlnow,
- double ultotal, double ulnow);
+ static int progress_callback(void *clientp, double dltotal, double dlnow,
+ double ultotal, double ulnow);
void SetupProxy();
CURL *curl;
FetchResult Res;
HttpsServerState *Server;
- bool ReceivedData;
public:
FileFd *File;
-
- HttpsMethod() : pkgAcqMethod("1.2",Pipeline | SendConfig), File(NULL)
+
+ HttpsMethod() : pkgAcqMethod("1.2",Pipeline | SendConfig), Server(NULL), File(NULL)
{
- File = 0;
curl = curl_easy_init();
};
diff --git a/methods/rred.cc b/methods/rred.cc
index cabb3c456..774b58a40 100644
--- a/methods/rred.cc
+++ b/methods/rred.cc
@@ -150,11 +150,11 @@ class FileChanges {
std::list<struct Change>::iterator where;
size_t pos; // line number is as far left of iterator as possible
- bool pos_is_okay(void)
+ bool pos_is_okay(void) const
{
#ifdef POSDEBUG
size_t cpos = 0;
- std::list<struct Change>::iterator x;
+ std::list<struct Change>::const_iterator x;
for (x = changes.begin(); x != where; ++x) {
assert(x != changes.end());
cpos += x->offset + x->add_cnt;
diff --git a/methods/server.cc b/methods/server.cc
index e321e0230..c17f27f73 100644
--- a/methods/server.cc
+++ b/methods/server.cc
@@ -328,10 +328,10 @@ ServerMethod::DealWithHeaders(FetchResult &Res)
failure */
if (Server->Result < 200 || Server->Result >= 300)
{
- char err[255];
- snprintf(err,sizeof(err)-1,"HttpError%i",Server->Result);
+ std::string err;
+ strprintf(err, "HttpError%u", Server->Result);
SetFailReason(err);
- _error->Error("%u %s",Server->Result,Server->Code);
+ _error->Error("%u %s", Server->Result, Server->Code);
if (Server->HaveContent == true)
return ERROR_WITH_CONTENT_PAGE;
return ERROR_UNRECOVERABLE;
@@ -397,9 +397,16 @@ bool ServerMethod::Fetch(FetchItem *)
for (FetchItem *I = Queue; I != 0 && Depth < (signed)PipelineDepth;
I = I->Next, Depth++)
{
- // If pipelining is disabled, we only queue 1 request
- if (Server->Pipeline == false && Depth >= 0)
- break;
+ if (Depth >= 0)
+ {
+ // If pipelining is disabled, we only queue 1 request
+ if (Server->Pipeline == false)
+ break;
+ // if we have no hashes, do at most one such request
+ // as we can't fixup pipeling misbehaviors otherwise
+ else if (I->ExpectedHashes.usable() == false)
+ break;
+ }
// Make sure we stick with the same server
if (Server->Comp(I->Uri) == false)
@@ -529,6 +536,13 @@ int ServerMethod::Loop()
// Run the data
bool Result = true;
+
+ // ensure we don't fetch too much
+ // we could do "Server->MaximumSize = Queue->MaximumSize" here
+ // but that would break the clever pipeline messup detection
+ // so instead we use the size of the biggest item in the queue
+ Server->MaximumSize = FindMaximumObjectSizeInQueue();
+
if (Server->HaveContent)
Result = Server->RunData(File);
@@ -551,7 +565,38 @@ int ServerMethod::Loop()
// Send status to APT
if (Result == true)
{
- Res.TakeHashes(*Server->GetHashes());
+ Hashes * const resultHashes = Server->GetHashes();
+ HashStringList const hashList = resultHashes->GetHashStringList();
+ if (PipelineDepth != 0 && Queue->ExpectedHashes.usable() == true && Queue->ExpectedHashes != hashList)
+ {
+ // we did not get the expected hash… mhhh:
+ // could it be that server/proxy messed up pipelining?
+ FetchItem * BeforeI = Queue;
+ for (FetchItem *I = Queue->Next; I != 0 && I != QueueBack; I = I->Next)
+ {
+ if (I->ExpectedHashes.usable() == true && I->ExpectedHashes == hashList)
+ {
+ // yes, he did! Disable pipelining and rewrite queue
+ if (Server->Pipeline == true)
+ {
+ // FIXME: fake a warning message as we have no proper way of communicating here
+ std::string out;
+ strprintf(out, _("Automatically disabled %s due to incorrect response from server/proxy. (man 5 apt.conf)"), "Acquire::http::PipelineDepth");
+ std::cerr << "W: " << out << std::endl;
+ Server->Pipeline = false;
+ // we keep the PipelineDepth value so that the rest of the queue can be fixed up as well
+ }
+ Rename(Res.Filename, I->DestFile);
+ Res.Filename = I->DestFile;
+ BeforeI->Next = I->Next;
+ I->Next = Queue;
+ Queue = I;
+ break;
+ }
+ BeforeI = I;
+ }
+ }
+ Res.TakeHashes(*resultHashes);
URIDone(Res);
}
else
@@ -571,7 +616,10 @@ int ServerMethod::Loop()
QueueBack = Queue;
}
else
+ {
+ Server->Close();
Fail(true);
+ }
}
break;
}
@@ -666,3 +714,13 @@ int ServerMethod::Loop()
return 0;
}
/*}}}*/
+ /*{{{*/
+unsigned long long
+ServerMethod::FindMaximumObjectSizeInQueue() const
+{
+ unsigned long long MaxSizeInQueue = 0;
+ for (FetchItem *I = Queue; I != 0 && I != QueueBack; I = I->Next)
+ MaxSizeInQueue = std::max(MaxSizeInQueue, I->MaximumSize);
+ return MaxSizeInQueue;
+}
+ /*}}}*/
diff --git a/methods/server.h b/methods/server.h
index 1b81e3549..b974ec89a 100644
--- a/methods/server.h
+++ b/methods/server.h
@@ -50,6 +50,8 @@ struct ServerState
URI Proxy;
unsigned long TimeOut;
+ unsigned long long MaximumSize;
+
protected:
ServerMethod *Owner;
@@ -74,7 +76,7 @@ struct ServerState
bool Comp(URI Other) const {return Other.Host == ServerName.Host && Other.Port == ServerName.Port;};
virtual void Reset() {Major = 0; Minor = 0; Result = 0; Code[0] = '\0'; Size = 0; JunkSize = 0;
StartPos = 0; Encoding = Closes; time(&Date); HaveContent = false;
- State = Header; Persistent = false; Pipeline = true;};
+ State = Header; Persistent = false; Pipeline = true; MaximumSize = 0;};
virtual bool WriteResponse(std::string const &Data) = 0;
/** \brief Transfer the data from the socket */
@@ -105,6 +107,10 @@ class ServerMethod : public pkgAcqMethod
unsigned long PipelineDepth;
bool AllowRedirect;
+ // Find the biggest item in the fetch queue for the checking of the maximum
+ // size
+ unsigned long long FindMaximumObjectSizeInQueue() const APT_PURE;
+
public:
bool Debug;
@@ -141,7 +147,7 @@ class ServerMethod : public pkgAcqMethod
virtual ServerState * CreateServerState(URI uri) = 0;
virtual void RotateDNS() = 0;
- ServerMethod(const char *Ver,unsigned long Flags = 0) : pkgAcqMethod(Ver, Flags), Server(NULL), File(NULL), PipelineDepth(0), AllowRedirect(false), Debug(false) {};
+ ServerMethod(const char *Ver,unsigned long Flags = 0) : pkgAcqMethod(Ver, Flags), Server(NULL), File(NULL), PipelineDepth(10), AllowRedirect(false), Debug(false) {};
virtual ~ServerMethod() {};
};