Server IP : 103.119.228.120 / Your IP : 18.118.140.78 Web Server : Apache System : Linux v8.techscape8.com 3.10.0-1160.119.1.el7.tuxcare.els2.x86_64 #1 SMP Mon Jul 15 12:09:18 UTC 2024 x86_64 User : nobody ( 99) PHP Version : 5.6.40 Disable Function : shell_exec,symlink,system,exec,proc_get_status,proc_nice,proc_terminate,define_syslog_variables,syslog,openlog,closelog,escapeshellcmd,passthru,ocinum cols,ini_alter,leak,listen,chgrp,apache_note,apache_setenv,debugger_on,debugger_off,ftp_exec,dl,dll,myshellexec,proc_open,socket_bind,proc_close,escapeshellarg,parse_ini_filepopen,fpassthru,exec,passthru,escapeshellarg,escapeshellcmd,proc_close,proc_open,ini_alter,popen,show_source,proc_nice,proc_terminate,proc_get_status,proc_close,pfsockopen,leak,apache_child_terminate,posix_kill,posix_mkfifo,posix_setpgid,posix_setsid,posix_setuid,dl,symlink,shell_exec,system,dl,passthru,escapeshellarg,escapeshellcmd,myshellexec,c99_buff_prepare,c99_sess_put,fpassthru,getdisfunc,fx29exec,fx29exec2,is_windows,disp_freespace,fx29sh_getupdate,fx29_buff_prepare,fx29_sess_put,fx29shexit,fx29fsearch,fx29ftpbrutecheck,fx29sh_tools,fx29sh_about,milw0rm,imagez,sh_name,myshellexec,checkproxyhost,dosyayicek,c99_buff_prepare,c99_sess_put,c99getsource,c99sh_getupdate,c99fsearch,c99shexit,view_perms,posix_getpwuid,posix_getgrgid,posix_kill,parse_perms,parsesort,view_perms_color,set_encoder_input,ls_setcheckboxall,ls_reverse_all,rsg_read,rsg_glob,selfURL,dispsecinfo,unix2DosTime,addFile,system,get_users,view_size,DirFiles,DirFilesWide,DirPrintHTMLHeaders,GetFilesTotal,GetTitles,GetTimeTotal,GetMatchesCount,GetFileMatchesCount,GetResultFiles,fs_copy_dir,fs_copy_obj,fs_move_dir,fs_move_obj,fs_rmdir,SearchText,getmicrotime MySQL : ON | cURL : ON | WGET : ON | Perl : ON | Python : ON | Sudo : ON | Pkexec : ON Directory : /usr/lib64/python2.7/site-packages/curl/ |
Upload File : |
# A high-level interface to the pycurl extension # # ** mfx NOTE: the CGI class uses "black magic" using COOKIEFILE in # combination with a non-existant file name. See the libcurl docs # for more info. # # By Eric S. Raymond, April 2003. import os, sys, urllib, exceptions, mimetools, pycurl try: from cStringIO import StringIO except ImportError: from StringIO import StringIO try: import signal from signal import SIGPIPE, SIG_IGN signal.signal(signal.SIGPIPE, signal.SIG_IGN) except ImportError: pass class Curl: "High-level interface to pycurl functions." def __init__(self, base_url="", fakeheaders=[]): self.handle = pycurl.Curl() # These members might be set. self.set_url(base_url) self.verbosity = 0 self.fakeheaders = fakeheaders # Nothing past here should be modified by the caller. self.payload = "" self.hrd = "" # Verify that we've got the right site; harmless on a non-SSL connect. self.set_option(pycurl.SSL_VERIFYHOST, 2) # Follow redirects in case it wants to take us to a CGI... self.set_option(pycurl.FOLLOWLOCATION, 1) self.set_option(pycurl.MAXREDIRS, 5) self.set_option(pycurl.NOSIGNAL, 1) # Setting this option with even a nonexistent file makes libcurl # handle cookie capture and playback automatically. self.set_option(pycurl.COOKIEFILE, "/dev/null") # Set timeouts to avoid hanging too long self.set_timeout(30) # Use password identification from .netrc automatically self.set_option(pycurl.NETRC, 1) # Set up a callback to capture the payload def payload_callback(x): self.payload += x self.set_option(pycurl.WRITEFUNCTION, payload_callback) def header_callback(x): self.hdr += x self.set_option(pycurl.HEADERFUNCTION, header_callback) def set_timeout(self, timeout): "Set timeout for a retrieving an object" self.set_option(pycurl.TIMEOUT, timeout) def set_url(self, url): "Set the base URL to be retrieved." self.base_url = url self.set_option(pycurl.URL, self.base_url) def set_option(self, *args): "Set an option on the retrieval." apply(self.handle.setopt, args) def set_verbosity(self, level): "Set verbosity to 1 to see transactions." self.set_option(pycurl.VERBOSE, level) def __request(self, relative_url=None): "Perform the pending request." if self.fakeheaders: self.set_option(pycurl.HTTPHEADER, self.fakeheaders) if relative_url: self.set_option(pycurl.URL,os.path.join(self.base_url,relative_url)) self.payload = "" self.hdr = "" self.handle.perform() return self.payload def get(self, url="", params=None): "Ship a GET request for a specified URL, capture the response." if params: url += "?" + urllib.urlencode(params) self.set_option(pycurl.HTTPGET, 1) return self.__request(url) def post(self, cgi, params): "Ship a POST request to a specified CGI, capture the response." self.set_option(pycurl.POST, 1) self.set_option(pycurl.POSTFIELDS, urllib.urlencode(params)) return self.__request(cgi) def body(self): "Return the body from the last response." return self.payload def header(self): "Return the header from the last response." return self.hdr def get_info(self, *args): "Get information about retrieval." return apply(self.handle.getinfo, args) def info(self): "Return a dictionary with all info on the last response." m = {} m['effective-url'] = self.handle.getinfo(pycurl.EFFECTIVE_URL) m['http-code'] = self.handle.getinfo(pycurl.HTTP_CODE) m['total-time'] = self.handle.getinfo(pycurl.TOTAL_TIME) m['namelookup-time'] = self.handle.getinfo(pycurl.NAMELOOKUP_TIME) m['connect-time'] = self.handle.getinfo(pycurl.CONNECT_TIME) m['pretransfer-time'] = self.handle.getinfo(pycurl.PRETRANSFER_TIME) m['redirect-time'] = self.handle.getinfo(pycurl.REDIRECT_TIME) m['redirect-count'] = self.handle.getinfo(pycurl.REDIRECT_COUNT) m['size-upload'] = self.handle.getinfo(pycurl.SIZE_UPLOAD) m['size-download'] = self.handle.getinfo(pycurl.SIZE_DOWNLOAD) m['speed-upload'] = self.handle.getinfo(pycurl.SPEED_UPLOAD) m['header-size'] = self.handle.getinfo(pycurl.HEADER_SIZE) m['request-size'] = self.handle.getinfo(pycurl.REQUEST_SIZE) m['content-length-download'] = self.handle.getinfo(pycurl.CONTENT_LENGTH_DOWNLOAD) m['content-length-upload'] = self.handle.getinfo(pycurl.CONTENT_LENGTH_UPLOAD) m['content-type'] = self.handle.getinfo(pycurl.CONTENT_TYPE) m['response-code'] = self.handle.getinfo(pycurl.RESPONSE_CODE) m['speed-download'] = self.handle.getinfo(pycurl.SPEED_DOWNLOAD) m['ssl-verifyresult'] = self.handle.getinfo(pycurl.SSL_VERIFYRESULT) m['filetime'] = self.handle.getinfo(pycurl.INFO_FILETIME) m['starttransfer-time'] = self.handle.getinfo(pycurl.STARTTRANSFER_TIME) m['redirect-time'] = self.handle.getinfo(pycurl.REDIRECT_TIME) m['redirect-count'] = self.handle.getinfo(pycurl.REDIRECT_COUNT) m['http-connectcode'] = self.handle.getinfo(pycurl.HTTP_CONNECTCODE) m['httpauth-avail'] = self.handle.getinfo(pycurl.HTTPAUTH_AVAIL) m['proxyauth-avail'] = self.handle.getinfo(pycurl.PROXYAUTH_AVAIL) m['os-errno'] = self.handle.getinfo(pycurl.OS_ERRNO) m['num-connects'] = self.handle.getinfo(pycurl.NUM_CONNECTS) m['ssl-engines'] = self.handle.getinfo(pycurl.SSL_ENGINES) m['cookielist'] = self.handle.getinfo(pycurl.INFO_COOKIELIST) m['lastsocket'] = self.handle.getinfo(pycurl.LASTSOCKET) m['ftp-entry-path'] = self.handle.getinfo(pycurl.FTP_ENTRY_PATH) return m def answered(self, check): "Did a given check string occur in the last payload?" return self.payload.find(check) >= 0 def close(self): "Close a session, freeing resources." if self.handle: self.handle.close() self.handle = None self.hdr = "" self.payload = "" def __del__(self): self.close() if __name__ == "__main__": if len(sys.argv) < 2: url = 'http://curl.haxx.se' else: url = sys.argv[1] c = Curl() c.get(url) print c.body() print '='*74 + '\n' import pprint pprint.pprint(c.info()) print c.get_info(pycurl.OS_ERRNO) print c.info()['os-errno'] c.close()