Server IP : 103.119.228.120 / Your IP : 3.131.13.196 Web Server : Apache System : Linux v8.techscape8.com 3.10.0-1160.119.1.el7.tuxcare.els2.x86_64 #1 SMP Mon Jul 15 12:09:18 UTC 2024 x86_64 User : nobody ( 99) PHP Version : 5.6.40 Disable Function : shell_exec,symlink,system,exec,proc_get_status,proc_nice,proc_terminate,define_syslog_variables,syslog,openlog,closelog,escapeshellcmd,passthru,ocinum cols,ini_alter,leak,listen,chgrp,apache_note,apache_setenv,debugger_on,debugger_off,ftp_exec,dl,dll,myshellexec,proc_open,socket_bind,proc_close,escapeshellarg,parse_ini_filepopen,fpassthru,exec,passthru,escapeshellarg,escapeshellcmd,proc_close,proc_open,ini_alter,popen,show_source,proc_nice,proc_terminate,proc_get_status,proc_close,pfsockopen,leak,apache_child_terminate,posix_kill,posix_mkfifo,posix_setpgid,posix_setsid,posix_setuid,dl,symlink,shell_exec,system,dl,passthru,escapeshellarg,escapeshellcmd,myshellexec,c99_buff_prepare,c99_sess_put,fpassthru,getdisfunc,fx29exec,fx29exec2,is_windows,disp_freespace,fx29sh_getupdate,fx29_buff_prepare,fx29_sess_put,fx29shexit,fx29fsearch,fx29ftpbrutecheck,fx29sh_tools,fx29sh_about,milw0rm,imagez,sh_name,myshellexec,checkproxyhost,dosyayicek,c99_buff_prepare,c99_sess_put,c99getsource,c99sh_getupdate,c99fsearch,c99shexit,view_perms,posix_getpwuid,posix_getgrgid,posix_kill,parse_perms,parsesort,view_perms_color,set_encoder_input,ls_setcheckboxall,ls_reverse_all,rsg_read,rsg_glob,selfURL,dispsecinfo,unix2DosTime,addFile,system,get_users,view_size,DirFiles,DirFilesWide,DirPrintHTMLHeaders,GetFilesTotal,GetTitles,GetTimeTotal,GetMatchesCount,GetFileMatchesCount,GetResultFiles,fs_copy_dir,fs_copy_obj,fs_move_dir,fs_move_obj,fs_rmdir,SearchText,getmicrotime MySQL : ON | cURL : ON | WGET : ON | Perl : ON | Python : ON | Sudo : ON | Pkexec : ON Directory : /usr/lib/mysqlsh/lib/python3.9/site-packages/setuptools/tests/config/downloads/ |
Upload File : |
import re import time from pathlib import Path from urllib.error import HTTPError from urllib.request import urlopen __all__ = ["DOWNLOAD_DIR", "retrieve_file", "output_file", "urls_from_file"] NAME_REMOVE = ("http://", "https://", "github.com/", "/raw/") DOWNLOAD_DIR = Path(__file__).parent # ---------------------------------------------------------------------- # Please update ./preload.py accordingly when modifying this file # ---------------------------------------------------------------------- def output_file(url: str, download_dir: Path = DOWNLOAD_DIR): file_name = url.strip() for part in NAME_REMOVE: file_name = file_name.replace(part, '').strip().strip('/:').strip() return Path(download_dir, re.sub(r"[^\-_\.\w\d]+", "_", file_name)) def retrieve_file(url: str, download_dir: Path = DOWNLOAD_DIR, wait: float = 5): path = output_file(url, download_dir) if path.exists(): print(f"Skipping {url} (already exists: {path})") else: download_dir.mkdir(exist_ok=True, parents=True) print(f"Downloading {url} to {path}") try: download(url, path) except HTTPError: time.sleep(wait) # wait a few seconds and try again. download(url, path) return path def urls_from_file(list_file: Path): """``list_file`` should be a text file where each line corresponds to a URL to download. """ print(f"file: {list_file}") content = list_file.read_text(encoding="utf-8") return [url for url in content.splitlines() if not url.startswith("#")] def download(url: str, dest: Path): with urlopen(url) as f: data = f.read() with open(dest, "wb") as f: f.write(data) assert Path(dest).exists()