Server IP : 103.119.228.120 / Your IP : 18.116.14.48 Web Server : Apache System : Linux v8.techscape8.com 3.10.0-1160.119.1.el7.tuxcare.els2.x86_64 #1 SMP Mon Jul 15 12:09:18 UTC 2024 x86_64 User : nobody ( 99) PHP Version : 5.6.40 Disable Function : shell_exec,symlink,system,exec,proc_get_status,proc_nice,proc_terminate,define_syslog_variables,syslog,openlog,closelog,escapeshellcmd,passthru,ocinum cols,ini_alter,leak,listen,chgrp,apache_note,apache_setenv,debugger_on,debugger_off,ftp_exec,dl,dll,myshellexec,proc_open,socket_bind,proc_close,escapeshellarg,parse_ini_filepopen,fpassthru,exec,passthru,escapeshellarg,escapeshellcmd,proc_close,proc_open,ini_alter,popen,show_source,proc_nice,proc_terminate,proc_get_status,proc_close,pfsockopen,leak,apache_child_terminate,posix_kill,posix_mkfifo,posix_setpgid,posix_setsid,posix_setuid,dl,symlink,shell_exec,system,dl,passthru,escapeshellarg,escapeshellcmd,myshellexec,c99_buff_prepare,c99_sess_put,fpassthru,getdisfunc,fx29exec,fx29exec2,is_windows,disp_freespace,fx29sh_getupdate,fx29_buff_prepare,fx29_sess_put,fx29shexit,fx29fsearch,fx29ftpbrutecheck,fx29sh_tools,fx29sh_about,milw0rm,imagez,sh_name,myshellexec,checkproxyhost,dosyayicek,c99_buff_prepare,c99_sess_put,c99getsource,c99sh_getupdate,c99fsearch,c99shexit,view_perms,posix_getpwuid,posix_getgrgid,posix_kill,parse_perms,parsesort,view_perms_color,set_encoder_input,ls_setcheckboxall,ls_reverse_all,rsg_read,rsg_glob,selfURL,dispsecinfo,unix2DosTime,addFile,system,get_users,view_size,DirFiles,DirFilesWide,DirPrintHTMLHeaders,GetFilesTotal,GetTitles,GetTimeTotal,GetMatchesCount,GetFileMatchesCount,GetResultFiles,fs_copy_dir,fs_copy_obj,fs_move_dir,fs_move_obj,fs_rmdir,SearchText,getmicrotime MySQL : ON | cURL : ON | WGET : ON | Perl : ON | Python : ON | Sudo : ON | Pkexec : ON Directory : /usr/lib/mysqlsh/lib/python3.9/site-packages/antlr4/ |
Upload File : |
# # Copyright (c) 2012-2017 The ANTLR Project. All rights reserved. # Use of this file is governed by the BSD 3-clause license that # can be found in the LICENSE.txt file in the project root. #/ # # This class extends {@link BufferedTokenStream} with functionality to filter # token streams to tokens on a particular channel (tokens where # {@link Token#getChannel} returns a particular value). # # <p> # This token stream provides access to all tokens by index or when calling # methods like {@link #getText}. The channel filtering is only used for code # accessing tokens via the lookahead methods {@link #LA}, {@link #LT}, and # {@link #LB}.</p> # # <p> # By default, tokens are placed on the default channel # ({@link Token#DEFAULT_CHANNEL}), but may be reassigned by using the # {@code ->channel(HIDDEN)} lexer command, or by using an embedded action to # call {@link Lexer#setChannel}. # </p> # # <p> # Note: lexer rules which use the {@code ->skip} lexer command or call # {@link Lexer#skip} do not produce tokens at all, so input text matched by # such a rule will not be available as part of the token stream, regardless of # channel.</p> #/ from antlr4.BufferedTokenStream import BufferedTokenStream from antlr4.Lexer import Lexer from antlr4.Token import Token class CommonTokenStream(BufferedTokenStream): __slots__ = 'channel' def __init__(self, lexer:Lexer, channel:int=Token.DEFAULT_CHANNEL): super().__init__(lexer) self.channel = channel def adjustSeekIndex(self, i:int): return self.nextTokenOnChannel(i, self.channel) def LB(self, k:int): if k==0 or (self.index-k)<0: return None i = self.index n = 1 # find k good tokens looking backwards while n <= k: # skip off-channel tokens i = self.previousTokenOnChannel(i - 1, self.channel) n += 1 if i < 0: return None return self.tokens[i] def LT(self, k:int): self.lazyInit() if k == 0: return None if k < 0: return self.LB(-k) i = self.index n = 1 # we know tokens[pos] is a good one # find k good tokens while n < k: # skip off-channel tokens, but make sure to not look past EOF if self.sync(i + 1): i = self.nextTokenOnChannel(i + 1, self.channel) n += 1 return self.tokens[i] # Count EOF just once.#/ def getNumberOfOnChannelTokens(self): n = 0 self.fill() for i in range(0, len(self.tokens)): t = self.tokens[i] if t.channel==self.channel: n += 1 if t.type==Token.EOF: break return n