403Webshell
Server IP : 103.119.228.120  /  Your IP : 3.15.239.145
Web Server : Apache
System : Linux v8.techscape8.com 3.10.0-1160.119.1.el7.tuxcare.els2.x86_64 #1 SMP Mon Jul 15 12:09:18 UTC 2024 x86_64
User : nobody ( 99)
PHP Version : 5.6.40
Disable Function : shell_exec,symlink,system,exec,proc_get_status,proc_nice,proc_terminate,define_syslog_variables,syslog,openlog,closelog,escapeshellcmd,passthru,ocinum cols,ini_alter,leak,listen,chgrp,apache_note,apache_setenv,debugger_on,debugger_off,ftp_exec,dl,dll,myshellexec,proc_open,socket_bind,proc_close,escapeshellarg,parse_ini_filepopen,fpassthru,exec,passthru,escapeshellarg,escapeshellcmd,proc_close,proc_open,ini_alter,popen,show_source,proc_nice,proc_terminate,proc_get_status,proc_close,pfsockopen,leak,apache_child_terminate,posix_kill,posix_mkfifo,posix_setpgid,posix_setsid,posix_setuid,dl,symlink,shell_exec,system,dl,passthru,escapeshellarg,escapeshellcmd,myshellexec,c99_buff_prepare,c99_sess_put,fpassthru,getdisfunc,fx29exec,fx29exec2,is_windows,disp_freespace,fx29sh_getupdate,fx29_buff_prepare,fx29_sess_put,fx29shexit,fx29fsearch,fx29ftpbrutecheck,fx29sh_tools,fx29sh_about,milw0rm,imagez,sh_name,myshellexec,checkproxyhost,dosyayicek,c99_buff_prepare,c99_sess_put,c99getsource,c99sh_getupdate,c99fsearch,c99shexit,view_perms,posix_getpwuid,posix_getgrgid,posix_kill,parse_perms,parsesort,view_perms_color,set_encoder_input,ls_setcheckboxall,ls_reverse_all,rsg_read,rsg_glob,selfURL,dispsecinfo,unix2DosTime,addFile,system,get_users,view_size,DirFiles,DirFilesWide,DirPrintHTMLHeaders,GetFilesTotal,GetTitles,GetTimeTotal,GetMatchesCount,GetFileMatchesCount,GetResultFiles,fs_copy_dir,fs_copy_obj,fs_move_dir,fs_move_obj,fs_rmdir,SearchText,getmicrotime
MySQL : ON |  cURL : ON |  WGET : ON |  Perl : ON |  Python : ON |  Sudo : ON |  Pkexec : ON
Directory :  /usr/local/ssl/lib/mysqlsh/lib/python3.9/site-packages/antlr4/atn/

Upload File :
current_dir [ Writeable] document_root [ Writeable]

 

Command :


[ Back ]     

Current File : /usr/local/ssl/lib/mysqlsh/lib/python3.9/site-packages/antlr4/atn/ATN.py
# Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.
# Use of this file is governed by the BSD 3-clause license that
# can be found in the LICENSE.txt file in the project root.
#/
from antlr4.IntervalSet import IntervalSet

from antlr4.RuleContext import RuleContext

from antlr4.Token import Token
from antlr4.atn.ATNType import ATNType
from antlr4.atn.ATNState import ATNState, DecisionState


class ATN(object):
    __slots__ = (
        'grammarType', 'maxTokenType', 'states', 'decisionToState',
        'ruleToStartState', 'ruleToStopState', 'modeNameToStartState',
        'ruleToTokenType', 'lexerActions', 'modeToStartState'
    )

    INVALID_ALT_NUMBER = 0

    # Used for runtime deserialization of ATNs from strings#/
    def __init__(self, grammarType:ATNType , maxTokenType:int ):
        # The type of the ATN.
        self.grammarType = grammarType
        # The maximum value for any symbol recognized by a transition in the ATN.
        self.maxTokenType = maxTokenType
        self.states = []
        # Each subrule/rule is a decision point and we must track them so we
        #  can go back later and build DFA predictors for them.  This includes
        #  all the rules, subrules, optional blocks, ()+, ()* etc...
        self.decisionToState = []
        # Maps from rule index to starting state number.
        self.ruleToStartState = []
        # Maps from rule index to stop state number.
        self.ruleToStopState = None
        self.modeNameToStartState = dict()
        # For lexer ATNs, this maps the rule index to the resulting token type.
        # For parser ATNs, this maps the rule index to the generated bypass token
        # type if the
        # {@link ATNDeserializationOptions#isGenerateRuleBypassTransitions}
        # deserialization option was specified; otherwise, this is {@code null}.
        self.ruleToTokenType = None
        # For lexer ATNs, this is an array of {@link LexerAction} objects which may
        # be referenced by action transitions in the ATN.
        self.lexerActions = None
        self.modeToStartState = []

    # Compute the set of valid tokens that can occur starting in state {@code s}.
    #  If {@code ctx} is null, the set of tokens will not include what can follow
    #  the rule surrounding {@code s}. In other words, the set will be
    #  restricted to tokens reachable staying within {@code s}'s rule.
    def nextTokensInContext(self, s:ATNState, ctx:RuleContext):
        from antlr4.LL1Analyzer import LL1Analyzer
        anal = LL1Analyzer(self)
        return anal.LOOK(s, ctx=ctx)

    # Compute the set of valid tokens that can occur starting in {@code s} and
    # staying in same rule. {@link Token#EPSILON} is in set if we reach end of
    # rule.
    def nextTokensNoContext(self, s:ATNState):
        if s.nextTokenWithinRule is not None:
            return s.nextTokenWithinRule
        s.nextTokenWithinRule = self.nextTokensInContext(s, None)
        s.nextTokenWithinRule.readonly = True
        return s.nextTokenWithinRule

    def nextTokens(self, s:ATNState, ctx:RuleContext = None):
        if ctx==None:
            return self.nextTokensNoContext(s)
        else:
            return self.nextTokensInContext(s, ctx)

    def addState(self, state:ATNState):
        if state is not None:
            state.atn = self
            state.stateNumber = len(self.states)
        self.states.append(state)

    def removeState(self, state:ATNState):
        self.states[state.stateNumber] = None # just free mem, don't shift states in list

    def defineDecisionState(self, s:DecisionState):
        self.decisionToState.append(s)
        s.decision = len(self.decisionToState)-1
        return s.decision

    def getDecisionState(self, decision:int):
        if len(self.decisionToState)==0:
            return None
        else:
            return self.decisionToState[decision]

    # Computes the set of input symbols which could follow ATN state number
    # {@code stateNumber} in the specified full {@code context}. This method
    # considers the complete parser context, but does not evaluate semantic
    # predicates (i.e. all predicates encountered during the calculation are
    # assumed true). If a path in the ATN exists from the starting state to the
    # {@link RuleStopState} of the outermost context without matching any
    # symbols, {@link Token#EOF} is added to the returned set.
    #
    # <p>If {@code context} is {@code null}, it is treated as
    # {@link ParserRuleContext#EMPTY}.</p>
    #
    # @param stateNumber the ATN state number
    # @param context the full parse context
    # @return The set of potentially valid input symbols which could follow the
    # specified state in the specified context.
    # @throws IllegalArgumentException if the ATN does not contain a state with
    # number {@code stateNumber}
    #/
    def getExpectedTokens(self, stateNumber:int, ctx:RuleContext ):
        if stateNumber < 0 or stateNumber >= len(self.states):
            raise Exception("Invalid state number.")
        s = self.states[stateNumber]
        following = self.nextTokens(s)
        if Token.EPSILON not in following:
            return following
        expected = IntervalSet()
        expected.addSet(following)
        expected.removeOne(Token.EPSILON)
        while (ctx != None and ctx.invokingState >= 0 and Token.EPSILON in following):
            invokingState = self.states[ctx.invokingState]
            rt = invokingState.transitions[0]
            following = self.nextTokens(rt.followState)
            expected.addSet(following)
            expected.removeOne(Token.EPSILON)
            ctx = ctx.parentCtx
        if Token.EPSILON in following:
            expected.addOne(Token.EOF)
        return expected

Youez - 2016 - github.com/yon3zu
LinuXploit