403Webshell
Server IP : 103.119.228.120  /  Your IP : 3.142.98.60
Web Server : Apache
System : Linux v8.techscape8.com 3.10.0-1160.119.1.el7.tuxcare.els2.x86_64 #1 SMP Mon Jul 15 12:09:18 UTC 2024 x86_64
User : nobody ( 99)
PHP Version : 5.6.40
Disable Function : shell_exec,symlink,system,exec,proc_get_status,proc_nice,proc_terminate,define_syslog_variables,syslog,openlog,closelog,escapeshellcmd,passthru,ocinum cols,ini_alter,leak,listen,chgrp,apache_note,apache_setenv,debugger_on,debugger_off,ftp_exec,dl,dll,myshellexec,proc_open,socket_bind,proc_close,escapeshellarg,parse_ini_filepopen,fpassthru,exec,passthru,escapeshellarg,escapeshellcmd,proc_close,proc_open,ini_alter,popen,show_source,proc_nice,proc_terminate,proc_get_status,proc_close,pfsockopen,leak,apache_child_terminate,posix_kill,posix_mkfifo,posix_setpgid,posix_setsid,posix_setuid,dl,symlink,shell_exec,system,dl,passthru,escapeshellarg,escapeshellcmd,myshellexec,c99_buff_prepare,c99_sess_put,fpassthru,getdisfunc,fx29exec,fx29exec2,is_windows,disp_freespace,fx29sh_getupdate,fx29_buff_prepare,fx29_sess_put,fx29shexit,fx29fsearch,fx29ftpbrutecheck,fx29sh_tools,fx29sh_about,milw0rm,imagez,sh_name,myshellexec,checkproxyhost,dosyayicek,c99_buff_prepare,c99_sess_put,c99getsource,c99sh_getupdate,c99fsearch,c99shexit,view_perms,posix_getpwuid,posix_getgrgid,posix_kill,parse_perms,parsesort,view_perms_color,set_encoder_input,ls_setcheckboxall,ls_reverse_all,rsg_read,rsg_glob,selfURL,dispsecinfo,unix2DosTime,addFile,system,get_users,view_size,DirFiles,DirFilesWide,DirPrintHTMLHeaders,GetFilesTotal,GetTitles,GetTimeTotal,GetMatchesCount,GetFileMatchesCount,GetResultFiles,fs_copy_dir,fs_copy_obj,fs_move_dir,fs_move_obj,fs_rmdir,SearchText,getmicrotime
MySQL : ON |  cURL : ON |  WGET : ON |  Perl : ON |  Python : ON |  Sudo : ON |  Pkexec : ON
Directory :  /usr/lib/mysqlsh/lib/python3.9/site-packages/oci/generative_ai/models/

Upload File :
current_dir [ Writeable] document_root [ Writeable]

 

Command :


[ Back ]     

Current File : /usr/lib/mysqlsh/lib/python3.9/site-packages/oci/generative_ai/models/lora_training_config.py
# coding: utf-8
# Copyright (c) 2016, 2024, Oracle and/or its affiliates.  All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.

# NOTE: This class is auto generated by OracleSDKGenerator. DO NOT EDIT. API Version: 20231130

from .training_config import TrainingConfig
from oci.util import formatted_flat_dict, NONE_SENTINEL, value_allowed_none_or_none_sentinel  # noqa: F401
from oci.decorators import init_model_state_from_kwargs


@init_model_state_from_kwargs
class LoraTrainingConfig(TrainingConfig):
    """
    The Lora training method hyperparameters.
    """

    def __init__(self, **kwargs):
        """
        Initializes a new LoraTrainingConfig object with values from keyword arguments. The default value of the :py:attr:`~oci.generative_ai.models.LoraTrainingConfig.training_config_type` attribute
        of this class is ``LORA_TRAINING_CONFIG`` and it should not be changed.
        The following keyword arguments are supported (corresponding to the getters/setters of this class):

        :param training_config_type:
            The value to assign to the training_config_type property of this LoraTrainingConfig.
            Allowed values for this property are: "TFEW_TRAINING_CONFIG", "VANILLA_TRAINING_CONFIG", "LORA_TRAINING_CONFIG"
        :type training_config_type: str

        :param total_training_epochs:
            The value to assign to the total_training_epochs property of this LoraTrainingConfig.
        :type total_training_epochs: int

        :param learning_rate:
            The value to assign to the learning_rate property of this LoraTrainingConfig.
        :type learning_rate: float

        :param training_batch_size:
            The value to assign to the training_batch_size property of this LoraTrainingConfig.
        :type training_batch_size: int

        :param early_stopping_patience:
            The value to assign to the early_stopping_patience property of this LoraTrainingConfig.
        :type early_stopping_patience: int

        :param early_stopping_threshold:
            The value to assign to the early_stopping_threshold property of this LoraTrainingConfig.
        :type early_stopping_threshold: float

        :param log_model_metrics_interval_in_steps:
            The value to assign to the log_model_metrics_interval_in_steps property of this LoraTrainingConfig.
        :type log_model_metrics_interval_in_steps: int

        :param lora_r:
            The value to assign to the lora_r property of this LoraTrainingConfig.
        :type lora_r: int

        :param lora_alpha:
            The value to assign to the lora_alpha property of this LoraTrainingConfig.
        :type lora_alpha: int

        :param lora_dropout:
            The value to assign to the lora_dropout property of this LoraTrainingConfig.
        :type lora_dropout: float

        """
        self.swagger_types = {
            'training_config_type': 'str',
            'total_training_epochs': 'int',
            'learning_rate': 'float',
            'training_batch_size': 'int',
            'early_stopping_patience': 'int',
            'early_stopping_threshold': 'float',
            'log_model_metrics_interval_in_steps': 'int',
            'lora_r': 'int',
            'lora_alpha': 'int',
            'lora_dropout': 'float'
        }

        self.attribute_map = {
            'training_config_type': 'trainingConfigType',
            'total_training_epochs': 'totalTrainingEpochs',
            'learning_rate': 'learningRate',
            'training_batch_size': 'trainingBatchSize',
            'early_stopping_patience': 'earlyStoppingPatience',
            'early_stopping_threshold': 'earlyStoppingThreshold',
            'log_model_metrics_interval_in_steps': 'logModelMetricsIntervalInSteps',
            'lora_r': 'loraR',
            'lora_alpha': 'loraAlpha',
            'lora_dropout': 'loraDropout'
        }

        self._training_config_type = None
        self._total_training_epochs = None
        self._learning_rate = None
        self._training_batch_size = None
        self._early_stopping_patience = None
        self._early_stopping_threshold = None
        self._log_model_metrics_interval_in_steps = None
        self._lora_r = None
        self._lora_alpha = None
        self._lora_dropout = None
        self._training_config_type = 'LORA_TRAINING_CONFIG'

    @property
    def lora_r(self):
        """
        Gets the lora_r of this LoraTrainingConfig.
        This parameter represents the LoRA rank of the update matrices.


        :return: The lora_r of this LoraTrainingConfig.
        :rtype: int
        """
        return self._lora_r

    @lora_r.setter
    def lora_r(self, lora_r):
        """
        Sets the lora_r of this LoraTrainingConfig.
        This parameter represents the LoRA rank of the update matrices.


        :param lora_r: The lora_r of this LoraTrainingConfig.
        :type: int
        """
        self._lora_r = lora_r

    @property
    def lora_alpha(self):
        """
        Gets the lora_alpha of this LoraTrainingConfig.
        This parameter represents the scaling factor for the weight matrices in LoRA.


        :return: The lora_alpha of this LoraTrainingConfig.
        :rtype: int
        """
        return self._lora_alpha

    @lora_alpha.setter
    def lora_alpha(self, lora_alpha):
        """
        Sets the lora_alpha of this LoraTrainingConfig.
        This parameter represents the scaling factor for the weight matrices in LoRA.


        :param lora_alpha: The lora_alpha of this LoraTrainingConfig.
        :type: int
        """
        self._lora_alpha = lora_alpha

    @property
    def lora_dropout(self):
        """
        Gets the lora_dropout of this LoraTrainingConfig.
        This parameter indicates the dropout probability for LoRA layers.


        :return: The lora_dropout of this LoraTrainingConfig.
        :rtype: float
        """
        return self._lora_dropout

    @lora_dropout.setter
    def lora_dropout(self, lora_dropout):
        """
        Sets the lora_dropout of this LoraTrainingConfig.
        This parameter indicates the dropout probability for LoRA layers.


        :param lora_dropout: The lora_dropout of this LoraTrainingConfig.
        :type: float
        """
        self._lora_dropout = lora_dropout

    def __repr__(self):
        return formatted_flat_dict(self)

    def __eq__(self, other):
        if other is None:
            return False

        return self.__dict__ == other.__dict__

    def __ne__(self, other):
        return not self == other

Youez - 2016 - github.com/yon3zu
LinuXploit