
    0h                         d Z ddlZddlmc mZ ddlmZ ddlm	Z	 ddl
mZ ddlmZ  ed       G d d	e	j                               Zy)
zContains the Dropout layer.    N)backend)
base_layer)control_flow_util)keras_exportzkeras.layers.Dropoutc                   B     e Zd ZdZd fd	Zd ZddZd Z fdZ xZ	S )	Dropouta  Applies Dropout to the input.

    The Dropout layer randomly sets input units to 0 with a frequency of `rate`
    at each step during training time, which helps prevent overfitting.
    Inputs not set to 0 are scaled up by 1/(1 - rate) such that the sum over
    all inputs is unchanged.

    Note that the Dropout layer only applies when `training` is set to True
    such that no values are dropped during inference. When using `model.fit`,
    `training` will be appropriately set to True automatically, and in other
    contexts, you can set the kwarg explicitly to True when calling the layer.

    (This is in contrast to setting `trainable=False` for a Dropout layer.
    `trainable` does not affect the layer's behavior, as Dropout does
    not have any variables/weights that can be frozen during training.)

    >>> tf.random.set_seed(0)
    >>> layer = tf.keras.layers.Dropout(.2, input_shape=(2,))
    >>> data = np.arange(10).reshape(5, 2).astype(np.float32)
    >>> print(data)
    [[0. 1.]
     [2. 3.]
     [4. 5.]
     [6. 7.]
     [8. 9.]]
    >>> outputs = layer(data, training=True)
    >>> print(outputs)
    tf.Tensor(
    [[ 0.    1.25]
     [ 2.5   3.75]
     [ 5.    6.25]
     [ 7.5   8.75]
     [10.    0.  ]], shape=(5, 2), dtype=float32)

    Args:
      rate: Float between 0 and 1. Fraction of the input units to drop.
      noise_shape: 1D integer tensor representing the shape of the
        binary dropout mask that will be multiplied with the input.
        For instance, if your inputs have shape
        `(batch_size, timesteps, features)` and
        you want the dropout mask to be the same for all timesteps,
        you can use `noise_shape=(batch_size, 1, features)`.
      seed: A Python integer to use as random seed.

    Call arguments:
      inputs: Input tensor (of any rank).
      training: Python boolean indicating whether the layer should behave in
        training mode (adding dropout) or in inference mode (doing nothing).
    c                     t        |   dd|i| t        |t        t        f      rd|cxk  rdk  sn t        d| d      || _        || _        || _        d| _	        y )Nseedr      zInvalid value z7 received for `rate`, expected a value between 0 and 1.T )
super__init__
isinstanceintfloat
ValueErrorratenoise_shaper
   supports_masking)selfr   r   r
   kwargs	__class__s        d/var/www/html/engine/venv/lib/python3.12/site-packages/tf_keras/src/layers/regularization/dropout.pyr   zDropout.__init__Q   sn    -d-f-dS%L)!t.q.  '< <  	&	 $    c                     | j                   y t        j                  |      }g }t        | j                         D ]  \  }}|j	                  |||   n|        t        j
                  |      S N)r   tfshape	enumerateappendconvert_to_tensor)r   inputsconcrete_inputs_shaper   ivalues         r   _get_noise_shapezDropout._get_noise_shape]   su     # " 0!$"2"23 	HAu,1M%a(u	 ##K00r   c                     t         j                  t        j                        r$ j                  dk(  rt	        j
                        S |t        j                         } fd}t        j                  ||fd      }|S )Nr   c                  r    j                   j                   j                  j                               S )N)r   )_random_generatordropoutr   r&   )r"   r   s   r   dropped_inputsz$Dropout.call.<locals>.dropped_inputss   s7    ))11		t/D/DV/L 2  r   c                  .    t        j                         S r   )r   identity)r"   s   r   <lambda>zDropout.call.<locals>.<lambda>y   s    bkk&.A r   )
r   r   numbersRealr   r-   r   learning_phaser   
smart_cond)r   r"   trainingr+   outputs   ``   r   callzDropout.calll   sh    dii.499>;;v&&--/H	
 #--n&A
 r   c                     |S r   r   )r   input_shapes     r   compute_output_shapezDropout.compute_output_shape}   s    r   c                     | j                   | j                  | j                  d}t        |          }t        t        |j                               t        |j                               z         S )N)r   r   r
   )r   r   r
   r   
get_configdictlistitems)r   configbase_configr   s      r   r:   zDropout.get_config   sY    II++II

 g(*D**,-V\\^0DDEEr   )NNr   )
__name__
__module____qualname____doc__r   r&   r5   r8   r:   __classcell__)r   s   @r   r   r      s)    0d
%1"F Fr   r   )rC   r/   tensorflow.compat.v2compatv2r   tf_keras.srcr   tf_keras.src.enginer   tf_keras.src.utilsr    tensorflow.python.util.tf_exportr   BaseRandomLayerr   r   r   r   <module>rM      sN    "  ! !   * 0 : $%iFj(( iF &iFr   