o
    hQ                     @   s
  d dl Z d dlZd dlmZ zd dlZW n eefy/   d dlZeej	j
dkr-edY nw dd Zdd Zd	d
 Zdd Zdd Zdd ZdddZeejjedkrbdd ZejjZeZneZeZeeeeeeeejjejjejjejjejjdZdd ZdS )    N)parse   zYour currently installed version of Keras is Keras 3, but this is not yet supported in Transformers. Please install the backwards-compatible tf-keras package with `pip install tf-keras`.c              
   C   s:   t | } ddt j| t t d| j   }| | S )av  
    Gaussian Error Linear Unit. Original Implementation of the gelu activation function in Google Bert repo when
    initially created. For information: OpenAI GPT's gelu is slightly different (and gives slightly different results):
    0.5 * x * (1 + torch.tanh(math.sqrt(2 / math.pi) * (x + 0.044715 * torch.pow(x, 3)))) Also see
    https://arxiv.org/abs/1606.08415
          ?      ?       @)tfconvert_to_tensormatherfcastsqrtdtype)xcdf r   o/var/www/html/construction_image-detection-poc/venv/lib/python3.10/site-packages/transformers/activations_tf.py_gelu"   s   
(r   c                 C   s`   t | } t tj| j}t d| j}ddt t d| | |t | d     }| | S )z
    Gaussian Error Linear Unit. This is a smoother version of the GELU. Original paper: https://arxiv.org/abs/1606.0841

    Args:
        x: float Tensor to perform activation

    Returns:
        `x` with the GELU activation applied.
    Hm?r   r   r      )	r   r   r   r	   pir   tanhr   pow)r   r   coeffr   r   r   r   	_gelu_new/   s
   

0r   c                 C   s    t | } | t t j|  S )N)r   r   r   r	   softplusr   r   r   r   mishA   s   
r   c                 C   sP   t | } t d| j}t d| j}d|  dt | | d||  |      S )Nr   g3E?r   r   )r   r   r   r   r   )r   coeff1coeff2r   r   r   	gelu_fastG   s   
*r   c                 C   s,   t | } t d| j}| t j||   S )NgZd;?)r   r   r   r   r	   sigmoid)r   r   r   r   r   
quick_geluO   s   
r!   c                 C   s   t t| ddS )a  
    Clip the range of possible GeLU outputs between [-10, 10]. This is especially useful for quantization purpose, as
    it allows mapping 2 negatives values in the GeLU spectrum. For more information on this trick, please refer to
    https://arxiv.org/abs/2004.09602

    Gaussian Error Linear Unit. Original Implementation of the gelu activation function in Google Bert repo when
    initially created. For information: OpenAI GPT's gelu is slightly different (and gives slightly different results):
    0.5 * x * (1 + torch.tanh(math.sqrt(2 / math.pi) * (x + 0.044715 * torch.pow(x, 3)))) Also see
    https://arxiv.org/abs/1606.08415 :param x: :return:
    i
   )r   clip_by_valuer   r   r   r   r   gelu_10U   s   r$   c                 C   s$   t j| d|d\}}|t j| S )a  
    Gated Linear Unit. Implementation as defined in the original paper (see https://arxiv.org/abs/1612.08083), where
    the input `x` is split in two halves across a dimension (`axis`), A and B, returning A * sigmoid(B).

    Args:
        `x`: float Tensor to perform activation
        `axis`: dimension across which `x` be split in half

    Returns:
        `x` with the GLU activation applied (with its size halved across the dimension `axis`).
    r   )axis)r   splitr	   r    )r   r&   abr   r   r   gluc   s   r*   z2.4c                 C   s   t jj| ddS )NT)approximate)kerasactivationsgelur   r   r   r   approximate_gelu_wrapu   s   r/   )r.   r$   r   gelu_newr*   r   r!   relur    siluswishr   c                 C   s,   | t v rt |  S td|  dtt   )Nz	function z not found in ACT2FN mapping )ACT2FNKeyErrorlistkeys)activation_stringr   r   r   get_tf_activation   s   r9   )r%   )r	   
tensorflowr   packaging.versionr   tf_kerasr,   ModuleNotFoundErrorImportError__version__major
ValueErrorr   r   r   r   r!   r$   r*   versionVERSIONr/   r-   r.   r0   r1   r    r3   r   r4   r9   r   r   r   r   <module>   sP   
