o
    hg                     @   s  d Z ddlmZ ddlZddlmZmZmZmZm	Z	 ddl
mZmZ ddlmZ ddlmZmZmZmZmZ dd	lmZ dd
lmZ eeZi dddddddddddddddd
dddddddddddd
dddddddddddd
dddddddddddddd d!ddddddddddd
d"d#d$d%d&d'd(ddddddddddd
d)d*dd+dd,d-d.d/d0d1d2d3d4dd,d5d6dd7d8dddddddd9dd:	d;d*d<d=d>d+d,d?d@dddddddAdBdCdddd,ddDdEdFddG	dHdddddddddIdd
dJddddddddddddKZdLdMdNdOdPd#d$d%d&dQdR
dSdd#d$d%d&dTdUZdVdW ZG dXdY dYZG dZd[ d[eZ G d\d] d]eZ!G d^d_ d_eZ"G d`da daeZ#G dbdc dceZ$G ddde deeZ%e e!e!e"e#e#e#e#e#e$e#e#e%dfZ&dgefdhdiZ'dS )jz
Integration with GGML / The file is copied and adapted from https://github.com/99991/pygguf
with extra methods beings exposed
    )arrayN)	Tokenizerdecodersnormalizerspre_tokenizers
processors)BPEUnigram   
AddedToken)GemmaConverterGPT2ConverterLlamaConverterQwen2ConverterT5Converter)logging)tqdmgeneral
model_type_model_name_or_path)architecturenamellamamax_position_embeddingsnum_hidden_layersintermediate_sizehidden_sizehead_dim
rope_thetanum_attention_headsnum_key_value_headsrms_norm_eps
vocab_size)
context_lengthblock_countfeed_forward_lengthembedding_lengthrope.dimension_countrope.freq_baseattention.head_countattention.head_count_kv attention.layer_norm_rms_epsilonr#   mistralqwen2qwen2moenum_expertsnum_experts_per_tok)r$   r%   r&   r'   r(   r)   r*   r+   r,   r#   expert_countexpert_used_countfalcon	tokenizerbos_token_ideos_token_idunk_token_idpad_token_id)ggml.bos_token_idggml.eos_token_idggml.unknown_token_idggml.padding_token_idphi3bloomn_layern_headlayer_norm_epsilon)r%   r'   r*   r#   attention.layer_norm_epsilont5n_positions
num_layersd_ffd_modeld_kv	num_headsrelative_attention_num_bucketsdecoder_start_token_id)r$   r%   r&   r'   attention.key_lengthr*   r+   rC   z attention.relative_buckets_countrL   r#   stablelmlayer_norm_eps)	r$   r%   r&   r'   r(   r*   r+   rC   r#   gpt2n_ctxn_embdr&   )r%   r$   r'   r&   r*   rC   
starcoder2norm_epsilon)r%   r$   r'   r&   r*   r+   rC   mambaconv_kernel
state_sizetime_step_rank)	r#   r$   r'   r,   r%   zssm.conv_kernelzssm.state_sizezssm.time_step_rankzssm.inner_sizenemotronnorm_epsgemma2)r$   r%   r&   r'   r(   r)   rM   r*   r+   r,   r#   tokenizer_typetokensscores
token_typemergesadd_prefix_space)

ggml.modelzggml.tokenszggml.scoreszggml.token_typezggml.mergesr:   r;   r<   r=   zggml.add_space_prefixchat_template)rc   rb   r:   r;   r<   r=   )r5   tokenizer_configc                 C   s   t |ts|g}t|dkr|d }d }n|d dkrtd|\}}|dv r/t| d } | S |dv r;t| d } | S |dv rGt| d } | S |dv rXtd	t|  	 } | S |d
v rat
| |} | S )N   r   	   zPReceived multiple types, therefore expected the first type to indicate an array.)r   re   r
            
      )      )   )   B)rf   )
isinstancelistlen
ValueErrorintfloatboolr   tobytesdecode_gguf_parse_value)_value	data_typearray_data_type r~   r/var/www/html/construction_image-detection-poc/venv/lib/python3.10/site-packages/transformers/integrations/ggml.pyrz      s.   
	
rz   c                   @   s   e Zd Zdd ZdS )GGUFTokenizerSkeletonc                    s  |  D ]
\}}t| || qt| dst| drt| ds"td| j}| j  fddt|D td g }t	  D ]=\}}g }t
dt|D ]}	|d |	 ||	d  }
}|
|v rl||v rl||
||f qMt|fd	d
dd}|| q@t|dd
 dd}dd |D }|| _ndd | jD | _t| dsdd t
t| jD | _t| dsg | _t| dsd | _t| dr| jd u r| j| _d S d S d S )Nr`   r]   r^   z\tokens and scores need to be passed for a LLaMa tokenizer without merges to be instantiated.c                    s   i | ]	\}}| | qS r~   r~   ).0it)r^   r~   r   
<dictcomp>      z2GGUFTokenizerSkeleton.__init__.<locals>.<dictcomp>z:Merges were not in checkpoint, building merges on the fly.re   c                    s    | d   | d  fS )Nr   re   r~   )x)vocabr~   r   <lambda>  s    z0GGUFTokenizerSkeleton.__init__.<locals>.<lambda>T)keyreversec                 S   s   | d S )Nr
   r~   )valr~   r~   r   r     s    c                 S   s   g | ]
}|d  |d fqS )r   re   r~   )r   r   r~   r~   r   
<listcomp>  s    z2GGUFTokenizerSkeleton.__init__.<locals>.<listcomp>c                 S   s   g | ]	}t |d qS ) )tuplesplit)r   merger~   r~   r   r     r   c                 S   s   g | ]}d qS Nr~   )r   _r~   r~   r   r   !  s    added_tokensr8   unknown_token_id)itemssetattrhasattrrt   r]   r^   	enumerateloggerwarningr   rangers   appendsortedextendr`   r   r8   r   )selfdict_kvr]   r`   r   piece_scorelocalindexpiece_lpiece_rr~   )r^   r   r   __init__  sD   




zGGUFTokenizerSkeleton.__init__N)__name__
__module____qualname__r   r~   r~   r~   r   r     s    r   c                   @   s<   e Zd Zdd Zdd Zdd Zdd Zd	d
 Zdd ZdS )GGUFLlamaConverterc                 C   s0   t || _| j| _i | _t| jdddk| _d S )Nr\   r   )r   protooriginal_tokenizeradditional_kwargsgetattris_llama_3_tokenizerr   tokenizer_dictr~   r~   r   r   /  s   
zGGUFLlamaConverter.__init__c                 C      t t|j|jS r   rr   zipr]   r^   r   r   r~   r~   r   r   5     zGGUFLlamaConverter.vocabc                 C      |j S r   r`   r   r~   r~   r   r`   8     zGGUFLlamaConverter.mergesc                 C   s  |  | j}| | j}dd t|D }|jd ur |j|j nd }t|dd d ur0|j|j nd }t|dd d ur@|j|j nd }tt	|||ddd}g }	t
| jds|d urc|	t|ddd	 |d urq|	t|ddd	 |d ur|	t|ddd	 n!tt| jjd
kd }
|
D ]}|	t| jj| ddd	 qt|	dkr||	 t| jjdkr|dd | jjD  || jd< || jd< || jd< | jrd | jd< d| jd< d| jd< d| j_|S )Nc                 S      i | ]	\}\}}||qS r~   r~   r   r   word_scorer~   r~   r   r   >  r   z0GGUFLlamaConverter.tokenizer.<locals>.<dictcomp>r6   r7   T)	unk_tokenfuse_unkbyte_fallbackr_   F
normalizedspecialrg   r   c                 S   s   g | ]	}t |d d dqS )Fr   r   )r   added_tokenr~   r~   r   r   e  r   z0GGUFLlamaConverter.tokenizer.<locals>.<listcomp>r   	eos_token	bos_tokenra   clean_up_tokenization_spaceslegacy)r   r   r`   r   r8   r]   r   r6   r   r   r   r   r   npwherer   r_   rs   add_special_tokensr   
add_tokensr   r   r   r   )r   r   vocab_scoresr`   	bpe_vocabr   r   r   r5   special_tokensspecial_tokens_idxidxr~   r~   r   r5   ;  sT     







zGGUFLlamaConverter.tokenizerc                 C   sX   t  t  t ddg}| jr|t jddddg7 }|r'|t jdddg7 }t |S )N   ▁r   FTra   trim_offsets	use_regexre   contentleft)r   ByteFallbackFuseReplacer   	ByteLevelStripSequencer   replacementra   sequencer~   r~   r   decoderu  s   

zGGUFLlamaConverter.decoderc                 C   s   |  | j}| | j}|d ur||_d}d}t| jdr!| jj}| ||}|d ur.||_| |||_|  }|r>||_| j	rPt
jdddd|_tg |_|S )Nr   Tra   Fr   )r5   r   
normalizerr   r   ra   pre_tokenizerr   post_processorr   r   r   r   r   )r   r5   r   r   ra   r   r   r~   r~   r   	converted  s*   zGGUFLlamaConverter.convertedN)	r   r   r   r   r   r`   r5   r   r   r~   r~   r~   r   r   .  s    :r   c                       *   e Zd Zdd Zdef fddZ  ZS )GGUFQwen2Converterc                 C      t || _i | _d S r   r   r   r   r   r~   r~   r   r        

zGGUFQwen2Converter.__init__returnc              	      s^   dd t | jjD }| jj}t ||}|tddddtddddtddddg |S )	Nc                 S      i | ]\}}||qS r~   r~   r   r   r   r~   r~   r   r         z0GGUFQwen2Converter.converted.<locals>.<dictcomp><|endoftext|>FTr   z<|im_start|>z
<|im_end|>)r   r   r]   r`   superr   r   r   r   r   r`   r5   	__class__r~   r   r     s   zGGUFQwen2Converter.convertedr   r   r   r   r   r   __classcell__r~   r~   r   r   r         r   c                   @   sB   e Zd Zdd Zdd Zdd Zdd Zd	d
 ZdefddZ	dS )GGUFPhi3Converterc                 C   s   t || _| j| _i | _d S r   r   r   r   r   r   r~   r~   r   r     s   

zGGUFPhi3Converter.__init__c                 C   r   r   r   r   r~   r~   r   r     r   zGGUFPhi3Converter.vocabc                 C   r   r   r   r   r~   r~   r   r`     r   zGGUFPhi3Converter.mergesc                 C   sn  |  | j}| | j}dd t|D }tt||}|tddddddtddddtd	dddd
tddddd
tddddd
tddddd
tddddd
tddddd
tddddd
tddddd
tddddd
tddddd
g |jd ur|j	|j nd | j
d< |jd ur|j	|j nd | j
d< |jd ur|j	|j nd | j
d< |jd ur|j	|j nd | j
d< |S )Nc                 S   r   r~   r~   r   r~   r~   r   r     r   z/GGUFPhi3Converter.tokenizer.<locals>.<dictcomp></s>TF)rstriplstripr   r   r   r   z<|assistant|>)r   r   r   z<|placeholder1|>z<|placeholder2|>z<|placeholder3|>z<|placeholder4|>z
<|system|>z<|end|>z<|placeholder5|>z<|placeholder6|>z<|user|>r   r   r   	pad_token)r   r   r`   r   r   r   r   r   r8   r]   r   r7   r6   r9   )r   r   r   r`   r   r5   r~   r~   r   r5     s8   zGGUFPhi3Converter.tokenizerc                 C   s<   t  t  t |dg}|r|t jdddg7 }t |S )Nr   re   r   )r   r   r   r   r   r   r   r~   r~   r   r     s   

zGGUFPhi3Converter.decoderr   c                 C   s:   |  | j}d}d}t| jdr| jj}| |||_|S )Nr   Tra   )r5   r   r   r   ra   r   )r   r5   r   ra   r~   r~   r   r     s   zGGUFPhi3Converter.convertedN)
r   r   r   r   r   r`   r5   r   r   r   r~   r~   r~   r   r     s    'r   c                       r   )GGUFGPTConverterc                 C   r   r   r   r   r~   r~   r   r     r   zGGUFGPTConverter.__init__r   c                    s0   dd t | jjD }| jj}t ||}|S )Nc                 S   r   r~   r~   r   r~   r~   r   r     r   z.GGUFGPTConverter.converted.<locals>.<dictcomp>)r   r   r]   r`   r   r   r   r   r~   r   r     s   zGGUFGPTConverter.convertedr   r~   r~   r   r   r     r   r   c                   @   :   e Zd Zdd Zdd Zdd Zdd Zd	efd
dZdS )GGUFT5Converterc                 C   s>   dg|d< t || _dd t| jjD | _| j| _i | _d S )N
dummy textr`   c                 S   r   r~   r~   )r   r   r   r~   r~   r   r     r   z,GGUFT5Converter.__init__.<locals>.<dictcomp>)r   r   r   r]   token2idr   r   r   r~   r~   r   r     s
   


zGGUFT5Converter.__init__c                 C   r   r   r   r   r~   r~   r   r     r   zGGUFT5Converter.vocabc                 C   sT   t | jddr(g }t | jddr|tjddg7 }|tjdddg7 }t|S d S )Nr   Tra   r   )prependr   )patternr   )r   r   r   Prependr   r   )r   r   r   r~   r~   r   r     s   
zGGUFT5Converter.normalizerc                 C   s$   t jddgg dd| jd fgdS )N$Ar   )r  r   z$Br   )singlepairr   )r   TemplateProcessingr  )r   r~   r~   r   r   (  s   zGGUFT5Converter.post_processorr   c                 C   s   |  | j}tt|| jjdd}| | j}|d ur||_d}d}t| jdr,| jj}| 	||}|d ur9||_	| 
|||_
|  }|rI||_|S )NFunk_idr   r   Tra   )r   r   r   r	   r8   r   r   r   ra   r   r   r   )r   r   r5   r   r   ra   r   r   r~   r~   r   r   1  s.   	zGGUFT5Converter.convertedN)	r   r   r   r   r   r   r   r   r   r~   r~   r~   r   r    s    			r  c                   @   r  )GGUFGemmaConverterc                 C   s&   dg|d< t || _| j| _i | _d S )Nr  r`   r   r   r~   r~   r   r   R  s   


zGGUFGemmaConverter.__init__c                 C   s   t t|j|j}g }|D ]1\}}|dkr|d|f qd|v r7t| dkr7dt| }|||f q|||f q|S )Nz<0x09>	r   r   r   )rr   r   r]   r^   r   rs   strip)r   r   original_vocabupdated_vocabtokenscoreunderscoresr~   r~   r   r   Z  s   zGGUFGemmaConverter.vocabc                 C   s   t ddS )Nr   r   )r   r   r   r~   r~   r   r   i  s   zGGUFGemmaConverter.normalizerc                 C   s<   t ddt  t  g}|r|t jdddg7 }t |S )Nr   r   re   r   )r   r   r   r   r   r   r   r~   r~   r   r   l  s   

zGGUFGemmaConverter.decoderr   c                 C   s   |  | j}tt|| jj| jd}| | j}|d ur||_d}d}t| jdr-| jj	}| 
|||_
| ||}|d urA||_|S )Nr  r   Tra   )r   r   r   r	   r8   handle_byte_fallbackr   r   r   ra   r   r   )r   r   r5   r   r   ra   r   r~   r~   r   r   w  s(   zGGUFGemmaConverter.convertedN)	r   r   r   r   r   r   r   r   r   r~   r~   r~   r   r  Q  s    r  )r   r.   	qwen2_moer>   r?   r4   rN   rP   rS   rD   rU   rY   r[   r   c                 C   s"   | }t | |}| }||jfS )a6  
    Utilities to convert a slow tokenizer instance in a fast tokenizer instance.

    Args:
        architecture (`str`): The model architecture derived from gguf file.
        transformer_tokenizer ([`~tokenization_utils_base.PreTrainedTokenizer`]):
            Instance of a slow tokenizer to convert in the backend tokenizer for
            [`~tokenization_utils_base.PreTrainedTokenizerFast`].

    Return:
        A instance of [`~tokenizers.Tokenizer`] to be used as the backend tokenizer of a
        [`~tokenization_utils_base.PreTrainedTokenizerFast`]
    )GGUF_TO_FAST_CONVERTERSr   r   )r   r   tokenizer_class_name	converterfast_tokenizerr~   r~   r   convert_gguf_tokenizer  s   
r  )(__doc__r   numpyr   
tokenizersr   r   r   r   r   tokenizers.modelsr   r	    r   convert_slow_tokenizerr   r   r   r   r   utilsr   utils.loggingr   
get_loggerr   r   GGUF_CONFIG_MAPPINGGGUF_TOKENIZER_MAPPINGrz   r   r   r   r   r   r  r  r  r  r~   r~   r~   r   <module>   s  
+9EKW^kv~       2+yK?B