
    U th                        d dl mZ d dlZd dlmZmZmZ d dlmZ d dl	m
Z
 d dlmZ ddlmZ d	d
lmZmZ d	dlmZ ddlmZ ddlmZ d	dlmZ erd dlmZ ddZ G d de          Z G d d          ZdS )    )annotationsN)TYPE_CHECKINGOptionalcast)ArgumentParser)partial)
Completion   )
get_client   )	NOT_GIVEN
NotGivenOr)is_given)CLIError)	BaseModel)Stream)_SubParsersAction	subparser!_SubParsersAction[ArgumentParser]returnNonec                   |                      d          }|                    dddd           |                    ddd	
           |                    ddd           |                    dddt                     |                    dddt                     |                    dddt                     |                    dddt                     |                    ddt                     |                    ddt                     |                    d d!d           |                    d"d#t                     |                    d$d%t                     |                    d&d'
           |                    d(d)
           |                    d*d+
           |                    t
          j        t          ,           d S )-Nzcompletions.createz-mz--modelzThe model to useT)helprequiredz-pz--promptz#An optional prompt to complete from)r   z--streamzStream tokens as they're ready.
store_true)r   actionz-Mz--max-tokensz(The maximum number of tokens to generate)r   typez-tz--temperaturezWhat sampling temperature to use. Higher values means the model will take more risks. Try 0.9 for more creative applications, and 0 (argmax sampling) for ones with a well-defined answer.

Mutually exclusive with `top_p`.z-Pz--top_pa  An alternative to sampling with temperature, called nucleus sampling, where the considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10%% probability mass are considered.

            Mutually exclusive with `temperature`.z-nz--nz5How many sub-completions to generate for each prompt.z
--logprobsa  Include the log probabilities on the `logprobs` most likely tokens, as well the chosen tokens. So for example, if `logprobs` is 10, the API will return a list of the 10 most likely tokens. If `logprobs` is 0, only the chosen tokens will have logprobs returned.z	--best_ofzGenerates `best_of` completions server-side and returns the 'best' (the one with the highest log probability per token). Results cannot be streamed.z--echoz2Echo back the prompt in addition to the completionz--frequency_penaltyzPositive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim.z--presence_penaltyzPositive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics.z--suffixz:The suffix that comes after a completion of inserted text.z--stopz3A stop sequence at which to stop generating tokens.z--userzbA unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse.)func
args_model)
add_parseradd_argumentintfloatset_defaultsCLICompletionscreateCLICompletionCreateArgs)r   subs     o/var/www/html/mycamper/aliexpress-site/backend/venv/lib/python3.11/site-packages/openai/cli/_api/completions.pyregisterr*      st   


3
4
4C 	     T:,QRRRZ&GP\]]]T>0Zadeee$      6      D	      T    
  d    
 A    
  m    
  `    
 Z&bcccX$YZZZq    
 ./<STTTTT    c                      e Zd ZU ded<   dZded<   dZded<   eZd	ed
<   eZded<   eZ	ded<   eZ
ded<   eZded<   eZd	ed<   eZded<   eZd	ed<   eZd	ed<   eZded<   eZded<   eZded<   dS )r'   strmodelFboolstreamNzOptional[str]promptzNotGivenOr[int]nzNotGivenOr[str]stopuserzNotGivenOr[bool]echosuffixbest_ofzNotGivenOr[float]top_plogprobs
max_tokenstemperaturepresence_penaltyfrequency_penalty)__name__
__module____qualname____annotations__r0   r1   r   r2   r3   r4   r5   r6   r7   r8   r9   r:   r;   r<   r=    r+   r)   r'   r'   ]   s        JJJF F    "A""""%D%%%%%D%%%%&D&&&&'F''''(G(((((E(((( )H))))"+J++++%.K....*33333+4444444r+   r'   c                  V    e Zd Zedd            Zedd            Zedd            ZdS )r%   argsr'   r   r   c                6   t          | j                  r!| j        dk    r| j        rt          d          t	          t                      j        j        | j        | j        | j	        | j
        | j        | j        | j        | j        | j        | j        | j        | j        | j        | j                  }| j        rBt*                              t/          t0          t2                    |d                              S t*                               |                      S )N   z6Can't stream completions with n>1 with the current CLI)r2   r5   r3   r4   r.   r8   r1   r6   r7   r9   r:   r;   r<   r=   T)r0   )r   r2   r0   r   r   r   completionsr&   r5   r3   r4   r.   r8   r1   r6   r7   r9   r:   r;   r<   r=   r%   _stream_creater   r   r	   _create)rD   make_requests     r)   r&   zCLICompletions.createq   s    DF 	U

t{
STTTLL$+f**;;L](!2"4
 
 
$ ; 	!00VJ'T)B)B)BCC  
 %%llnn555r+   
completionr	   c                   t          | j                  dk    }| j        D ]}|r7t          j                            d                    |j                             t          j                            |j                   |s|j                            d          st          j                            d           t          j        	                                 d S )NrF   z===== Completion {} =====

)
lenchoicessysstdoutwriteformatindextextendswithflush)rK   should_print_headerchoices      r)   rI   zCLICompletions._create   s    !*"4559 ( 		 		F" U
  !>!E!Efl!S!STTTJV[)))" '&+*>*>t*D*D '
  &&&J		 		r+   r0   Stream[Completion]c                   | D ]}t          |j                  dk    }t          |j        d           D ]}|r7t          j                            d                    |j                             t          j                            |j                   |rt          j                            d           t          j        	                                 t          j                            d           d S )NrF   c                    | j         S )N)rT   )cs    r)   <lambda>z/CLICompletions._stream_create.<locals>.<lambda>   s    17 r+   )keyz===== Chat Completion {} =====
rM   )
rN   rO   sortedrP   rQ   rR   rS   rT   rU   rW   )r0   rK   rX   rY   s       r)   rH   zCLICompletions._stream_create   s      	# 	#J"%j&8"9"9A"= !39J9JKKK 	# 	#& ^J$$%G%N%Nv|%\%\]]]
  ---& +J$$T***
  """"	# 	
r+   N)rD   r'   r   r   )rK   r	   r   r   )r0   rZ   r   r   )r>   r?   r@   staticmethodr&   rI   rH   rB   r+   r)   r%   r%   p   sr        6 6 6 \6<    \    \  r+   r%   )r   r   r   r   )
__future__r   rP   typingr   r   r   argparser   	functoolsr   openai.types.completionr	   _utilsr   _typesr   r   r   _errorsr   _modelsr   
_streamingr   r   r*   r'   r%   rB   r+   r)   <module>rl      sy   " " " " " " 



 0 0 0 0 0 0 0 0 0 0 # # # # # #       . . . . . .       + + + + + + + +                               +******EU EU EU EUP5 5 5 5 5i 5 5 5&= = = = = = = = = =r+   