
     sg!                        d Z ddlmZ ddlZddlZddlZddlZddlmZ ddl	m
Z
mZmZmZ ddlmZmZ d Zedk(  r e        yy)	aY  Example FastAPI server for llama.cpp.

To run this example:

```bash
pip install fastapi uvicorn sse-starlette pydantic-settings
export MODEL=../models/7B/...
```

Then run:
```
uvicorn llama_cpp.server.app:create_app --reload
```

or

```
python3 -m llama_cpp.server
```

Then visit http://localhost:8000/docs to see the interactive API docs.

    )annotationsN)
create_app)SettingsServerSettingsModelSettingsConfigFileSettings)add_args_from_modelparse_model_from_argsc            
        d} t        j                  |       }t        |t               |j	                  dt
        d       d }g }|j                         }	 t        j                  j                  d|j                        }|rt        j                  j                  |      st        d| d      t        |d	      5 }|j                  d
      s|j                  d      r<dd l}dd l}t%        j&                  |j)                  |j+                  |                  }	n#t%        j&                  |j-                               }	t/        j0                  |	      }|	j2                  }d d d        n!t5        t.        |      }t5        t6        |      g}|J |J tE        ||      }tG        jH                  |t        jJ                  d|jL                        tO        t        jJ                  d|jP                              |jR                  |jT                         y # 1 sw Y   xY w# t8        $ rJ}
t;        |
t<        j>                         |jA                          t=        jB                  d       Y d }
~
d }
~
ww xY w)Nu5   🦙 Llama.cpp python server. Host your own LLMs!🚀)descriptionz--config_filezPath to a config file to load.)typehelpCONFIG_FILEzConfig file z not found!rbz.yamlz.ymlr   )file   )server_settingsmodel_settingsHOSTPORT)hostportssl_keyfilessl_certfile)+argparseArgumentParserr	   r   add_argumentstr
parse_argsosenvirongetconfig_filepathexists
ValueErroropenendswithyamljsonr   model_validate_jsondumps	safe_loadreadr   model_validatemodelsr
   r   	Exceptionprintsysstderr
print_helpexitr   uvicornrungetenvr   intr   r   r   )r   parserr   r   argsr#   fr)   r*   config_file_settingseapps               L/var/www/html/venv/lib/python3.12/site-packages/llama_cpp/server/__main__.pymainrB   +   s   IK$$=F)
-  
 .2O*,NDjjnn]D4D4DE77>>+. <}K!HIIk4( =A''0K4H4H4P+=+Q+Q

4>>!#45,( ,>+Q+Q,( #1"?"?@T"U!5!<!<= =  4NDIO3M4HIN
 &&&%%%
'%C KKYYv3346?#7#789#//$119= =$  acjj!s3   A&H <B#H*H HH 	I1'A I,,I1__main__)__doc__
__future__r   r    r3   r   r7   llama_cpp.server.appr   llama_cpp.server.settingsr   r   r   r   llama_cpp.server.clir	   r
   rB   __name__     rA   <module>rL      sH   0 # 	 
   +  L5p zF rK   