contextlib2
ninja
jinja2
rich
ray[default]==2.47.1
pyarrow
datasets
langchain==0.2.7
paramiko
einops
tqdm
loguru
pyjava>=0.6.21
tiktoken
fastapi
uvicorn
retrying
zhipuai
dashscope
tiktoken
tabulate
jupyter_client
prompt-toolkit
websocket-client
sqlmodel
jieba
json5
modelscope
huggingface_hub
wudao
openai>=1.59.8
anthropic
google-generativeai
protobuf
azure-cognitiveservices-speech

[default]
contextlib2
ninja
jinja2
rich
ray[default]==2.47.1
pyarrow
datasets
langchain==0.2.7
paramiko
einops
tqdm
loguru
pyjava>=0.6.21
tiktoken
fastapi
uvicorn
retrying
zhipuai
dashscope
tabulate
jupyter_client
prompt-toolkit
websocket-client
sqlmodel
jieba
json5
modelscope
huggingface_hub
wudao
openai>=1.59.8
anthropic
google-generativeai
protobuf
azure-cognitiveservices-speech
accelerate
bitsandbytes>=0.39.0
transformers>=4.35.0
torch>=2.1.2
sentence-transformers
transformers_stream_generator
optimum
sentencepiece

[local]
contextlib2
ninja
jinja2
rich
ray[default]==2.47.1
pyarrow
datasets
langchain==0.2.7
paramiko
einops
tqdm
loguru
pyjava>=0.6.21
tiktoken
fastapi
uvicorn
retrying
zhipuai
dashscope
tabulate
jupyter_client
prompt-toolkit
websocket-client
sqlmodel
jieba
json5
modelscope
huggingface_hub
wudao
openai>=1.59.8
anthropic
google-generativeai
protobuf
azure-cognitiveservices-speech
accelerate
bitsandbytes>=0.39.0
transformers>=4.35.0
torch>=2.1.2
sentence-transformers
transformers_stream_generator
optimum
sentencepiece

[saas]
contextlib2
ninja
jinja2
rich
ray[default]==2.47.1
pyarrow
datasets
langchain==0.2.7
paramiko
einops
tqdm
loguru
pyjava>=0.6.21
tiktoken
fastapi
uvicorn
retrying
zhipuai
dashscope
tabulate
jupyter_client
prompt-toolkit
websocket-client
sqlmodel
jieba
json5
modelscope
huggingface_hub
wudao
openai>=1.59.8
anthropic
google-generativeai
protobuf
azure-cognitiveservices-speech
