00000099
apt-get install git-lfs 或者yum install git-lfs
cd /home
mkdir models
cd /home/models
git clone https://www.modelscope.cn/BAAI/bge-m3.git
git clone https://www.modelscope.cn/AI-ModelScope/m3e-large.git
通过SFTP下载,SFTP地址、密码联系天数工程师
get /MR_4.2.0/x86/sdk/corex-docker-installer-4.2.0-10.2-ubuntu20.04-py3.10-x86_64.run /home
get /MR_4.2.0/x86/apps/py3.10/transformers-4.45.2+corex.4.2.0-py3-none-any.whl /home
bash corex-docker-installer-4.2.0-10.2-ubuntu20.04-py3.10-x86_64.run --silent --disable-dkms
docker run --shm-size="32g" -it -v /usr/src:/usr/src -v /lib/modules:/lib/modules -v /dev:/dev -v /home:/home --name=test -p 9997:9997 --privileged --cap-add=ALL --pid=host corex:4.2.0
pip3 install xinference[transformers,vllm]==1.3.1.post1 -i https://pypi.tuna.tsinghua.edu.cn/simple
pip3 uninstall opencv-python -y
pip3 install sentence-transformers
pip3 install /home/transformers-4.45.2+corex.4.2.0-py3-none-any.whl
cd /home/models
vim bge_m3.json
{ "model_name": "bge-m3-local", "dimensions": 1024, "max_tokens": 8192, "language": ["en","zh"], "model_id": "BAAI/bge-m3", "model_uri": "/home/models/bge-m3" }
vim m3e_large.json
xinference register --model-type embedding --file bge_m3.json --persist
xinference register --model-type embedding --file m3e_large.json --persist
cd /home/models
curl -X 'POST' \
'http://0.0.0.0:9997/v1/embeddings' \
-H 'accept: application/json' \
-H 'Content-Type: application/json' \
-d ' { "model": "bge-m3-local", "input": "What is the capital of China?" }
'
curl -X 'POST' \
'http://0.0.0.0:9997/v1/embeddings' \
-H 'accept: application/json' \
-H 'Content-Type: application/json' \
-d '
'