yum -y install zlib-devel bzip2-devel openssl-devel ncurses-devel sqlite-devel readline-devel tk-devel gdbm-devel db4-devel libpcap-devel xz-devel libffi-devel
不升级 OpenSSL,pip install 可能无法使用
# 安装依赖
yum install -y gcc gcc-c++ autoconf automake zlib zlib-devel pcre-devel make perl perl-CPAN
# 安装 IPC::Cmd模块,安装过程中其中一项选择“manual”,其他选择“yes”即可
cpan IPC::Cmd
# 下载tar -zxvf openssl-3.0.2.tar.gz
cd /opt
wget https://www.openssl.org/source/openssl-3.0.2.tar.gz --no-check-certificate
tar -zxvf openssl-3.0.2.tar.gz
cd openssl-3.0.2/
# 执行配置,安装目录前缀是“/usr/local/openssl”,很重要
./Configure --prefix=/usr/local/openssl
# 编译安装
make && make install
# 进入安装目录
cd /usr/local/openssl
# 复制lib文件,很重要
cp -rf /usr/local/openssl/lib64 /usr/local/openssl/lib
# 配置动态库链接
echo "/usr/local/openssl/lib64/" >> /etc/ld.so.conf
# 更新系统的库缓存
ldconfig
# 替换旧版的目录
mv /usr/bin/openssl /usr/bin/openssl.bak
# 创建软连接
ln -s /usr/local/openssl/bin/openssl /usr/bin/openssl
# 查看版本
openssl version
# 创建文件夹
mkdir python
cd python
# 下载文件,没有wget命令就安装:yum install wget
wget https://www.python.org/ftp/python/3.10.6/Python-3.10.6.tgz
tar -xzf Python-3.10.6.tgz
cd Python-3.10.6
# 编译安装, 注意此处的--with-openssl=/usr/local/openssl,对应openssl的安装目录
./configure --prefix=/usr/local/python3 --with-openssl=/usr/local/openssl --with-openssl-rpath=auto
make && make install
# 删除无效数据
rm -rf /usr/local/bin/pip3
rm -rf /usr/local/bin/python3
# 创建软连接
ln -s /usr/local/python3/bin/pip3 /usr/local/bin/
ln -s /usr/local/python3/bin/python3 /usr/local/bin/
# 进入opt安装目录
cd /opt
# 安装依赖包
pip3 install virtualenv -i https://pypi.tuna.tsinghua.edu.cn/simple
# 安装虚拟环境
python3 -m venv pyai
# 进入bin目录
cd pyai/bin
# 进入虚拟环境
source activate
# GitHub
https://github.com/THUDM/ChatGLM3
# HuggingFace
https://huggingface.co/THUDM/chatglm3-6b
使用git下载Huggingface大模型
# 安装lfs
git lfs install
# 下载大模型
git clone https://huggingface.co/THUDM/chatglm3-6b
requirements.txt文件
protobuf
transformers>=4.30.2
cpm_kernels
torch>=2.0
gradio~=3.39
sentencepiece
accelerate
sse-starlette
streamlit>=1.24.0
fastapi>=0.95.1
uvicorn~=0.24.0
sse_starlette
loguru~=0.7.2
安装依赖包
# 使用-i 设置安装源,速度会比较快
pip install -r requirements.txt -i https://pypi.tuna.tsinghua.edu.cn/simple
# 导入模型
from transformers import AutoTokenizer, AutoModel
# 预先下载chatglm3-6b模型,./model/chatglm3-6b为自定义目录
tokenizer = AutoTokenizer.from_pretrained("./model/chatglm3-6b", trust_remote_code=True)
# GPU部署
# model = AutoModel.from_pretrained("./model/chatglm3-6b", trust_remote_code=True).half().cuda()
# CPU部署
model = AutoModel.from_pretrained("./model/chatglm3-6b", trust_remote_code=True).float()
model = model.eval()
# 使用模型
response, history = model.chat(tokenizer, "什么是人工智能技术", history=[])
print(response)
response, history = model.chat(tokenizer, "生成一篇关于人工智能技术的报告", history=history)
print(response)