yum install docker
docker -v
service docker start
systemctl enable docker
systemctl stop docker
docker images
docker rmi 镜像id
docker ps -l # 查看最近运行的容器
docker ps -a # 查看所有运行过的容器
docker ps # 查看运行中的容器
docker pull tensorflow/serving
git clone https://github.com/tensorflow/serving
TESTDATA="$(pwd)/serving/tensorflow_serving/servables/tensorflow/testdata"
docker run -t --rm -p 8501:8501
-v “$TESTDATA/saved_model_half_plus_two_cpu:/models/half_plus_two”
-e MODEL_NAME=half_plus_two
tensorflow/serving &
curl -d ‘{“instances”: [1.0, 2.0, 5.0]}’
-X POST http://localhost:8501/v1/models/half_plus_two:predict
Returns => { “predictions”: [2.5, 3.0, 4.5] }
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# author ChenYongSheng
# date 20200525
import numpy as np
from keras.layers import Dense, Dropout
from keras.models import Sequential
import tensorflow as tf
import keras.backend as K
import os
def export_model(model,
export_model_dir,
model_version
):
"""
:param export_model_dir: type string, save dir for exported model
:param model_version: type int best
:return:no return
"""
with tf.get_default_graph().as_default():
# prediction_signature
tensor_info_input = tf.saved_model.utils.build_tensor_info(model.input)
tensor_info_output = tf.saved_model.utils.build_tensor_info(model.output)
prediction_signature = (
tf.saved_model.signature_def_utils.build_signature_def(
inputs={'text': tensor_info_input},
outputs={'result': tensor_info_output},
method_name=tf.saved_model.signature_constants.PREDICT_METHOD_NAME))
print('step1 => prediction_signature created successfully')
# set-up a builder
export_path_base = export_model_dir
export_path = os.path.join(
tf.compat.as_bytes(export_path_base),
tf.compat.as_bytes(str(model_version)))
builder = tf.saved_model.builder.SavedModelBuilder(export_path)
builder.add_meta_graph_and_variables(
# tags:SERVING,TRAINING,EVAL,GPU,TPU
sess=K.get_session(),
tags=[tf.saved_model.tag_constants.SERVING],
signature_def_map={'serving_default': prediction_signature,},
)
print('step2 => Export path(%s) ready to export trained model' % export_path, '\n starting to export model...')
builder.save(as_text=True)
print('Done exporting!')
# 生成数据
x_train = np.random.random((1, 1))
y_train = np.random.randint(2, size=(1, 1))
x_test = np.random.random((1, 1))
y_test = np.random.randint(2, size=(1, 1))
model = Sequential()
model.add(Dense(64, input_dim=1, activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(64, activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(1, activation='sigmoid'))
model.compile(loss='binary_crossentropy',
optimizer='rmsprop',
metrics=['accuracy'])
model.fit(x_train, y_train, epochs=2, batch_size=128)
score = model.evaluate(x_test, y_test, batch_size=128)
print(score)
model_path = "model/keras_test.h5"
model.save(model_path)
model.load_weights('model/keras_test.h5')
model.summary()
export_model(
model,
'model/export_model',
1
)
略:docker ps -l # 查看最近运行的容器,找到容器id.
略:docker cp /bert/model/export_model/ dd33a8cdcf23:/models/keras_test
docker run -t --rm -p 8501:8501
-v “/bert/model/export_model:/models/keras_test”
-e MODEL_NAME=keras_test
tensorflow/serving &
curl -d ‘{“instances”: [[0.85939513]]}’
-X POST http://localhost:8501/v1/models/keras_test:predict
通过model status API 查看模型状态
curl http://localhost:8501/v1/models/keras_test
通过model metadata API 查看模型的元数据
curl http://localhost:8501/v1/models/keras_test/metadata
import requests
from time import time
import numpy as np
url = 'http://localhost:8501/v1/models/keras_test:predict'
a = np.array([0.85939513])
predict_request = '{"instances" : [%s]}' % list(a)
r = requests.post(url,data=predict_request)
print(r.content)