Tensorflow Serving
from kashgari.tasks.classification import BiGRU_Modelfrom kashgari.corpus import SMP2018ECDTCorpusfrom kashgari import utilstrain_x, train_y = SMP2018ECDTCorpus.load_data()model = BiGRU_Model()model.fit(train_x, train_y)# Save modelutils.convert_to_saved_model(model,model_path='saved_model/bgru',version=1)
Then run tensorflow-serving.
docker run -t --rm -p 8501:8501 -v "path_to/saved_model:/models/" -e MODEL_NAME=bgru tensorflow/serving
Load processor from model, then predict with serving.
import requestsfrom kashgari import utilsimport numpy as npx = ['Hello', 'World']# Pre-processor dataprocessor = utils.load_processor(model_path='saved_model/bgru/1')tensor = processor.process_x_dataset([x])# array([[1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]], dtype=int32)# if you using BERT, you need to reformat tensor first# ------ Only for BERT Embedding Start --------tensor = [{"Input-Token:0": i.tolist(),"Input-Segment:0": np.zeros(i.shape).tolist()} for i in tensor]# ------ Only for BERT Embedding End ----------# predictr = requests.post("http://localhost:8501/v1/models/bgru:predict", json={"instances": tensor.tolist()})preds = r.json()['predictions']# Convert result back to labelslabels = processor.reverse_numerize_label_sequences(np.array(preds).argmax(-1))# labels = ['video']
