pickle模块存储对象

import pprint, pickle

#对象
tokenizer = tf.keras.preprocessing.text.Tokenizer(num_words=max_words)
tokenizer.fit_on_texts(train)

#存储至文件
tokenizer_file_path = os.path.join(FLAGS.pre_model_path, FLAGS.tokenizer_file)
with open( tokenizer_file_path , 'wb') as tokenizer_file:
    pickle.dump(tokenizer,tokenizer_file,-1) #最新protocol
    pprint.pprint(tokenizer)

#从文件加载
print("tokenizer reload:")
try:
    with open(tokenizer_file_path, 'rb') as pkl_file:
        tokenizer_reload = pickle.load(pkl_file)
except EOFError:
    pass
pprint.pprint(tokenizer_reload)

你可能感兴趣的:(pickle模块存储对象)