tensorflow2.0,keras函数式与子类式API实现wide&deep预测模型

wide&deep模型可以将其理解为在一个神经网络中将数据通过不同的方式处理后再结合起来的一种模型,这里只实现了一个简单的模型,将同样的数据通过线性处理的结果(文中无)和两个全连接层处理后的结果结合起来再经过输出层输出
数据集为sklearn提供的加利福利亚房价预测数据集
tensorflow2.0,keras函数式与子类式API实现wide&deep预测模型_第1张图片
tensorflow2.0,keras函数式与子类式API实现wide&deep预测模型_第2张图片

import matplotlib.pyplot as plt
from tensorflow import keras
import tensorflow as tf
import matplotlib as mpl
import pandas as pd
import numpy as np
import sklearn
import time
import sys
import os

# 加载数据
from sklearn.datasets import fetch_california_housing
housing = fetch_california_housing()
print(housing.data.shape)   # (20640, 8)
print(housing.target.shape) # (20640, )

# 切分数据集
from sklearn.model_selection import train_test_split
x_train_all, x_test, y_train_all, y_test = train_test_split(housing.data, housing.target, random_state = 7)
x_train, x_valid, y_train, y_valid = train_test_split(x_train_all, y_train_all, random_state = 11)
print(x_train.shape, y_train.shape) # (11610, 8) (11610,)
print(x_valid.shape, y_valid.shape) # (3870, 8) (3870,)
print(x_test.shape, y_test.shape)   # (5160, 8) (5160,)

# 数据归一化 x = (x - u) / d
from sklearn.preprocessing import StandardScaler
scaler = StandardScaler()
x_train_scaled = scaler.fit_transform(x_train)
x_valid_scaled = scaler.transform(x_valid)
x_test_scaled = scaler.transform(x_test)

# 建模
#************************************************
# 方式一函数式API 功能API
# input = keras.layers.Input(shape=x_train.shape[1:])
# hidden1 = keras.layers.Dense(30, activation='relu')(input)
# hidden2 = keras.layers.Dense(30, activation='relu')(hidden1)
# 复合函数: f(x) = h(g(x))
# concat = keras.layers.concatenate([input, hidden2])
# output = keras.layers.Dense(1)(concat)
# 实例化模型
# model = keras.models.Model(inputs = [input],outputs = [output])

# 方式二子类API
# 子类API
class WideDeepModel(keras.models.Model):
    def __init__(self):
        super(WideDeepModel, self).__init__()
        """定义模型的层次"""
        self.hidden1_layer = keras.layers.Dense(30, activation='relu')
        self.hidden2_layer = keras.layers.Dense(30, activation='relu')
        self.output_layer = keras.layers.Dense(1)

    def call(self, input):
        """完成模型的正向计算"""
        hidden1 = self.hidden1_layer(input)
        hidden2 = self.hidden2_layer(hidden1)
        concat = keras.layers.concatenate([input, hidden2])
        output = self.output_layer(concat)
        return output
    
# 两种实例化模型的方式
# model = keras.models.Sequential([WideDeepModel(),]) # 将WideDeepModel看作一个层
model = WideDeepModel()
model.build(input_shape=(None, 8))
#***************************************************

model.summary()
model.compile(loss="mean_squared_error",
              optimizer="sgd"
              )
# 连续5个epoch loss的变化小于1e-2 earlystop
callbacks = [keras.callbacks.EarlyStopping(patience=5, min_delta=1e-2)]
# 未设置频率 默认每个epoch验证一次
history = model.fit(x_train_scaled, y_train,
                    validation_data = (x_valid_scaled, y_valid),
                    epochs = 100,
                    callbacks = callbacks
                    )

# 画学习曲线
def plot_learning_curves(history):
    pd.DataFrame(history.history).plot(figsize=(8, 5))
    plt.grid(True)
    plt.gca().set_ylim(0, 1)
    plt.show()
plot_learning_curves(history)

# 测试
model.evaluate(x_test_scaled, y_test)

你可能感兴趣的:(tensorflow)