使用tensorflow的线性回归的例子(九)

from future import absolute_import, division, print_function, unicode_literals

import numpy as np

import pandas as pd

import seaborn as sb

import tensorflow as tf

from tensorflow import keras as ks

from tensorflow.estimator import LinearRegressor

from sklearn import datasets

from sklearn.model_selection import train_test_split

from sklearn.metrics import mean_squared_error, r2_score

print(tf.version)

"""## Load and configure the Boston Housing Dataset"""

boston_load = datasets.load_boston()

feature_columns = boston_load.feature_names

target_column = boston_load.target

boston_data = pd.DataFrame(boston_load.data, columns=feature_columns).astype(np.float32)

boston_data['MEDV'] = target_column.astype(np.float32)

boston_data.head()

"""## Checking the relation between the variables using Pairplot and Correlation Graph"""

sb.pairplot(boston_data, diag_kind="kde", height=3, aspect=0.6)

correlation_data = boston_data.corr()

correlation_data.style.background_gradient(cmap='coolwarm', axis=None)

"""## Descriptive Statistics - Central Tendency and Dispersion"""

stats = boston_data.describe()

boston_stats = stats.transpose()

boston_stats

"""## Select the required columns"""

X_data = boston_data[[i for i in boston_data.columns if i not in ['MEDV']]]

Y_data = boston_data[['MEDV']]

"""## Train Test Split"""

training_features , test_features ,training_labels, test_labels = train_test_split(X_data , Y_data , test_size=0.2)

print('No. of rows in Training Features: ', training_features.shape[0])

print('No. of rows in Test Features: ', test_features.shape[0])

print('No. of columns in Training Features: ', training_features.shape[1])

print('No. of columns in Test Features: ', test_features.shape[1])

print('No. of rows in Training Label: ', training_labels.shape[0])

print('No. of rows in Test Label: ', test_labels.shape[0])

print('No. of columns in Training Label: ', training_labels.shape[1])

print('No. of columns in Test Label: ', test_labels.shape[1])

stats = training_features.describe()

stats = stats.transpose()

stats

stats = test_features.describe()

stats = stats.transpose()

stats

"""## Normalize Data"""

def norm(x):

stats = x.describe()

stats = stats.transpose()

return (x - stats['mean']) / stats['std']

normed_train_features = norm(training_features)

normed_test_features = norm(test_features)

"""## Build the Input Pipeline for TensorFlow model"""

def feed_input(features_dataframe, target_dataframe, num_of_epochs=10, shuffle=True, batch_size=32):

def input_feed_function():

dataset = tf.data.Dataset.from_tensor_slices((dict(features_dataframe), target_dataframe))

if shuffle:

dataset = dataset.shuffle(2000)

dataset = dataset.batch(batch_size).repeat(num_of_epochs)

return dataset

return input_feed_function

train_feed_input = feed_input(normed_train_features, training_labels)

train_feed_input_testing = feed_input(normed_train_features, training_labels, num_of_epochs=1, shuffle=False)

test_feed_input = feed_input(normed_test_features, test_labels, num_of_epochs=1, shuffle=False)

"""## Model Training"""

feature_columns_numeric = [tf.feature_column.numeric_column(m) for m in training_features.columns]

linear_model = LinearRegressor(feature_columns=feature_columns_numeric, optimizer='RMSProp')

linear_model.train(train_feed_input)

"""## Predictions"""

train_predictions = linear_model.predict(train_feed_input_testing)

test_predictions = linear_model.predict(test_feed_input)

train_predictions_series = pd.Series([p['predictions'][0] for p in train_predictions])

test_predictions_series = pd.Series([p['predictions'][0] for p in test_predictions])

train_predictions_df = pd.DataFrame(train_predictions_series, columns=['predictions'])

test_predictions_df = pd.DataFrame(test_predictions_series, columns=['predictions'])

training_labels.reset_index(drop=True, inplace=True)

train_predictions_df.reset_index(drop=True, inplace=True)

test_labels.reset_index(drop=True, inplace=True)

test_predictions_df.reset_index(drop=True, inplace=True)

train_labels_with_predictions_df = pd.concat([training_labels, train_predictions_df], axis=1)

test_labels_with_predictions_df = pd.concat([test_labels, test_predictions_df], axis=1)

"""## Validation"""

def calculate_errors_and_r2(y_true, y_pred):

mean_squared_err = (mean_squared_error(y_true, y_pred))

root_mean_squared_err = np.sqrt(mean_squared_err)

r2 = round(r2_score(y_true, y_pred)*100,0)

return mean_squared_err, root_mean_squared_err, r2

train_mean_squared_error, train_root_mean_squared_error, train_r2_score_percentage = calculate_errors_and_r2(training_labels, train_predictions_series)

test_mean_squared_error, test_root_mean_squared_error, test_r2_score_percentage = calculate_errors_and_r2(test_labels, test_predictions_series)

print('Training Data Mean Squared Error = ', train_mean_squared_error)

print('Training Data Root Mean Squared Error = ', train_root_mean_squared_error)

print('Training Data R2 = ', train_r2_score_percentage)

print('Test Data Mean Squared Error = ', test_mean_squared_error)

print('Test Data Root Mean Squared Error = ', test_root_mean_squared_error)

print('Test Data R2 = ', test_r2_score_percentage)

相关推荐
莫非王土也非王臣1 天前
TensorFlow中卷积神经网络相关函数
人工智能·cnn·tensorflow
wm10432 天前
机器学习之线性回归
人工智能·机器学习·线性回归
清水白石0082 天前
深入 Python 的底层世界:从 C 扩展到 ctypes 与 Cython 的本质差异全解析
c语言·python·neo4j
TGITCIC3 天前
讲透知识图谱Neo4j在构建Agent时到底怎么用(二)
人工智能·知识图谱·neo4j·ai agent·ai智能体·大模型落地·graphrag
zhangfeng11333 天前
数据分析 医学分析中线性回归、Cox回归、Logistic回归的定义和区别,原理和公式,适用场景
数据分析·回归·线性回归
zhangfeng11333 天前
spss 性别类似的二分类变量 多分类变量 做线性回归分析
分类·数据挖掘·线性回归
TGITCIC4 天前
讲透知识图谱Neo4j在构建Agent时到底怎么用(一)
知识图谱·neo4j·ai agent·智能体·图搜索·ai智能体·graphrag
菜鸟冲锋号4 天前
从零搭建高可用GraphRAG系统:LangChain+Neo4j+FAISS+Qwen-7B实战指南
langchain·neo4j·faiss
可爱分享4 天前
锂电设备用哪种直线模组更稳?威洛博在极片、卷绕、模切工站的应用
自动化·线性回归·直线模组·机器人末端执行器·线性导轨
Quintus五等升5 天前
深度学习①|线性回归的实现
人工智能·python·深度学习·学习·机器学习·回归·线性回归