lstm预测

复制代码
import numpy as np
import pandas as pd
import tensorflow as tf
import math
import matplotlib.pyplot as plt
from sklearn.preprocessing import MinMaxScaler
from keras.layers import LSTM,Activation,Dense,Dropout




# 时间序列数据转换为监督学习的格式
def creatXY(dataset,step):
    dataX,dataY=[],[]
    for i in range(len(dataset)-step):
        a =dataset[i:(i+step),0:dataset.shape[1]]
        dataX.append(a)
        b =dataset[i+step,0:dataset.shape[1]]
        dataY.append(b)
    return np.array(dataX),np.array(dataY)

df = pd.read_csv("output.csv")

dataset = np.array(df.iloc[:,0:2])
train_size = int(len(dataset)*0.8)

dataset_train = dataset[:train_size,:]
dataset_test = dataset[train_size:,:]

# print(dataset_train.shape)# (n,2)
# print(dataset_test.shape)# (n,2)
scaler = MinMaxScaler(feature_range=(0,1))
dataset_train_scaled = scaler.fit_transform(dataset_train)
dataset_test_scaled = scaler.transform(dataset_test)

step = 10
trainX,trainY = creatXY(dataset_train_scaled,step)
testX,testY = creatXY(dataset_test_scaled,step)
# #input格式为:[样本数,step数,特征数]
trainX_input = np.reshape(trainX,(len(trainX),step,2))
testX_input = np.reshape(testX,(len(testX),step,2))#
#
model = tf.keras.models.Sequential()
model.add(LSTM(48,return_sequences=True,input_shape=(step,2)))
model.add(LSTM(96,return_sequences=True,input_shape=(step,2)))
model.add(LSTM(48,input_shape=[step,2]))
model.add(Activation('relu'))
model.add(Dense(2))#全连接层
model.compile(loss='mse',optimizer='adam')
model.fit(trainX_input,trainY,batch_size=200,epochs=500)


predict_train = model.predict(trainX_input)
predict_test = model.predict(testX_input)

result0 = scaler.inverse_transform(predict_train)
result1 = scaler.inverse_transform(predict_test)

x0,y0 = result0[:,0],result0[:,1]
x1,y1 = result1[:,0],result1[:,1]
plt.plot(x0,y0,c='r')
plt.plot(x1,y1,c='r')
x = dataset[:,0]
y = dataset[:,1]
plt.plot(x,y)
plt.show()

output.csv:

复制代码
x,y,thta
0.0,0.0,0.0
0.5,9.5,0.5
1.0,13.4,1.0
1.5,16.4,1.5
2.0,18.9,2.0
2.5,21.1,2.5
3.0,23.0,3.0
3.5,24.9,3.5
4.0,26.5,4.0
4.5,28.1,4.5
5.0,29.6,5.0
5.5,31.0,5.5
6.0,32.3,6.0
6.5,33.6,6.5
7.0,34.8,7.0
7.5,36.0,7.5
8.0,37.1,8.0
8.5,38.2,8.5
9.0,39.2,9.0
9.5,40.2,9.5
10.0,41.2,10.0
10.5,42.2,10.5
11.0,43.1,11.0
11.5,44.0,11.5
12.0,44.9,12.0
12.5,45.8,12.5
13.0,46.6,13.0
13.5,47.4,13.5
14.0,48.2,14.0
14.5,49.0,14.5
15.0,49.7,15.0
15.5,50.5,15.5
16.0,51.2,16.0
16.5,51.9,16.5
17.0,52.6,17.0
17.5,53.3,17.5
18.0,54.0,18.0
18.5,54.7,18.5
19.0,55.3,19.0
19.5,55.9,19.5
20.0,56.6,20.0
20.5,57.2,20.5
21.0,57.8,21.0
21.5,58.4,21.5
22.0,59.0,22.0
22.5,59.5,22.5
23.0,60.1,23.0
23.5,60.6,23.5
24.0,61.2,24.0
24.5,61.7,24.5
25.0,62.2,25.0
25.5,62.8,25.5
26.0,63.3,26.0
26.5,63.8,26.5
27.0,64.3,27.0
27.5,64.8,27.5
28.0,65.2,28.0
28.5,65.7,28.5
29.0,66.2,29.0
29.5,66.6,29.5
30.0,67.1,30.0
30.5,67.5,30.5
31.0,68.0,31.0
31.5,68.4,31.5
32.0,68.8,32.0
32.5,69.2,32.5
33.0,69.6,33.0
33.5,70.1,33.5
34.0,70.5,34.0
34.5,70.9,34.5
35.0,71.2,35.0
35.5,71.6,35.5
36.0,72.0,36.0
36.5,72.4,36.5
37.0,72.7,37.0
37.5,73.1,37.5
38.0,73.5,38.0
38.5,73.8,38.5
39.0,74.2,39.0
39.5,74.5,39.5
40.0,74.8,40.0
40.5,75.2,40.5
41.0,75.5,41.0
41.5,75.8,41.5
42.0,76.1,42.0
42.5,76.4,42.5
43.0,76.8,43.0
43.5,77.1,43.5
44.0,77.4,44.0
44.5,77.7,44.5
45.0,77.9,45.0
45.5,78.2,45.5
46.0,78.5,46.0
46.5,78.8,46.5
47.0,79.1,47.0
47.5,79.3,47.5
48.0,79.6,48.0
48.5,79.9,48.5
49.0,80.1,49.0
49.5,80.4,49.5
50.0,80.6,50.0
50.5,80.9,50.5
51.0,81.1,51.0
51.5,81.3,51.5
52.0,81.6,52.0
52.5,81.8,52.5
53.0,82.0,53.0
53.5,82.3,53.5
54.0,82.5,54.0
54.5,82.7,54.5
55.0,82.9,55.0
55.5,83.1,55.5
56.0,83.3,56.0
56.5,83.5,56.5
57.0,83.7,57.0
57.5,83.9,57.5
58.0,84.1,58.0
58.5,84.3,58.5
59.0,84.5,59.0
59.5,84.7,59.5
60.0,84.9,60.0
60.5,85.0,60.5
61.0,85.2,61.0
61.5,85.4,61.5
62.0,85.5,62.0
62.5,85.7,62.5
63.0,85.9,63.0
63.5,86.0,63.5
64.0,86.2,64.0
64.5,86.3,64.5
65.0,86.5,65.0
65.5,86.6,65.5
66.0,86.7,66.0
66.5,86.9,66.5
67.0,87.0,67.0
67.5,87.1,67.5
68.0,87.3,68.0
68.5,87.4,68.5
69.0,87.5,69.0
69.5,87.6,69.5
70.0,87.7,70.0
70.5,87.9,70.5
71.0,88.0,71.0
71.5,88.1,71.5
72.0,88.2,72.0
72.5,88.3,72.5
73.0,88.4,73.0
73.5,88.5,73.5
74.0,88.6,74.0
74.5,88.7,74.5
75.0,88.7,75.0
75.5,88.8,75.5
76.0,88.9,76.0
76.5,89.0,76.5
77.0,89.1,77.0
77.5,89.1,77.5
78.0,89.2,78.0
78.5,89.3,78.5
79.0,89.3,79.0
79.5,89.4,79.5
80.0,89.4,80.0
80.5,89.5,80.5
81.0,89.5,81.0
81.5,89.6,81.5
82.0,89.6,82.0
82.5,89.7,82.5
83.0,89.7,83.0
83.5,89.8,83.5
84.0,89.8,84.0
84.5,89.8,84.5
85.0,89.9,85.0
85.5,89.9,85.5
86.0,89.9,86.0
86.5,89.9,86.5
87.0,89.9,87.0
87.5,90.0,87.5
88.0,90.0,88.0
88.5,90.0,88.5
89.0,90.0,89.0
89.5,90.0,89.5
90.0,90.0,90.0
90.5,90.0,90.5
91.0,90.0,91.0
91.5,90.0,91.5
92.0,90.0,92.0
92.5,90.0,92.5
93.0,89.9,93.0
93.5,89.9,93.5
94.0,89.9,94.0
94.5,89.9,94.5
95.0,89.9,95.0
95.5,89.8,95.5
96.0,89.8,96.0
96.5,89.8,96.5
97.0,89.7,97.0
97.5,89.7,97.5
98.0,89.6,98.0
98.5,89.6,98.5
99.0,89.5,99.0
99.5,89.5,99.5
100.0,89.4,100.0
100.5,89.4,100.5
101.0,89.3,101.0
101.5,89.3,101.5
102.0,89.2,102.0
102.5,89.1,102.5
103.0,89.1,103.0
103.5,89.0,103.5
104.0,88.9,104.0
104.5,88.8,104.5
105.0,88.7,105.0
105.5,88.7,105.5
106.0,88.6,106.0
106.5,88.5,106.5
107.0,88.4,107.0
107.5,88.3,107.5
108.0,88.2,108.0
108.5,88.1,108.5
109.0,88.0,109.0
109.5,87.9,109.5
110.0,87.7,110.0
110.5,87.6,110.5
111.0,87.5,111.0
111.5,87.4,111.5
112.0,87.3,112.0
112.5,87.1,112.5
113.0,87.0,113.0
113.5,86.9,113.5
114.0,86.7,114.0
114.5,86.6,114.5
115.0,86.5,115.0
115.5,86.3,115.5
116.0,86.2,116.0
116.5,86.0,116.5
117.0,85.9,117.0
117.5,85.7,117.5
118.0,85.5,118.0
118.5,85.4,118.5
119.0,85.2,119.0
119.5,85.0,119.5
120.0,84.9,120.0
120.5,84.7,120.5
121.0,84.5,121.0
121.5,84.3,121.5
122.0,84.1,122.0
122.5,83.9,122.5
123.0,83.7,123.0
123.5,83.5,123.5
124.0,83.3,124.0
124.5,83.1,124.5
125.0,82.9,125.0
125.5,82.7,125.5
126.0,82.5,126.0
126.5,82.3,126.5
127.0,82.0,127.0
127.5,81.8,127.5
128.0,81.6,128.0
128.5,81.3,128.5
129.0,81.1,129.0
129.5,80.9,129.5
130.0,80.6,130.0
130.5,80.4,130.5
131.0,80.1,131.0
131.5,79.9,131.5
132.0,79.6,132.0
132.5,79.3,132.5
133.0,79.1,133.0
133.5,78.8,133.5
134.0,78.5,134.0
134.5,78.2,134.5
135.0,77.9,135.0
135.5,77.7,135.5
136.0,77.4,136.0
136.5,77.1,136.5
137.0,76.8,137.0
137.5,76.4,137.5
138.0,76.1,138.0
138.5,75.8,138.5
139.0,75.5,139.0
139.5,75.2,139.5
140.0,74.8,140.0
140.5,74.5,140.5
141.0,74.2,141.0
141.5,73.8,141.5
142.0,73.5,142.0
142.5,73.1,142.5
143.0,72.7,143.0
143.5,72.4,143.5
144.0,72.0,144.0
144.5,71.6,144.5
145.0,71.2,145.0
145.5,70.9,145.5
146.0,70.5,146.0
146.5,70.1,146.5
147.0,69.6,147.0
147.5,69.2,147.5
148.0,68.8,148.0
148.5,68.4,148.5
149.0,68.0,149.0
149.5,67.5,149.5
150.0,67.1,150.0
150.5,66.6,150.5
151.0,66.2,151.0
151.5,65.7,151.5
152.0,65.2,152.0
152.5,64.8,152.5
153.0,64.3,153.0
153.5,63.8,153.5
154.0,63.3,154.0
154.5,62.8,154.5
155.0,62.2,155.0
155.5,61.7,155.5
156.0,61.2,156.0
156.5,60.6,156.5
157.0,60.1,157.0
157.5,59.5,157.5
158.0,59.0,158.0
158.5,58.4,158.5
159.0,57.8,159.0
159.5,57.2,159.5
160.0,56.6,160.0
160.5,55.9,160.5
161.0,55.3,161.0
161.5,54.7,161.5
162.0,54.0,162.0
162.5,53.3,162.5
163.0,52.6,163.0
163.5,51.9,163.5
164.0,51.2,164.0
164.5,50.5,164.5
165.0,49.7,165.0
165.5,49.0,165.5
166.0,48.2,166.0
166.5,47.4,166.5
167.0,46.6,167.0
167.5,45.8,167.5
168.0,44.9,168.0
168.5,44.0,168.5
169.0,43.1,169.0
169.5,42.2,169.5
170.0,41.2,170.0
170.5,40.2,170.5
171.0,39.2,171.0
171.5,38.2,171.5
172.0,37.1,172.0
172.5,36.0,172.5
173.0,34.8,173.0
173.5,33.6,173.5
174.0,32.3,174.0
174.5,31.0,174.5
175.0,29.6,175.0
175.5,28.1,175.5
176.0,26.5,176.0
176.5,24.9,176.5
177.0,23.0,177.0
177.5,21.1,177.5
178.0,18.9,178.0
178.5,16.4,178.5
179.0,13.4,179.0
179.5,9.5,179.5
180.0,0.0,180.0
相关推荐
落痕的寒假2 小时前
[论文总结] 深度学习在农业领域应用论文笔记14
论文阅读·人工智能·深度学习
神探阿航2 小时前
图像噪声处理技术:让图像更清晰的艺术
图像处理·人工智能·计算机视觉
纠结哥_Shrek4 小时前
自然语言处理-词嵌入 (Word Embeddings)
人工智能·自然语言处理
Zfox_4 小时前
DeepSeek R1本地化部署 Ollama + Chatbox 打造最强 AI 工具
人工智能·ai·大模型教程·deepseek
CodeLinghu4 小时前
Agentic Automation:基于Agent的企业认知架构重构与数字化转型跃迁---我的AI经典战例
人工智能·重构·架构
银行数字化转型导师坚鹏4 小时前
数字化转型导师坚鹏:AI大模型DEEPSEEK重构人工智能格局的里程碑
人工智能·ai·重构·deepseek
X.AI6665 小时前
【大模型LLM面试合集】大语言模型架构_MHA_MQA_GQA
人工智能·语言模型·自然语言处理
智识世界Intelligence5 小时前
DeepSeek的崛起与全球科技市场的震荡
人工智能
弥树子5 小时前
使用 PyTorch 实现逻辑回归并评估模型性能
人工智能·pytorch·逻辑回归