今天,搞了一段代码,但没有达到应有的效果
import torch import torch.nn as nn import numpy as np # 设置随机种子以便结果可重复 torch.manual_seed(42) # 定义一个更复杂的LSTM模型 class ComplexLSTMModel(nn.Module): def __init__(self, input_size, hidden_size, output_size): super(ComplexLSTMModel, self).__init__() self.hidden_size = hidden_size self.lstm = nn.LSTM(input_size, hidden_size, num_layers=2) # 增加了一个LSTM层 self.fc = nn.Linear(hidden_size, output_size) def forward(self, input): lstm_out, _ = self.lstm(input.view(len(input), 1, -1)) output = self.fc(lstm_out.view(len(input), -1)) return output[-1] # 准备数据 input_size = 1 hidden_size = 8 output_size = 1 lr = 0.01 num_epochs = 100 # 生成一些示例数据 data = np.sin(np.arange(0, 100, 0.1)) # 定义模型、损失函数和优化器 model = ComplexLSTMModel(input_size, hidden_size, output_size) criterion = nn.MSELoss() optimizer = torch.optim.Adam(model.parameters(), lr=lr) # 训练模型 for epoch in range(num_epochs): inputs = torch.Tensor(data[:-1]).view(-1, 1, 1) labels = torch.Tensor(data[1:]) optimizer.zero_grad() outputs = model(inputs) loss = criterion(outputs, labels) loss.backward() optimizer.step() if (epoch+1) % 10 == 0: print(f'Epoch [{epoch+1}/{num_epochs}], Loss: {loss.item():.4f}') # 预测 with torch.no_grad(): future = 100 # 用训练数据最后一个数据点开始预测 pred_data = [data[-1]] for _ in range(future): inputs = torch.Tensor([pred_data[-1]]).view(-1, 1, 1) pred = model(inputs) pred_data.append(pred.item()) # 绘制结果 import matplotlib.pyplot as plt plt.plot(data, label='Original data') plt.plot(np.arange(len(data)-1, len(data) + future), pred_data, label='Predictions') plt.legend() plt.show()
结果如图:
没有达到想象的效果
标签:hidden,每日,torch,笔记,input,LSTM,data,self,size From: https://www.cnblogs.com/kingkaixuan/p/18024004