在尝试创建神经网络并使用 Pytorch 对其进行优化时,我得到了
ValueError:优化器得到一个空的参数列表
这是代码。
import torch.nn as nn
import torch.nn.functional as F
from os.path import dirname
from os import getcwd
from os.path import realpath
from sys import argv
class NetActor(nn.Module):
def __init__(self, args, state_vector_size, action_vector_size, hidden_layer_size_list):
super(NetActor, self).__init__()
self.args = args
self.state_vector_size = state_vector_size
self.action_vector_size = action_vector_size
self.layer_sizes = hidden_layer_size_list
self.layer_sizes.append(action_vector_size)
self.nn_layers = []
self._create_net()
def _create_net(self):
prev_layer_size = self.state_vector_size
for next_layer_size in self.layer_sizes:
next_layer = nn.Linear(prev_layer_size, next_layer_size)
prev_layer_size = next_layer_size
self.nn_layers.append(next_layer)
def forward(self, torch_state):
activations = torch_state
for …
Run Code Online (Sandbox Code Playgroud) python machine-learning reinforcement-learning backpropagation pytorch
下面是使用 pytorch 为两个回归任务构建 DNN 的示例代码。该forward
函数返回两个输出 (x1, x2)。用于大量回归/分类任务的网络怎么样?例如,100 或 1000 个输出。对所有输出(例如,x1、x2、...、x100)进行硬编码绝对不是一个好主意。有一个简单的方法可以做到这一点吗?谢谢。
import torch
from torch import nn
import torch.nn.functional as F
class mynet(nn.Module):
def __init__(self):
super(mynet, self).__init__()
self.lin1 = nn.Linear(5, 10)
self.lin2 = nn.Linear(10, 3)
self.lin3 = nn.Linear(10, 4)
def forward(self, x):
x = self.lin1(x)
x1 = self.lin2(x)
x2 = self.lin3(x)
return x1, x2
if __name__ == '__main__':
x = torch.randn(1000, 5)
y1 = torch.randn(1000, 3)
y2 = torch.randn(1000, 4)
model = mynet()
optimizer = torch.optim.Adam(model.parameters(), lr=0.001, weight_decay=1e-4)
for …
Run Code Online (Sandbox Code Playgroud)