pytorch Error: module 'torch.nn' has no attribute 'ReLu'

15.8k views Asked by At

i am working in google colab, so i assume its the current version of pytorch. I tried this:

class Fc(nn.Module):
    def __init__(self):
        super(Fc, self).__init__()
        self.flatt = nn.Flatten()
        self.seq = nn.Sequential(nn.Linear(28*28, 512),
                                 nn.ReLU(),
                                 nn.Linear(512, 512),
                                 nn.ReLu(),
                                 nn.Linear(512, 10), nn.ReLu())


    def forward(x):
        p = self.flatt(x)
        p = self.seq(p)
        return p
m1 = Fc()

and got:

<ipython-input-85-142a1e77b6b6> in <module>()
----> 1 m1 = Fc()
<ipython-input-84-09df3be0b613> in __init__(self)
  4         self.flatt = nn.Flatten()
  5         self.relu = torch.nn.modules.activation.ReLU()
----> 6         self.seq = nn.Sequential(nn.Linear(28*28, 1012), nn.ReLU(), 
nn.Linear(1012, 512), nn.ReLu(), nn.Linear(512, 10), nn.ReLu())
AttributeError: module 'torch.nn' has no attribute 'ReLu'

What I am doing wrong here?

1

There are 1 answers

0
KarelZe On

You got a typo regarding casing. It's called ReLU not ReLu.

import torch.nn as nn

class Fc(nn.Module):
    def __init__(self):
        super(Fc, self).__init__()
        self.flatt = nn.Flatten()
        self.seq = nn.Sequential(nn.Linear(28*28, 512),
                                 # TODO: Adjust here        
                                 nn.ReLU(),
                                 nn.Linear(512, 512),
                                 nn.ReLU(),
                                 # TODO: Adjust here
                                 nn.Linear(512, 10), nn.ReLU())


    def forward(x):
        p = self.flatt(x)
        p = self.seq(p)
        return p
m1 = Fc()