Ray Tune Dynamic size of Dynamic number of layers

32 views Asked by At

I am trying to specify a dynamic number of neurons for each layer in a dynamic number of layer. My code is:

    def __init__(self,numberoflayers=2,layer_size=[20,50]):
        # call constructor from superclass
        super().__init__()
        self.layers =nn.ModuleList()
        # define network layers
        input_size=layer_size[0]
        self.layers.append(nn.Linear(3,layer_size[0]))
        self.layers.append('ReLU')
        for size in layer_size:
            self.layers.append(nn.Linear(input_size,size))
            self.layers.append('ReLU')
            input_size=size
        self.layers.append(nn.Linear(layer_size[-1],len(y.unique())))
        self.layers.append('Sigmoid')
        
    def forward(self, input_data):
        # define forward pass
        for layer in self.layers:
            input_data = layer(input_data)
        return input_data

and I defined my variables:

config = {
    "numberoflayers": tune.choice([2,3,5,5]),
    for i in range(spec.config.numberoflayers +1):
        

    "sizeof": tune.choice([2 ** i for i in range(9)]),
    "lr": tune.loguniform(1e-4, 1e-1),
    "batch_size": tune.choice([2, 4, 8, 16])
}

I'm expecting to get a different number of neurons for each layers

0

There are 0 answers