From b7f4ea6505a0d907c606a794c79a52dc95349ccd Mon Sep 17 00:00:00 2001 From: Eren Kotar <79402527+erenkotar@users.noreply.github.com> Date: Tue, 18 Jun 2024 22:04:54 +0300 Subject: [PATCH] readable text representaton --- micrograd/nn.py | 23 +++++++++++++++++++++-- 1 file changed, 21 insertions(+), 2 deletions(-) diff --git a/micrograd/nn.py b/micrograd/nn.py index 30d5d777..e90b0eb3 100644 --- a/micrograd/nn.py +++ b/micrograd/nn.py @@ -26,6 +26,13 @@ def parameters(self): def __repr__(self): return f"{'ReLU' if self.nonlin else 'Linear'}Neuron({len(self.w)})" + + def __str__(self): + fstr = f"{'ReLU' if self.nonlin else 'Linear'}Neuron({len(self.w)}) -> " + for idx, wi in enumerate(self.w): + fstr += f"w{idx} ={wi.data:7.4f}, " + fstr += f"b ={self.b.data:7.4f}" + return fstr class Layer(Module): @@ -40,7 +47,13 @@ def parameters(self): return [p for n in self.neurons for p in n.parameters()] def __repr__(self): - return f"Layer of [{', '.join(str(n) for n in self.neurons)}]" + return f"Layer of [{', '.join(str(n.__repr__()) for n in self.neurons)}]" + + def __str__(self): + fstr = f"Shape of the layer is: {len(self.neurons[0].w)} X {len(self.neurons)} (nin X nout)\n" + for idx, neuron in enumerate(self.neurons): + fstr += f" Neuron {idx+1}: {neuron.__str__()} \n" + return fstr class MLP(Module): @@ -57,4 +70,10 @@ def parameters(self): return [p for layer in self.layers for p in layer.parameters()] def __repr__(self): - return f"MLP of [{', '.join(str(layer) for layer in self.layers)}]" + return f"MLP of [{', '.join(str(layer.__repr__()) for layer in self.layers)}]" + + def __str__(self): + fstr = "Multi-Layer Perceptron Structure:\n" + for idx, layer in enumerate(self.layers): + fstr += f" Layer {idx+1}/{len(self.layers)} - {layer}\n" + return fstr