Adding neurons to layers is now more robust
This commit is contained in:
parent
67d94efc39
commit
cf75abf75c
2 changed files with 10 additions and 5 deletions
|
@ -27,12 +27,17 @@ public class Network {
|
|||
*/
|
||||
public void addNeuron(int layer, int n) {
|
||||
if (!(this.layers.get(layer) instanceof FCLayer)) {
|
||||
System.out.println("This layer is not a de.lluni.javann.layers.BlankLayer");
|
||||
} else if (!(this.layers.get(layer + 2) instanceof FCLayer)) {
|
||||
System.out.println("The next layer is not a de.lluni.javann.layers.BlankLayer");
|
||||
System.out.println("This layer is not a FCLayer");
|
||||
}
|
||||
|
||||
((FCLayer) this.layers.get(layer)).addNeuron(n);
|
||||
((FCLayer) this.layers.get(layer + 2)).updateInputSize(n);
|
||||
|
||||
for (int i = layer + 1; i < this.layers.size(); i++) {
|
||||
if (this.getLayers().get(i) instanceof FCLayer) {
|
||||
((FCLayer) this.layers.get(i)).updateInputSize(n);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -46,7 +46,7 @@ public class FCLayer extends Layer {
|
|||
}
|
||||
for (int i = 0; i < newWeights.getNumElements(); i++) {
|
||||
if (newWeights.get(i) == 0) {
|
||||
newWeights.set(i, random.nextDouble(-1, 1));
|
||||
newWeights.set(i, 0.1d * random.nextGaussian(0, 1));
|
||||
}
|
||||
}
|
||||
this.weights = newWeights;
|
||||
|
|
Loading…
Reference in a new issue