# HG changeset patch # User Jordi Gutiérrez Hermoso # Date 1321039829 18000 # Node ID 8e8089d5a55b38f7dd43191499c8f7253d0a8fdb # Parent 55430128adcdf2936e686b0f0e584085592f4b27 Implement randInitializeWeights diff --git a/randInitializeWeights.m b/randInitializeWeights.m --- a/randInitializeWeights.m +++ b/randInitializeWeights.m @@ -1,32 +1,16 @@ function W = randInitializeWeights(L_in, L_out) -%RANDINITIALIZEWEIGHTS Randomly initialize the weights of a layer with L_in -%incoming connections and L_out outgoing connections -% W = RANDINITIALIZEWEIGHTS(L_in, L_out) randomly initializes the weights -% of a layer with L_in incoming connections and L_out outgoing -% connections. -% -% Note that W should be set to a matrix of size(L_out, 1 + L_in) as -% the first row of W handles the "bias" terms -% - -% You need to return the following variables correctly -W = zeros(L_out, 1 + L_in); + ##RANDINITIALIZEWEIGHTS Randomly initialize the weights of a layer with L_in + ##incoming connections and L_out outgoing connections + ## W = RANDINITIALIZEWEIGHTS(L_in, L_out) randomly initializes the weights + ## of a layer with L_in incoming connections and L_out outgoing + ## connections. + ## + ## Note that W should be set to a matrix of size(L_out, 1 + L_in) as + ## the first row of W handles the "bias" terms + ## -% ====================== YOUR CODE HERE ====================== -% Instructions: Initialize W randomly so that we break the symmetry while -% training the neural network. -% -% Note: The first row of W corresponds to the parameters for the bias units -% - - + ## Randomly initialize the weights to small values + epsilon init = 0.12; + W = rand(L out, 1 + L in) * 2 * epsilon init − epsilon init; - - - - - - -% ========================================================================= - -end +endfunction