# HG changeset patch # User Jordi GutiƩrrez Hermoso # Date 1321051832 18000 # Node ID a1d62b2fd48d2c706889cf37b1f70f2e4076835f # Parent 8e8089d5a55b38f7dd43191499c8f7253d0a8fdb Comment out the pauses diff --git a/ex4.m b/ex4.m --- a/ex4.m +++ b/ex4.m @@ -41,8 +41,8 @@ displayData(X(sel, :)); -fprintf('Program paused. Press enter to continue.\n'); -pause; +% fprintf('Program paused. Press enter to continue.\n'); +% pause; %% ================ Part 2: Loading Pameters ================ @@ -80,8 +80,8 @@ fprintf(['Cost at parameters (loaded from ex4weights): %f '... '\n(this value should be about 0.287629)\n'], J); -fprintf('\nProgram paused. Press enter to continue.\n'); -pause; +% fprintf('\nProgram paused. Press enter to continue.\n'); +% pause; %% =============== Part 4: Implement Regularization =============== % Once your cost function implementation is correct, you should now @@ -99,8 +99,8 @@ fprintf(['Cost at parameters (loaded from ex4weights): %f '... '\n(this value should be about 0.383770)\n'], J); -fprintf('Program paused. Press enter to continue.\n'); -pause; +% fprintf('Program paused. Press enter to continue.\n'); +% pause; %% ================ Part 5: Sigmoid Gradient ================ @@ -116,8 +116,8 @@ fprintf('%f ', g); fprintf('\n\n'); -fprintf('Program paused. Press enter to continue.\n'); -pause; +% fprintf('Program paused. Press enter to continue.\n'); +% pause; %% ================ Part 6: Initializing Pameters ================ @@ -146,8 +146,8 @@ % Check gradients by running checkNNGradients checkNNGradients; -fprintf('\nProgram paused. Press enter to continue.\n'); -pause; +% fprintf('\nProgram paused. Press enter to continue.\n'); +% pause; %% =============== Part 8: Implement Regularization =============== @@ -168,8 +168,8 @@ fprintf(['\n\nCost at (fixed) debugging parameters (w/ lambda = 10): %f ' ... '\n(this value should be about 0.576051)\n\n'], debug_J); -fprintf('Program paused. Press enter to continue.\n'); -pause; +% fprintf('Program paused. Press enter to continue.\n'); +% pause; %% =================== Part 8: Training NN =================== @@ -205,8 +205,8 @@ Theta2 = reshape(nn_params((1 + (hidden_layer_size * (input_layer_size + 1))):end), ... num_labels, (hidden_layer_size + 1)); -fprintf('Program paused. Press enter to continue.\n'); -pause; +% fprintf('Program paused. Press enter to continue.\n'); +% pause; %% ================= Part 9: Visualize Weights ================= @@ -218,8 +218,8 @@ displayData(Theta1(:, 2:end)); -fprintf('\nProgram paused. Press enter to continue.\n'); -pause; +% fprintf('\nProgram paused. Press enter to continue.\n'); +% pause; %% ================= Part 10: Implement Predict ================= % After training the neural network, we would like to use it to predict