Skip to content

Commit

Permalink
Version 0.9.2. Fixes to stl code tags
Browse files Browse the repository at this point in the history
  • Loading branch information
amaas committed Sep 25, 2013
1 parent a3816ee commit ada8026
Show file tree
Hide file tree
Showing 2 changed files with 7 additions and 27 deletions.
13 changes: 3 additions & 10 deletions stl/feedfowardRICA.m
Original file line number Diff line number Diff line change
Expand Up @@ -43,9 +43,7 @@
filter = zeros(8,8); % You should replace this
% Form W, obtain the feature (filterDim x filterDim) needed during the
% convolution
% ---- YOUR CODE HERE ----
filter = squeeze(W(:,:,filterNum));
% ------------------------
%%% YOUR CODE HERE %%%

% Flip the feature matrix because of the definition of convolution, as explained later
filter = rot90(squeeze(filter),2);
Expand All @@ -56,16 +54,11 @@
resp = zeros(convDim, convDim); % You should replace this
% Convolve "filter" with "im" to find "resp"
% be sure to do a 'valid' convolution
% ---- YOUR CODE HERE ----
resp = conv2(im,filter,'valid');
% ------------------------
%%% YOUR CODE HERE %%%
% Then, apply square-square-root pooling on "resp" to get the hidden
% activation "act"
act = zeros(convDim / poolDim, convDim / poolDim); % You should replace this
% ---- YOUR CODE HERE ---%
act=conv2(resp.^2,poolMat,'valid');
act = sqrt(act(1:poolDim:end,1:poolDim:end)+params.epsilon);
% ------------------------
%%% YOUR CODE HERE %%%
features(:, :, filterNum, imageNum) = act;
end
end
Expand Down
21 changes: 4 additions & 17 deletions stl/stlExercise.m
Original file line number Diff line number Diff line change
Expand Up @@ -85,11 +85,8 @@
% Find opttheta by running the RICA on all the training patches.
% You will need to whitened the patches with the zca2 function
% then call minFunc with the softICACost function as seen in the RICA exercise.
%% ----------------- YOUR CODE HERE ----------------------
[patches,V,E,D] = zca2(patches);
% optimize
[opttheta, cost, exitflag] = minFunc( @(theta) softICACost(theta, patches, params), randTheta, options);
%% -------------------------------------------------------
%%% YOUR CODE HERE %%%

% reshape visualize weights
W = reshape(opttheta, params.numFeatures, params.n);
display_network(W');
Expand Down Expand Up @@ -136,24 +133,14 @@
options.MaxIter = 300;

% optimize
%% ----------------- YOUR CODE HERE ----------------------
[opttheta_softmax, cost, exitflag] = minFunc( @(theta) softmax_regression_vec(theta, trainFeatures, trainLabels), randTheta2, options);

%% -----------------------------------------------------
%%% YOUR CODE HERE %%%


%%======================================================================
%% STEP 5: Testing
% Compute Predictions on tran and test sets using softmaxPredict
% and softmaxModel
%% ----------------- YOUR CODE HERE ----------------------
W_softmax = reshape(opttheta_softmax, featureSize, numClasses);
train_score = trainFeatures'*W_softmax;
[~,train_pred] = max(train_score, [], 2);
score = testFeatures'*W_softmax;
[~,pred] = max(score, [], 2);
%% -----------------------------------------------------

%%% YOUR CODE HERE %%%
% Classification Score
fprintf('Train Accuracy: %f%%\n', 100*mean(train_pred(:) == trainLabels(:)));
fprintf('Test Accuracy: %f%%\n', 100*mean(pred(:) == testLabels(:)));
Expand Down

0 comments on commit ada8026

Please sign in to comment.