import scipy.io as sio import numpy as np import math import matplotlib.pyplot as plt def displayData(X): # Display stacked small images m,npix = X.shape example_width = round(math.sqrt(npix)) example_height = (npix/example_width) display_rows = math.floor(math.sqrt(m)) display_cols = math.ceil(m/display_rows) pad = 1 display_array = np.ones((pad+display_rows*(example_height+pad),pad+display_cols*(example_width+pad))) curr_ex= 0 for j in range(1,int(display_rows+1)): for i in range(1,int(display_cols+1)): if curr_ex>m: break max_val = max(abs(X[curr_ex,:])) padjstart =int((j-1)*(example_height+pad)) padistart = int( (i-1)*(example_width+pad)) currimstack = X[curr_ex,:] creshape = currimstack.reshape((int(example_height),int(example_width))).T tt=1 display_array[padjstart:padjstart+int(example_height),padistart:padistart+int(example_width)]=creshape reshape(X[curr_ex,:],example_height,wxample_width)/max_val curr_ex = curr_ex+1 if curr_ex>m: break plt.imshow(display_array, cmap='gray') plt.show() return def sigmoid_func(z): # Return the value of the sigmoid function return ..... def nnetpredict(Theta1, Theta2, X): # Compute the predictions of a 2-layer network trained by Theta1 and Theta2 # Use sigmoid activations and a one-vs-all classifier for each class. return predictedlabels def nnCostFunction(Theta1, Theta2, input_layer_size, hidden_layer_size, numLabels, X, y, lval): # Fill in return J # Set up the number of nodes input_layer_size = 400 hidden_layer_size = 25 num_labels = 10 # Load and visualize the data #THis contains the MNIST images of handwritten digits datastruct = sio.loadmat('week6data1.mat') X = datastruct['X'] y=datastruct['y'] nsamp,nfeat = X.shape #print np.mean(X[14,:]) #print X.shape #y = datastruct.y #Visualize 100 random images sel = np.random.permutation(nsamp) sel = sel[:100] #displayData(X[sel,:]) # LOad a set of pretrained weights #print sio.whosmat('week6weights.mat') wstruct = sio.loadmat('week6weights.mat') Theta1 = wstruct['Theta1'] Theta2 = wstruct['Theta2'] #print Theta1.shape, Theta2.shape # PRedict the class labels and display the accuracy (it should be aroung 0.9752( # When it works, select n images at random, classify one at a time and display the image, true class, and estimated class # Compute the cost function without regularization, your answer should be around 0.2876 # Include regularization with lambda=1, and compute the cost. It should be around 0.3837