from __future__ import division from sklearn import datasets import matplotlib.pyplot as plt import seaborn as sns import pandas as pd import numpy as np import math data = datasets.load_iris() X = data.data[:100, :2] y = data.target[:100] X_full = data.data[:100, :] # Remember to add a column of ones to X, as the first column setosa = plt.scatter(X[:50,0], X[:50,1], c='b') versicolor = plt.scatter(X[50:,0], X[50:,1], c='r') plt.xlabel("Sepal Length") plt.ylabel("Sepal Width") plt.legend((setosa, versicolor), ("Setosa", "Versicolor")) plt.show() def logistic_func(theta, x): #Fill in the correct value for the sigmoid function, the logistic function, not logarithm # If the input x is a vector, the output should be a vector return sigmoidvalue def log_gradient(theta, x, y): #Compute the gradient of theta to use in gradient descent updates, without learning rate #All nfeat elements in theta should be return theta_gradient def cost_func(theta, x, y): # Compute the cost function for logistic regression return costval def grad_desc(theta_values, X, y, lr=.01, converge_change=.001): #Do gradient descent with learning rate lr and stop of the nof. changes is below limit #Return the resulting theta values, and an array with the cost values for each iteration # Stop if the abs(cost(it)-cost(it+1))