Introduction to Computational Analysis

Pay Notebook Creator: Roy Hyunjin Han0
Set Container: Numerical CPU with TINY Memory for 10 Minutes 0
In [1]:
import itertools
import pylab as pl
import random
from sklearn import datasets, feature_selection, linear_model, neighbors, svm
digits = datasets.load_digits()

Train a classifier to recognize handwritten digits

Examine dataset.

In [2]:
# Look at the first image as an array
In [3]:
In [4]:
# Look at five random images and their corresponding labels
def draw_samples(images, labels):
    for index, (image, label) in enumerate(itertools.izip(images, labels)):
        pl.subplot(1, len(images), index + 1)
        pl.imshow(image,, interpolation='nearest')
        pl.title('%s' % label)

indices = random.sample(xrange(len(digits.images)), 5)
In [5]:
# Flatten each image into an array, where each pixel is a feature
# data = [image.ravel() for image in digits.images]
# We can do this more efficiently by reshaping the entire matrix at once, where
# -1 tells reshape () to determine the size of the second dimension automatically
data = digits.images.reshape(len(digits.images), -1)

Train a supervised learning model and test its performance on an image it hasn't seen before.

In [6]:
sampleCount = len(data)
imageShape = digits.images[0].shape
# Train on the first half of the data
trainingData = data[:sampleCount / 2]
trainingLabels =[:sampleCount / 2]
# Test on five random images from the second half of the data
testData = random.sample(data, 5)

def train_and_test(model):, trainingLabels)
    predictedLabels = model.predict(testData)
    draw_samples([x.reshape(imageShape) for x in testData], [int(x) for x in predictedLabels])
In [7]:

Try different supervised learning models.

In [8]:
In [9]:

Discover informative features

Pick a supervised learning model.

In [10]:
model = svm.SVC(kernel='linear', gamma=0.001)

Pick a feature selection algorithm.

In [11]:
featureSelector = feature_selection.RFE(estimator=model,n_features_to_select=1, step=1),
featureRanking = featureSelector.ranking_.reshape(digits.images[0].shape)

Color the pixels that are most informative.

In [12]:
pl.title('Pixel ranking by\nrecursive feature elimination')