One layer SoftMax Classifier, "Handwriting recognition"
import lib needed¶
from PIL import Image
import numpy as np
import matplotlib.pyplot as plt
import re
from glob import glob
begin, load data¶
def load_data(train_path='train/',test_path='test/'):
train_list=glob(r'train/*.png')
pattern = re.compile(r'num(\d).png')
train_id = np.array([float(pattern.search(img_name).groups()[0]) for img_name in train_list])
train_data=np.concatenate([np.array(Image.open(img_name)).reshape(1,784) for img_name in train_list],axis=0).astype(np.float)
test_list=glob(r'test/*.png')
test_id=np.array([float(pattern.search(img_name).groups()[0]) for img_name in test_list])
test_data=np.concatenate([np.array(Image.open(img_name)).reshape(1,784) for img_name in test_list],axis=0).astype(np.float)
return train_id,train_data,test_id,test_data
load data, print the shape of data¶
train_id,train_data,test_id,test_data=load_data()
train_id.shape,train_data.shape,test_id.shape,test_data.shape
train_val=np.zeros((train_id.shape[0],10))
for i in range(train_id.shape[0]):
train_val[i,train_id[i].astype('int')]=1
split data into minibatches¶
mini_batch_num=100
mini_batch_size=600
define function need, such as softmax, propagation,back_propagation¶
def softmax(x):
x=x-np.max(x) #using softmax(x)=softmax(x+c)
exp_x=np.exp(x)
softmax_x=exp_x/sum(np.exp(x))
return softmax_x
use cross entrophy to compute loss, this is part of propagation¶
def propa(train_x,train_y,W,b): #propagation
yt=softmax(np.dot(train_x,W)+b)
loss=-np.sum(train_y.T.dot(np.log(yt))) #cross entrophy
dy=(yt-train_y).T
return dy,loss
update W¶
def back_propa(train_data,train_id,W,b,alpha,data_size):
for i in range(data_size):
dy,loss=propa(train_data[i,:],train_id[i,:],W,b)
dy=dy.reshape(1,10)
p=train_data[i,:]
p=p.reshape(784,1)
dW=alpha*np.dot(p,dy)
W-=dW
return W,loss
initialize W and b¶
W=np.zeros((784,10))
b=1
loop and update, also print accurancy of our traindataset¶
for i in range(mini_batch_num):
for iteration in range(20):
lb=(mini_batch_size*i)
ub=(mini_batch_size*(i+1))
mini_batch_data=train_data[lb:ub,:]
mini_batch_id=train_val[lb:ub,:]
W,loss=back_propa(mini_batch_data,mini_batch_id,W,b,0.01,600)
count=0
for j in range(600):
if np.argmax(softmax(train_data[j,:].dot(W)))==train_id[j].astype('int'):
count+=1
acc=count/600
if i%10==0:
print('batch={},acc={}'.format(i+1,acc))
predict in the test dataset¶
for j in range(test_id.shape[0]):
if np.argmax(softmax(test_data[j,:].dot(W)))==test_id[j].astype('int'):
count+=1
acc=count/test_id.shape[0]
print(acc)