Benutzer-Werkzeuge

Webseiten-Werkzeuge


Seitenleiste

ss16:predict
from sklearn import neighbors
from scipy.ndimage.filters import gaussian_filter as gausfilter
from scipy.ndimage.morphology import  grey_dilation
import numpy as np
import center_image as ci
import random
import cPickle as pickle
from scipy import misc
with open("/[…]/[…]/scikit_learn_data/mldata/datensaetze-cn-knn-fit-bereiningt.p",'r') as f:
	u=pickle.Unpickler(f)
	clf=u.load()
labels = np.load("/[…]/[…]/scikit_learn_data/mldata/datensaetze-zsm-center-norm-labels-bereinigt.p.npy")
 
def predict(bild):
	eps1 = random.random() + 0.3
	eps2 = random.random() + 0.3
	bild = bild/np.linalg.norm(bild)
	bild = ci.center_image(bild)
	#-----erzeugen-----
	bildgaus1 = gausfilter(bild, eps1)
	bildgaus2 = gausfilter(bildgaus1,eps2)
	bilddil1 = grey_dilation(bild,mode='constant',size=(2,2))
	bilddil2 = grey_dilation(bilddil1,mode='constant',size=(2,2))
	tests=[bild, bildgaus1, bildgaus2, bilddil1, bilddil2]
	#------center------
	center_tests = [ci.center_image(t) for t in tests]
	#-------reshape-------
	center_reshape_tests = [ t.reshape((784,)) for t in center_tests]
	#------normieren-------
	center_norm_tests = [ t/np.linalg.norm(t) for t in center_reshape_tests]
	#-----predict-----
	average = reduce ( lambda x,y: x+y, (clf.predict_proba(t) for t in center_norm_tests))
	average = [ x / len(tests) for x in average]
	print "---------------------------"
	print average
	print "---------------------------"
	#--------ermitteln des Zeichens-------
	maximum = np.argmax(average)
	l = clf.classes_
	#print l
	index1 = l[maximum]
	index2 = list(labels[0]).index(index1)
	zeichen = labels[1][index2]
	return zeichen
 
 
if __name__=="__main__":
	bild=misc.imread("test6.png")
	print predict(bild)
ss16/predict.txt · Zuletzt geändert: 2016/09/15 15:36 von henrikfrenzel