python神經(jīng)網(wǎng)絡(luò)編程實現(xiàn)手寫數(shù)字識別
本文實例為大家分享了python實現(xiàn)手寫數(shù)字識別的具體代碼,供大家參考,具體內(nèi)容如下
import numpy import scipy.special #import matplotlib.pyplot class neuralNetwork: def __init__(self,inputnodes,hiddennodes,outputnodes,learningrate): self.inodes=inputnodes self.hnodes=hiddennodes self.onodes=outputnodes self.lr=learningrate self.wih=numpy.random.normal(0.0,pow(self.hnodes,-0.5),(self.hnodes,self.inodes)) self.who=numpy.random.normal(0.0,pow(self.onodes,-0.5),(self.onodes,self.hnodes)) self.activation_function=lambda x: scipy.special.expit(x) pass def train(self,inputs_list,targets_list): inputs=numpy.array(inputs_list,ndmin=2).T targets=numpy.array(targets_list,ndmin=2).T hidden_inputs=numpy.dot(self.wih,inputs) hidden_outputs=self.activation_function(hidden_inputs) final_inputs=numpy.dot(self.who,hidden_outputs) final_outputs=self.activation_function(final_inputs) output_errors=targets-final_outputs hidden_errors=numpy.dot(self.who.T,output_errors) self.who+=self.lr*numpy.dot((output_errors*final_outputs*(1.0-final_outputs)),numpy.transpose(hidden_outputs)) self.wih+=self.lr*numpy.dot((hidden_errors*hidden_outputs*(1.0-hidden_outputs)),numpy.transpose(inputs)) pass def query(self,input_list): inputs=numpy.array(input_list,ndmin=2).T hidden_inputs=numpy.dot(self.wih,inputs) hidden_outputs=self.activation_function(hidden_inputs) final_inputs=numpy.dot(self.who,hidden_outputs) final_outputs=self.activation_function(final_inputs) return final_outputs input_nodes=784 hidden_nodes=100 output_nodes=10 learning_rate=0.1 n=neuralNetwork(input_nodes,hidden_nodes,output_nodes,learning_rate) training_data_file=open(r"C:\Users\lsy\Desktop\nn\mnist_train.csv","r") training_data_list=training_data_file.readlines() training_data_file.close() #print(n.wih) #print("") epochs=2 for e in range(epochs): for record in training_data_list: all_values=record.split(",") inputs=(numpy.asfarray(all_values[1:])/255.0*0.99)+0.01 targets=numpy.zeros(output_nodes)+0.01 targets[int(all_values[0])]=0.99 n.train(inputs,targets) #print(n.wih) #print(len(training_data_list)) #for i in training_data_list: # print(i) test_data_file=open(r"C:\Users\lsy\Desktop\nn\mnist_test.csv","r") test_data_list=test_data_file.readlines() test_data_file.close() scorecard=[] for record in test_data_list: all_values=record.split(",") correct_lable=int(all_values[0]) inputs=(numpy.asfarray(all_values[1:])/255.0*0.99)+0.01 outputs=n.query(inputs) label=numpy.argmax(outputs) if(label==correct_lable): scorecard.append(1) else: scorecard.append(0) scorecard_array=numpy.asarray(scorecard) print(scorecard_array) print("") print(scorecard_array.sum()/scorecard_array.size) #all_value=test_data_list[0].split(",") #input=(numpy.asfarray(all_value[1:])/255.0*0.99)+0.01 #print(all_value[0]) #image_array=numpy.asfarray(all_value[1:]).reshape((28,28)) #matplotlib.pyplot.imshow(image_array,cmap="Greys",interpolation="None") #matplotlib.pyplot.show() #nn=n.query((numpy.asfarray(all_value[1:])/255.0*0.99)+0.01) #for i in nn : # print(i)
《python神經(jīng)網(wǎng)絡(luò)編程》中代碼,僅做記錄,以備后用。
image_file_name=r"*.JPG" img_array=scipy.misc.imread(image_file_name,flatten=True) img_data=255.0-img_array.reshape(784) image_data=(img_data/255.0*0.99)+0.01
圖片對應像素的讀取。因訓練集灰度值與實際相反,故用255減取反。
import numpy import scipy.special #import matplotlib.pyplot import scipy.misc from PIL import Image class neuralNetwork: def __init__(self,inputnodes,hiddennodes,outputnodes,learningrate): self.inodes=inputnodes self.hnodes=hiddennodes self.onodes=outputnodes self.lr=learningrate self.wih=numpy.random.normal(0.0,pow(self.hnodes,-0.5),(self.hnodes,self.inodes)) self.who=numpy.random.normal(0.0,pow(self.onodes,-0.5),(self.onodes,self.hnodes)) self.activation_function=lambda x: scipy.special.expit(x) pass def train(self,inputs_list,targets_list): inputs=numpy.array(inputs_list,ndmin=2).T targets=numpy.array(targets_list,ndmin=2).T hidden_inputs=numpy.dot(self.wih,inputs) hidden_outputs=self.activation_function(hidden_inputs) final_inputs=numpy.dot(self.who,hidden_outputs) final_outputs=self.activation_function(final_inputs) output_errors=targets-final_outputs hidden_errors=numpy.dot(self.who.T,output_errors) self.who+=self.lr*numpy.dot((output_errors*final_outputs*(1.0-final_outputs)),numpy.transpose(hidden_outputs)) self.wih+=self.lr*numpy.dot((hidden_errors*hidden_outputs*(1.0-hidden_outputs)),numpy.transpose(inputs)) pass def query(self,input_list): inputs=numpy.array(input_list,ndmin=2).T hidden_inputs=numpy.dot(self.wih,inputs) hidden_outputs=self.activation_function(hidden_inputs) final_inputs=numpy.dot(self.who,hidden_outputs) final_outputs=self.activation_function(final_inputs) return final_outputs input_nodes=784 hidden_nodes=100 output_nodes=10 learning_rate=0.1 n=neuralNetwork(input_nodes,hidden_nodes,output_nodes,learning_rate) training_data_file=open(r"C:\Users\lsy\Desktop\nn\mnist_train.csv","r") training_data_list=training_data_file.readlines() training_data_file.close() #print(n.wih) #print("") #epochs=2 #for e in range(epochs): for record in training_data_list: all_values=record.split(",") inputs=(numpy.asfarray(all_values[1:])/255.0*0.99)+0.01 targets=numpy.zeros(output_nodes)+0.01 targets[int(all_values[0])]=0.99 n.train(inputs,targets) #image_file_name=r"C:\Users\lsy\Desktop\nn\1000-1.JPG" ''' img_array=scipy.misc.imread(image_file_name,flatten=True) img_data=255.0-img_array.reshape(784) image_data=(img_data/255.0*0.99)+0.01 #inputs=(numpy.asfarray(image_data)/255.0*0.99)+0.01 outputs=n.query(image_data) label=numpy.argmax(outputs) print(label) ''' #print(n.wih) #print(len(training_data_list)) #for i in training_data_list: # print(i) test_data_file=open(r"C:\Users\lsy\Desktop\nn\mnist_test.csv","r") test_data_list=test_data_file.readlines() test_data_file.close() scorecard=[] total=[0,0,0,0,0,0,0,0,0,0] rightsum=[0,0,0,0,0,0,0,0,0,0] for record in test_data_list: all_values=record.split(",") correct_lable=int(all_values[0]) inputs=(numpy.asfarray(all_values[1:])/255.0*0.99)+0.01 outputs=n.query(inputs) label=numpy.argmax(outputs) total[correct_lable]+=1 if(label==correct_lable): scorecard.append(1) rightsum[correct_lable]+=1 else: scorecard.append(0) scorecard_array=numpy.asarray(scorecard) print(scorecard_array) print("") print(scorecard_array.sum()/scorecard_array.size) print("") print(total) print(rightsum) for i in range(10): print((rightsum[i]*1.0)/total[i]) #all_value=test_data_list[0].split(",") #input=(numpy.asfarray(all_value[1:])/255.0*0.99)+0.01 #print(all_value[0]) #image_array=numpy.asfarray(all_value[1:]).reshape((28,28)) #matplotlib.pyplot.imshow(image_array,cmap="Greys",interpolation="None") #matplotlib.pyplot.show() #nn=n.query((numpy.asfarray(all_value[1:])/255.0*0.99)+0.01) #for i in nn : # print(i)
嘗試統(tǒng)計了對于各個數(shù)據(jù)測試數(shù)量及正確率。
原本想驗證書后向后查詢中數(shù)字‘9'識別模糊是因為訓練數(shù)量不足或錯誤率過高而產(chǎn)生,然最終結(jié)果并不支持此猜想。
另書中只能針對特定像素的圖片進行學習,真正手寫的圖片并不能滿足訓練條件,實際用處仍需今后有時間改進。
以上就是本文的全部內(nèi)容,希望對大家的學習有所幫助,也希望大家多多支持腳本之家。
相關(guān)文章
對python特殊函數(shù) __call__()的使用詳解
今天小編就為大家分享一篇對python特殊函數(shù) __call__()的使用詳解,具有很好的參考價值,希望對大家有所幫助。一起跟隨小編過來看看吧2019-07-07scrapy框架攜帶cookie訪問淘寶購物車功能的實現(xiàn)代碼
這篇文章主要介紹了scrapy框架攜帶cookie訪問淘寶購物車,本文通過實例代碼圖文詳解給大家介紹的非常詳細,對大家的學習或工作具有一定的參考借鑒價值,需要的朋友可以參考下2020-07-07PyCharm利用pydevd-pycharm實現(xiàn)Python遠程調(diào)試的詳細過程
這篇文章主要介紹了PyCharm利用pydevd-pycharm實現(xiàn)Python遠程調(diào)試,本文通過實例代碼給大家介紹的非常詳細,對大家的學習或工作具有一定的參考借鑒價值,需要的朋友可以參考下2022-09-09