本文實例講述了Python實現的邏輯回歸算法。分享給大家供大家參考,具體如下:

使用python實現邏輯回歸
Using Python to Implement Logistic Regression Algorithm
菜鳥寫的邏輯回歸,記錄一下學習過程
代碼:
#encoding:utf-8
"""
Author: njulpy
Version: 1.0
Data: 2018/04/10
Project: Using Python to Implement LogisticRegression Algorithm
"""
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from sklearn.model_selection import train_test_split
#建立sigmoid函數
def sigmoid(x):
x = x.astype(float)
return 1./(1+np.exp(-x))
#訓練模型,采用梯度下降算法
def train(x_train,y_train,num,alpha,m,n):
beta = np.ones(n)
for i in range(num):
h=sigmoid(np.dot(x_train,beta)) #計算預測值
error = h-y_train.T #計算預測值與訓練集的差值
delt=alpha*(np.dot(error,x_train))/m #計算參數的梯度變化值
beta = beta - delt
#print('error',error)
return beta
def predict(x_test,beta):
y_predict=np.zeros(len(y_test))+0.5
s=sigmoid(np.dot(beta,x_test.T))
y_predict[s < 0.34] = 0
y_predict[s > 0.67] = 1
return y_predict
def accurancy(y_predict,y_test):
acc=1-np.sum(np.absolute(y_predict-y_test))/len(y_test)
return acc
if __name__ == "__main__":
data = pd.read_csv('iris.csv')
x = data.iloc[:,1:5]
y = data.iloc[:,5].copy()
y.loc[y== 'setosa'] = 0
y.loc[y== 'versicolor'] = 0.5
y.loc[y== 'virginica'] = 1
x_train,x_test,y_train,y_test = train_test_split(x,y,test_size=0.3,random_state=15)
m,n=np.shape(x_train)
alpha = 0.01
beta=train(x_train,y_train,1000,alpha,m,n)
pre=predict(x_test,beta)
t = np.arange(len(x_test))
plt.figure()
p1 = plt.plot(t,pre)
p2 = plt.plot(t,y_test,label='test')
label = ['prediction', 'true']
plt.legend(label, loc=1)
plt.show()
acc=accurancy(pre,y_test)
print('The predicted value is ',pre)
print('The true value is ',np.array(y_test))
print('The accuracy rate is ',acc)
分享文章:Python實現的邏輯回歸算法示例【附測試csv文件下載】-創新互聯
文章鏈接:http://www.yijiale78.com/article18/dpcodp.html
成都網站建設公司_創新互聯,為您提供Google、自適應網站、云服務器、網站策劃、全網營銷推廣、網站設計公司
聲明:本網站發布的內容(圖片、視頻和文字)以用戶投稿、用戶轉載內容為主,如果涉及侵權請盡快告知,我們將會在第一時間刪除。文章觀點不代表本網站立場,如需處理請聯系客服。電話:028-86922220;郵箱:631063699@qq.com。內容未經允許不得轉載,或轉載時需注明來源: 創新互聯