Rosenblatt感知器原始算法,激活函数为hardlims.
import numpy as np import matplotlib.pyplot as plt # 感知机学习算法的原始形式 # 输入T={(x1, y1), (x2, y2), (x3, y3)}, y属于{1,-1};学习率为lr # 输出w, b; 感知机模型 f(x)=hardlims(w*x+b) 其中w*x表示w与x的内积 # hardlims为激活函数,当w*x+b>=0时,为1,否则为-1 def Perceptron_algorithm(): # 训练集 x1 = np.array([4, 4]).T x2 = np.array([4, 5]).T x3 = np.array([1, 1]).T input = [x1, x2, x3] y = [1, 1, -1] # 初始化w,b,lr w = np.array([0,0]) b = 1 lr = 1 T = 0 while True: num_errors = 0 for i in range(3): # 激活函数 hardlims y_hat = np.dot(w,input[i]) + b if y_hat >= 0: y_hardline = 1 else: y_hardline = -1 # 判断是否误分类 if y_hardline != y[i]: # 利用随机梯度下降算法,更新w,b w += lr * y[i] * input[i].T b += lr * y[i] # 误分类个数加1 num_errors += 1 T += 1 # M中为空训练结束 if num_errors == 0: print(w, b,T) break if __name__ == '__main__': Perceptron_algorithm()
欢迎分享,转载请注明来源:内存溢出
评论列表(0条)