BP神经网络实现.docxVIP

  • 6
  • 0
  • 约5.76千字
  • 约 8页
  • 2019-03-07 发布于广东
  • 举报
完整的python 用BP网络实现的iris数据处理 #coding:utf-8 from __future__ import division import pandas as pd import matplotlib.pyplot as plt import numpy as np from sklearn.metrics import roc_auc_score from sklearn.metrics import confusion_matrix ################定义激活函数~########################################## def sigmoid_activation(x, theta): x = np.asarray(x) theta = np.asarray(theta) return 1 / (1 + np.exp(-np.dot(theta.T, x))) # 将模型的函数凝结为一个类,这是很好的一种编程习惯 class NNet3: # 初始化必要的几个参数 def __init__(self, learning_rate=0.5, maxepochs=1e4, convergence_thres=1e-5, hidden_layer=4): self.learning_rate = learning_rate self.maxepochs = int(maxepochs) self.convergence_thres = 1e-5 self.hidden_layer = int(hidden_layer) # 计算最终的误差 def _multiplecost(self, X, y): # l1是中间层的输出,l2是输出层的结果 l1, l2 = self._feedforward(X) # 计算误差,这里的l2是前面的h inner = y * np.log(l2) + (1 - y) * np.log(1 - l2) # 添加符号,将其转换为正值 return -np.mean(inner) # 前向传播函数计算每层的输出结果 def _feedforward(self, X): # l1是中间层的输出 l1 = sigmoid_activation(X.T, self.theta0).T # 为中间层添加一个常数列 l1 = np.column_stack([np.ones(l1.shape[0]), l1]) # 中间层的输出作为输出层的输入产生结果l2 l2 = sigmoid_activation(l1.T, self.theta1) return l1, l2 # 传入一个结果未知的样本,返回其属于1的概率 def predict(self, X): _, y = self._feedforward(X) return y # 学习参数,不断迭代至参数收敛,误差最小化 def learn(self, X, y): nobs, ncols = X.shape self.theta0 = np.random.normal(0, 0.01, size=(ncols, self.hidden_layer)) self.theta1 = np.random.normal(0, 0.01, size=(self.hidden_layer + 1, 1)) self.costs = [] cost = self._multiplecost(X, y) self.costs.append(cost) costprev = cost + self.convergence_thres + 1 counter = 0 for counter in range(self.maxepochs): # 计算中间层和输出层的输出 l1, l2 = self._feedforward(X) # 首先计算输出层的梯

文档评论(0)

1亿VIP精品文档

相关文档