1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950 |
- from functools import reduce
- def relu(x):
- """激活函数-阶跃函数
- :param x: 输入值
- :return:
- """
- return 1 if x > 0 else 0
- def gradient_check(network, x, y):
- """梯度检查
- :param network: 神经网络对象
- :param x: 样本的特征
- :param y: 样本的标签
- :return:
- """
-
- network_error = lambda vec1, vec2: \
- 0.5 * reduce(lambda a, b: a + b,
- map(lambda v: (v[0] - v[1]) * (v[0] - v[1]),
- zip(vec1, vec2)))
-
- network.get_gradient(x, y)
-
- for conn in network.connections.connections:
-
- actual_gradient = conn.get_gradient()
-
- epsilon = 0.0001
- conn.weight += epsilon
- error1 = network_error(network.predict(x), y)
-
- conn.weight -= 2 * epsilon
- error2 = network_error(network.predict(x), y)
-
- expected_gradient = (error2 - error1) / (2 * epsilon)
-
- print('expected gradient: \t%f\nactual gradient: \t%f' % (expected_gradient, actual_gradient))
|