61 lines
1.9 KiB
Python
61 lines
1.9 KiB
Python
# -*- coding: utf-8 -*-
|
|
"""
|
|
神经网络
|
|
"""
|
|
|
|
# 导入模块
|
|
from typing import List
|
|
import numpy
|
|
|
|
|
|
class NeuralNetwork:
|
|
"""
|
|
神经网络
|
|
"""
|
|
|
|
# 激活函数和其导数函数
|
|
FUNCTIONS = {
|
|
"relu": {
|
|
"activate": lambda x: numpy.maximum(0, x),
|
|
"derivative": lambda x: numpy.where(x > 0, 1, 0),
|
|
},
|
|
"linear": {
|
|
"activate": lambda x: x,
|
|
"derivative": lambda x: numpy.ones_like(x),
|
|
}, # 适合回归任务的输出层
|
|
"softmax": {
|
|
"activate": lambda x: numpy.exp(x) / numpy.sum(numpy.exp(x), axis=1),
|
|
"derivative": lambda x: x * (1 - x),
|
|
}, # 适合分类任务的输出层
|
|
}
|
|
|
|
def __init__(
|
|
self,
|
|
hidden_layer_neurons: List[int] = [10],
|
|
hidden_layer_function: str = "relu",
|
|
output_layer_function: str = "softmax",
|
|
):
|
|
"""
|
|
初始化
|
|
:param hidden_layer_neurons: 隐含层神经元数量
|
|
:param hidden_layer_function: 隐含层函数
|
|
:param output_layer_function: 输出层函数
|
|
"""
|
|
# 检查函数是否存在
|
|
if not (
|
|
hidden_layer_function in self.FUNCTIONS
|
|
and output_layer_function in self.FUNCTIONS
|
|
):
|
|
raise RuntimeError("所输入的隐含层或输出层函数未定义")
|
|
|
|
# 初始化隐含层的激活函数和导数函数
|
|
self.hidden_layer_activate, self.hidden_layer_derivative = (
|
|
self.FUNCTIONS[hidden_layer_function]["activate"],
|
|
self.FUNCTIONS[hidden_layer_function]["derivative"],
|
|
)
|
|
# 初始化输出层的激活函数和导数函数
|
|
self.output_layer_activate, self.output_layer_derivative = (
|
|
self.FUNCTIONS[output_layer_function]["activate"],
|
|
self.FUNCTIONS[output_layer_function]["derivative"],
|
|
)
|