栏目分类:
子分类:
返回
名师互学网用户登录
快速导航关闭
当前搜索
当前分类
子分类
实用工具
热门搜索
名师互学网 > IT > 软件开发 > 后端开发 > Python

9.24作业

Python 更新时间: 发布时间: IT归档 最新发布 模块sitemap 名妆网 法律咨询 聚返吧 英语巴士网 伯小乐 网商动力

9.24作业

import math

class node:
    #结点类,用以构成网络
    def __init__(self,w1=None,w2=None):
        self.value=0; #数值,存储结点最后的状态,对应到文章示例为X1,Y1等值
        self.W=[w1,w2]; #结点到下一层的权值

class net:
    def __init__(self):
        #初始化函数,将权重,偏置等值全部初始化为博文示例的数值
        self.inlayer =[node(0.15,0.25),node(0.20,0.30)]; #输入层结点
        self.hidlayer=[node(0.40,0.50),node(0.45,0.55)]; #隐含层结点
        self.outlayer=[node(),node()];                   #输出层结点

        self.yita = 0.5;                                 #学习率η
        self.k1=0.35;                                    #输入层偏置项权重
        self.k2=0.60;                                    #隐含层偏置项权重
        self.Tg=[0,0];                                   #训练目标
        self.O=[0,0];                                    #网络实际输出

    def sigmoid(self,z):
        #激活函数
        return 1 / (1 + math.exp(-z))

    def getLoss(self):
        #损失函数
        return ((self.O[0] -self.Tg[0])**2+ (self.O[1] - self.Tg[1])**2)/2;

    def forwardPropagation(self,input1,input2):
        #前向传播
        self.inlayer[0].value = input1;
        self.inlayer[1].value = input2;
        for hNNum in range(0,2):
             #算出隐含层结点的值
            z = 0;
            for iNNum in range(0,2):
                z+=self.inlayer[iNNum].value*self.inlayer[iNNum].W[hNNum];
            #加上偏置项
            z+= self.k1;
            self.hidlayer[hNNum].value = self.sigmoid(z);

        for onNum in range(0,2):
            #算出输出层结点的值
            z = 0;
            for hNNum in range(0,2):
                z += self.hidlayer[hNNum].value* self.hidlayer[hNNum].W[oNNum];
            z += self.k2;
            self.outlayer[oNNum].value = self.sigmoid(z);
            self.O[oNNum] = self.sigmoid(z);


    def backPropagation(self,T1,T2):
        #反向传播,这里为了公式好看一点多写了一些变量作为中间值
        #计算过程用到的公式在博文中已经推导过了,如果代码没看明白请看看博文
        self.Tg[0] = T1;
        self.Tg[1] = T2;
        for iNNum in range(0,2):
            #更新输入层权重
            for wnum in range(0,2):
                y = self.hidlayer[wnum].value;
                self.inlayer[iNNum].W[wnum] -= self.yita*((self.O[0] - self.Tg[0])*self.O[0] *(1- self.O[0])*
                    self.hidlayer[wnum].W[0] +(self.O[1] - self.Tg[1])*self.O[1] *(1 - self.O[1])*
                    self.hidlayer[wnum].W[1])*y*(1- y)*self.inlayer[iNNum].value;

        for hNNum in range(0,2):
            for wnum in range(0,2):
                self.hidlayer[hNNum].W[wnum]-= self.yita*(self.O[wnum] - self.Tg[wnum])*self.O[wnum]*
                    (1- self.O[wnum])*self.hidlayer[hNNum].value;

    def printresual(self):
        loss = self.getLoss();
        print("loss",loss);
        print("输出1",self.O[0]);
        print("输出2",self.O[1]);

mnet=net();
for n in range(0,20000):
    mnet.forwardPropagation(0.05, 0.1);
    mnet.backPropagation(0.01, 0.99);
    if (n%1000==0):
        mnet.printresual();
loss 0.2983711087600027
输出1 0.7513650695523157
输出2 0.7729284653214625
loss 0.001114349453733746
输出1 0.04405288511514458
输出2 0.9573029065083415
loss 0.00044486770391326887
输出1 0.03128488237823188
输出2 0.9691028903919376
loss 0.00025152360241099405
输出1 0.025941904529987098
输出2 0.974223343675583
loss 0.00016427727363225536
输出1 0.02285619446850588
输出2 0.9772221750265364
loss 0.00011622569943793622
输出1 0.020798621232395936
输出2 0.9792370460209513
loss 8.653979465934606e-05
输出1 0.019308574394168124
输出2 0.9807032246414722
loss 6.676957838285888e-05
输出1 0.018169936884899422
输出2 0.981827406283041
loss 5.288555201099971e-05
输出1 0.017266299924310786
输出2 0.9827218141386771
loss 4.2742284255545085e-05
输出1 0.016528758992967827
输出2 0.9834532546618314
loss 3.510187782978859e-05
输出1 0.01591362044355068
输出2 0.9840642735146238
loss 2.920486860247276e-05
输出1 0.0153916803305579
输出2 0.9845834032439159
loss 2.4562178659193972e-05
输出1 0.014942590687018614
输出2 0.985030578039753
loss 2.0846236182453837e-05
输出1 0.014551687923710659
输出2 0.9854201954834235
loss 1.7830396487788443e-05
输出1 0.014208105883921584
输出2 0.9857629446728557
loss 1.5353500839028535e-05
输出1 0.013903601106673661
输出2 0.9860669477402362
loss 1.3298252530083327e-05
输出1 0.013631793197913696
输出2 0.9863384998664817
loss 1.1577544311191025e-05
输出1 0.013387657534692458
输出2 0.9865825645507159
loss 1.0125544006057146e-05
输出1 0.013167176954570812
输出2 0.9868031143044285
loss 8.891725240699747e-06
输出1 0.01296709676445429
输出2 0.9870033706816217

进程已结束,退出代码0

转载请注明:文章转载自 www.mshxw.com
本文地址:https://www.mshxw.com/it/268895.html
我们一直用心在做
关于我们 文章归档 网站地图 联系我们

版权所有 (c)2021-2022 MSHXW.COM

ICP备案号:晋ICP备2021003244-6号