神经网络和深度学习理论基础

举报
今天吃什么 发表于 2022/10/10 19:33:03 2022/10/10
【摘要】 1.激活函数非线性 2.深层网络比浅层网络拥有更强的表达能力 3.三层神经网络可以表示任意函数(足够隐藏单元)
import numpy as np
import math
import matplotlib.pyplot as plt
def act(x, deriv=False):
    if(deriv==True):
        return x*(1-x)#x is activated a(z)
    return 1/(1+np.exp(-x))


x = np.linspace(0,math.pi,200)
x=x.reshape((200,1))


y = np.sin(x)
num = 100
w1 = 2 * np.random.random((1, num)) - 1
w2 = 2 * np.random.random((num, 60)) - 1
w3= 2 * np.random.random((60, 1)) - 1

def feedfoward(x):
    a0 = x
    a1 = act(np.dot(a0,w1))
    a2 = act(np.dot(a1,w2))
    a3=act(np.dot(a2,w3))
    return (a0,a1,a2,a3)
n_epochs = 1000000
for i in range(n_epochs):
    a0,a1,a2,a3= feedfoward(x)
    l3_delta = (a3 - y)*act(a3, deriv=True)
    l2_delta = l3_delta.dot(w3.T) * act(a2,deriv=True)
    l1_delta = l2_delta.dot(w2.T) * act(a1,deriv=True)
    w3 = w3 - a2.T.dot(l3_delta) * 0.01
    w2 = w2 - a1.T.dot(l2_delta)*0.1
    w1 = w1 - a0.T.dot(l1_delta)*0.1
    if(i % 10000) ==0:
        loss =np.mean(np.abs(y - a3))
        print("epochs %d/%d loss = %f" % (i/1e4+1, n_epochs/1e4, loss))

a0,a1,a2 ,a3= feedfoward(x)
plt.plot(x,y,'b',label='y=sin(x)')
plt.plot(x,a3,'r',label='sin(x) curve fitted by neural network')
plt.legend()
plt.show()
import numpy as np
import math
import matplotlib.pyplot as plt
def act(x, deriv=False):
    if(deriv==True):
        return x*(1-x)#x is activated a(z)
    return 1/(1+np.exp(-x))

x = np.linspace(0,math.pi,100)
x=x.reshape((100,1))


y = np.sin(x)
num = 100
w1 = 2 * np.random.random((1, num)) - 1
w2 = 2 * np.random.random((num, 60)) - 1
w3= 2 * np.random.random((60, 1)) - 1

def feedfoward(x):
    a0 = x
    a1 = act(np.dot(a0,w1))
    a2 = act(np.dot(a1,w2))
    a3=act(np.dot(a2,w3))
    return (a0,a1,a2,a3)
n_epochs = 1000000
for i in range(n_epochs):
    a0,a1,a2,a3= feedfoward(x)
    l3_delta = (a3 - y)*act(a3, deriv=True)
    l2_delta = l3_delta.dot(w3.T) * act(a2,deriv=True)
    l1_delta = l2_delta.dot(w2.T) * act(a1,deriv=True)
    w3 = w3 - a2.T.dot(l3_delta) * 0.01
    w2 = w2 - a1.T.dot(l2_delta)*0.1
    w1 = w1 - a0.T.dot(l1_delta)*0.1
    if(i % 10000) ==0:
        loss =np.mean(np.abs(y - a3))
        print("epochs %d/%d loss = %f" % (i/1e4+1, n_epochs/1e4, loss))

a0,a1,a2 ,a3= feedfoward(x)
plt.plot(x,y,'b',label='y=sin(x)')
plt.plot(x,a3,'r',label='sin(x) curve fitted by neural network')
plt.legend()
plt.show()
import numpy as np
import math
import matplotlib.pyplot as plt
def act(x, deriv=False):
    if(deriv==True):
        return x*(1-x)#x is activated a(z)
    return 1/(1+np.exp(-x))


x = np.linspace(0,math.pi,50)
x=x.reshape((50,1))

y = np.sin(x)
num = 100
w1 = 2 * np.random.random((1, num)) - 1
w2 = 2 * np.random.random((num, 60)) - 1
w3= 2 * np.random.random((60, 1)) - 1

def feedfoward(x):
    a0 = x
    a1 = act(np.dot(a0,w1))
    a2 = act(np.dot(a1,w2))
    a3=act(np.dot(a2,w3))
    return (a0,a1,a2,a3)
n_epochs = 1000000
for i in range(n_epochs):
    a0,a1,a2,a3= feedfoward(x)
    l3_delta = (a3 - y)*act(a3, deriv=True)
    l2_delta = l3_delta.dot(w3.T) * act(a2,deriv=True)
    l1_delta = l2_delta.dot(w2.T) * act(a1,deriv=True)
    w3 = w3 - a2.T.dot(l3_delta) * 0.01
    w2 = w2 - a1.T.dot(l2_delta)*0.1
    w1 = w1 - a0.T.dot(l1_delta)*0.1
    if(i % 10000) ==0:
        loss =np.mean(np.abs(y - a3))
        print("epochs %d/%d loss = %f" % (i/1e4+1, n_epochs/1e4, loss))

a0,a1,a2 ,a3= feedfoward(x)
plt.plot(x,y,'b',label='y=sin(x)')
plt.plot(x,a3,'r',label='sin(x) curve fitted by neural network')
plt.legend()
plt.show()
import numpy as np
import math
import matplotlib.pyplot as plt
def act(x, deriv=False):
    if(deriv==True):
        return x*(1-x)#x is activated a(z)
    return 1/(1+np.exp(-x))


x = np.linspace(0,math.pi,50)
x=x.reshape((50,1))

y = np.sin(x)
num1 = 1000
num2=100
w1 = 2 * np.random.random((1, num1)) - 1
w2 = 2 * np.random.random((num1, num2)) - 1
w3= 2 * np.random.random((num2, 1)) - 1

def feedfoward(x):
    a0 = x
    a1 = act(np.dot(a0,w1))
    a2 = act(np.dot(a1,w2))
    a3=act(np.dot(a2,w3))
    return (a0,a1,a2,a3)
n_epochs = 1000000
for i in range(n_epochs):
    a0,a1,a2,a3= feedfoward(x)
    l3_delta = (a3 - y)*act(a3, deriv=True)
    l2_delta = l3_delta.dot(w3.T) * act(a2,deriv=True)
    l1_delta = l2_delta.dot(w2.T) * act(a1,deriv=True)
    w3 = w3 - a2.T.dot(l3_delta) * 0.01
    w2 = w2 - a1.T.dot(l2_delta)*0.1
    w1 = w1 - a0.T.dot(l1_delta)*0.1
    if(i % 10000) ==0:
        loss =np.mean(np.abs(y - a3))
        print("epochs %d/%d loss = %f" % (i/1e4+1, n_epochs/1e4, loss))

a0,a1,a2 ,a3= feedfoward(x)
plt.plot(x,y,'b',label='y=sin(x)')
plt.plot(x,a3,'r',label='sin(x) curve fitted by neural network')
plt.legend()
plt.show()
import numpy as np
import math
import matplotlib.pyplot as plt
def act(x, deriv=False):
    if(deriv==True):
        return x*(1-x)#x is activated a(z)
    return 1/(1+np.exp(-x))


x = np.linspace(0,math.pi,50)
x=x.reshape((50,1))

y = np.sin(x)
num1 = 100
num2=100
num3=100
w1 = 2 * np.random.random((1, num1)) - 1
w2 = 2 * np.random.random((num1, num2)) - 1
w3= 2 * np.random.random((num2, num3)) - 1
w4= 2 * np.random.random((num3, 1)) - 1

def feedfoward(x):
    a0 = x
    a1 = act(np.dot(a0,w1))
    a2 = act(np.dot(a1,w2))
    a3=act(np.dot(a2,w3))
    a4=act(np.dot(a3,w4))
    return (a0,a1,a2,a3,a4)
n_epochs = 1000000
for i in range(n_epochs):
    a0,a1,a2,a3,a4= feedfoward(x)
    l4_delta = (a4 - y)*act(a4, deriv=True)
    l3_delta = l4_delta.dot(w4.T) * act(a3,deriv=True)
    l2_delta = l3_delta.dot(w3.T) * act(a2,deriv=True)
    l1_delta = l2_delta.dot(w2.T) * act(a1,deriv=True)
    w4 = w4 - a3.T.dot(l4_delta) * 0.1
    w3 = w3 - a2.T.dot(l3_delta) * 0.01
    w2 = w2 - a1.T.dot(l2_delta)*0.1
    w1 = w1 - a0.T.dot(l1_delta)*0.1
    if(i % 10000) ==0:
        loss =np.mean(np.abs(y - a4))
        print("epochs %d/%d loss = %f" % (i/1e4+1, n_epochs/1e4, loss))

a0,a1,a2 ,a3,a4= feedfoward(x)
plt.plot(x,y,'b',label='y=sin(x)')
plt.plot(x,a4,'r',label='sin(x) curve fitted by neural network')
plt.legend()
plt.show()
import numpy as np
import math
import matplotlib.pyplot as plt
def act(x, deriv=False):
    if(deriv==True):
        return x*(1-x)#x is activated a(z)
    return 1/(1+np.exp(-x))


x = np.linspace(0,math.pi,50)
x=x.reshape((50,1))

y = np.sin(x)
num1 = 500
num2=400
num3=100
w1 = 2 * np.random.random((1, num1)) - 1
w2 = 2 * np.random.random((num1, num2)) - 1
w3= 2 * np.random.random((num2, num3)) - 1
w4= 2 * np.random.random((num3, 1)) - 1

def feedfoward(x):
    a0 = x
    a1 = act(np.dot(a0,w1))
    a2 = act(np.dot(a1,w2))
    a3=act(np.dot(a2,w3))
    a4=act(np.dot(a3,w4))
    return (a0,a1,a2,a3,a4)
n_epochs = 1000000
for i in range(n_epochs):
    a0,a1,a2,a3,a4= feedfoward(x)
    l4_delta = (a4 - y)*act(a4, deriv=True)
    l3_delta = l4_delta.dot(w4.T) * act(a3,deriv=True)
    l2_delta = l3_delta.dot(w3.T) * act(a2,deriv=True)
    l1_delta = l2_delta.dot(w2.T) * act(a1,deriv=True)
    w4 = w4 - a3.T.dot(l4_delta) * 0.1
    w3 = w3 - a2.T.dot(l3_delta) * 0.01
    w2 = w2 - a1.T.dot(l2_delta)*0.1
    w1 = w1 - a0.T.dot(l1_delta)*0.1
    if(i % 10000) ==0:
        loss =np.mean(np.abs(y - a4))
        print("epochs %d/%d loss = %f" % (i/1e4+1, n_epochs/1e4, loss))

a0,a1,a2 ,a3,a4= feedfoward(x)
plt.plot(x,y,'b',label='y=sin(x)')
plt.plot(x,a4,'r',label='sin(x) curve fitted by neural network')
plt.legend()
plt.show()
【版权声明】本文为华为云社区用户原创内容,转载时必须标注文章的来源(华为云社区)、文章链接、文章作者等基本信息, 否则作者和本社区有权追究责任。如果您发现本社区中有涉嫌抄袭的内容,欢迎发送邮件进行举报,并提供相关证据,一经查实,本社区将立刻删除涉嫌侵权内容,举报邮箱: cloudbbs@huaweicloud.com
  • 点赞
  • 收藏
  • 关注作者

评论(0

0/1000
抱歉,系统识别当前为高风险访问,暂不支持该操作

全部回复

上滑加载中

设置昵称

在此一键设置昵称,即可参与社区互动!

*长度不超过10个汉字或20个英文字符,设置后3个月内不可修改。

*长度不超过10个汉字或20个英文字符,设置后3个月内不可修改。