1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
|
# coding=utf-8
from mxnet import gluon
from mxnet import ndarray as nd
from mxnet import autograd
import matplotlib.pyplot as plt
import random
def data_iter(X, y, num_examples, batch_size):
# 产生索引
idx = list(range(num_examples))
random.shuffle(idx)
for i in range(0, num_examples, batch_size):
j = nd.array(idx[i:min(i + batch_size, num_examples)])
yield nd.take(X, j), nd.take(y, j)
def net(X, w, b):
return nd.dot(X, w) + b
def square_loss(yhat, y):
return (yhat - y.reshape(yhat.shape)) ** 2
def SGD(params, lr):
for param in params:
param[:] = param - lr * param.grad
def main():
num_inputs = 2
num_examples = 1000
true_w = [2, -3.4]
true_b = 4.2
X = nd.random_normal(shape=(num_examples, num_inputs))
y = true_w[0] * X[:, 0] + true_w[1] * X[:, 1] + true_b
y += 0.01 * nd.random_normal(shape=y.shape)
batch_size = 10
# # 画图
# plt.scatter(X[:, 1].asnumpy(), y.asnumpy())
# plt.show()
# for data, label in data_iter(X, y, num_examples, batch_size):
# print(data, label)
# break
w = nd.random_normal(shape=(num_inputs, 1))
b = nd.zeros((1,))
params = [w, b]
for param in params:
param.attach_grad()
epochs = 5
learning_rate = 0.01
for e in range(epochs):
total_loss = 0
for data, label in data_iter(X, y, num_examples, batch_size):
with autograd.record():
output = net(data, w, b)
loss = square_loss(output, label)
loss.backward()
SGD(params, learning_rate)
total_loss += nd.sum(loss).asscalar()
print("Epoch %d,average loss is %f" % (e, total_loss / num_examples))
print(true_w, w)
print(true_b, b)
pass
if __name__ == '__main__':
main()
|