This repository has been archived by the owner on Mar 28, 2022. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathmain.py
97 lines (82 loc) · 2.94 KB
/
main.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
#!/usr/bin/env python3
from sklearn.model_selection import train_test_split
import tensorflow as tf
import numpy as np
import os
import shutil
MODEL = 'build/0'
INPUTS = 10 # 100
RATE = 0.1
EPOCHS = 200
def com(row):
m, mx, my = (0, 0, 0)
for i in range(0, len(row), 3):
m += row[i]
mx += row[i]*row[i+1]
my += row[i]*row[i+2]
if m==0: return (-1, -1)
return np.array([mx/m, my/m])
def mse(y, y_):
mse = 0
for i in range(len(y)):
mse += (y_[i] - y[i])**2
return mse
def data_generate(count=10000, test=0.2):
x, y = ([], [])
for i in range(count):
x.append(np.random.rand(INPUTS*3))
y.append(com(x[i]))
return train_test_split(x, y, test_size=test)
def ann_layer(x, size, name=None):
w = tf.Variable(tf.truncated_normal(size))
b = tf.Variable(tf.truncated_normal(size[-1:]))
return tf.add(tf.matmul(x, w), b, name)
def ann_network(x):
h1 = tf.nn.relu(ann_layer(x, [INPUTS*3, INPUTS]))
h2 = tf.nn.sigmoid(ann_layer(h1, [INPUTS, INPUTS]))
h3 = tf.nn.sigmoid(ann_layer(h2, [INPUTS, 30]))
return ann_layer(h3, [30, 2])
def regress_signature(x, y, sign):
inputs = {'inputs': tf.saved_model.build_tensor_info(x)}
outputs = {'outputs': tf.saved_model.build_tensor_info(y)}
return tf.saved_model.build_signature_def(inputs, outputs, sign)
print('generating dataset:')
train_x, test_x, train_y, test_y = data_generate()
print('%d train rows, %d test rows' % (len(train_x), len(test_x)))
print('test_x[0]:', test_x[0].shape)
print('test_y[0]:', test_y[0].shape)
print('\ndefining api:')
#serialized = tf.placeholder(tf.string, name='tf_example')
#features = {'mxy': tf.FixedLenFeature(shape=INPUTS*3, dtype=tf.string)}
#example = tf.parse_example(serialized, features)
#example_mxy = tf.to_float(example['mxy'])
print('\ndefining ann:')
# x = tf.identity(example_mxy, name='x')
x = tf.placeholder(tf.float32, [None, INPUTS*3], name='x')
y_ = tf.placeholder(tf.float32, [None, 2], name='y_')
y = ann_network(x)
cost = tf.reduce_sum(tf.pow(y-y_, 2)) / (2 * len(train_x))
train = tf.train.GradientDescentOptimizer(RATE).minimize(cost)
# cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=y, labels=y_))
print('\nstarting training:')
if os.path.exists(MODEL):
shutil.rmtree(MODEL)
sess = tf.Session()
builder = tf.saved_model.Builder(MODEL)
sess.run(tf.global_variables_initializer())
for epoch in range(EPOCHS):
sess.run(train, {x: train_x, y_: train_y})
err = sess.run(cost, {x: train_x, y_: train_y})
print('Epoch %d: %f mse' % (epoch, err))
signatures = {
'serving_default': regress_signature(x, y, 'tensorflow/serving/regress'),
'regress': regress_signature(x, y, 'tensorflow/serving/regress')
}
builder.add_meta_graph_and_variables(sess, ['serve'], signatures, main_op=tf.tables_initializer(), strip_default_attrs=True)
builder.save()
print('\ntesting:')
mse_total = 0
yv = sess.run(y, {x: test_x, y_: test_y})
for i in range(len(yv)):
mse_total += mse(yv[i], test_y[i])
print('mse avg:', mse_total/len(yv))