-
Notifications
You must be signed in to change notification settings - Fork 9
/
Copy pathoptimizer.py
64 lines (54 loc) · 2.32 KB
/
optimizer.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
from core import optimizer2 as optimizer
from util import common
import tensorflow as tf
import os
from core.model_two_policy import Model
from util.dataset2 import Dataset
import glob
import time
from util.common import log
FLAGS = tf.app.flags.FLAGS
common.set_flags()
tf.app.flags.DEFINE_boolean('pending_dataset', False, "pending dataset")
common.make_dirs(os.path.join(FLAGS.save_dir, "dataset_ready"))
common.make_dirs(os.path.join(FLAGS.save_dir, "dataset_bak"))
config = tf.ConfigProto()
config.gpu_options.allow_growth = True
sess = tf.Session(config=config)
model = Model(sess, weight_decay=FLAGS.weight_decay, momentum=FLAGS.momentum, num_layers=FLAGS.num_model_layers,
use_cache=FLAGS.use_cache, conf=FLAGS)
writer = tf.summary.FileWriter(FLAGS.save_dir + '/summary', sess.graph)
sess.run(tf.global_variables_initializer())
saver = tf.train.Saver()
FLAGS.dataset_dir = "./"
FLAGS.num_model_layers = 20
FLAGS.batch_size = 31
ds = Dataset(sess)
while True:
if FLAGS.dataset_dir:
dataset_dir = FLAGS.dataset_dir
else:
dataset_dir = os.path.join(FLAGS.save_dir, "dataset_ready")
files = glob.glob(os.path.join(dataset_dir, "dataset*.csv"))
if FLAGS.pending_dataset:
if len(files) < common.num_opt_games / common.num_selfplay_games:
log("waiting for dataset... now %d games" % (len(files) * common.num_selfplay_games))
time.sleep(10)
continue
log("load dataset %d files" % len(files))
if FLAGS.restore_model_path:
common.restore_model(FLAGS.restore_model_path, None, saver, sess, restore_pending=False)
for epoch in range(FLAGS.epoch):
print("epoch %d" % epoch)
ds.make_dataset(files, FLAGS.batch_size, shuffle_buffer_size=FLAGS.shuffle_buffer_size)
optimizer.train_model_epoch(model, ds, FLAGS.batch_size, writer)
ds.close_dataset()
if (epoch == 0 and common.num_checkpoint_epochs == 1) or (
epoch > 0 and epoch % common.num_checkpoint_epochs == 0):
now = common.now_date_str_nums()
saver.save(sess, os.path.join(FLAGS.save_dir, "new_model_%s.ckpt" % now))
log("save model")
if FLAGS.backup_dataset:
for file in files:
os.rename(file, os.path.join(FLAGS.save_dir, "dataset_bak", os.path.basename(file)))
# os.remove(file)