1. 程式人生 > >Tensorflow學習筆記--cifar10 cnn分類器程式碼

Tensorflow學習筆記--cifar10 cnn分類器程式碼

資料集使用了cifar10資料,實現了一個cnn的分類器。

1)下載地址:gti clone https://github.com/tensorflow/models.git  包含了cifar10等python包

2)分層如下:conv1,pool1,norm1,conv2,norm2,pool2,local3,local4,logits

3)其中用了多執行緒,如果使用with tf.Session() as sess  會報錯,所以最後使用 sess = tf.InteractiveSession()

4) 迭代3000次後 最終測試集準確率為71%

5)參考資料:tensorflow 官網,《Tensorflow實戰》

python coding:

import cifar10,cifar10_input
import tensorflow as tf
import numpy as np
import time
cifar10.maybe_download_and_extract()
max_steps = 3000
batch_size = 128
data_dir = '/home/edward/TensorFlow/models/tutorials/cifar-10-batches-bin'

def variable_with_weight_loss(shape,stddev,w1):
    var = tf.Variable(tf.truncated_normal(shape,stddev=stddev))
    if w1 is not None:
        weight_loss = tf.multiply(tf.nn.l2_loss(var),w1,name='weight_loss')
        tf.add_to_collection('losses',weight_loss)
    return var

images_train,labels_train = cifar10_input.distorted_inputs(data_dir=data_dir, batch_size=batch_size)

images_test, labels_test = cifar10_input.inputs(eval_data = True,
                                                data_dir = data_dir,
                                                batch_size = batch_size)

image_holder = tf.placeholder(tf.float32,[batch_size,24,24,3])
label_holder = tf.placeholder(tf.float32,[batch_size])

weight1 = variable_with_weight_loss(shape=[5,5,3,64],stddev=0.05,w1=0)
kernel1 = tf.nn.conv2d(image_holder,weight1,[1,1,1,1],padding='SAME')
bias1 = tf.Variable(tf.constant(0.0,shape=[64]))
# conv1 = tf.nn.relu(tf.nn.bias_add(kernel1,bias1))
conv1 = tf.nn.relu(kernel1+bias1)
pool1 = tf.nn.max_pool(conv1,ksize=[1,3,3,1],strides=[1,2,2,1],padding='SAME')
norm1 = tf.nn.lrn(pool1,4,bias = 1.0 , alpha = 0.001/9.0 , beta = 0.75)

weight2 = variable_with_weight_loss(shape=[5,5,64,64],stddev=0.05,w1=0.0)
kernel2 = tf.nn.conv2d(norm1,weight2,[1,1,1,1],padding='SAME')
bias2 = tf.Variable(tf.constant(0.1,shape=[64]))
conv2 = tf.nn.relu(kernel2+bias1)
norm2 = tf.nn.lrn(conv2,4,bias = 1.0 , alpha = 0.001/9.0 , beta = 0.75)
pool2 = tf.nn.max_pool(norm2, ksize=[1,3,3,1],strides=[1,2,2,1],padding='SAME')

reshape = tf.reshape(pool2,[batch_size,-1])
dim = reshape.get_shape()[1].value
weight3 = variable_with_weight_loss([dim,384],stddev=0.04,w1=0.004)
bias3 = tf.Variable(tf.constant(0.1,shape=[384]))
local3 = tf.nn.relu(tf.matmul(reshape,weight3)+bias3)

weight4 = variable_with_weight_loss([384,192],stddev=0.04,w1=0.004)
bias4 = tf.Variable(tf.constant(0.1,shape=[192]))
local4 = tf.nn.relu(tf.matmul(local3,weight4)+bias4)

weight5 = variable_with_weight_loss([192,10],stddev=1/192,w1=0.0)
bias5 = tf.Variable(tf.constant(0.0,shape=[10]))
logits = tf.matmul(local4,weight5)+bias5

def loss(logits,labels):
    labels = tf.cast(labels,tf.int64)
    cross_entropy = tf.nn.sparse_softmax_cross_entropy_with_logits(logits=logits, labels=labels, 
                                                                   name='cross_entropy_per_example')
    cross_entropy_mean = tf.reduce_mean(cross_entropy,name='cross_entropy')
    tf.add_to_collection('losses',cross_entropy_mean)
    return tf.add_n(tf.get_collection('losses'),name='total_loss')

loss=loss(logits,label_holder)

train_op = tf.train.AdamOptimizer(1e-3).minimize(loss)
top_k_op = tf.nn.in_top_k(logits,tf.cast(label_holder,tf.int64),1)

# num_examples = 10000
# import math
# num_iter = int(math.ceil(num_examples/ batch_size))
# true_count = 0
# total_sample_count = num_iter * batch_size
# step = 0
# with tf.Session() as sess:
#     sess.run(tf.global_variables_initializer())
#     tf.train.start_queue_runners()
#     for step in range(max_steps):
#         start_time = time.time()
#         image_batch,label_batch = sess.run([images_train,labels_train])
#         _,loss_value = sess.run([train_op,loss],feed_dict={image_holder: image_batch,label_holder:label_batch})
#         duration = time.time()-start_time
#         if step % 10 == 0:
#             examples_per_sec = batch_size /duration
#             sec_per_batch = float(duration)
            
#             format_str = ('step %d,loss=%.2f (%.1f examples/sec;%.3f sec/batch)')
#             print(format_str % (step,loss_value,examples_per_sec,sec_per_batch))

#     while step< num_iter:
#         image_batch,label_batch = sess.run([images_test,labels_test])
#         predictions = sess.run([top_k_op],feed_dict = {image_holder:image_batch,
#                                                        label_holder:label_batch})
#         true_count += np.sum(predictions)
#         step+=1
#         if step % 10 ==0:
#             print true_count
sess = tf.InteractiveSession()
tf.global_variables_initializer().run()
tf.train.start_queue_runners()
for step in range(max_steps):
    start_time = time.time()
    image_batch,label_batch = sess.run([images_train,labels_train])
    _,loss_value = sess.run([train_op,loss],feed_dict={image_holder: image_batch,label_holder:label_batch})
    duration = time.time()-start_time
    if step % 10 == 0:
        examples_per_sec = batch_size /duration
        sec_per_batch = float(duration)
        
        format_str = ('step %d,loss=%.2f (%.1f examples/sec;%.3f sec/batch)')
        print(format_str % (step,loss_value,examples_per_sec,sec_per_batch))

num_examples = 10000
import math
num_iter = int(math.ceil(num_examples/ batch_size))
true_count = 0
total_sample_count = num_iter * batch_size
step = 0
# with tf.Session() as sess:
while step< num_iter:
    image_batch,label_batch = sess.run([images_test,labels_test])
    predictions = sess.run([top_k_op],feed_dict = {image_holder:image_batch,
                                                   label_holder:label_batch})
    true_count += np.sum(predictions)
    step+=1
    if step % 10 ==0:
        print true_count

precision = float(true_count) / total_sample_count
print('precision @ 1 =%.3f' % precision)

result:
step 0,loss=4.68 (13.0 examples/sec;9.867 sec/batch)
step 10,loss=3.66 (2056.2 examples/sec;0.062 sec/batch)
step 20,loss=3.10 (2092.1 examples/sec;0.061 sec/batch)
step 30,loss=2.76 (2099.5 examples/sec;0.061 sec/batch)
step 40,loss=2.51 (2044.6 examples/sec;0.063 sec/batch)
step 50,loss=2.34 (1918.1 examples/sec;0.067 sec/batch)
step 60,loss=2.26 (2404.8 examples/sec;0.053 sec/batch)
step 70,loss=2.27 (1877.0 examples/sec;0.068 sec/batch)
step 80,loss=2.19 (2146.7 examples/sec;0.060 sec/batch)
step 90,loss=1.98 (2225.1 examples/sec;0.058 sec/batch)
step 100,loss=1.90 (1884.5 examples/sec;0.068 sec/batch)
step 110,loss=2.01 (2037.7 examples/sec;0.063 sec/batch)
step 120,loss=1.94 (2028.8 examples/sec;0.063 sec/batch)
step 130,loss=1.83 (2071.4 examples/sec;0.062 sec/batch)
step 140,loss=1.88 (1998.5 examples/sec;0.064 sec/batch)
step 150,loss=1.84 (1979.2 examples/sec;0.065 sec/batch)
step 160,loss=1.74 (2297.7 examples/sec;0.056 sec/batch)
step 170,loss=1.83 (1945.0 examples/sec;0.066 sec/batch)
step 180,loss=1.75 (2110.6 examples/sec;0.061 sec/batch)
step 190,loss=1.79 (2609.5 examples/sec;0.049 sec/batch)
step 200,loss=1.52 (2124.4 examples/sec;0.060 sec/batch)
step 210,loss=1.66 (2197.6 examples/sec;0.058 sec/batch)
step 220,loss=1.68 (1974.4 examples/sec;0.065 sec/batch)
step 230,loss=1.81 (2094.4 examples/sec;0.061 sec/batch)
step 240,loss=1.69 (1956.8 examples/sec;0.065 sec/batch)
step 250,loss=1.75 (2065.6 examples/sec;0.062 sec/batch)
step 260,loss=1.65 (2019.1 examples/sec;0.063 sec/batch)
step 270,loss=1.73 (2119.6 examples/sec;0.060 sec/batch)
step 280,loss=1.78 (2004.4 examples/sec;0.064 sec/batch)
step 290,loss=1.67 (2235.9 examples/sec;0.057 sec/batch)
step 300,loss=1.71 (1920.9 examples/sec;0.067 sec/batch)
step 310,loss=1.70 (2421.0 examples/sec;0.053 sec/batch)
step 320,loss=1.79 (2025.6 examples/sec;0.063 sec/batch)
step 330,loss=1.48 (2202.9 examples/sec;0.058 sec/batch)
step 340,loss=1.55 (2089.6 examples/sec;0.061 sec/batch)
step 350,loss=1.48 (2240.9 examples/sec;0.057 sec/batch)
step 360,loss=1.66 (2042.5 examples/sec;0.063 sec/batch)
step 370,loss=1.58 (2012.1 examples/sec;0.064 sec/batch)
step 380,loss=1.53 (2147.9 examples/sec;0.060 sec/batch)
step 390,loss=1.51 (2063.0 examples/sec;0.062 sec/batch)
step 400,loss=1.49 (2561.1 examples/sec;0.050 sec/batch)
step 410,loss=1.53 (2019.8 examples/sec;0.063 sec/batch)
step 420,loss=1.43 (2116.1 examples/sec;0.060 sec/batch)
step 430,loss=1.50 (1866.1 examples/sec;0.069 sec/batch)
step 440,loss=1.48 (2139.0 examples/sec;0.060 sec/batch)
step 450,loss=1.48 (2084.3 examples/sec;0.061 sec/batch)
step 460,loss=1.42 (2197.7 examples/sec;0.058 sec/batch)
step 470,loss=1.57 (2281.9 examples/sec;0.056 sec/batch)
step 480,loss=1.45 (2011.3 examples/sec;0.064 sec/batch)
step 490,loss=1.57 (2067.5 examples/sec;0.062 sec/batch)
step 500,loss=1.48 (2199.9 examples/sec;0.058 sec/batch)
step 510,loss=1.68 (2215.9 examples/sec;0.058 sec/batch)
step 520,loss=1.47 (1969.8 examples/sec;0.065 sec/batch)
step 530,loss=1.38 (2243.6 examples/sec;0.057 sec/batch)
step 540,loss=1.50 (2209.7 examples/sec;0.058 sec/batch)
step 550,loss=1.28 (1896.2 examples/sec;0.068 sec/batch)
step 560,loss=1.36 (2088.0 examples/sec;0.061 sec/batch)
step 570,loss=1.28 (1973.4 examples/sec;0.065 sec/batch)
step 580,loss=1.46 (2032.7 examples/sec;0.063 sec/batch)
step 590,loss=1.30 (2135.9 examples/sec;0.060 sec/batch)
step 600,loss=1.42 (2648.2 examples/sec;0.048 sec/batch)
step 610,loss=1.32 (2074.1 examples/sec;0.062 sec/batch)
step 620,loss=1.33 (2141.3 examples/sec;0.060 sec/batch)
step 630,loss=1.51 (2114.3 examples/sec;0.061 sec/batch)
step 640,loss=1.56 (2164.3 examples/sec;0.059 sec/batch)
step 650,loss=1.61 (1928.6 examples/sec;0.066 sec/batch)
step 660,loss=1.37 (2012.7 examples/sec;0.064 sec/batch)
step 670,loss=1.47 (2170.8 examples/sec;0.059 sec/batch)
step 680,loss=1.32 (2322.4 examples/sec;0.055 sec/batch)
step 690,loss=1.55 (2035.7 examples/sec;0.063 sec/batch)
step 700,loss=1.43 (2472.6 examples/sec;0.052 sec/batch)
step 710,loss=1.32 (2053.6 examples/sec;0.062 sec/batch)
step 720,loss=1.40 (2612.4 examples/sec;0.049 sec/batch)
step 730,loss=1.31 (1935.6 examples/sec;0.066 sec/batch)
step 740,loss=1.22 (2243.3 examples/sec;0.057 sec/batch)
step 750,loss=1.50 (2279.6 examples/sec;0.056 sec/batch)
step 760,loss=1.59 (2090.1 examples/sec;0.061 sec/batch)
step 770,loss=1.38 (2052.4 examples/sec;0.062 sec/batch)
step 780,loss=1.37 (2216.3 examples/sec;0.058 sec/batch)
step 790,loss=1.38 (2003.8 examples/sec;0.064 sec/batch)
step 800,loss=1.33 (2185.4 examples/sec;0.059 sec/batch)
step 810,loss=1.28 (2180.2 examples/sec;0.059 sec/batch)
step 820,loss=1.47 (1997.7 examples/sec;0.064 sec/batch)
step 830,loss=1.29 (2085.5 examples/sec;0.061 sec/batch)
step 840,loss=1.27 (1900.6 examples/sec;0.067 sec/batch)
step 850,loss=1.25 (1973.7 examples/sec;0.065 sec/batch)
step 860,loss=1.19 (1945.3 examples/sec;0.066 sec/batch)
step 870,loss=1.24 (2403.8 examples/sec;0.053 sec/batch)
step 880,loss=1.36 (2097.9 examples/sec;0.061 sec/batch)
step 890,loss=1.25 (2006.7 examples/sec;0.064 sec/batch)
step 900,loss=1.19 (1993.0 examples/sec;0.064 sec/batch)
step 910,loss=1.15 (2287.3 examples/sec;0.056 sec/batch)
step 920,loss=1.25 (2060.5 examples/sec;0.062 sec/batch)
step 930,loss=1.72 (2446.6 examples/sec;0.052 sec/batch)
step 940,loss=1.33 (2011.1 examples/sec;0.064 sec/batch)
step 950,loss=1.06 (2062.4 examples/sec;0.062 sec/batch)
step 960,loss=1.29 (2286.2 examples/sec;0.056 sec/batch)
step 970,loss=1.12 (1947.4 examples/sec;0.066 sec/batch)
step 980,loss=1.41 (2031.9 examples/sec;0.063 sec/batch)
step 990,loss=1.29 (1982.9 examples/sec;0.065 sec/batch)
step 1000,loss=1.28 (2102.3 examples/sec;0.061 sec/batch)
step 1010,loss=1.26 (1986.9 examples/sec;0.064 sec/batch)
step 1020,loss=1.26 (2163.7 examples/sec;0.059 sec/batch)
step 1030,loss=1.38 (2018.7 examples/sec;0.063 sec/batch)
step 1040,loss=1.50 (2104.5 examples/sec;0.061 sec/batch)
step 1050,loss=1.23 (2472.1 examples/sec;0.052 sec/batch)
step 1060,loss=1.12 (1968.7 examples/sec;0.065 sec/batch)
step 1070,loss=1.56 (2105.0 examples/sec;0.061 sec/batch)
step 1080,loss=1.38 (2268.9 examples/sec;0.056 sec/batch)
step 1090,loss=1.42 (1912.6 examples/sec;0.067 sec/batch)
step 1100,loss=1.16 (1931.4 examples/sec;0.066 sec/batch)
step 1110,loss=1.26 (1932.6 examples/sec;0.066 sec/batch)
step 1120,loss=1.10 (2039.4 examples/sec;0.063 sec/batch)
step 1130,loss=1.40 (2034.2 examples/sec;0.063 sec/batch)
step 1140,loss=1.29 (2095.4 examples/sec;0.061 sec/batch)
step 1150,loss=1.33 (2045.4 examples/sec;0.063 sec/batch)
step 1160,loss=1.11 (2025.8 examples/sec;0.063 sec/batch)
step 1170,loss=1.27 (1934.3 examples/sec;0.066 sec/batch)
step 1180,loss=1.34 (1951.4 examples/sec;0.066 sec/batch)
step 1190,loss=1.14 (2515.3 examples/sec;0.051 sec/batch)
step 1200,loss=1.30 (1825.7 examples/sec;0.070 sec/batch)
step 1210,loss=1.48 (2087.1 examples/sec;0.061 sec/batch)
step 1220,loss=1.19 (1863.2 examples/sec;0.069 sec/batch)
step 1230,loss=1.45 (2235.5 examples/sec;0.057 sec/batch)
step 1240,loss=1.34 (2069.0 examples/sec;0.062 sec/batch)
step 1250,loss=1.09 (2289.8 examples/sec;0.056 sec/batch)
step 1260,loss=1.31 (2020.6 examples/sec;0.063 sec/batch)
step 1270,loss=1.45 (1912.6 examples/sec;0.067 sec/batch)
step 1280,loss=1.26 (2149.8 examples/sec;0.060 sec/batch)
step 1290,loss=1.19 (2111.7 examples/sec;0.061 sec/batch)
step 1300,loss=1.28 (2135.2 examples/sec;0.060 sec/batch)
step 1310,loss=1.16 (2002.9 examples/sec;0.064 sec/batch)
step 1320,loss=1.18 (2659.5 examples/sec;0.048 sec/batch)
step 1330,loss=1.46 (1889.1 examples/sec;0.068 sec/batch)
step 1340,loss=1.26 (2211.1 examples/sec;0.058 sec/batch)
step 1350,loss=1.30 (2120.4 examples/sec;0.060 sec/batch)
step 1360,loss=1.20 (2052.6 examples/sec;0.062 sec/batch)
step 1370,loss=1.24 (2143.9 examples/sec;0.060 sec/batch)
step 1380,loss=1.18 (2334.5 examples/sec;0.055 sec/batch)
step 1390,loss=1.19 (2041.0 examples/sec;0.063 sec/batch)
step 1400,loss=1.37 (2004.5 examples/sec;0.064 sec/batch)
step 1410,loss=1.12 (2089.1 examples/sec;0.061 sec/batch)
step 1420,loss=1.21 (2430.7 examples/sec;0.053 sec/batch)
step 1430,loss=1.08 (2119.2 examples/sec;0.060 sec/batch)
step 1440,loss=1.09 (2613.8 examples/sec;0.049 sec/batch)
step 1450,loss=1.20 (2057.9 examples/sec;0.062 sec/batch)
step 1460,loss=1.19 (1945.2 examples/sec;0.066 sec/batch)
step 1470,loss=1.10 (1981.3 examples/sec;0.065 sec/batch)
step 1480,loss=1.07 (2094.4 examples/sec;0.061 sec/batch)
step 1490,loss=1.21 (1870.0 examples/sec;0.068 sec/batch)
step 1500,loss=1.14 (2286.1 examples/sec;0.056 sec/batch)
step 1510,loss=1.30 (2388.0 examples/sec;0.054 sec/batch)
step 1520,loss=0.99 (2613.7 examples/sec;0.049 sec/batch)
step 1530,loss=1.20 (2121.1 examples/sec;0.060 sec/batch)
step 1540,loss=1.18 (1938.2 examples/sec;0.066 sec/batch)
step 1550,loss=1.08 (2211.4 examples/sec;0.058 sec/batch)
step 1560,loss=1.19 (2130.7 examples/sec;0.060 sec/batch)
step 1570,loss=1.07 (2157.0 examples/sec;0.059 sec/batch)
step 1580,loss=1.24 (2137.4 examples/sec;0.060 sec/batch)
step 1590,loss=1.14 (1971.2 examples/sec;0.065 sec/batch)
step 1600,loss=1.04 (2113.4 examples/sec;0.061 sec/batch)
step 1610,loss=1.18 (2330.2 examples/sec;0.055 sec/batch)
step 1620,loss=1.15 (2199.2 examples/sec;0.058 sec/batch)
step 1630,loss=1.29 (2277.3 examples/sec;0.056 sec/batch)
step 1640,loss=1.25 (2077.4 examples/sec;0.062 sec/batch)
step 1650,loss=1.26 (2088.8 examples/sec;0.061 sec/batch)
step 1660,loss=1.14 (2262.4 examples/sec;0.057 sec/batch)
step 1670,loss=1.01 (2186.7 examples/sec;0.059 sec/batch)
step 1680,loss=1.13 (2198.6 examples/sec;0.058 sec/batch)
step 1690,loss=1.37 (1924.4 examples/sec;0.067 sec/batch)
step 1700,loss=1.22 (2150.9 examples/sec;0.060 sec/batch)
step 1710,loss=1.18 (1990.6 examples/sec;0.064 sec/batch)
step 1720,loss=1.18 (2101.6 examples/sec;0.061 sec/batch)
step 1730,loss=1.16 (2018.2 examples/sec;0.063 sec/batch)
step 1740,loss=1.12 (2276.1 examples/sec;0.056 sec/batch)
step 1750,loss=1.11 (2262.9 examples/sec;0.057 sec/batch)
step 1760,loss=1.28 (2058.3 examples/sec;0.062 sec/batch)
step 1770,loss=1.09 (1963.6 examples/sec;0.065 sec/batch)
step 1780,loss=1.09 (2119.2 examples/sec;0.060 sec/batch)
step 1790,loss=1.29 (2060.0 examples/sec;0.062 sec/batch)
step 1800,loss=1.21 (2236.5 examples/sec;0.057 sec/batch)
step 1810,loss=1.27 (2656.1 examples/sec;0.048 sec/batch)
step 1820,loss=1.11 (2102.5 examples/sec;0.061 sec/batch)
step 1830,loss=1.16 (2113.5 examples/sec;0.061 sec/batch)
step 1840,loss=1.28 (2052.3 examples/sec;0.062 sec/batch)
step 1850,loss=1.10 (2241.3 examples/sec;0.057 sec/batch)
step 1860,loss=1.14 (2123.6 examples/sec;0.060 sec/batch)
step 1870,loss=1.14 (2127.8 examples/sec;0.060 sec/batch)
step 1880,loss=1.20 (2103.9 examples/sec;0.061 sec/batch)
step 1890,loss=1.13 (2490.6 examples/sec;0.051 sec/batch)
step 1900,loss=1.02 (2042.8 examples/sec;0.063 sec/batch)
step 1910,loss=1.08 (2153.0 examples/sec;0.059 sec/batch)
step 1920,loss=1.15 (2095.7 examples/sec;0.061 sec/batch)
step 1930,loss=1.08 (2403.8 examples/sec;0.053 sec/batch)
step 1940,loss=1.02 (2528.2 examples/sec;0.051 sec/batch)
step 1950,loss=1.08 (1917.7 examples/sec;0.067 sec/batch)
step 1960,loss=1.20 (2038.6 examples/sec;0.063 sec/batch)
step 1970,loss=1.21 (1843.0 examples/sec;0.069 sec/batch)
step 1980,loss=1.15 (2391.4 examples/sec;0.054 sec/batch)
step 1990,loss=1.14 (1956.8 examples/sec;0.065 sec/batch)
step 2000,loss=1.37 (2146.8 examples/sec;0.060 sec/batch)
step 2010,loss=1.18 (2120.6 examples/sec;0.060 sec/batch)
step 2020,loss=1.24 (2002.6 examples/sec;0.064 sec/batch)
step 2030,loss=1.14 (2161.2 examples/sec;0.059 sec/batch)
step 2040,loss=1.15 (2055.7 examples/sec;0.062 sec/batch)
step 2050,loss=1.05 (2187.5 examples/sec;0.059 sec/batch)
step 2060,loss=1.17 (2297.3 examples/sec;0.056 sec/batch)
step 2070,loss=1.03 (2052.5 examples/sec;0.062 sec/batch)
step 2080,loss=1.44 (2065.2 examples/sec;0.062 sec/batch)
step 2090,loss=1.20 (2112.0 examples/sec;0.061 sec/batch)
step 2100,loss=1.08 (1922.0 examples/sec;0.067 sec/batch)
step 2110,loss=1.04 (2001.5 examples/sec;0.064 sec/batch)
step 2120,loss=1.26 (1987.5 examples/sec;0.064 sec/batch)
step 2130,loss=1.09 (2150.5 examples/sec;0.060 sec/batch)
step 2140,loss=1.06 (2118.7 examples/sec;0.060 sec/batch)
step 2150,loss=1.07 (1989.6 examples/sec;0.064 sec/batch)
step 2160,loss=0.96 (2563.6 examples/sec;0.050 sec/batch)
step 2170,loss=1.08 (2181.8 examples/sec;0.059 sec/batch)
step 2180,loss=0.95 (2136.1 examples/sec;0.060 sec/batch)
step 2190,loss=1.16 (2130.0 examples/sec;0.060 sec/batch)
step 2200,loss=1.09 (2095.2 examples/sec;0.061 sec/batch)
step 2210,loss=1.00 (2264.8 examples/sec;0.057 sec/batch)
step 2220,loss=1.24 (2557.3 examples/sec;0.050 sec/batch)
step 2230,loss=1.21 (2118.9 examples/sec;0.060 sec/batch)
step 2240,loss=1.05 (2104.3 examples/sec;0.061 sec/batch)
step 2250,loss=1.21 (2011.3 examples/sec;0.064 sec/batch)
step 2260,loss=1.08 (2187.4 examples/sec;0.059 sec/batch)
step 2270,loss=1.04 (1962.4 examples/sec;0.065 sec/batch)
step 2280,loss=1.30 (2629.2 examples/sec;0.049 sec/batch)
step 2290,loss=1.09 (2012.2 examples/sec;0.064 sec/batch)
step 2300,loss=1.34 (2269.2 examples/sec;0.056 sec/batch)
step 2310,loss=1.09 (2696.2 examples/sec;0.047 sec/batch)
step 2320,loss=1.11 (2310.0 examples/sec;0.055 sec/batch)
step 2330,loss=1.04 (2120.9 examples/sec;0.060 sec/batch)
step 2340,loss=1.10 (2052.5 examples/sec;0.062 sec/batch)
step 2350,loss=0.91 (1990.1 examples/sec;0.064 sec/batch)
step 2360,loss=1.00 (2338.7 examples/sec;0.055 sec/batch)
step 2370,loss=1.17 (2199.6 examples/sec;0.058 sec/batch)
step 2380,loss=1.03 (2192.5 examples/sec;0.058 sec/batch)
step 2390,loss=1.09 (2046.2 examples/sec;0.063 sec/batch)
step 2400,loss=1.13 (2308.2 examples/sec;0.055 sec/batch)
step 2410,loss=1.06 (2087.1 examples/sec;0.061 sec/batch)
step 2420,loss=1.10 (2253.6 examples/sec;0.057 sec/batch)
step 2430,loss=1.01 (2014.1 examples/sec;0.064 sec/batch)
step 2440,loss=1.05 (2025.8 examples/sec;0.063 sec/batch)
step 2450,loss=1.15 (1995.2 examples/sec;0.064 sec/batch)
step 2460,loss=1.15 (2057.0 examples/sec;0.062 sec/batch)
step 2470,loss=1.05 (1928.0 examples/sec;0.066 sec/batch)
step 2480,loss=1.08 (1889.6 examples/sec;0.068 sec/batch)
step 2490,loss=0.98 (2085.1 examples/sec;0.061 sec/batch)
step 2500,loss=0.89 (2428.0 examples/sec;0.053 sec/batch)
step 2510,loss=0.88 (2297.4 examples/sec;0.056 sec/batch)
step 2520,loss=1.12 (1794.2 examples/sec;0.071 sec/batch)
step 2530,loss=1.03 (2090.1 examples/sec;0.061 sec/batch)
step 2540,loss=1.30 (2170.9 examples/sec;0.059 sec/batch)
step 2550,loss=1.14 (2184.6 examples/sec;0.059 sec/batch)
step 2560,loss=1.11 (2095.0 examples/sec;0.061 sec/batch)
step 2570,loss=1.19 (2561.0 examples/sec;0.050 sec/batch)
step 2580,loss=1.38 (2041.3 examples/sec;0.063 sec/batch)
step 2590,loss=1.07 (2202.0 examples/sec;0.058 sec/batch)
step 2600,loss=1.06 (2274.1 examples/sec;0.056 sec/batch)
step 2610,loss=1.34 (1913.6 examples/sec;0.067 sec/batch)
step 2620,loss=1.19 (2135.1 examples/sec;0.060 sec/batch)
step 2630,loss=1.24 (1980.0 examples/sec;0.065 sec/batch)
step 2640,loss=0.85 (1976.0 examples/sec;0.065 sec/batch)
step 2650,loss=1.07 (2016.8 examples/sec;0.063 sec/batch)
step 2660,loss=1.09 (1983.2 examples/sec;0.065 sec/batch)
step 2670,loss=1.03 (2092.1 examples/sec;0.061 sec/batch)
step 2680,loss=1.00 (1834.6 examples/sec;0.070 sec/batch)
step 2690,loss=1.16 (2150.2 examples/sec;0.060 sec/batch)
step 2700,loss=1.13 (2011.9 examples/sec;0.064 sec/batch)
step 2710,loss=0.95 (2169.8 examples/sec;0.059 sec/batch)
step 2720,loss=1.18 (1954.5 examples/sec;0.065 sec/batch)
step 2730,loss=1.06 (2228.8 examples/sec;0.057 sec/batch)
step 2740,loss=1.13 (2212.3 examples/sec;0.058 sec/batch)
step 2750,loss=1.01 (2078.9 examples/sec;0.062 sec/batch)
step 2760,loss=1.05 (1933.5 examples/sec;0.066 sec/batch)
step 2770,loss=1.11 (2083.0 examples/sec;0.061 sec/batch)
step 2780,loss=1.12 (1904.6 examples/sec;0.067 sec/batch)
step 2790,loss=1.12 (2192.2 examples/sec;0.058 sec/batch)
step 2800,loss=1.22 (2391.4 examples/sec;0.054 sec/batch)
step 2810,loss=1.03 (2024.6 examples/sec;0.063 sec/batch)
step 2820,loss=1.20 (2089.3 examples/sec;0.061 sec/batch)
step 2830,loss=0.88 (2105.0 examples/sec;0.061 sec/batch)
step 2840,loss=1.05 (1986.3 examples/sec;0.064 sec/batch)
step 2850,loss=1.08 (2079.6 examples/sec;0.062 sec/batch)
step 2860,loss=0.90 (2164.1 examples/sec;0.059 sec/batch)
step 2870,loss=1.27 (2095.6 examples/sec;0.061 sec/batch)
step 2880,loss=0.94 (2130.7 examples/sec;0.060 sec/batch)
step 2890,loss=0.88 (2058.5 examples/sec;0.062 sec/batch)
step 2900,loss=1.10 (2304.7 examples/sec;0.056 sec/batch)
step 2910,loss=1.09 (2163.3 examples/sec;0.059 sec/batch)
step 2920,loss=0.98 (2059.3 examples/sec;0.062 sec/batch)
step 2930,loss=0.98 (2036.9 examples/sec;0.063 sec/batch)
step 2940,loss=1.19 (2116.8 examples/sec;0.060 sec/batch)
step 2950,loss=1.12 (1966.9 examples/sec;0.065 sec/batch)
step 2960,loss=0.97 (2055.3 examples/sec;0.062 sec/batch)
step 2970,loss=0.99 (1952.7 examples/sec;0.066 sec/batch)
step 2980,loss=0.96 (1929.5 examples/sec;0.066 sec/batch)
step 2990,loss=1.19 (2186.4 examples/sec;0.059 sec/batch)

precision @ 1 =0.710