第一步在 Spyder(TensorFlow)中输入一下代码:
# View more python learning tutorial on my Youtube and Youku channel!!!
# Youtube video tutorial: https://www.youtube.com/channel/UCdyjiB5H8Pu7aDTNVXTTpcg
# Youku video tutorial: http://i.youku.com/pythontutorial
"""
Please note, this code is only for python 3+. If you are using python 2+, please modify the code accordingly.
"""
from __future__ import print_function
import tensorflow as tf
def add_layer(inputs, in_size, out_size, activation_function=None):
# add one more layer and return the output of this layer
with tf.name_scope('layer'):
with tf.name_scope('weights'):
Weights = tf.Variable(tf.random_normal([in_size, out_size]), name='W')
with tf.name_scope('biases'):
biases = tf.Variable(tf.zeros([1, out_size]) + 0.1, name='b')
with tf.name_scope('Wx_plus_b'):
Wx_plus_b = tf.add(tf.matmul(inputs, Weights), biases)
if activation_function is None:
outputs = Wx_plus_b
else:
outputs = activation_function(Wx_plus_b, )
return outputs
# define placeholder for inputs to network
with tf.name_scope('inputs'):
xs = tf.placeholder(tf.float32, [None, 1], name='x_input')
ys = tf.placeholder(tf.float32, [None, 1], name='y_input')
# add hidden layer
l1 = add_layer(xs, 1, 10, activation_function=tf.nn.relu)
# add output layer
prediction = add_layer(l1, 10, 1, activation_function=None)
# the error between prediciton and real data
with tf.name_scope('loss'):
loss = tf.reduce_mean(tf.reduce_sum(tf.square(ys - prediction),
reduction_indices=[1]))
with tf.name_scope('train'):
train_step = tf.train.GradientDescentOptimizer(0.1).minimize(loss)
sess = tf.Session()
# tf.train.SummaryWriter soon be deprecated, use following
if int((tf.__version__).split('.')[1]) < 12 and int((tf.__version__).split('.')[0]) < 1: # tensorflow version < 0.12
writer = tf.train.SummaryWriter('logs/', sess.graph)
else: # tensorflow version >= 0.12
writer = tf.summary.FileWriter("logs/", sess.graph)
# tf.initialize_all_variables() no long valid from
# 2017-03-02 if using tensorflow >= 0.12
if int((tf.__version__).split('.')[1]) < 12 and int((tf.__version__).split('.')[0]) < 1:
init = tf.initialize_all_variables()
else:
init = tf.global_variables_initializer()
sess.run(init)
# direct to the local dir and run this in terminal:
# $ tensorboard --logdir=logs
第二步运行这里代码,一般人这里都能成功不是问题。生成logs夹下面有好一个events开头的文件,示意图如下:
第三步安装一个Chrome浏览器,国内浏览器好像都不太行,也行我能力不行、Chrome不会安装的直接用360助手就可以了。
第四步用管理员身份打开anaconda prompt,在终端输入activate TensorFlow示意图如下:
在这里输入你logs文件夹目录
cd /d E:\deep_learing_python_code\tensorflow_mofan\logs
跳入指定文件目录示意图如下:
第五步接下来就是关键的一步了,单独说出来
在终端中输入tensorboard.exe --logdir=E://deep_learing-python_code//tensorflow_mofan//logs
或者tensorboard--logdir=E:/deep_learing-python_code/tensorflow_mofan/logs
你可以讲tensorboard.exe改成tensorboard,我是只是进去了没有看到文件内容
你也可以不用绝对路径,你可能进入不了网站。
终端示意图如下
CTRL C不一定能复制到,可能还要自己手动输入CHROME浏览器中。
复制完后示意图
说明你前面都成功了,恭喜你了进入最后一关了
将http://WIN-20180527QMD:6006
每个人的网站可能不一样,将上一部复制的网站输入到Chrome浏览器中
就可以看到如下:
可能会遇到防火墙,多试几次就可以了。
成功后的路径是这样的
http://win-20180527qmd:6006/#graphs&run=.