tensorflow实例和执行过程

import numpy as np
import tensorflow as tf
import matplotlib.pyplot as plt

train_x = np.linspace(-1, 1, 100)
print(len(train_x), 'len(train_x)') # 100 len(train_x)
print(type(train_x), 'type(train_x)') # <class 'numpy.ndarray'> type(train_x)
print(train_x, 'train_x')
'''
[-1.         -0.97979798 -0.95959596 -0.93939394 -0.91919192 -0.8989899
 -0.87878788 -0.85858586 -0.83838384 -0.81818182 -0.7979798  -0.77777778
 -0.75757576 -0.73737374 -0.71717172 -0.6969697  -0.67676768 -0.65656566
 -0.63636364 -0.61616162 -0.5959596  -0.57575758 -0.55555556 -0.53535354
 -0.51515152 -0.49494949 -0.47474747 -0.45454545 -0.43434343 -0.41414141
 -0.39393939 -0.37373737 -0.35353535 -0.33333333 -0.31313131 -0.29292929
 -0.27272727 -0.25252525 -0.23232323 -0.21212121 -0.19191919 -0.17171717
 -0.15151515 -0.13131313 -0.11111111 -0.09090909 -0.07070707 -0.05050505
 -0.03030303 -0.01010101  0.01010101  0.03030303  0.05050505  0.07070707
  0.09090909  0.11111111  0.13131313  0.15151515  0.17171717  0.19191919
  0.21212121  0.23232323  0.25252525  0.27272727  0.29292929  0.31313131
  0.33333333  0.35353535  0.37373737  0.39393939  0.41414141  0.43434343
  0.45454545  0.47474747  0.49494949  0.51515152  0.53535354  0.55555556
  0.57575758  0.5959596   0.61616162  0.63636364  0.65656566  0.67676768
  0.6969697   0.71717172  0.73737374  0.75757576  0.77777778  0.7979798
  0.81818182  0.83838384  0.85858586  0.87878788  0.8989899   0.91919192
  0.93939394  0.95959596  0.97979798  1.        ] train_x
'''

train_y = 2 * train_x + np.random.randn(*train_x.shape) * 0.3
print(train_y, 'train_y')
'''
[-1.97537371 -2.0707564  -2.50659017 -1.74005574 -1.7182739  -1.69568339
 -2.17685749 -1.65533878 -1.81077207 -1.24474538 -1.47408266 -1.78770071
 -2.55034481 -1.09649091 -0.83456965 -1.12748184 -1.60790972 -1.44289216
 -1.11855699 -0.87314642 -1.01221576 -1.42516523 -0.88050722 -1.490482
 -1.72149092 -0.70313579 -0.71962395 -0.98758657 -0.38254856 -0.93820301
 -0.74938492 -0.58497603 -0.87109708 -0.4070504   0.21718465  0.06411439
 -0.10974623 -0.5450655  -0.7198069  -0.36340493 -0.55079501 -0.37930734
 -0.33364772  0.01452697 -0.20080607 -0.1647803  -0.36632996 -0.52541821
 -0.0417619  -0.10118491 -0.26923789  0.23949143  0.15637785  0.03223219
  0.24792483 -0.06720146  0.88348099  0.17352677  0.20654782  0.32752297
 -0.0221242   1.11004173  0.543269    0.77831015  1.12086169  0.22127075
  0.32665351  0.4745935   1.68278184  0.68084667  0.92837247  1.00389364
  0.8130549   0.58524667  1.0982213   1.04425041  0.9826977   1.22104511
  1.74847155  0.93826344  1.4680691   1.68145267  1.52103978  1.9941887
  1.48961483  0.84978128  1.18871248  1.92147679  1.38878515  1.94890224
  1.62084605  1.77141107  1.88397044  1.8813066   1.95174276  1.43820094
  1.89797614  1.85152474  2.05707149  2.17824911] train_y
'''

plt.plot(train_x, train_y, 'ro', label = 'Original data')
plt.legend()
plt.show()

猜你喜欢

转载自blog.csdn.net/wyx100/article/details/80492918
今日推荐