1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @File : 自实现一个线性回归.py
# @Author: 赵路仓
# @Date : 2020/4/12
# @Desc :
# @Contact : 398333404@qq.com
import os
import tensorflow as tf
def linear_regression():
"""
自实现一个线性回归
:return:
"""
# 命名空间
with tf.variable_scope( "prepared_data" ):
# 准备数据
x = tf.random_normal(shape = [ 100 , 1 ], name = "Feature" )
y_true = tf.matmul(x, [[ 0.08 ]]) + 0.7
# x = tf.constant([[1.0], [2.0], [3.0]])
# y_true = tf.constant([[0.78], [0.86], [0.94]])
with tf.variable_scope( "create_model" ):
# 2.构造函数
# 定义模型变量参数
weights = tf.Variable(initial_value = tf.random_normal(shape = [ 1 , 1 ], name = "Weights" ))
bias = tf.Variable(initial_value = tf.random_normal(shape = [ 1 , 1 ], name = "Bias" ))
y_predit = tf.matmul(x, weights) + bias
with tf.variable_scope( "loss_function" ):
# 3.构造损失函数
error = tf.reduce_mean(tf.square(y_predit - y_true))
with tf.variable_scope( "optimizer" ):
# 4.优化损失
optimizer = tf.train.GradientDescentOptimizer(learning_rate = 0.01 ).minimize(error)
# 收集变量
tf.summary.scalar( "error" , error)
tf.summary.histogram( "weights" , weights)
tf.summary.histogram( "bias" , bias)
# 合并变量
merged = tf.summary.merge_all()
# 创建saver对象
saver = tf.train.Saver()
# 显式的初始化变量
init = tf.global_variables_initializer()
# 开启会话
with tf.Session() as sess:
# 初始化变量
sess.run(init)
# 创建事件文件
file_writer = tf.summary.FileWriter( "E:/tmp/linear" , graph = sess.graph)
# print(x.eval())
# print(y_true.eval())
# 查看初始化变量模型参数之后的值
print ( "训练前模型参数为:权重%f,偏置%f" % (weights. eval (), bias. eval ()))
# 开始训练
for i in range ( 1000 ):
sess.run(optimizer)
print ( "第%d次参数为:权重%f,偏置%f,损失%f" % (i + 1 , weights. eval (), bias. eval (), error. eval ()))
# 运行合并变量操作
summary = sess.run(merged)
# 将每次迭代后的变量写入事件
file_writer.add_summary(summary, i)
# 保存模型
if i = = 999 :
saver.save(sess, "./tmp/model/my_linear.ckpt" )
# # 加载模型
# if os.path.exists("./tmp/model/checkpoint"):
# saver.restore(sess, "./tmp/model/my_linear.ckpt")
print ( "参数为:权重%f,偏置%f,损失%f" % (weights. eval (), bias. eval (), error. eval ()))
pre = [[ 0.5 ]]
prediction = tf.matmul(pre, weights) + bias
sess.run(prediction)
print (prediction. eval ())
return None
if __name__ = = "__main__" :
linear_regression()
|
以上就是python 实现一个简单的线性回归案例的详细内容,更多关于python 实现线性回归的资料请关注服务器之家其它相关文章!
原文链接:https://www.cnblogs.com/zlc364624/p/12686695.html