importtensorflowastfimportnumpyasnpSEED=23455rdm=np.random.RandomState(seed=SEED)# 生成[0,1)之间的随机数x=rdm.rand(32,2)y_=[[x1+x2+(rdm.rand()/10.0-0.05)]for(x1,x2)inx]# 生成噪声[0,1)/10=[0,0.1); [0,0.1)-0.05=[-0.05,0.05)x=tf.cast(x,dtype=tf.float32)w1=tf.Variable(tf.random.normal([2,1],stddev=1,seed=1))epoch=15000lr=0.002forepochinrange(epoch):withtf.GradientTape()astape:y=tf.matmul(x,w1)loss_mse=tf.reduce_mean(tf.square(y_-y))grads=tape.gradient(loss_mse,w1)w1.assign_sub(lr*grads)ifepoch%500==0:print("After %d training steps,w1 is "%(epoch))print(w1.numpy(),"\n")print("Final w1 is: ",w1.numpy())
importtensorflowastfimportnumpyasnpSEED=23455COST=1PROFIT=99rdm=np.random.RandomState(SEED)x=rdm.rand(32,2)y_=[[x1+x2+(rdm.rand()/10.0-0.05)]for(x1,x2)inx]# 生成噪声[0,1)/10=[0,0.1); [0,0.1)-0.05=[-0.05,0.05)x=tf.cast(x,dtype=tf.float32)w1=tf.Variable(tf.random.normal([2,1],stddev=1,seed=1))epoch=10000lr=0.002forepochinrange(epoch):withtf.GradientTape()astape:y=tf.matmul(x,w1)loss=tf.reduce_sum(tf.where(tf.greater(y,y_),(y-y_)*COST,(y_-y)*PROFIT))grads=tape.gradient(loss,w1)w1.assign_sub(lr*grads)ifepoch%500==0:print("After %d training steps,w1 is "%(epoch))print(w1.numpy(),"\n")print("Final w1 is: ",w1.numpy())