Skip to content

Instantly share code, notes, and snippets.

@aowal
Created July 3, 2017 17:39
Show Gist options
  • Save aowal/1eadc73db93492fd1109145d158e64cd to your computer and use it in GitHub Desktop.
Save aowal/1eadc73db93492fd1109145d158e64cd to your computer and use it in GitHub Desktop.
Neural network for small datasets with derivative info supplied.
import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plt
#Wether or not to include derivative information
derinfo=False
learning_rate = 0.001
training_epochs = 15000
display_step = 100
np.random.seed(3)
xmin=-10.0
xmax=10.0
X=np.random.random((5,1))*(xmax-xmin)+xmin
x_test=np.linspace(xmin,xmax,101).reshape(-1,1)
def f(x):
return np.sin(x)
def dfdx(x):
return np.cos(x)
if derinfo:
Y=np.zeros((len(X),2))
y_test=np.zeros((len(x_test),2))
Y[:,1]=dfdx(X[:,0])
y_test[:,1]=dfdx(x_test[:,0])
else:
Y=np.zeros((len(X),1))
y_test=np.zeros((len(x_test),1))
Y[:,0]=f(X[:,0])
y_test[:,0]=f(x_test[:,0])
x_train=X
y_train=Y
batch_size = len(X)
# tf Graph input
x = tf.placeholder("float", [None, 1],name='x')
if derinfo:
y = tf.placeholder("float", [None, 2])
else:
y = tf.placeholder("float", [None, 1])
n_1=200
W_0=tf.Variable(0.01*tf.random_normal([1,n_1]))
W_2=tf.Variable(0.01*tf.random_normal([n_1,1]))
b_0=tf.Variable(0.01*tf.random_normal([n_1]))
b_2=tf.Variable(0.01*tf.random_normal([1]))
act=tf.nn.sigmoid
def neuralnet(x):
h1=act(tf.matmul(x,W_0)+b_0)
pred=tf.matmul(h1,W_2)+b_2
return pred
pred=neuralnet(x)
pred2=tf.gradients(pred,x)[0]
if derinfo:
pred=tf.concat([pred,pred2],1)
cost=tf.reduce_mean((pred-y)**2)+1e-6*(tf.reduce_mean(W_0**2)+tf.reduce_mean(W_2**2))
optimizer = tf.train.AdamOptimizer(learning_rate=learning_rate).minimize(cost)
# Initializing the variables
init = tf.global_variables_initializer()
# Launch the graph
with tf.Session() as sess:
sess.run(init)
# Training cycle
for epoch in range(training_epochs):
avg_cost = 0.
total_batch = int(len(x_train)/batch_size)
# Loop over all batches
for i in range(total_batch):
batch_x=x_train[i*batch_size:(i+1)*batch_size]
batch_y=y_train[i*batch_size:(i+1)*batch_size]
# Run optimization op (backprop) and cost op (to get loss value)
_, c = sess.run([optimizer, cost], feed_dict={x: batch_x, y: batch_y})
# Compute average loss
avg_cost += c / total_batch
# Display logs per epoch step
if epoch % display_step == 0:
print("Epoch:", '%04d' % (epoch+1), "cost=", "{:.9f}".format(avg_cost))
print("Optimization Finished!")
y_t=sess.run(pred,feed_dict={x:x_test})
plt.plot(x_test[:,0],y_t[:,0])
plt.plot(x_test[:,0],y_test[:,0])
plt.legend(["prediction","true function"])
plt.plot(x_train[:,0],y_train[:,0],'.')
plt.show()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment