AttributeError: no attrib. "tensorflow_core.compat.v1

# Create Placeholders
x = tf.placeholder(dtype=tf.float32, shape=[None, 28, 28])
y = tf.placeholder(dtype=tf.int32, shape=[None])

# Flatten the input data
images_flat = tf.reshape(x, [-1, 28*28])

# Fully connected layer 
logits = tf.contrib.layers.fully_connected(images_flat, 62, tf.nn.relu)

# Define loss function
loss = tf.reduce_mean(tf.nn.sparse_softmax_cross_entropy_with_logits(labels=y, logits=logits))

# Define optimizer
train_op = tf.train.AdamOptimizer(learning_rate=0.001).minimize(loss)

# Convert logits to label indexes
correct_pred = tf.argmax(logits, 1)

# Define an accuracy metric
accuracy = tf.reduce_mean(tf.cast(correct_pred, tf.float32))

# Print the accuracy
print("Accuracy: ", accuracy)

I am getting an AttributeError when running the code in a Jupyter Notebook. The error states that the tensorflow_core.compat.v1 module has no attribute contrib. How can I fix this issue?

Replace tf.contrib.layers.fully_connected with tf.layers.dense. The updated code would look like this:

# Create Placeholders
x = tf.placeholder(dtype=tf.float32, shape=[None, 28, 28])
y = tf.placeholder(dtype=tf.int32, shape=[None])

# Flatten the input data
images_flat = tf.reshape(x, [-1, 28*28])

# Fully connected layer 
logits = tf.layers.dense(images_flat, 62, activation=tf.nn.relu)

# Define loss function
loss = tf.reduce_mean(tf.nn.sparse_softmax_cross_entropy_with_logits(labels=y, logits=logits))

# Define optimizer
train_op = tf.train.AdamOptimizer(learning_rate=0.001).minimize(loss)

# Convert logits to label indexes
correct_pred = tf.argmax(logits, 1)

# Define an accuracy metric
accuracy = tf.reduce_mean(tf.cast(correct_pred, tf.float32))

# Print the accuracy
print("Accuracy: ", accuracy)