Hi @german.mesa,
There is no change in the grader’s output when I implemented that change either. it still reads out “Can’t compile the student’s code. load_data() got an unexpected keyword argument ‘path’”.
This is the code I have been submitting repeatedly trying to get a different grader output:
GRADED FUNCTION: train_mnist_conv
def train_mnist_conv():
# Please write your code only where you are indicated.
# please do not remove model fitting inline comments.
# YOUR CODE STARTS HERE
class myCallback(tf.keras.callbacks.Callback):
def on_epoch_end(self, epoch, logs={}):
if(logs.get('accuracy') is not None and logs.get('accuracy')>0.998):
print("good enough accuracy")
self.model.stop_training =True
# YOUR CODE ENDS HERE
mnist = tf.keras.datasets.mnist
(training_images, training_labels), (test_images, test_labels) = mnist.load_data(path=path)
# YOUR CODE STARTS HERE
mnist = tf.keras.datasets.fashion_mnist
(x_train, y_train),(x_test, y_test) = mnist.load_data()
x_train = x_train.reshape(60000, 28, 28, 1)
callbacks = myCallback()
x_train = x_train/255.0
x_test = x_test/255.0
# YOUR CODE ENDS HERE
model = tf.keras.models.Sequential([
# YOUR CODE STARTS HERE
tf.keras.layers.Conv2D(64, (3,3), activation ='relu', input_shape=(28, 28, 1)),
tf.keras.layers.MaxPooling2D(2,2),
tf.keras.layers.Flatten(input_shape=(28,28)),
#tf.keras.layers.Dense(512, activation=tf.nn.relu),
#tf.keras.layers.Dense(256, activation=tf.nn.relu),
tf.keras.layers.Dense(64, activation=tf.nn.relu),
tf.keras.layers.Dense(10, activation=tf.nn.softmax)
# YOUR CODE ENDS HERE
])
model.compile(optimizer='adam', loss='sparse_categorical_crossentropy', metrics=['accuracy'])
# model fitting
history = model.fit(x_train, y_train, epochs=20, callbacks=[callbacks]
# YOUR CODE STARTS HERE
# YOUR CODE ENDS HERE
)
# model fitting
return history.epoch, history.history['acc'][-1]
Thank you for your continued support!
Ian Kuhl