Week 2 Assignment 1: ValueError: ('Input has undefined rank:', TensorShape(None))

There is an issue with my test case for the ‘Identity Block’ code.

This is my code for the Identity Block

# UNQ_C1
# GRADED FUNCTION: identity_block

def identity_block(X, f, filters, training=True, initializer=random_uniform):
    """
    Implementation of the identity block as defined in Figure 4
    
    Arguments:
    X -- input tensor of shape (m, n_H_prev, n_W_prev, n_C_prev)
    f -- integer, specifying the shape of the middle CONV's window for the main path
    filters -- python list of integers, defining the number of filters in the CONV layers of the main path
    training -- True: Behave in training mode
                False: Behave in inference mode
    initializer -- to set up the initial weights of a layer. Equals to random uniform initializer
    
    Returns:
    X -- output of the identity block, tensor of shape (m, n_H, n_W, n_C)
    """
    
    # Retrieve Filters
    F1, F2, F3 = filters
    
    # Save the input value. You'll need this later to add back to the main path. 
    X_shortcut = X
    
    # First component of main path
    X = Conv2D(filters = F1, kernel_size = 1, strides = (1,1), padding = 'valid', kernel_initializer = initializer(seed=0))(X)
    X = BatchNormalization(axis = 3)(X, training = training) # Default axis
    X = Activation('relu')(X)
    
    ### START CODE HERE
    ## Second component of main path (≈3 lines)
    X = Conv2D(filters= F2, kernel_size= (f,f), strides = (1,1), padding = 'same', kernel_initializer = initializer(seed=0))(X)
    X = BatchNormalization(axis = 3)(X, training = training)
    X = Activation('relu')(X)

    ## Third component of main path (≈2 lines)
    X = Conv2D(filters= F3, kernel_size= 1, strides= (1,1), padding ='valid', kernel_initializer = initializer(seed=0))
    X = BatchNormalization(axis=3)(X, training=training)
    
    ## Final step: Add shortcut value to main path, and pass it through a RELU activation (≈2 lines)
    X = Add()([X, X_shortcut])
    X = Activation('relu')(X)
    ### END CODE HERE

    return X

And this is the code for test

np.random.seed(1)
X1 = np.ones((1, 4, 4, 3)) * -1
X2 = np.ones((1, 4, 4, 3)) * 1
X3 = np.ones((1, 4, 4, 3)) * 3

X = np.concatenate((X1, X2, X3), axis = 0).astype(np.float32)

A3 = identity_block(X, f=2, filters=[4, 4, 3],
                   initializer=lambda seed=0:constant(value=1),
                   training=False)

print('\033[1mWith training=False\033[0m\n')
A3np = A3.numpy()


print(np.around(A3.numpy()[:,(0,-1),:,:].mean(axis = 3), 5))


resume = A3np[:,(0,-1),:,:].mean(axis = 3)
print(resume[1, 1, 0])

print('\n\033[1mWith training=True\033[0m\n')
np.random.seed(1)
A4 = identity_block(X, f=2, filters=[3, 3, 3],
                   initializer=lambda seed=0:constant(value=1),
                   training=True)
print(np.around(A4.numpy()[:,(0,-1),:,:].mean(axis = 3), 5))

public_tests.identity_block_test(identity_block)

The error trace is received as:

---------------------------------------------------------------------------
ValueError                                Traceback (most recent call last)
<ipython-input-33-0f36a8c31713> in <module>
      8 A3 = identity_block(X, f=2, filters=[4, 4, 3],
      9                    initializer=lambda seed=0:constant(value=1),
---> 10                    training=False)
     11 
     12 # print('\033[1mWith training=False\033[0m\n')

<ipython-input-15-d442657c0e4d> in identity_block(X, f, filters, training, initializer)
     37     ## Third component of main path (≈2 lines)
     38     X = Conv2D(filters= F3, kernel_size= 1, strides= (1,1), padding ='valid', kernel_initializer = initializer(seed=0))
---> 39     X = BatchNormalization(axis=3)(X, training=training)
     40 
     41     ## Final step: Add shortcut value to main path, and pass it through a RELU activation (≈2 lines)

/usr/local/lib/python3.6/dist-packages/tensorflow/python/keras/engine/base_layer.py in __call__(self, *args, **kwargs)
    980       with ops.name_scope_v2(name_scope):
    981         if not self.built:
--> 982           self._maybe_build(inputs)
    983 
    984         with ops.enable_auto_cast_variables(self._compute_dtype_object):

/usr/local/lib/python3.6/dist-packages/tensorflow/python/keras/engine/base_layer.py in _maybe_build(self, inputs)
   2641         # operations.
   2642         with tf_utils.maybe_init_scope(self):
-> 2643           self.build(input_shapes)  # pylint:disable=not-callable
   2644       # We must set also ensure that the layer is marked as built, and the build
   2645       # shape is stored since user defined build functions may not be calling

/usr/local/lib/python3.6/dist-packages/tensorflow/python/keras/layers/normalization.py in build(self, input_shape)
    285     input_shape = tensor_shape.TensorShape(input_shape)
    286     if not input_shape.ndims:
--> 287       raise ValueError('Input has undefined rank:', input_shape)
    288     ndims = len(input_shape)
    289 

ValueError: ('Input has undefined rank:', TensorShape(None))


please tell me where am I going wrong

In your batch normalization layers, remove the axis argument.

you never pass something to this layer