import tensorflow as tf
from tensorflow.keras import layers, models
models = models.Sequential()
models.add(layers.Conv2D(32, kernel_size=(3, 3), activation='relu', input_shape=(32, 32, 3)))
models.add(layers.MaxPooling2D((2, 2)))
models.add(layers.Conv2D(64, (3, 3), activation='relu'))
models.add(layers.MaxPooling2D((2, 2)))
models.add(layers.Conv2D(64, (3,3), activation='relu'))
models.add(layers.Flatten())
models.add(layers.Dense(64, activation='relu'))
models.add(layers.Dense(10, activation='softmax'))
models.summary() //打印模型在各层的参数
a = tf.random.normal([1, 32, 32, 3])
print(a.shape)
out = models(a)
print(out.shape)
res:
Model: "sequential"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
conv2d (Conv2D) (None, 30, 30, 32) 896
_________________________________________________________________
max_pooling2d (MaxPooling2D) (None, 15, 15, 32) 0
_________________________________________________________________
conv2d_1 (Conv2D) (None, 13, 13, 64) 18496
_________________________________________________________________
max_pooling2d_1 (MaxPooling2 (None, 6, 6, 64) 0
_________________________________________________________________
conv2d_2 (Conv2D) (None, 4, 4, 64) 36928
_________________________________________________________________
flatten (Flatten) (None, 1024) 0
_________________________________________________________________
dense (Dense) (None, 64) 65600
_________________________________________________________________
dense_1 (Dense) (None, 10) 650
=================================================================
Total params: 122,570
Trainable params: 122,570
Non-trainable params: 0
_________________________________________________________________
(1, 32, 32, 3)
(1, 10)
模型的单输入,多输出:
import tensorflow as tf
from tensorflow.keras import layers, models, Input
input_tensor = Input(shape=(32, 32, 3))
x = layers.Conv2D(32, (3, 3), activation='relu')(input_tensor)
x = layers.MaxPooling2D((2, 2))(x)
x = layers.Conv2D(64, (3, 3), activation='relu')(x)
x = layers.MaxPooling2D((2, 2))(x)
x = layers.Conv2D(64, (3, 3), activation='relu')(x)
output_tensor1=layers.Flatten()(x)
x = layers.Dense(64, activation='relu')(output_tensor1)
output_tensor2 = layers.Dense(10, activation='softmax')(x)
model = models.Model(inputs=input_tensor, outputs=[output_tensor1, output_tensor2])
model.summary()
a = tf.random.uniform([1, 32, 32, 3])
[y1, y2] = model(a)
print(y1.shape)
print(y2.shape)
res:
Model: "model"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
input_1 (InputLayer) [(None, 32, 32, 3)] 0
_________________________________________________________________
conv2d (Conv2D) (None, 30, 30, 32) 896
_________________________________________________________________
max_pooling2d (MaxPooling2D) (None, 15, 15, 32) 0
_________________________________________________________________
conv2d_1 (Conv2D) (None, 13, 13, 64) 18496
_________________________________________________________________
max_pooling2d_1 (MaxPooling2 (None, 6, 6, 64) 0
_________________________________________________________________
conv2d_2 (Conv2D) (None, 4, 4, 64) 36928
_________________________________________________________________
flatten (Flatten) (None, 1024) 0
_________________________________________________________________
dense (Dense) (None, 64) 65600
_________________________________________________________________
dense_1 (Dense) (None, 10) 650
=================================================================
Total params: 122,570
Trainable params: 122,570
Non-trainable params: 0
_________________________________________________________________
(1, 1024)
(1, 10)
|