Двоичная классификация видео ResNet50 LSTM Keras

#python #keras #classification #lstm #resnet

Вопрос:

У меня есть набор данных видео, где каждое видео имеет размеры 10(кадры) х 224(ширина) х 224(высота) х 3 (каналы). Я создал функцию генератора для генерации 10 кадров на видео и 1 метки, соответствующей каждому набору из 10 кадров, а также распределения данных в пакеты размером 32. Я пытаюсь выполнить двоичную классификацию видео. Ниже приведена моя модель-

 from tensorflow.keras.applications.resnet50 import ResNet50
inputs = Input((10, 224, 224, 3))
resnet = ResNet50(include_top=False,input_shape =(224,224,3), weights='imagenet')
for layer in resnet.layers:
    layer.trainable=False

output = GlobalAveragePooling2D()(resnet.output)
cnn = Model(inputs=resnet.input, outputs=output)
encoded_frames = TimeDistributed(cnn)(inputs)


lstm = LSTM(2048)(encoded_frames)
out_leaky = LeakyReLU()(lstm)
out_drop = Dropout(0.4)(out_leaky)
out_dense = Dense(2048,input_dim=inputs,activation='relu')(out_drop)
out_1 = Dense(1,activation='sigmoid')(out_dense)
model = Model(inputs=[inputs], outputs=out_1)
model.compile(loss = 'binary_crossentropy', optimizer='adam', metrics=['accuracy', tf.keras.metrics.AUC()])
model.summary()

 

Когда я пытаюсь обучить модель, я получаю следующую ошибку-

 Epoch 1/6
      2/Unknown - 1s 714ms/step - loss: 1.7860 - accuracy: 0.8750 - auc_2: 0.5000
---------------------------------------------------------------------------
InvalidArgumentError                      Traceback (most recent call last)
<ipython-input-30-b53f019f79ae> in <module>
     10 history = 0
     11 with tf.device('/GPU:0'):
---> 12     history = model.fit(data_gen(train.loc[:, 'video_path'], id_label_map, batch_size, seq_len), epochs=6, validation_data = data_gen(valid.loc[:, 'video_path'], id_label_map, batch_size, seq_len=10), callbacks=[checkpoint])

/opt/conda/lib/python3.6/site-packages/tensorflow_core/python/keras/engine/training.py in fit(self, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, initial_epoch, steps_per_epoch, validation_steps, validation_freq, max_queue_size, workers, use_multiprocessing, **kwargs)
    791         max_queue_size=max_queue_size,
    792         workers=workers,
--> 793         use_multiprocessing=use_multiprocessing)
    794 
    795   def evaluate(self,

/opt/conda/lib/python3.6/site-packages/tensorflow_core/python/keras/engine/training_v2.py in fit(self, model, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, initial_epoch, steps_per_epoch, validation_steps, validation_freq, max_queue_size, workers, use_multiprocessing, **kwargs)
    340                 mode=ModeKeys.TRAIN,
    341                 training_context=training_context,
--> 342                 total_epochs=epochs)
    343             cbks.make_logs(model, epoch_logs, training_result, ModeKeys.TRAIN)
    344 

/opt/conda/lib/python3.6/site-packages/tensorflow_core/python/keras/engine/training_v2.py in run_one_epoch(model, iterator, execution_function, dataset_size, batch_size, strategy, steps_per_epoch, num_samples, mode, training_context, total_epochs)
    126         step=step, mode=mode, size=current_batch_size) as batch_logs:
    127       try:
--> 128         batch_outs = execution_function(iterator)
    129       except (StopIteration, errors.OutOfRangeError):
    130         # TODO(kaftan): File bug about tf function and errors.OutOfRangeError?

/opt/conda/lib/python3.6/site-packages/tensorflow_core/python/keras/engine/training_v2_utils.py in execution_function(input_fn)
     96     # `numpy` translates Tensors to values in Eager mode.
     97     return nest.map_structure(_non_none_constant_value,
---> 98                               distributed_function(input_fn))
     99 
    100   return execution_function

/opt/conda/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py in __call__(self, *args, **kwds)
    566         xla_context.Exit()
    567     else:
--> 568       result = self._call(*args, **kwds)
    569 
    570     if tracing_count == self._get_tracing_count():

/opt/conda/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py in _call(self, *args, **kwds)
    597       # In this case we have created variables on the first call, so we run the
    598       # defunned version which is guaranteed to never create variables.
--> 599       return self._stateless_fn(*args, **kwds)  # pylint: disable=not-callable
    600     elif self._stateful_fn is not None:
    601       # Release the lock early so that multiple threads can perform the call

/opt/conda/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py in __call__(self, *args, **kwargs)
   2361     with self._lock:
   2362       graph_function, args, kwargs = self._maybe_define_function(args, kwargs)
-> 2363     return graph_function._filtered_call(args, kwargs)  # pylint: disable=protected-access
   2364 
   2365   @property

/opt/conda/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py in _filtered_call(self, args, kwargs)
   1609          if isinstance(t, (ops.Tensor,
   1610                            resource_variable_ops.BaseResourceVariable))),
-> 1611         self.captured_inputs)
   1612 
   1613   def _call_flat(self, args, captured_inputs, cancellation_manager=None):

/opt/conda/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py in _call_flat(self, args, captured_inputs, cancellation_manager)
   1690       # No tape is watching; skip to running the function.
   1691       return self._build_call_outputs(self._inference_function.call(
-> 1692           ctx, args, cancellation_manager=cancellation_manager))
   1693     forward_backward = self._select_forward_and_backward_functions(
   1694         args,

/opt/conda/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py in call(self, ctx, args, cancellation_manager)
    543               inputs=args,
    544               attrs=("executor_type", executor_type, "config_proto", config),
--> 545               ctx=ctx)
    546         else:
    547           outputs = execute.execute_with_cancellation(

/opt/conda/lib/python3.6/site-packages/tensorflow_core/python/eager/execute.py in quick_execute(op_name, num_outputs, inputs, attrs, ctx, name)
     65     else:
     66       message = e.message
---> 67     six.raise_from(core._status_to_exception(e.code, message), None)
     68   except TypeError as e:
     69     keras_symbolic_tensors = [

/opt/conda/lib/python3.6/site-packages/six.py in raise_from(value, from_value)

InvalidArgumentError: 2 root error(s) found.
  (0) Invalid argument:  TypeError: `generator` yielded an element that could not be converted to the expected type. The expected type was float64, but the yielded element was [array([[[[0.55294118, 0.57647059, 0.7254902 ],
         [0.55294118, 0.57647059, 0.7254902 ],
         [0.54901961, 0.57254902, 0.72156863],
         ...,
         [0.6       , 0.63137255, 0.67843137],
         [0.59215686, 0.62352941, 0.67058824],
         [0.58823529, 0.61960784, 0.66666667]],

        [[0.55294118, 0.57647059, 0.7254902 ],
         [0.55294118, 0.57647059, 0.7254902 ],
         [0.54901961, 0.57254902, 0.72156863],
         ...,
         [0.59607843, 0.62745098, 0.6745098 ],
         [0.58823529, 0.61960784, 0.66666667],
         [0.58039216, 0.61568627, 0.6627451 ]],

        [[0.55294118, 0.57647059, 0.7254902 ],
         [0.55294118, 0.57647059, 0.7254902 ],
         [0.54901961, 0.57254902, 0.72156863],
         ...,
         [0.59215686, 0.62352941, 0.67058824],
         [0.57647059, 0.61176471, 0.65882353],
         [0.57254902, 0.60784314, 0.65490196]],

        ...,

        [[0.2627451 , 0.23137255, 0.27058824],
         [0.2627451 , 0.23137255, 0.27058824],
         [0.26666667, 0.23529412, 0.2745098 ],
         ...,
         [0.57254902, 0.51764706, 0.29411765],
         [0.57254902, 0.51764706, 0.29411765],
         [0.57254902, 0.51764706, 0.29411765]],

        [[0.25882353, 0.22745098, 0.26666667],
         [0.2627451 , 0.23137255, 0.27058824],
         [0.26666667, 0.23529412, 0.2745098 ],
         ...,
         [0.57647059, 0.52156863, 0.29803922],
         [0.57647059, 0.52156863, 0.29803922],
         [0.57647059, 0.52156863, 0.29803922]],

        [[0.25882353, 0.22745098, 0.26666667],
         [0.25882353, 0.22745098, 0.26666667],
         [0.2627451 , 0.23137255, 0.27058824],
         ...,
         [0.57647059, 0.52156863, 0.29803922],
         [0.57647059, 0.52156863, 0.29803922],
         [0.57647059, 0.52156863, 0.29803922]]],


       [[[0.56862745, 0.59607843, 0.73333333],
         [0.56862745, 0.59607843, 0.73333333],
         [0.56470588, 0.59215686, 0.72941176],
         ...,
         [0.62352941, 0.65882353, 0.70588235],
         [0.60392157, 0.63529412, 0.68235294],
         [0.59607843, 0.62745098, 0.6745098 ]],

        [[0.56862745, 0.59607843, 0.73333333],
         [0.56862745, 0.59607843, 0.73333333],
         [0.56470588, 0.59215686, 0.72941176],
         ...,
         [0.62352941, 0.65882353, 0.70588235],
         [0.60392157, 0.63529412, 0.68235294],
         [0.59215686, 0.62352941, 0.67058824]],

        [[0.56862745, 0.59607843, 0.73333333],
         [0.56862745, 0.59607843, 0.73333333],
         [0.56470588, 0.59215686, 0.72941176],
         ...,
         [0.61568627, 0.65098039, 0.69803922],
         [0.59607843, 0.62745098, 0.6745098 ],
         [0.58823529, 0.61960784, 0.66666667]],

        ...,

        [[0.27843137, 0.24705882, 0.30980392],
         [0.2745098 , 0.24313725, 0.30196078],
         [0.26666667, 0.23529412, 0.29411765],
         ...,
         [0.57254902, 0.54901961, 0.29019608],
         [0.57647059, 0.55294118, 0.29411765],
         [0.57647059, 0.55294118, 0.29411765]],

        [[0.2745098 , 0.24313725, 0.30196078],
         [0.27058824, 0.23921569, 0.29803922],
         [0.26666667, 0.23529412, 0.29411765],
         ...,
         [0.57254902, 0.54901961, 0.29019608],
         [0.57647059, 0.55294118, 0.29411765],
         [0.57647059, 0.55294118, 0.29411765]],

        [[0.2745098 , 0.24313725, 0.30196078],
         [0.27058824, 0.23921569, 0.29803922],
         [0.26666667, 0.23529412, 0.29411765],
         ...,
         [0.57254902, 0.54901961, 0.29019608],
         [0.57647059, 0.55294118, 0.29411765],
         [0.57647059, 0.55294118, 0.29411765]]],


       [[[0.57647059, 0.59607843, 0.74509804],
         [0.57254902, 0.59215686, 0.74117647],
         [0.56862745, 0.58823529, 0.7372549 ],
         ...,
         [0.65098039, 0.67058824, 0.72156863],
         [0.61568627, 0.63529412, 0.68627451],
         [0.6       , 0.61960784, 0.67058824]],

        [[0.57647059, 0.59607843, 0.74509804],
         [0.57647059, 0.59607843, 0.74509804],
         [0.57254902, 0.59215686, 0.74117647],
         ...,
         [0.65098039, 0.67058824, 0.72156863],
         [0.61568627, 0.63529412, 0.68627451],
         [0.6       , 0.61960784, 0.67058824]],

        [[0.58039216, 0.6       , 0.75294118],
         [0.57647059, 0.59607843, 0.74509804],
         [0.57254902, 0.59215686, 0.74117647],
         ...,
         [0.64705882, 0.66666667, 0.71764706],
         [0.61568627, 0.63529412, 0.68627451],
         [0.6       , 0.61960784, 0.67058824]],

        ...,

        [[0.28235294, 0.22352941, 0.31372549],
         [0.2745098 , 0.21568627, 0.30196078],
         [0.26666667, 0.20784314, 0.29411765],
         ...,
         [0.56862745, 0.5372549 , 0.28627451],
         [0.56470588, 0.53333333, 0.28235294],
         [0.56470588, 0.53333333, 0.28235294]],

        [[0.27843137, 0.21960784, 0.30980392],
         [0.2745098 , 0.21568627, 0.30196078],
         [0.2627451 , 0.20392157, 0.29019608],
         ...,
         [0.56862745, 0.5372549 , 0.28627451],
         [0.56470588, 0.53333333, 0.28235294],
         [0.56078431, 0.5254902 , 0.27843137]],

        [[0.27843137, 0.21960784, 0.30980392],
         [0.2745098 , 0.21568627, 0.30196078],
         [0.2627451 , 0.20392157, 0.29019608],
         ...,
         [0.56862745, 0.5372549 , 0.28627451],
         [0.56470588, 0.53333333, 0.28235294],
         [0.56078431, 0.5254902 , 0.27843137]]],


       ...,


       [[[0.68235294, 0.69019608, 0.78431373],
         [0.6745098 , 0.68235294, 0.77254902],
         [0.65490196, 0.66666667, 0.75686275],
         ...,
         [0.50196078, 0.52941176, 0.64705882],
         [0.56470588, 0.59607843, 0.71372549],
         [0.6       , 0.63137255, 0.74901961]],

        [[0.68235294, 0.69019608, 0.78431373],
         [0.6745098 , 0.68235294, 0.77254902],
         [0.65490196, 0.66666667, 0.75686275],
         ...,
         [0.50196078, 0.52941176, 0.64705882],
         [0.56470588, 0.59607843, 0.71372549],
         [0.59607843, 0.62745098, 0.74509804]],

        [[0.68235294, 0.69019608, 0.78431373],
         [0.6745098 , 0.68235294, 0.77254902],
         [0.6627451 , 0.67058824, 0.76078431],
         ...,
         [0.50196078, 0.52941176, 0.64705882],
         [0.56078431, 0.59215686, 0.70980392],
         [0.59215686, 0.62352941, 0.7372549 ]],

        ...,

        [[0.25098039, 0.17647059, 0.37254902],
         [0.25098039, 0.17647059, 0.37254902],
         [0.24705882, 0.17254902, 0.36862745],
         ...,
         [0.55686275, 0.5254902 , 0.25490196],
         [0.55686275, 0.5254902 , 0.25490196],
         [0.55686275, 0.5254902 , 0.25490196]],

        [[0.25098039, 0.17647059, 0.37254902],
         [0.25098039, 0.17647059, 0.37254902],
         [0.24705882, 0.17254902, 0.36862745],
         ...,
         [0.55686275, 0.5254902 , 0.25490196],
         [0.55686275, 0.5254902 , 0.25490196],
         [0.55686275, 0.5254902 , 0.25490196]],

        [[0.25098039, 0.17647059, 0.37254902],
         [0.25098039, 0.17647059, 0.37254902],
         [0.24705882, 0.17254902, 0.36862745],
         ...,
         [0.55686275, 0.5254902 , 0.25490196],
         [0.55686275, 0.5254902 , 0.25490196],
         [0.55686275, 0.5254902 , 0.25490196]]],


       [[[0.70980392, 0.72941176, 0.78431373],
         [0.70588235, 0.7254902 , 0.78039216],
         [0.69803922, 0.71372549, 0.77254902],
         ...,
         [0.43529412, 0.4627451 , 0.59215686],
         [0.43921569, 0.46666667, 0.59607843],
         [0.43921569, 0.46666667, 0.59607843]],

        [[0.70980392, 0.72941176, 0.78431373],
         [0.70588235, 0.7254902 , 0.78039216],
         [0.69803922, 0.71372549, 0.77254902],
         ...,
         [0.43529412, 0.4627451 , 0.59215686],
         [0.43921569, 0.46666667, 0.59607843],
         [0.43921569, 0.46666667, 0.59607843]],

        [[0.70588235, 0.7254902 , 0.78039216],
         [0.70196078, 0.72156863, 0.77647059],
         [0.6941 [Op:__inference_distributed_function_65285]

Function call stack:
distributed_function
 

Вот код для моей функции datagen для справки. Функция возвращает массив X формы (32,10,224,224,3) и Y формы (32,)-

 def data_gen(label_data, id_label_map, batch_size=32, seq_len = 10):
     while True:
#         shuffle(label_data)
        for batch in chunker(label_data, batch_size):
            X = []
            Y = []
            for x in batch:
                label = id_label_map[os.path.basename(x)]
                frames = []
                for i, frame in enumerate(frame_extract(x)):
                    frames.append(train_transform(frame))
                    if(len(frames) == seq_len):
                        break
                frames = np.stack(frames)
                frames = frames[:seq_len]
        
                X.append(frames)
                Y.append(label)

            X = np.array(X)
            Y = np.array(Y)

            yield X, Y
 

Как устранить ошибку?