kerasで学習データ用のGeneratorを定義しましたが、各epochの終了時に on_epoch_end() が呼び出されません。どうしたらよいでしょうか?よろしくお願いします。

from pathlib import Path
import math

from tensorflow.keras.utils import Sequence
from keras.utils import np_utils

class ImageSequence(Sequence):
    def __init__(self, x, batch_size=512):
        self.x_positive = x[0]
        self.x_negative = x[1]
        self.batch_size = batch_size

    def __getitem__(self, idx):
        hbs = self.batch_size//2
        idx_p = np.random.randint(0, self.x_positive.shape[0], hbs)
        batch_x_positive = self.x_positive[idx_p]
        #
        idx_n = np.random.randint(0, self.x_negative.shape[0], hbs)
        batch_x_negative = self.x_negative[idx_n]
        #batch_x_negative = self.x_negative[idx*hbs : (idx+1)*hbs]
        #
        batch_x = np.r_[batch_x_positive, batch_x_negative]
        #
        batch_y = np.r_[np.ones(len(batch_x_positive)), np.zeros(len(batch_x_negative))]
        return batch_x, batch_y

    def __len__(self):
        return math.ceil(2 * len(self.x_negative) / self.batch_size)

    def _shuffle(self):        
        self.x_negative = shuffle(self.x_negative)

    def on_epoch_end(self):
        self._shuffle()


data_gen = ImageSequence([train_positive, train_negative], batch_size=BATCH_SIZE)

history = model.fit_generator(
    generator=data_gen,
    use_multiprocessing=True,
    validation_data=(x_valid, y_valid),
    steps_per_epoch=2 * len(train_positive) / BATCH_SIZE, 
    epochs=30,
    verbose=2,
    callbacks=[])

開発環境はコチラになります(Google Colabを使っています)

import tensorflow.keras
print(tensorflow.keras.__version__)

2.1.6-tf