๋ณธ๋ฌธ ๋ฐ”๋กœ๊ฐ€๊ธฐ

TIL/Linux

NotImplementedError: Layers with arguments in __init__ must override get_config

๋ฌธ์ œ ์›์ธ

callback ํ•จ์ˆ˜๋ฅผ ์‚ฌ์šฉํ•ด ๋ชจ๋ธ์„ ์ €์žฅํ•˜๋ ค ํ•˜๋Š”๋ฐ ์—๋Ÿฌ๊ฐ€ ๋ฐœ์ƒํ•˜์˜€๋‹ค.

def training(self, fold_var, dataset,epochs,batch_size,train_index,test_index, X_data, Y_data, x_data, y_data):     
        self.epochs = epochs
        self.batch_size = batch_size
        self.dataset = dataset

        self.X_data = X_data
        self.Y_data = Y_data
        self.x_data = x_data
        self.y_data = y_data

        ### Cross Validation (CV)
        es = EarlyStopping(monitor='val_loss', mode='min', verbose=1, patience=100)
        self.fold_var = fold_var

        Path("./models/"+self.dataset+"_models(s="+str(self.MAX_SENTENCES)+"w="+str(self.MAX_SENTENCE_LENGTH)+")").mkdir(parents=True, exist_ok=True)
        save_folder = "./models/"+self.dataset+"_models(s="+str(self.MAX_SENTENCES)+"w="+str(self.MAX_SENTENCE_LENGTH)+")"

        if not os.path.isdir(save_folder):
            os.mkdir(save_folder)
        self.model_path = os.path.join(save_folder, "model.h5")

        train_val_X,test_X= self.X_data[train_index],self.X_data[test_index]
        train_val_Y,test_Y=self.y_data[train_index],self.y_data[test_index]

        self.nb_classes = len(set(train_val_Y))
        train_val_Y = to_categorical(train_val_Y, self.nb_classes)

        self.train_X, self.val_X, self.train_Y, self.val_Y= train_test_split(train_val_X, train_val_Y, 
                                                                    test_size=0.1111, 
                                                                    random_state=42)

        # self.embedding_matrix = self.load_embedding('word2vec')#์ถ”๊ฐ€
        self.model, self.attention_extractor = self.HAN_layer(
                                            attention_dim=100,
                                                rnn_dim=50,
                                                include_dense_batch_normalization=False,
                                                include_dense_dropout=True,
                                                nb_dense=1,
                                                dense_dim=300,
                                                dense_dropout=0.2)
        checkpointer = ModelCheckpoint(filepath=save_folder+self.get_model_name(self.fold_var),
                            monitor='val_loss',
                            verbose=True,
                            save_best_only=True,
                            mode='min')

        #========callback ์‚ฌ์šฉ ๋ถ€๋ถ„============
        self.history = self.model.fit(x=[self.train_X],
                            y=[self.train_Y],
                            batch_size=self.batch_size,
                            epochs=self.epochs,
                            verbose=True,
                            validation_data=(self.val_X, self.val_Y),
                            callbacks=[es,checkpointer]
                            )

        self.model.load_weights(save_folder+"/model_"+str(self.fold_var)+".h5")
        length = len(test_Y)
        y_true = test_Y
        y_pred = []
        y_predict = self.model.predict(test_X)

ํ•ด๊ฒฐ ๋ฐฉ์•ˆ

def get_config(self) ๋ฉ”์„œ๋“œ๋ฅผ ์ถ”๊ฐ€ํ•˜์—ฌ ์—…๋ฐ์ดํŠธํ•  ํŒŒ๋ผ๋ฏธํ„ฐ๋“ค์„ ๋„ฃ์–ด์ฃผ๋ฉด ๋œ๋‹ค.
get_config๋Š” layer๋ฅผ ๋‹ค์‹œ ์‚ฌ์šฉํ•  ์ˆ˜ ์žˆ๊ฒŒ ์ •์˜๋ฅผ ํ•ด์ฃผ๋Š” ์—ญํ• ์„ ํ•œ๋‹ค.

class Hierarchical_attention_networks():

    def __init__(self,lr,op,MAX_SENTENCES,MAX_SENTENCE_LENGTH,embedding_dim, max_nb_words, tokenizer,number_of_class):
        self.MAX_SENTENCES = MAX_SENTENCES
        self.MAX_SENTENCE_LENGTH = MAX_SENTENCE_LENGTH
        self.tokenizer = tokenizer 
        self.embedding_dim = embedding_dim
        self.max_nb_words = max_nb_words
        self.embedding_matrix = self.load_embedding('word2vec')
        self.nb_classes = number_of_class
        self.lr = lr
        if op == 'Adagrad':
            self.optimizer = keras.optimizers.Adagrad(lr=self.lr, epsilon=1e-6)
        elif op =='Adadelta':
            self.optimizer = keras.optimizers.Adadelta(lr=self.lr, epsilon=1e-6)
        elif op == 'Adam':
            self.optimizer = keras.optimizers.Adam(lr=self.lr)
        elif op == 'RMSprop':
            self.optimizer = keras.optimizers.Adadelta(lr=self.lr,rho=0.9, epsilon=1e-6)

    #========์ด ๋ถ€๋ถ„! ============
    def get_config(self):
        config = super().get_config()
        config.update({
            'MAX_SENTENCES':self.MAX_SENTENCES,
            'MAX_SENTENCE_LENGTH':self.MAX_SENTENCE_LENGTH,
            'embedding_dim':self.embedding_dim,
            'max_nb_words':self.max_nb_words,
            'tokenizer':self.tokenizer,
        })
        return config

์ฐธ๊ณ 

https://stackoverflow.com/questions/58678836/notimplementederror-layers-with-arguments-in-init-must-override-get-conf

https://lsjsj92.tistory.com/598

https://vhrehfdl.tistory.com/60