1

I have been working on a multivariate time series problem. The dataset has at least 40 different factors. I tried to select only the appropriate features before training the model. I came across a paper called "A Multiattention-Based Supervised Feature Selection Method for Multivariate Time Series. The link to the paper:"https://www.hindawi.com/journals/cin/2021/6911192/

The paper looks promising however I could not find the the implementation of it. I would like to know if anyone has come across a similar paper and knows how to implement the architecture in the paper?

If not, I want to know alternate methods to find only the appropriate features for my multivariate time series before training the model.

1 Answers1

0

I believe I implemented correctly this article. The code for it is bellow:

import numpy as np
import tensorflow as tf
from keras import backend as K

from keras import backend as K

class MTSAttention(tf.keras.layers.Layer):
    def __init__(self, units=128, **kwargs):
        super(MTSAttention, self).__init__(**kwargs)
        self.attention_T = None
        self.attention_V = None
        self.dense_E_AoT = None
        self.dense_U_AoT = None
        self.dense_E_AoV = None
        self.dense_U_AoV = None
        self.units = units

    def get_config(self):
        config = super().get_config()
        config['dense_E_AoT'] = self.dense_E_AoT
        config['dense_U_AoT'] = self.dense_U_AoT
        config['dense_E_AoV'] = self.dense_E_AoV
        config['dense_U_AoV'] = self.dense_U_AoV
        config['units'] = self.units
        return config

    def build(self, input_shape):
        reduction_size_shape_AoT = int(input_shape[-1] * .4)
        reduction_size_shape_AoV = int(input_shape[1] * .6)

        self.dense_E_AoT = tf.keras.layers.Dense(reduction_size_shape_AoT, trainable=True, activation='tanh', use_bias=True,
                                                 name='dense_E_AoT')
        self.dense_U_AoT = tf.keras.layers.Dense(input_shape[-1], trainable=True, use_bias=True, name='dense_U_AoT')

        self.dense_E_AoV = tf.keras.layers.Dense(reduction_size_shape_AoV, trainable=True, activation='tanh', use_bias=True,
                                                 name='dense_E_AoV')
        self.dense_U_AoV = tf.keras.layers.Dense(input_shape[1], trainable=True, use_bias=True, name='dense_U_AoV')

        super(MTSAttention, self).build(input_shape)

    def AoT_AoV(self, data):
        AoT = data
        p_AoT = self.dense_E_AoT(AoT)
        n_AoT = self.dense_U_AoT(p_AoT)
        temporal_attention = K.softmax(n_AoT, axis=-2)

        AoV = tf.transpose(data, [0, 2, 1])
        p_AoV = self.dense_E_AoV(AoV)
        n_AoV = self.dense_U_AoV(p_AoV)
        variable_attention = K.softmax(n_AoV, axis=-2)

        global_weight_generation = tf.matmul(temporal_attention, variable_attention)

        weighted_data = tf.matmul(global_weight_generation, data)

        return weighted_data, global_weight_generation

    def get_config(self):
        """
        Returns the config of a the layer. This is used for saving and loading from a model
        :return: python dictionary with specs to rebuild layer
        """
        config = super(MTSAttention, self).get_config()
        config.update({'units': self.units})
        return config

    def call(self, x):
        return self.AoT_AoV(x)
Hugofac
  • 53
  • 8