微信公众号搜"智元新知"关注
微信扫一扫可直接关注哦!

创建模型时,TypeError:“ NoneType”对象不可迭代,并且AttributeError:“ NoneType”对象不具有属性“ _inbound_nodes”

如何解决创建模型时,TypeError:“ NoneType”对象不可迭代,并且AttributeError:“ NoneType”对象不具有属性“ _inbound_nodes”

出于研究目的,我正在尝试撰写一篇论文。为了创建模型的一部分,我创建了一个类。该代码在Google colab上运行良好,但是当我尝试在Windows的spyder上运行相同的代码时,这些错误就会出现。我不确定代码或环境中是否有错误

Keras版本:2.3.1 Python版本:2.3.6

我以前的目标是在乘积之后与初始要素图X相乘,然后在标题中提到TypeError。

备注:代码在Colab上运行良好 这是我的代码

    import keras
    from keras.layers import Input,Conv2D,GlobalAveragePooling2D,Batchnormalization,Reshape,Dot,Multiply,Concatenate,Add,Conv1D,Activation
    from keras.models import Model
    import keras.backend as K
    from keras.layers import Layer

    X = Input(shape = (28,28,512))


#Adding attention maps

class MyLayer(Layer):

    def __init__(self,**kwargs):
        super(MyLayer,self).__init__(**kwargs)

    def build(self,input_shape):
        # Create a trainable weight variable for this layer.
        self._x = self.add_weight(name='Lambda',shape=(1,28),initializer='uniform',trainable=True)
        super(MyLayer,self).build(input_shape)  # Be sure to call this at the end

    def call(self,x):
        A = x
        result = Dot(axes = (-1,-1))([self._x,A])
        return result

    def compute_output_shape(self,input_shape):
        return input_shape[0]

class Low_Rank_Tensor_Module(Layer):

    def __init__(self,X,**kwargs):
        super(Low_Rank_Tensor_Module,self).__init__(**kwargs)
        self.X = X
        self.batch_size,self.height,self.width,self.channels = X.shape
        self.rank = self.height
        self.X_shortcut = X

    def pool(self,pool_axis ):
        
        
        if pool_axis == 1:
            X = Reshape((self.channels,self.height))(self.X)
            X = GlobalAveragePooling2D()(X)
            X = Reshape((1,1,self.height))(X)
        elif pool_axis == 2:
            X = Reshape((self.channels,self.width))(X)
        elif pool_axis ==3:
            X = GlobalAveragePooling2D()(self.X)
            X = Reshape((1,self.channels))(X)
        else:
            print('Pool_Axis Value should be 1,2 or 3')

        return X

    def axis_tgm(self,axis ):
        # batch_size,height,width,channels = X.shape
        # rank = 28    
        X = self.pool(axis)
        if axis == 1:
            X = Conv2D(self.height,kernel_size=(1,1),padding='valid')(X)
        if axis == 2:
            X = Conv2D(self.width,padding='valid')(X)    
        if axis == 3:
            X = Conv2D(self.channels,padding='valid')(X)
        
        X = Activation('sigmoid')(X)

        return X

    def tgm(self):
        rank = int(self.rank)

        height_feature,width_feature,channel_feature = [],[],[]
        H_mod = K.expand_dims(self.axis_tgm(1))
        W_mod = K.expand_dims(self.axis_tgm(2))
        C_mod = K.expand_dims(self.axis_tgm(3))

        height_feature = H_mod
        width_feature = W_mod
        channel_feature = C_mod

        for i in range(rank-1):
            H = K.expand_dims(self.axis_tgm(1))
            height_feature = Concatenate()([height_feature,H])
            
            W = K.expand_dims(self.axis_tgm(2))
            width_feature = Concatenate()([width_feature,W])
            
            C = K.expand_dims(self.axis_tgm(3))
            channel_feature = Concatenate()([channel_feature,C])

        return height_feature,channel_feature 

#TENSOR Reconstruction Module Starting from here
    
    def low_rank_cons(self):
        height_feature,channel_feature = self.tgm()
        Attn_Maps = []

        for i in range(self.rank):
            h_f = height_feature[:,:,i]
            w_f = width_feature[:,i]
            two_d = Dot(axes=1)([h_f,w_f])
            attention_maps = Dot(axes = 1)([two_d,channel_feature[:,i]])
            attention_maps = Reshape((self.height,self.channels))(attention_maps)
            Attn_Maps.append(K.expand_dims(attention_maps))
            
        Attn_Maps = Concatenate()(Attn_Maps)
        add_lambda = self.add_weight(name = 'lambda',shape = (1,self.rank),initializer= 'uniform')
        attention_map = Dot(axes = -1)([add_lambda,Attn_Maps])
        print(attention_map.shape)
        print(self.X_shortcut.shape)
        # attention_map = MyLayer()(Attn_Maps)
        modified_features = Multiply()([attention_map,self.X_shortcut])
        modified_features = Add()([self.X_shortcut,modified_features])
        modified_features = Conv2D(512,padding= 'same')(modified_features)

        
        print(modified_features.shape)
        
        return modified_features


modified_features = Low_Rank_Tensor_Module(X).low_rank_cons()
# modified_features = Reshape((28,512))(modified_features)
print(modified_features.shape)
# modified_features = Concatenate()([ modified_features,X])
# modified_features = Concatenate(axis = -1)([modified_features,self.X])
# attention_map = MyLayer()(modified_features)    
    
    
model = Model(X,modified_features)

错误

runfile('C:/Users/akash/Thesis_Project/low_rank_tensor_mod.py',wdir='C:/Users/akash/Thesis_Project')
(None,512)
(None,512)
Traceback (most recent call last):

  File "C:\Users\akash\Thesis_Project\low_rank_tensor_mod.py",line 145,in <module>
    model = Model(X,modified_features)

  File "C:\Users\akash\anaconda3\envs\keras-gpu\lib\site-packages\keras\legacy\interfaces.py",line 91,in wrapper
    return func(*args,**kwargs)

  File "C:\Users\akash\anaconda3\envs\keras-gpu\lib\site-packages\keras\engine\network.py",line 94,in __init__
    self._init_graph_network(*args,line 241,in _init_graph_network
    self.inputs,self.outputs)

  File "C:\Users\akash\anaconda3\envs\keras-gpu\lib\site-packages\keras\engine\network.py",line 1434,in _map_graph_network
    tensor_index=tensor_index)

  File "C:\Users\akash\anaconda3\envs\keras-gpu\lib\site-packages\keras\engine\network.py",line 1421,in build_map
    node_index,tensor_index)

  File "C:\Users\akash\anaconda3\envs\keras-gpu\lib\site-packages\keras\engine\network.py",line 1393,in build_map
    node = layer._inbound_nodes[node_index]

AttributeError: 'nonetype' object has no attribute '_inbound_nodes'

版权声明:本文内容由互联网用户自发贡献,该文观点与技术仅代表作者本人。本站仅提供信息存储空间服务,不拥有所有权,不承担相关法律责任。如发现本站有涉嫌侵权/违法违规的内容, 请发送邮件至 dio@foxmail.com 举报,一经查实,本站将立刻删除。