栏目分类:
子分类:
返回
名师互学网用户登录
快速导航关闭
当前搜索
当前分类
子分类
实用工具
热门搜索
名师互学网 > IT > 软件开发 > 后端开发 > Python

Keras实现SEnet实验

Python 更新时间: 发布时间: IT归档 最新发布 模块sitemap 名妆网 法律咨询 聚返吧 英语巴士网 伯小乐 网商动力

Keras实现SEnet实验

import numpy as np
import pandas as pd
from sklearn.model_selection import train_test_split, KFold
from sklearn.utils import shuffle
from tensorflow.keras.layers import Dense, Conv2D, BatchNormalization, Activation, add, multiply
from tensorflow.keras.layers import AveragePooling2D, Input, Flatten, GlobalAvgPool2D, Reshape
from tensorflow.keras.optimizers import Adam
from tensorflow.keras.models import Model
from tensorflow.keras.regularizers import l2
from tensorflow.keras.callbacks import ModelCheckpoint, EarlyStopping
from scipy.stats import pearsonr
def se_block(input_feature, ration=4):
    channel = input_feature.shape[-1]
    # squeeze: H*W*C 压缩 1*1*C 大小的特征图,有全局视野
    se_feature = GlobalAvgPool2D()(input_feature)
    se_feature = Reshape((1,1,channel))(se_feature)
    # excitation: 对squeeze后的结果做一个非线性变换,得到不同channel的重要性大小
    se_feature = Dense(channel//ration, activation='relu', kernel_initializer='he_normal', 
                        use_bias=True, bias_initializer='zeros')(se_feature)
    se_feature = Dense(channel, activation='sigmoid', kernel_initializer='he_normal',
                        use_bias=True, bias_initializer='zeros', name='weight')(se_feature)
    # reweight 特征重标定:用excitation后的结果作为权重,乘到输入特征上
    se_feature = multiply([input_feature, se_feature])
    return se_feature

# build net
def SimpleNet(input_shape, filter_len):
    X_input = Input(input_shape)
    # 卷积 filter_num=16 conv_size=filter_len*4 stride=1 l2正则化
    X = Conv2D(16, kernel_size=(filter_len, 4), strides=1, padding='valid',
                kernel_initializer='glorot_normal', kernel_regularizer=l2(1e-4))(X_input)
    # 卷积后的 feature map 标准化
    X = BatchNormalization(axis=3, name='bn_conv1')(X)
    X = Activation('relu')(X)
    y = se_block(X)
    # 将 se_block之后的 y 和 X add 
    X = add([X, y])
    X = AveragePooling2D(pool_size=(2,1))(X)
    y = Flatten()(X)
    # 铺平后加上全连接
    outputs = Dense(1, activation='linear', kernel_initializer='glorot_normal')(y)
    model = Model(inputs=X_input, outputs=outputs) # save model system
    print(model.summary())
    return model

# load data
def load_data(data_path, label_path):
    x = np.load(data_path)
    x = np.expand_dims(x, 3)
    y = np.array(pd.read_csv(label_path, header=None).values).reshape([x.shape[0], 1])
    X, X_test, Y, Y_test = train_test_split(x, y, test_size=0.2, random_state=123456)
    print(X.shape, Y.shape) #(784,100,4,1) (784,1)
    return X, X_test, Y, Y_test

if __name__=='__main__':
    data_path = 'sgRNA_980_100bp.npy'
    label_path = 'label.csv'
    X, X_test, Y, Y_test = load_data(data_path, label_path)
    # 用十折交叉验证
    kfold = KFold(n_splits=10, shuffle=True)
    train_cv_result = pd.Dataframe(columns=['filter_len', 'train_pcc']) # to save

    # train model
    for index, filter_len in enumerate(range(5,26,2)):# filter_size: 5*4、 7*4、 9*4、 11*4...25*4
        y_test_all = []
        y_pred_all = []
        for train_index, test_index in kfold.split(X,Y):
            x_train, y_train = X[train_index], Y[train_index] # train in trainSet
            x_test, y_test = X[test_index], Y[test_index] # test in trainSet
            model = SimpleNet(input_shape=(100,4,1), filter_len=filter_len)
            model.compile(loss='mse', optimizer=Adam())
            early_stopping = EarlyStopping(monitor='val_loss', patience=10, verbose=0)
            # train
            model.fit(x_train, y_train, epochs=100, shuffle=True, batch_size=64,
                        validation_split=0.1, verbose=1, callbacks=[early_stopping])
            # predict
            y_pred = model.predict(x_test)
            y_test_all.extend(y_test)
            y_pred_all.extend(y_pred)
        # pcc
        pcc_train, _ = pearsonr(np.array(y_test_all).flatten(),
                                np.array(y_pred_all).flatten())
        # save best
        train_cv_result.loc[index] = [filter_len, pcc_train]
        print('When filter length is %d, the pcc in trainSet is %.3f'%(filter_len, pcc_train))

        best_length = train_cv_result['filter_len'][train_cv_result['train_pcc'].idxmax(axis=0)]
        best_length = int(best_length)

        model = SimpleNet(input_shape=(100,4,1), filter_len=best_length)
        model.compile(loss='mse', optimizer=Adam())
        early_stopping = EarlyStopping(monitor='val_loss', patience=10, verbose=0)
        model.fit(X, Y, epochs=100, shuffle=True, batch_size=64,
                    validation_split=0.1, verbose=1, callbacks=[early_stopping])
        Y_pred = model.predict(X_test)
        pcc_test, _ = pearsonr(Y_test.ravel(), Y_pred.ravel())
        print('The pcc value in independent testSet is %.3f' % pcc_test)
        # print weights of 16 filters in SE_block
        se_layer_model = Model(inputs=model.input, 
                                outputs=model.get_layer('weight').output)
        se_output = se_layer_model.predict(X_test)
        print("The weights of 16 filters are:")
        print(se_output[0])

        
转载请注明:文章转载自 www.mshxw.com
本文地址:https://www.mshxw.com/it/769934.html
我们一直用心在做
关于我们 文章归档 网站地图 联系我们

版权所有 (c)2021-2022 MSHXW.COM

ICP备案号:晋ICP备2021003244-6号