Xlnet句向量實現(embedding)與句子相似度計算

一.Xlnet概述

       Xlnet是bert預訓練模型之後NLP領域的又一重大進展,它充分吸收了Bert的雙向語言模型(自編碼-Mask LM機制)、預訓練+Finetun機制(Transformer特徵抽取 )、大規模語料訓練經驗(corpus)、句子級別表徵(sentence-level representation)等成功經驗,開創性的引入Permutation Language Model(PLM,雙流自注意力-Attention Mask機制,可以理解爲在bert mask的基礎上擾亂句子的順序),並行化universal transformers的進階版transformer-xl和Relative Segment Encoding(相對位置編碼)處理長文本依賴問題。在更大規模語料的支持下,實現了了比bert更強的效果。

        當然,到今天(20190828),google版中文xlnet還未開放出來,不過哈工大版xlnet已經於七天還是八天前便放了出來,讓我等平民開始歡呼,能用-簡易-免費,就好。地址爲: 哈工大版Xlnet中文預訓練模型

二.Xlnet embedding

        首先當然是玩耍一波,當時是xlnet-embedding和句子相似度啦。採用的是keras-xlnet加載輸出,CyberZHG大佬很厲害哇。

        chinese_xlnet_mid_L-24_H-768_A-12是有24層246個layer的中文xlnet模型,xlnet-embedding和bert調用大同小異,就是不知道對不對,如果不對,還望指出,共同進步,謝謝!

        所有代碼請走github: https://github.com/yongzhuo/nlp_xiaojiang/tree/master/FeatureProject/xlnet

        代碼如下: 

# -*- coding: UTF-8 -*-
# !/usr/bin/python
# @time     :2019/8/27 22:27
# @author   :Mo
# @function :



from keras_xlnet import Tokenizer, ATTENTION_TYPE_BI, ATTENTION_TYPE_UNI
from keras_xlnet import load_trained_model_from_checkpoint

from FeatureProject.bert.layers_keras import NonMaskingLayer
import keras.backend.tensorflow_backend as ktf_keras
from keras.models import Model
from keras.layers import Add
import tensorflow as tf
import numpy as np
import codecs
import os

from FeatureProject.xlnet import args


# 全局使用,使其可以django、flask、tornado等調用
graph = None
model = None
# gpu配置與使用率設置
os.environ['CUDA_VISIBLE_DEVICES'] = '0'
config = tf.ConfigProto()
config.gpu_options.per_process_gpu_memory_fraction = args.gpu_memory_fraction
sess = tf.Session(config=config)
ktf_keras.set_session(sess)


class KerasXlnetVector():
    def __init__(self):
        self.attention_type = ATTENTION_TYPE_BI if args.attention_type[0] == 'bi' else ATTENTION_TYPE_UNI
        self.memory_len, self.target_len, self.batch_size = args.memory_len, args.target_len, args.batch_size
        self.checkpoint_path, self.config_path = args.ckpt_name, args.config_name
        self.layer_indexes, self.in_train_phase = args.layer_indexes, False

        print("load KerasXlnetEmbedding start! ")
        # 全局使用,使其可以django、flask、tornado等調用
        global graph
        graph = tf.get_default_graph()
        global model
        # 模型加載
        model = load_trained_model_from_checkpoint(checkpoint_path=self.checkpoint_path,
                                                   attention_type=self.attention_type,
                                                   in_train_phase=self.in_train_phase,
                                                   config_path=self.config_path,
                                                   memory_len=self.memory_len,
                                                   target_len=self.target_len,
                                                   batch_size=self.batch_size,
                                                   mask_index=0)
        # 字典加載
        self.tokenizer = Tokenizer(args.spiece_model)
        # debug時候查看layers
        self.model_layers = model.layers
        len_layers = self.model_layers.__len__()
        print(len_layers)
        len_couche = int((len_layers - 6) / 10)
        # 一共246個layer
        # 每層10個layer,第一是7個layer的輸入和embedding層
        # 一共24層
        layer_dict = [5]
        layer_0 = 6
        for i in range(len_couche):
            layer_0 = layer_0 + 10
            layer_dict.append(layer_0-2)
        # 輸出它本身
        if len(self.layer_indexes) == 0:
            encoder_layer = model.output
        # 分類如果只有一層,取得不正確的話就取倒數第二層
        elif len(self.layer_indexes) == 1:
            if self.layer_indexes[0] in [i + 1 for i in range(len_couche + 1)]:
                encoder_layer = model.get_layer(index=layer_dict[self.layer_indexes[0]]).output
            else:
                encoder_layer = model.get_layer(index=layer_dict[-2]).output
        # 否則遍歷需要取的層,把所有層的weight取出來並加起來shape:768*層數
        else:
            # layer_indexes must be [0, 1, 2,3,......24]
            all_layers = [model.get_layer(index=layer_dict[lay]).output
                          if lay in [i + 1 for i in range(len_couche + 1)]
                          else model.get_layer(index=layer_dict[-2]).output  # 如果給出不正確,就默認輸出倒數第二層
                          for lay in self.layer_indexes]
            print(self.layer_indexes)
            print(all_layers)
            all_layers_select = []
            for all_layers_one in all_layers:
                all_layers_select.append(all_layers_one)
            encoder_layer = Add()(all_layers_select)
            print(encoder_layer.shape)
        output_layer = NonMaskingLayer()(encoder_layer)
        model = Model(model.inputs, output_layer)
        print("load KerasXlnetEmbedding end")
        model.summary(132)


    def xlnet_encode(self, texts):

        # 相當於pool,採用的是https://github.com/terrifyzhao/bert-utils/blob/master/graph.py
        mul_mask = lambda x, m: x * np.expand_dims(m, axis=-1)
        masked_reduce_mean = lambda x, m: np.sum(mul_mask(x, m), axis=1) / (np.sum(m, axis=1, keepdims=True) + 1e-9)

        # 文本預處理
        predicts = []
        for text in texts:
            # print(text)
            tokens = self.tokenizer.encode(text)
            tokens = tokens + [0]*(self.target_len-len(tokens)) if len(tokens) < self.target_len else tokens[0:self.target_len]
            token_input = np.expand_dims(np.array(tokens), axis=0)
            mask_input = np.array([0 if ids == 0 else 1 for ids in tokens])
            segment_input = np.zeros_like(token_input)
            memory_length_input = np.zeros((1, 1))
            # 全局使用,使其可以django、flask、tornado等調用
            with graph.as_default():
                predict = model.predict([token_input, segment_input, memory_length_input], batch_size=1)
                # print(predict)
                prob = predict[0]
                pooled = masked_reduce_mean(prob, [mask_input])
                pooled = pooled.tolist()
                predicts.append(pooled[0])
        return predicts


if __name__ == "__main__":
    xlnet_vector = KerasXlnetVector()
    pooled = xlnet_vector.xlnet_encode(['你是誰呀', '小老弟'])
    print(pooled)
    while True:
        print("input:")
        ques = input()
        print(ques)
        print(xlnet_vector.xlnet_encode([ques]))

 

希望對你有所幫!

發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章