2017-11-18 175 views
0

我已經在不同名稱範圍內重用變量的問題。下面的代碼將源代碼嵌入和目標嵌入分離到兩個不同的空間中,我想要做的是將源代碼和目標代碼放在同一個空間中,重用查找表中的變量。Tensorflow在不同名稱範圍內重用變量

''' Applying bidirectional encoding for source-side inputs and first-word decoding. 
''' 
def decode_first_word(self, source_vocab_id_tensor, source_mask_tensor, scope, reuse): 
    with tf.name_scope('Word_Embedding_Layer'): 
     with tf.variable_scope('Source_Side'): 
      source_embedding_tensor = self._src_lookup_table(source_vocab_id_tensor) 
    with tf.name_scope('Encoding_Layer'): 
     source_concated_hidden_tensor = self._encoder.get_biencoded_tensor(\ 
      source_embedding_tensor, source_mask_tensor) 
    with tf.name_scope('Decoding_Layer_First'): 
     rvals = self.decode_next_word(source_concated_hidden_tensor, source_mask_tensor, \ 
      None, None, None, scope, reuse) 
    return rvals + [source_concated_hidden_tensor] 


''' Applying one-step decoding. 
''' 
def decode_next_word(self, enc_concat_hidden, src_mask, cur_dec_hidden, \ 
          cur_trg_wid, trg_mask=None, scope=None, reuse=False, \ 
          src_side_pre_act=None): 
    with tf.name_scope('Word_Embedding_Layer'): 
     with tf.variable_scope('Target_Side'): 
      cur_trg_wemb = None 
      if None == cur_trg_wid: 
       pass 
      else: 
       cur_trg_wemb = self._trg_lookup_table(cur_trg_wid) 

我想讓他們如下,所以只會出現在全圖中的一個節點嵌入:

def decode_first_word_shared_embedding(self, source_vocab_id_tensor, source_mask_tensor, scope, reuse): 
    with tf.name_scope('Word_Embedding_Layer'): 
     with tf.variable_scope('Bi_Side'): 
      source_embedding_tensor = self._bi_lookup_table(source_vocab_id_tensor) 
    with tf.name_scope('Encoding_Layer'): 
     source_concated_hidden_tensor = self._encoder.get_biencoded_tensor(\ 
      source_embedding_tensor, source_mask_tensor) 
    with tf.name_scope('Decoding_Layer_First'): 
     rvals = self.decode_next_word_shared_embedding(source_concated_hidden_tensor, source_mask_tensor, \ 
      None, None, None, scope, reuse) 
    return rvals + [source_concated_hidden_tensor] 

def decode_next_word_shared_embedding(self, enc_concat_hidden, src_mask, cur_dec_hidden, \ 
          cur_trg_wid, trg_mask=None, scope=None, reuse=False, \ 
          src_side_pre_act=None): 
    with tf.name_scope('Word_Embedding_Layer'):    
     cur_trg_wemb = None 
     if None == cur_trg_wid: 
      pass 
     else: 
      with tf.variable_scope('Bi_Side'): 
       cur_trg_wemb = self._bi_lookup_table(cur_trg_wid) 

如何實現這一目標?

回答