Skip to content

Instantly share code, notes, and snippets.

View ayushidalmia's full-sized avatar
👩‍💻

Ayushi Dalmia ayushidalmia

👩‍💻
View GitHub Profile
from onmt.translate.Translator import Translator
from onmt.translate.Translation import TranslationBuilder
import onmt
import onmt.ModelConstructor
from utils import *
from parameters import *
import opts
import argparse
import torch
from onmt.io import IO
def decoder(self,encoder_outputs,dropout,mode):
cell, decoder_initial_state = self.decoder_cell(encoder_outputs,dropout,mode)
if mode != "INFER":
tgt_text = self.tgt_text
if self.time_major:
tgt_text = tf.transpose(self.tgt_text)
inputs = tf.nn.embedding_lookup(self.tgt_embedding,tgt_text)
helper = tf.contrib.seq2seq.TrainingHelper(inputs, self.tgt_sequence_length, time_major=True)
basic_decoder = tf.contrib.seq2seq.BasicDecoder(cell, helper, decoder_initial_state)
def load_initial_weights(self, session):
"""
As the weights from http://www.cs.toronto.edu/~guerzhoy/tf_alexnet/ come
as a dict of lists (e.g. weights['conv1'] is a list) and not as dict of
dicts (e.g. weights['conv1'] is a dict with keys 'weights' & 'biases') we
need a special load function
"""
# Load the weights into memory
weights_dict = np.load(self.WEIGHTS_PATH, encoding = 'bytes').item()
bad argument #2 to '?' (index out of bound)
stack traceback:
[C]: at 0x2b4f0352a7e0
[C]: in function '__index'
./onmt/translate/Beam.lua:97: in function 'func'
./onmt/utils/Tensor.lua:12: in function 'recursiveApply'
./onmt/utils/Tensor.lua:7: in function '_indexState'
./onmt/translate/BeamSearcher.lua:171: in function '_retrieveHypothesis'
./onmt/translate/BeamSearcher.lua:230: in function '_completeHypotheses'
./onmt/translate/BeamSearcher.lua:74: in function 'search'