Skip to content

Instantly share code, notes, and snippets.

@Lyken17
Created November 29, 2016 05:07
Show Gist options
  • Save Lyken17/f7df98478324f49fba9de01773cee273 to your computer and use it in GitHub Desktop.
Save Lyken17/f7df98478324f49fba9de01773cee273 to your computer and use it in GitHub Desktop.
for i in range(0, epochs):
mistakes = 0
np_mistakes = 0
np_mistakes2 = 0
sys.stderr.write("Epoch %s...\n" % i)
# sys.stderr.write("length of nbests is %s and first is %s\n" % (len(nbests), len(nbests[0])))
for nbest in nbests:
def get_sample():
sample = []
for j in range(0, tau):
s1 = random.choice(nbest)
s2 = random.choice(nbest)
if math.fabs(s1.bleu - s2.bleu) > alpha:
if s1.bleu > s2.bleu:
sample.append((s1, s2))
else:
sample.append((s2, s1))
else:
continue
return sample
samples = sorted(get_sample(), key=lambda h: h[0].bleu - h[1].bleu, reverse = True)[:xi]
random.shuffle(samples)
pre_grad = np.zeros_like(np_theta)
for idx, sample in enumerate(samples):
s1 = sample[0]
s2 = sample[1]
np_feat1 = np.array(s1.features, dtype=float)
np_feat2 = np.array(s2.features, dtype=float)
np_score1 = np.dot(np_theta2, np_feat1)
np_score2 = np.dot(np_theta2, np_feat2)
if np_score1 <= np_score2:
np_mistakes2 += 1
grad = eta * (np_feat1 - np_feat2)
pre_grad = pre_grad * 0.5 + grad
np_theta2 = np_theta2 + pre_grad
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment