forked from autosimtrans/SimulTransBaseline
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathlatency.py
More file actions
63 lines (53 loc) · 1.95 KB
/
latency.py
File metadata and controls
63 lines (53 loc) · 1.95 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
import al
from IPython import embed
import sys
pred_file_name, src_file_name = sys.argv[1], sys.argv[2]
# with open('decode/ft.zh-en.dec.114k.w1.b1.en.stream.detok.detc.unbpe', 'r') as f:
# with open('decode/wmt18.zh-en.dec.100k.w-1.b1.en.stream.unbpe.detok.detc', 'r') as f:
with open(pred_file_name, 'r', encoding='UTF-8') as f:
tgt_lines = []
for line in f.readlines():
# exclude '\n'
tgt_lines.append(line[:-1])
# with open('/mnt/scratch/zrenj/Project/challenge/transformer/data/zh-en.dev.zh.cut.json', 'r') as f:
# with open('/mnt/scratch/zrenj/Project/challenge/transformer/data/zh-en.dev.zh.json', 'r') as f:
with open(src_file_name, 'r', encoding='UTF-8') as f:
import json
src_lines = json.load(f)
rws = []
als = []
idx = 0
with open(pred_file_name+'.merge', 'w', encoding='UTF-8') as f:
for talk in src_lines:
sent = ''
for sentence in talk:
_rw = []
words = []
for part_sent in sentence:
crt_sent = tgt_lines[idx]
# apply a read
if (len(crt_sent) > 0 and crt_sent[0] != ' ') and (len(sent) > 0 and sent[-1] != ' '):
# it's a bpe or detokenized word
# print('%s|%s'%(sent, crt_sent))
for i in range(len(_rw))[::-1]:
if _rw[i] == 1:
_rw[i] = 0
break
else:
_rw += [0]
sent += crt_sent
for w in crt_sent.split():
# apply a write
_rw += [1]
idx += 1
rws.append(_rw)
# print(' '.join(words), end='')
# f.write(sent+' ')
# f.write(sent)
_ap, _cw, _al = al.delay(rws[-1])
als.append(_al)
# embed()
f.write(sent+'\n')
assert(idx == len(tgt_lines))
print('AL:', sum(als) / len(als))
# embed()