ZhangYunchenY commited on
Commit
39d48f0
1 Parent(s): 40ed032

[Model] roberta-base-squad2

Browse files
config.json ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "/mnt/lustre/zhangyunchen/transformers/roberta-base",
3
+ "architectures": [
4
+ "RobertaForQuestionAnswering"
5
+ ],
6
+ "attention_probs_dropout_prob": 0.1,
7
+ "bos_token_id": 0,
8
+ "classifier_dropout": null,
9
+ "eos_token_id": 2,
10
+ "hidden_act": "gelu",
11
+ "hidden_dropout_prob": 0.1,
12
+ "hidden_size": 768,
13
+ "initializer_range": 0.02,
14
+ "intermediate_size": 3072,
15
+ "layer_norm_eps": 1e-05,
16
+ "max_position_embeddings": 514,
17
+ "model_type": "roberta",
18
+ "num_attention_heads": 12,
19
+ "num_hidden_layers": 12,
20
+ "pad_token_id": 1,
21
+ "position_embedding_type": "absolute",
22
+ "torch_dtype": "float32",
23
+ "transformers_version": "4.16.2",
24
+ "type_vocab_size": 1,
25
+ "use_cache": true,
26
+ "vocab_size": 50265
27
+ }
merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:add7dd77d93e5413cca793bad0173c339d7ae4c3e887dc152daeecb23119dc0c
3
+ size 992578299
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6a857a7484193480d1601ded205d76f2c7fa7ba2537b9e28f7ca2564f2fa2f0c
3
+ size 496316479
rng_state.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d41a719410a3620845bd75578fd5c88c17aa5e36b48e00acc041abe02cf883c6
3
+ size 14659
scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:659db696246edcfdbfbee2fdd32cc1f234a9057f1a631472ad587cfe4eefd542
3
+ size 623
special_tokens_map.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"bos_token": "<s>", "eos_token": "</s>", "unk_token": "<unk>", "sep_token": "</s>", "pad_token": "<pad>", "cls_token": "<s>", "mask_token": {"content": "<mask>", "single_word": false, "lstrip": true, "rstrip": false, "normalized": false}}
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"unk_token": "<unk>", "bos_token": "<s>", "eos_token": "</s>", "add_prefix_space": false, "errors": "replace", "sep_token": "</s>", "cls_token": "<s>", "pad_token": "<pad>", "mask_token": "<mask>", "trim_offsets": true, "use_fase": true, "special_tokens_map_file": null, "name_or_path": "/mnt/lustre/zhangyunchen/transformers/roberta-base", "tokenizer_class": "RobertaTokenizer"}
trainer_state.json ADDED
@@ -0,0 +1,591 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": null,
3
+ "best_model_checkpoint": null,
4
+ "epoch": 3.034348828741352,
5
+ "global_step": 25000,
6
+ "is_hyper_param_search": false,
7
+ "is_local_process_zero": true,
8
+ "is_world_process_zero": true,
9
+ "log_history": [
10
+ {
11
+ "epoch": 0.12,
12
+ "learning_rate": 4.045307443365696e-06,
13
+ "loss": 3.818,
14
+ "step": 1000
15
+ },
16
+ {
17
+ "epoch": 0.12,
18
+ "eval_HasAns_exact": 44.97300944669366,
19
+ "eval_HasAns_f1": 52.01439218879365,
20
+ "eval_HasAns_total": 5928,
21
+ "eval_NoAns_exact": 50.647603027754414,
22
+ "eval_NoAns_f1": 50.647603027754414,
23
+ "eval_NoAns_total": 5945,
24
+ "eval_best_exact": 50.09685841825992,
25
+ "eval_best_exact_thresh": 0.0,
26
+ "eval_best_f1": 51.885203441071,
27
+ "eval_best_f1_thresh": 0.0,
28
+ "eval_exact": 47.81436873578708,
29
+ "eval_f1": 51.33001911018023,
30
+ "eval_total": 11873,
31
+ "step": 1000
32
+ },
33
+ {
34
+ "epoch": 0.24,
35
+ "learning_rate": 8.090614886731393e-06,
36
+ "loss": 1.5859,
37
+ "step": 2000
38
+ },
39
+ {
40
+ "epoch": 0.24,
41
+ "eval_HasAns_exact": 69.04520917678812,
42
+ "eval_HasAns_f1": 77.70845080310447,
43
+ "eval_HasAns_total": 5928,
44
+ "eval_NoAns_exact": 45.853658536585364,
45
+ "eval_NoAns_f1": 45.853658536585364,
46
+ "eval_NoAns_total": 5945,
47
+ "eval_best_exact": 57.48336561947276,
48
+ "eval_best_exact_thresh": 0.0,
49
+ "eval_best_f1": 61.802768280079185,
50
+ "eval_best_f1_thresh": 0.0,
51
+ "eval_exact": 57.43283079255453,
52
+ "eval_f1": 61.758249503984274,
53
+ "eval_total": 11873,
54
+ "step": 2000
55
+ },
56
+ {
57
+ "epoch": 0.36,
58
+ "learning_rate": 9.863646928182218e-06,
59
+ "loss": 1.3077,
60
+ "step": 3000
61
+ },
62
+ {
63
+ "epoch": 0.36,
64
+ "eval_HasAns_exact": 70.58029689608637,
65
+ "eval_HasAns_f1": 78.40109850512528,
66
+ "eval_HasAns_total": 5928,
67
+ "eval_NoAns_exact": 64.8780487804878,
68
+ "eval_NoAns_f1": 64.8780487804878,
69
+ "eval_NoAns_total": 5945,
70
+ "eval_best_exact": 67.73351301271794,
71
+ "eval_best_exact_thresh": 0.0,
72
+ "eval_best_f1": 71.63831482678222,
73
+ "eval_best_f1_thresh": 0.0,
74
+ "eval_exact": 67.7250905415649,
75
+ "eval_f1": 71.62989235562912,
76
+ "eval_total": 11873,
77
+ "step": 3000
78
+ },
79
+ {
80
+ "epoch": 0.49,
81
+ "learning_rate": 9.605402473981872e-06,
82
+ "loss": 1.1543,
83
+ "step": 4000
84
+ },
85
+ {
86
+ "epoch": 0.49,
87
+ "eval_HasAns_exact": 65.24966261808368,
88
+ "eval_HasAns_f1": 70.75967293392448,
89
+ "eval_HasAns_total": 5928,
90
+ "eval_NoAns_exact": 85.26492851135409,
91
+ "eval_NoAns_f1": 85.26492851135409,
92
+ "eval_NoAns_total": 5945,
93
+ "eval_best_exact": 75.27162469468541,
94
+ "eval_best_exact_thresh": 0.0,
95
+ "eval_best_f1": 78.02268518085604,
96
+ "eval_best_f1_thresh": 0.0,
97
+ "eval_exact": 75.27162469468541,
98
+ "eval_f1": 78.02268518085617,
99
+ "eval_total": 11873,
100
+ "step": 4000
101
+ },
102
+ {
103
+ "epoch": 0.61,
104
+ "learning_rate": 9.347158019781525e-06,
105
+ "loss": 1.083,
106
+ "step": 5000
107
+ },
108
+ {
109
+ "epoch": 0.61,
110
+ "eval_HasAns_exact": 72.62145748987854,
111
+ "eval_HasAns_f1": 78.57932116933335,
112
+ "eval_HasAns_total": 5928,
113
+ "eval_NoAns_exact": 75.47518923465097,
114
+ "eval_NoAns_f1": 75.47518923465097,
115
+ "eval_NoAns_total": 5945,
116
+ "eval_best_exact": 74.05036637749515,
117
+ "eval_best_exact_thresh": 0.0,
118
+ "eval_best_f1": 77.02503292274977,
119
+ "eval_best_f1_thresh": 0.0,
120
+ "eval_exact": 74.05036637749515,
121
+ "eval_f1": 77.02503292274983,
122
+ "eval_total": 11873,
123
+ "step": 5000
124
+ },
125
+ {
126
+ "epoch": 0.73,
127
+ "learning_rate": 9.08891356558118e-06,
128
+ "loss": 1.0232,
129
+ "step": 6000
130
+ },
131
+ {
132
+ "epoch": 0.73,
133
+ "eval_HasAns_exact": 75.87719298245614,
134
+ "eval_HasAns_f1": 82.4856147328558,
135
+ "eval_HasAns_total": 5928,
136
+ "eval_NoAns_exact": 75.1892346509672,
137
+ "eval_NoAns_f1": 75.1892346509672,
138
+ "eval_NoAns_total": 5945,
139
+ "eval_best_exact": 75.53272130042954,
140
+ "eval_best_exact_thresh": 0.0,
141
+ "eval_best_f1": 78.83220114009663,
142
+ "eval_best_f1_thresh": 0.0,
143
+ "eval_exact": 75.53272130042954,
144
+ "eval_f1": 78.83220114009681,
145
+ "eval_total": 11873,
146
+ "step": 6000
147
+ },
148
+ {
149
+ "epoch": 0.85,
150
+ "learning_rate": 8.830669111380835e-06,
151
+ "loss": 0.9925,
152
+ "step": 7000
153
+ },
154
+ {
155
+ "epoch": 0.85,
156
+ "eval_HasAns_exact": 76.16396761133603,
157
+ "eval_HasAns_f1": 82.92856292587923,
158
+ "eval_HasAns_total": 5928,
159
+ "eval_NoAns_exact": 76.53490328006728,
160
+ "eval_NoAns_f1": 76.53490328006728,
161
+ "eval_NoAns_total": 5945,
162
+ "eval_best_exact": 76.34970100227407,
163
+ "eval_best_exact_thresh": 0.0,
164
+ "eval_best_f1": 79.72715581778914,
165
+ "eval_best_f1_thresh": 0.0,
166
+ "eval_exact": 76.34970100227407,
167
+ "eval_f1": 79.72715581778931,
168
+ "eval_total": 11873,
169
+ "step": 7000
170
+ },
171
+ {
172
+ "epoch": 0.97,
173
+ "learning_rate": 8.572424657180489e-06,
174
+ "loss": 0.9409,
175
+ "step": 8000
176
+ },
177
+ {
178
+ "epoch": 0.97,
179
+ "eval_HasAns_exact": 76.87246963562752,
180
+ "eval_HasAns_f1": 83.28420126895146,
181
+ "eval_HasAns_total": 5928,
182
+ "eval_NoAns_exact": 77.32548359966358,
183
+ "eval_NoAns_f1": 77.32548359966358,
184
+ "eval_NoAns_total": 5945,
185
+ "eval_best_exact": 77.0993009348943,
186
+ "eval_best_exact_thresh": 0.0,
187
+ "eval_best_f1": 80.3005765284547,
188
+ "eval_best_f1_thresh": 0.0,
189
+ "eval_exact": 77.0993009348943,
190
+ "eval_f1": 80.30057652845481,
191
+ "eval_total": 11873,
192
+ "step": 8000
193
+ },
194
+ {
195
+ "epoch": 1.09,
196
+ "learning_rate": 8.314180202980142e-06,
197
+ "loss": 0.8459,
198
+ "step": 9000
199
+ },
200
+ {
201
+ "epoch": 1.09,
202
+ "eval_HasAns_exact": 79.47031039136303,
203
+ "eval_HasAns_f1": 86.27241463084697,
204
+ "eval_HasAns_total": 5928,
205
+ "eval_NoAns_exact": 70.64760302775441,
206
+ "eval_NoAns_f1": 70.64760302775441,
207
+ "eval_NoAns_total": 5945,
208
+ "eval_best_exact": 75.05264044470648,
209
+ "eval_best_exact_thresh": 0.0,
210
+ "eval_best_f1": 78.44882286967555,
211
+ "eval_best_f1_thresh": 0.0,
212
+ "eval_exact": 75.05264044470648,
213
+ "eval_f1": 78.44882286967575,
214
+ "eval_total": 11873,
215
+ "step": 9000
216
+ },
217
+ {
218
+ "epoch": 1.21,
219
+ "learning_rate": 8.055935748779795e-06,
220
+ "loss": 0.7936,
221
+ "step": 10000
222
+ },
223
+ {
224
+ "epoch": 1.21,
225
+ "eval_HasAns_exact": 73.66734143049932,
226
+ "eval_HasAns_f1": 79.21121792689594,
227
+ "eval_HasAns_total": 5928,
228
+ "eval_NoAns_exact": 83.28006728343145,
229
+ "eval_NoAns_f1": 83.28006728343145,
230
+ "eval_NoAns_total": 5945,
231
+ "eval_best_exact": 78.48058620399225,
232
+ "eval_best_exact_thresh": 0.0,
233
+ "eval_best_f1": 81.24855553530176,
234
+ "eval_best_f1_thresh": 0.0,
235
+ "eval_exact": 78.48058620399225,
236
+ "eval_f1": 81.24855553530186,
237
+ "eval_total": 11873,
238
+ "step": 10000
239
+ },
240
+ {
241
+ "epoch": 1.34,
242
+ "learning_rate": 7.797691294579448e-06,
243
+ "loss": 0.801,
244
+ "step": 11000
245
+ },
246
+ {
247
+ "epoch": 1.34,
248
+ "eval_HasAns_exact": 74.07219973009447,
249
+ "eval_HasAns_f1": 79.92037997936747,
250
+ "eval_HasAns_total": 5928,
251
+ "eval_NoAns_exact": 84.52481076534903,
252
+ "eval_NoAns_f1": 84.52481076534903,
253
+ "eval_NoAns_total": 5945,
254
+ "eval_best_exact": 79.3059883769898,
255
+ "eval_best_exact_thresh": 0.0,
256
+ "eval_best_f1": 82.22589173062302,
257
+ "eval_best_f1_thresh": 0.0,
258
+ "eval_exact": 79.3059883769898,
259
+ "eval_f1": 82.2258917306232,
260
+ "eval_total": 11873,
261
+ "step": 11000
262
+ },
263
+ {
264
+ "epoch": 1.46,
265
+ "learning_rate": 7.539446840379103e-06,
266
+ "loss": 0.8088,
267
+ "step": 12000
268
+ },
269
+ {
270
+ "epoch": 1.46,
271
+ "eval_HasAns_exact": 76.06275303643724,
272
+ "eval_HasAns_f1": 82.31177335930619,
273
+ "eval_HasAns_total": 5928,
274
+ "eval_NoAns_exact": 80.40370058873002,
275
+ "eval_NoAns_f1": 80.40370058873002,
276
+ "eval_NoAns_total": 5945,
277
+ "eval_best_exact": 78.2363345405542,
278
+ "eval_best_exact_thresh": 0.0,
279
+ "eval_best_f1": 81.35637096554916,
280
+ "eval_best_f1_thresh": 0.0,
281
+ "eval_exact": 78.2363345405542,
282
+ "eval_f1": 81.3563709655493,
283
+ "eval_total": 11873,
284
+ "step": 12000
285
+ },
286
+ {
287
+ "epoch": 1.58,
288
+ "learning_rate": 7.281202386178758e-06,
289
+ "loss": 0.8092,
290
+ "step": 13000
291
+ },
292
+ {
293
+ "epoch": 1.58,
294
+ "eval_HasAns_exact": 78.7280701754386,
295
+ "eval_HasAns_f1": 85.24409087886156,
296
+ "eval_HasAns_total": 5928,
297
+ "eval_NoAns_exact": 76.31623212783852,
298
+ "eval_NoAns_f1": 76.31623212783852,
299
+ "eval_NoAns_total": 5945,
300
+ "eval_best_exact": 77.52042449254611,
301
+ "eval_best_exact_thresh": 0.0,
302
+ "eval_best_f1": 80.77376995956291,
303
+ "eval_best_f1_thresh": 0.0,
304
+ "eval_exact": 77.52042449254611,
305
+ "eval_f1": 80.77376995956293,
306
+ "eval_total": 11873,
307
+ "step": 13000
308
+ },
309
+ {
310
+ "epoch": 1.7,
311
+ "learning_rate": 7.0229579319784115e-06,
312
+ "loss": 0.7684,
313
+ "step": 14000
314
+ },
315
+ {
316
+ "epoch": 1.7,
317
+ "eval_HasAns_exact": 78.0195681511471,
318
+ "eval_HasAns_f1": 84.35251514019012,
319
+ "eval_HasAns_total": 5928,
320
+ "eval_NoAns_exact": 79.76450798990749,
321
+ "eval_NoAns_f1": 79.76450798990749,
322
+ "eval_NoAns_total": 5945,
323
+ "eval_best_exact": 78.89328729049103,
324
+ "eval_best_exact_thresh": 0.0,
325
+ "eval_best_f1": 82.05522696462938,
326
+ "eval_best_f1_thresh": 0.0,
327
+ "eval_exact": 78.89328729049103,
328
+ "eval_f1": 82.05522696462947,
329
+ "eval_total": 11873,
330
+ "step": 14000
331
+ },
332
+ {
333
+ "epoch": 1.82,
334
+ "learning_rate": 6.764713477778065e-06,
335
+ "loss": 0.7751,
336
+ "step": 15000
337
+ },
338
+ {
339
+ "epoch": 1.82,
340
+ "eval_HasAns_exact": 78.69433198380567,
341
+ "eval_HasAns_f1": 84.83678017806832,
342
+ "eval_HasAns_total": 5928,
343
+ "eval_NoAns_exact": 78.16652649285113,
344
+ "eval_NoAns_f1": 78.16652649285113,
345
+ "eval_NoAns_total": 5945,
346
+ "eval_best_exact": 78.43005137707404,
347
+ "eval_best_exact_thresh": 0.0,
348
+ "eval_best_f1": 81.49687803382365,
349
+ "eval_best_f1_thresh": 0.0,
350
+ "eval_exact": 78.43005137707404,
351
+ "eval_f1": 81.49687803382375,
352
+ "eval_total": 11873,
353
+ "step": 15000
354
+ },
355
+ {
356
+ "epoch": 1.94,
357
+ "learning_rate": 6.506469023577719e-06,
358
+ "loss": 0.7746,
359
+ "step": 16000
360
+ },
361
+ {
362
+ "epoch": 1.94,
363
+ "eval_HasAns_exact": 78.40755735492577,
364
+ "eval_HasAns_f1": 84.57986037447542,
365
+ "eval_HasAns_total": 5928,
366
+ "eval_NoAns_exact": 78.67115222876367,
367
+ "eval_NoAns_f1": 78.67115222876367,
368
+ "eval_NoAns_total": 5945,
369
+ "eval_best_exact": 78.53954350206351,
370
+ "eval_best_exact_thresh": 0.0,
371
+ "eval_best_f1": 81.62127619808709,
372
+ "eval_best_f1_thresh": 0.0,
373
+ "eval_exact": 78.53954350206351,
374
+ "eval_f1": 81.62127619808716,
375
+ "eval_total": 11873,
376
+ "step": 16000
377
+ },
378
+ {
379
+ "epoch": 2.06,
380
+ "learning_rate": 6.248224569377374e-06,
381
+ "loss": 0.6995,
382
+ "step": 17000
383
+ },
384
+ {
385
+ "epoch": 2.06,
386
+ "eval_HasAns_exact": 79.25101214574899,
387
+ "eval_HasAns_f1": 85.85300684093066,
388
+ "eval_HasAns_total": 5928,
389
+ "eval_NoAns_exact": 76.93860386879732,
390
+ "eval_NoAns_f1": 76.93860386879732,
391
+ "eval_NoAns_total": 5945,
392
+ "eval_best_exact": 78.09315253095258,
393
+ "eval_best_exact_thresh": 0.0,
394
+ "eval_best_f1": 81.38942344420408,
395
+ "eval_best_f1_thresh": 0.0,
396
+ "eval_exact": 78.09315253095258,
397
+ "eval_f1": 81.38942344420423,
398
+ "eval_total": 11873,
399
+ "step": 17000
400
+ },
401
+ {
402
+ "epoch": 2.18,
403
+ "learning_rate": 5.9899801151770276e-06,
404
+ "loss": 0.6657,
405
+ "step": 18000
406
+ },
407
+ {
408
+ "epoch": 2.18,
409
+ "eval_HasAns_exact": 79.1497975708502,
410
+ "eval_HasAns_f1": 85.40764464218985,
411
+ "eval_HasAns_total": 5928,
412
+ "eval_NoAns_exact": 78.01513877207738,
413
+ "eval_NoAns_f1": 78.01513877207738,
414
+ "eval_NoAns_total": 5945,
415
+ "eval_best_exact": 78.5816558578287,
416
+ "eval_best_exact_thresh": 0.0,
417
+ "eval_best_f1": 81.70609933790104,
418
+ "eval_best_f1_thresh": 0.0,
419
+ "eval_exact": 78.5816558578287,
420
+ "eval_f1": 81.70609933790118,
421
+ "eval_total": 11873,
422
+ "step": 18000
423
+ },
424
+ {
425
+ "epoch": 2.31,
426
+ "learning_rate": 5.7317356609766805e-06,
427
+ "loss": 0.643,
428
+ "step": 19000
429
+ },
430
+ {
431
+ "epoch": 2.31,
432
+ "eval_HasAns_exact": 73.6842105263158,
433
+ "eval_HasAns_f1": 79.63974257623968,
434
+ "eval_HasAns_total": 5928,
435
+ "eval_NoAns_exact": 84.60891505466779,
436
+ "eval_NoAns_f1": 84.60891505466779,
437
+ "eval_NoAns_total": 5945,
438
+ "eval_best_exact": 79.15438389623516,
439
+ "eval_best_exact_thresh": 0.0,
440
+ "eval_best_f1": 82.12788629596118,
441
+ "eval_best_f1_thresh": 0.0,
442
+ "eval_exact": 79.15438389623516,
443
+ "eval_f1": 82.12788629596123,
444
+ "eval_total": 11873,
445
+ "step": 19000
446
+ },
447
+ {
448
+ "epoch": 2.43,
449
+ "learning_rate": 5.473491206776334e-06,
450
+ "loss": 0.6494,
451
+ "step": 20000
452
+ },
453
+ {
454
+ "epoch": 2.43,
455
+ "eval_HasAns_exact": 77.10863697705803,
456
+ "eval_HasAns_f1": 83.39536168287856,
457
+ "eval_HasAns_total": 5928,
458
+ "eval_NoAns_exact": 80.89150546677881,
459
+ "eval_NoAns_f1": 80.89150546677881,
460
+ "eval_NoAns_total": 5945,
461
+ "eval_best_exact": 79.0027794154805,
462
+ "eval_best_exact_thresh": 0.0,
463
+ "eval_best_f1": 82.14164103900457,
464
+ "eval_best_f1_thresh": 0.0,
465
+ "eval_exact": 79.0027794154805,
466
+ "eval_f1": 82.14164103900465,
467
+ "eval_total": 11873,
468
+ "step": 20000
469
+ },
470
+ {
471
+ "epoch": 2.55,
472
+ "learning_rate": 5.215246752575989e-06,
473
+ "loss": 0.6326,
474
+ "step": 21000
475
+ },
476
+ {
477
+ "epoch": 2.55,
478
+ "eval_HasAns_exact": 79.28475033738192,
479
+ "eval_HasAns_f1": 85.68401857002343,
480
+ "eval_HasAns_total": 5928,
481
+ "eval_NoAns_exact": 78.33473507148865,
482
+ "eval_NoAns_f1": 78.33473507148865,
483
+ "eval_NoAns_total": 5945,
484
+ "eval_best_exact": 78.80906257896066,
485
+ "eval_best_exact_thresh": 0.0,
486
+ "eval_best_f1": 82.00411539485366,
487
+ "eval_best_f1_thresh": 0.0,
488
+ "eval_exact": 78.80906257896066,
489
+ "eval_f1": 82.00411539485363,
490
+ "eval_total": 11873,
491
+ "step": 21000
492
+ },
493
+ {
494
+ "epoch": 2.67,
495
+ "learning_rate": 4.957002298375643e-06,
496
+ "loss": 0.6236,
497
+ "step": 22000
498
+ },
499
+ {
500
+ "epoch": 2.67,
501
+ "eval_HasAns_exact": 79.77395411605939,
502
+ "eval_HasAns_f1": 85.76130612988482,
503
+ "eval_HasAns_total": 5928,
504
+ "eval_NoAns_exact": 78.45248107653491,
505
+ "eval_NoAns_f1": 78.45248107653491,
506
+ "eval_NoAns_total": 5945,
507
+ "eval_best_exact": 79.11227154046998,
508
+ "eval_best_exact_thresh": 0.0,
509
+ "eval_best_f1": 82.10166114191487,
510
+ "eval_best_f1_thresh": 0.0,
511
+ "eval_exact": 79.11227154046998,
512
+ "eval_f1": 82.10166114191493,
513
+ "eval_total": 11873,
514
+ "step": 22000
515
+ },
516
+ {
517
+ "epoch": 2.79,
518
+ "learning_rate": 4.698757844175297e-06,
519
+ "loss": 0.6177,
520
+ "step": 23000
521
+ },
522
+ {
523
+ "epoch": 2.79,
524
+ "eval_HasAns_exact": 80.17881241565452,
525
+ "eval_HasAns_f1": 86.16732437952784,
526
+ "eval_HasAns_total": 5928,
527
+ "eval_NoAns_exact": 78.33473507148865,
528
+ "eval_NoAns_f1": 78.33473507148865,
529
+ "eval_NoAns_total": 5945,
530
+ "eval_best_exact": 79.25545355007159,
531
+ "eval_best_exact_thresh": 0.0,
532
+ "eval_best_f1": 82.24542229612054,
533
+ "eval_best_f1_thresh": 0.0,
534
+ "eval_exact": 79.25545355007159,
535
+ "eval_f1": 82.24542229612064,
536
+ "eval_total": 11873,
537
+ "step": 23000
538
+ },
539
+ {
540
+ "epoch": 2.91,
541
+ "learning_rate": 4.4405133899749504e-06,
542
+ "loss": 0.6171,
543
+ "step": 24000
544
+ },
545
+ {
546
+ "epoch": 2.91,
547
+ "eval_HasAns_exact": 79.25101214574899,
548
+ "eval_HasAns_f1": 85.3060014254297,
549
+ "eval_HasAns_total": 5928,
550
+ "eval_NoAns_exact": 79.3103448275862,
551
+ "eval_NoAns_f1": 79.3103448275862,
552
+ "eval_NoAns_total": 5945,
553
+ "eval_best_exact": 79.2807209635307,
554
+ "eval_best_exact_thresh": 0.0,
555
+ "eval_best_f1": 82.30388077570497,
556
+ "eval_best_f1_thresh": 0.0,
557
+ "eval_exact": 79.2807209635307,
558
+ "eval_f1": 82.303880775705,
559
+ "eval_total": 11873,
560
+ "step": 24000
561
+ },
562
+ {
563
+ "epoch": 3.03,
564
+ "learning_rate": 4.182268935774604e-06,
565
+ "loss": 0.5992,
566
+ "step": 25000
567
+ },
568
+ {
569
+ "epoch": 3.03,
570
+ "eval_HasAns_exact": 76.51821862348179,
571
+ "eval_HasAns_f1": 82.59682578958576,
572
+ "eval_HasAns_total": 5928,
573
+ "eval_NoAns_exact": 84.00336417157276,
574
+ "eval_NoAns_f1": 84.00336417157276,
575
+ "eval_NoAns_total": 5945,
576
+ "eval_best_exact": 80.26615008843595,
577
+ "eval_best_exact_thresh": 0.0,
578
+ "eval_best_f1": 83.30110193553955,
579
+ "eval_best_f1_thresh": 0.0,
580
+ "eval_exact": 80.26615008843595,
581
+ "eval_f1": 83.30110193553969,
582
+ "eval_total": 11873,
583
+ "step": 25000
584
+ }
585
+ ],
586
+ "max_steps": 41195,
587
+ "num_train_epochs": 5,
588
+ "total_flos": 7.838843910309734e+16,
589
+ "trial_name": null,
590
+ "trial_params": null
591
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bd8d7b4ce323852ab6b9664c788d60fa5524afdbe03909c8adb221b45e54199e
3
+ size 3055
vocab.json ADDED
The diff for this file is too large to render. See raw diff