Skip to content

Commit 45a3321

Browse files
committed
t pMerge branch 'tatoeba' of https://github.com/Muennighoff/promptsource into tatoeba
2 parents be399eb + 0815263 commit 45a3321

File tree

7 files changed

+592
-60
lines changed

7 files changed

+592
-60
lines changed

promptsource/templates.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -38,6 +38,7 @@
3838
"BigScienceBiasEval",
3939
"gsarti",
4040
"Helsinki-NLP",
41+
"Muennighoff",
4142
}
4243

4344
# These are the metrics with which templates can be tagged
Lines changed: 110 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,110 @@
1+
dataset: Muennighoff/xwinograd
2+
subset: en
3+
templates:
4+
28d31908-4aee-4545-aff2-7528cbf39197: !Template
5+
answer_choices: '{{option1}} ||| {{option2}}'
6+
id: 28d31908-4aee-4545-aff2-7528cbf39197
7+
jinja: "{{sentence}}\nReplace the _ in the above sentence with the correct option:\
8+
\ \n- {{option1}}\n- {{option2}}\n|||\n{% if answer == '1' %} {{option1}} {%\
9+
\ else %} {{ option2 }} {% endif %}"
10+
metadata: !TemplateMetadata
11+
choices_in_prompt: true
12+
languages:
13+
- en
14+
metrics:
15+
- Accuracy
16+
original_task: true
17+
name: Replace
18+
reference: ''
19+
50ce5113-882f-4a9d-b21d-8d98b4644295: !Template
20+
answer_choices: '{{option1}} ||| {{option2}}'
21+
id: 50ce5113-882f-4a9d-b21d-8d98b4644295
22+
jinja: 'Fill in the _ in the below sentence:
23+
24+
{{sentence}}
25+
26+
27+
Choices:
28+
29+
- {{ option1 }}
30+
31+
- {{ option2 }}
32+
33+
34+
Answer: ||| {% if answer == ''1'' %} {{option1}} {% else %} {{ option2 }} {%
35+
endif %}'
36+
metadata: !TemplateMetadata
37+
choices_in_prompt: true
38+
languages:
39+
- en
40+
metrics:
41+
- Accuracy
42+
original_task: true
43+
name: fill in the blank
44+
reference: ''
45+
7f0f6d33-25e2-4394-b1f0-49a2a54767aa: !Template
46+
answer_choices: True ||| False
47+
id: 7f0f6d33-25e2-4394-b1f0-49a2a54767aa
48+
jinja: 'The _ in the sentence below refers to {{option1}}. True or False?
49+
50+
{{sentence}}|||
51+
52+
{{answer_choices[answer|int - 1]}}'
53+
metadata: !TemplateMetadata
54+
choices_in_prompt: true
55+
languages:
56+
- en
57+
metrics:
58+
- Accuracy
59+
original_task: false
60+
name: True or False
61+
reference: ''
62+
80f9679e-7b6c-4ee7-a348-e905ed9aaf9e: !Template
63+
answer_choices: '{{ option1 }} ||| {{ option2 }}'
64+
id: 80f9679e-7b6c-4ee7-a348-e905ed9aaf9e
65+
jinja: '{{ sentence }} In the previous sentence, does _ refer to {{ option1 }}
66+
or {{ option2 }}? ||| {% if answer == ''1'' %} {{option1}} {% else %} {{ option2
67+
}} {% endif %}'
68+
metadata: !TemplateMetadata
69+
choices_in_prompt: true
70+
languages:
71+
- en
72+
metrics:
73+
- Accuracy
74+
original_task: true
75+
name: does underscore refer to
76+
reference: ''
77+
bd40cf1f-bda2-4757-b1b5-f1a20a3f7202: !Template
78+
answer_choices: '{{option1}} ||| {{option2}}'
79+
id: bd40cf1f-bda2-4757-b1b5-f1a20a3f7202
80+
jinja: '{{sentence}}
81+
82+
What does the _ in the above sentence refer to? {{ option1 }} or {{ option2
83+
}}? ||| {% if answer == ''1'' %} {{option1}} {% else %} {{ option2 }} {% endif
84+
%}'
85+
metadata: !TemplateMetadata
86+
choices_in_prompt: true
87+
languages:
88+
- en
89+
metrics:
90+
- Accuracy
91+
original_task: true
92+
name: underscore refer to
93+
reference: ''
94+
ec365d5d-bb5c-488c-93a0-4f90e6011c5d: !Template
95+
answer_choices: '{{option1}} ||| {{option2}}'
96+
id: ec365d5d-bb5c-488c-93a0-4f90e6011c5d
97+
jinja: 'In the sentence below, does the _ stand for {{answer_choices[0]}} or {{answer_choices[1]}}?
98+
99+
{{sentence}}|||
100+
101+
{{answer_choices[answer | int - 1]}}'
102+
metadata: !TemplateMetadata
103+
choices_in_prompt: true
104+
languages:
105+
- en
106+
metrics:
107+
- Accuracy
108+
original_task: true
109+
name: stand for
110+
reference: ''
Lines changed: 110 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,110 @@
1+
dataset: Muennighoff/xwinograd
2+
subset: en
3+
templates:
4+
38d31908-4aee-4545-aff2-7528cbf39197: !Template
5+
answer_choices: '{{option1}} ||| {{option2}}'
6+
id: 38d31908-4aee-4545-aff2-7528cbf39197
7+
jinja: "{{sentence}}\nReplace the _ in the above sentence with the correct option:\
8+
\ \n- {{option1}}\n- {{option2}}\n|||\n{% if answer == '1' %} {{option1}} {%\
9+
\ else %} {{ option2 }} {% endif %}"
10+
metadata: !TemplateMetadata
11+
choices_in_prompt: true
12+
languages:
13+
- en
14+
metrics:
15+
- Accuracy
16+
original_task: true
17+
name: Replace
18+
reference: ''
19+
60ce5113-882f-4a9d-b21d-8d98b4644295: !Template
20+
answer_choices: '{{option1}} ||| {{option2}}'
21+
id: 60ce5113-882f-4a9d-b21d-8d98b4644295
22+
jinja: 'Fill in the _ in the below sentence:
23+
24+
{{sentence}}
25+
26+
27+
Choices:
28+
29+
- {{ option1 }}
30+
31+
- {{ option2 }}
32+
33+
34+
Answer: ||| {% if answer == ''1'' %} {{option1}} {% else %} {{ option2 }} {%
35+
endif %}'
36+
metadata: !TemplateMetadata
37+
choices_in_prompt: true
38+
languages:
39+
- en
40+
metrics:
41+
- Accuracy
42+
original_task: true
43+
name: fill in the blank
44+
reference: ''
45+
8f0f6d33-25e2-4394-b1f0-49a2a54767aa: !Template
46+
answer_choices: True ||| False
47+
id: 8f0f6d33-25e2-4394-b1f0-49a2a54767aa
48+
jinja: 'The _ in the sentence below refers to {{option1}}. True or False?
49+
50+
{{sentence}}|||
51+
52+
{{answer_choices[answer|int - 1]}}'
53+
metadata: !TemplateMetadata
54+
choices_in_prompt: true
55+
languages:
56+
- en
57+
metrics:
58+
- Accuracy
59+
original_task: false
60+
name: True or False
61+
reference: ''
62+
90f9679e-7b6c-4ee7-a348-e905ed9aaf9e: !Template
63+
answer_choices: '{{ option1 }} ||| {{ option2 }}'
64+
id: 90f9679e-7b6c-4ee7-a348-e905ed9aaf9e
65+
jinja: '{{ sentence }} In the previous sentence, does _ refer to {{ option1 }}
66+
or {{ option2 }}? ||| {% if answer == ''1'' %} {{option1}} {% else %} {{ option2
67+
}} {% endif %}'
68+
metadata: !TemplateMetadata
69+
choices_in_prompt: true
70+
languages:
71+
- en
72+
metrics:
73+
- Accuracy
74+
original_task: true
75+
name: does underscore refer to
76+
reference: ''
77+
cd40cf1f-bda2-4757-b1b5-f1a20a3f7202: !Template
78+
answer_choices: '{{option1}} ||| {{option2}}'
79+
id: cd40cf1f-bda2-4757-b1b5-f1a20a3f7202
80+
jinja: '{{sentence}}
81+
82+
What does the _ in the above sentence refer to? {{ option1 }} or {{ option2
83+
}}? ||| {% if answer == ''1'' %} {{option1}} {% else %} {{ option2 }} {% endif
84+
%}'
85+
metadata: !TemplateMetadata
86+
choices_in_prompt: true
87+
languages:
88+
- en
89+
metrics:
90+
- Accuracy
91+
original_task: true
92+
name: underscore refer to
93+
reference: ''
94+
fc365d5d-bb5c-488c-93a0-4f90e6011c5d: !Template
95+
answer_choices: '{{option1}} ||| {{option2}}'
96+
id: fc365d5d-bb5c-488c-93a0-4f90e6011c5d
97+
jinja: 'In the sentence below, does the _ stand for {{answer_choices[0]}} or {{answer_choices[1]}}?
98+
99+
{{sentence}}|||
100+
101+
{{answer_choices[answer | int - 1]}}'
102+
metadata: !TemplateMetadata
103+
choices_in_prompt: true
104+
languages:
105+
- en
106+
metrics:
107+
- Accuracy
108+
original_task: true
109+
name: stand for
110+
reference: ''
Lines changed: 110 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,110 @@
1+
dataset: Muennighoff/xwinograd
2+
subset: en
3+
templates:
4+
38d31908-4aee-4545-aff2-7528cbf39197: !Template
5+
answer_choices: '{{option1}} ||| {{option2}}'
6+
id: 38d31908-4aee-4545-aff2-7528cbf39197
7+
jinja: "{{sentence}}\nReplace the _ in the above sentence with the correct option:\
8+
\ \n- {{option1}}\n- {{option2}}\n|||\n{% if answer == '1' %} {{option1}} {%\
9+
\ else %} {{ option2 }} {% endif %}"
10+
metadata: !TemplateMetadata
11+
choices_in_prompt: true
12+
languages:
13+
- en
14+
metrics:
15+
- Accuracy
16+
original_task: true
17+
name: Replace
18+
reference: ''
19+
60ce5113-882f-4a9d-b21d-8d98b4644295: !Template
20+
answer_choices: '{{option1}} ||| {{option2}}'
21+
id: 60ce5113-882f-4a9d-b21d-8d98b4644295
22+
jinja: 'Fill in the _ in the below sentence:
23+
24+
{{sentence}}
25+
26+
27+
Choices:
28+
29+
- {{ option1 }}
30+
31+
- {{ option2 }}
32+
33+
34+
Answer: ||| {% if answer == ''1'' %} {{option1}} {% else %} {{ option2 }} {%
35+
endif %}'
36+
metadata: !TemplateMetadata
37+
choices_in_prompt: true
38+
languages:
39+
- en
40+
metrics:
41+
- Accuracy
42+
original_task: true
43+
name: fill in the blank
44+
reference: ''
45+
8f0f6d33-25e2-4394-b1f0-49a2a54767aa: !Template
46+
answer_choices: True ||| False
47+
id: 8f0f6d33-25e2-4394-b1f0-49a2a54767aa
48+
jinja: 'The _ in the sentence below refers to {{option1}}. True or False?
49+
50+
{{sentence}}|||
51+
52+
{{answer_choices[answer|int - 1]}}'
53+
metadata: !TemplateMetadata
54+
choices_in_prompt: true
55+
languages:
56+
- en
57+
metrics:
58+
- Accuracy
59+
original_task: false
60+
name: True or False
61+
reference: ''
62+
90f9679e-7b6c-4ee7-a348-e905ed9aaf9e: !Template
63+
answer_choices: '{{ option1 }} ||| {{ option2 }}'
64+
id: 90f9679e-7b6c-4ee7-a348-e905ed9aaf9e
65+
jinja: '{{ sentence }} In the previous sentence, does _ refer to {{ option1 }}
66+
or {{ option2 }}? ||| {% if answer == ''1'' %} {{option1}} {% else %} {{ option2
67+
}} {% endif %}'
68+
metadata: !TemplateMetadata
69+
choices_in_prompt: true
70+
languages:
71+
- en
72+
metrics:
73+
- Accuracy
74+
original_task: true
75+
name: does underscore refer to
76+
reference: ''
77+
cd40cf1f-bda2-4757-b1b5-f1a20a3f7202: !Template
78+
answer_choices: '{{option1}} ||| {{option2}}'
79+
id: cd40cf1f-bda2-4757-b1b5-f1a20a3f7202
80+
jinja: '{{sentence}}
81+
82+
What does the _ in the above sentence refer to? {{ option1 }} or {{ option2
83+
}}? ||| {% if answer == ''1'' %} {{option1}} {% else %} {{ option2 }} {% endif
84+
%}'
85+
metadata: !TemplateMetadata
86+
choices_in_prompt: true
87+
languages:
88+
- en
89+
metrics:
90+
- Accuracy
91+
original_task: true
92+
name: underscore refer to
93+
reference: ''
94+
fc365d5d-bb5c-488c-93a0-4f90e6011c5d: !Template
95+
answer_choices: '{{option1}} ||| {{option2}}'
96+
id: fc365d5d-bb5c-488c-93a0-4f90e6011c5d
97+
jinja: 'In the sentence below, does the _ stand for {{answer_choices[0]}} or {{answer_choices[1]}}?
98+
99+
{{sentence}}|||
100+
101+
{{answer_choices[answer | int - 1]}}'
102+
metadata: !TemplateMetadata
103+
choices_in_prompt: true
104+
languages:
105+
- en
106+
metrics:
107+
- Accuracy
108+
original_task: true
109+
name: stand for
110+
reference: ''

0 commit comments

Comments
 (0)