Skip to content

Commit eb93f9a

Browse files
authored
Remove deprecated functionality (#714)
Changes in this PR: - Remove deprecated add_fusion() & train_fusion() methods - Remove deprecated support for passing lists to adapter activation - Add "adapters." prefix to version identifier when saving adapters to prevent confusion to adapter-transformers versions
1 parent d3e7784 commit eb93f9a

21 files changed

+2661
-2671
lines changed

examples/pytorch/adapterfusion/run_fusion_glue.py

Lines changed: 19 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@
2727
import numpy as np
2828

2929
import adapters
30-
from adapters import AdapterArguments, AdapterTrainer
30+
from adapters import AdapterArguments, AdapterTrainer, Fuse
3131
from transformers import AutoConfig, AutoModelForSequenceClassification, AutoTokenizer, EvalPrediction, GlueDataset
3232
from transformers import GlueDataTrainingArguments as DataTrainingArguments
3333
from transformers import (
@@ -162,28 +162,26 @@ def main():
162162
model.load_adapter("qa/boolq@ukp", config=SeqBnConfig(), with_head=False)
163163
model.load_adapter("sentiment/imdb@ukp", config=SeqBnConfig(), with_head=False)
164164

165-
adapter_setup = [
166-
[
167-
"sst-2",
168-
"mnli",
169-
"rte",
170-
"mrpc",
171-
"qqp",
172-
"cosmosqa",
173-
"csqa",
174-
"hellaswag",
175-
"socialiqa",
176-
"winogrande",
177-
"cb",
178-
"sick",
179-
"scitail",
180-
"boolq",
181-
"imdb",
182-
]
183-
]
165+
adapter_setup = Fuse(
166+
"sst-2",
167+
"mnli",
168+
"rte",
169+
"mrpc",
170+
"qqp",
171+
"cosmosqa",
172+
"csqa",
173+
"hellaswag",
174+
"socialiqa",
175+
"winogrande",
176+
"cb",
177+
"sick",
178+
"scitail",
179+
"boolq",
180+
"imdb",
181+
)
184182

185183
# Add a fusion layer and tell the model to train fusion
186-
model.add_adapter_fusion(adapter_setup[0], "dynamic")
184+
model.add_adapter_fusion(adapter_setup, "dynamic")
187185
model.train_adapter_fusion(adapter_setup)
188186

189187
# ~~~~~ Rest is again same as in standard training setup ~~~~~

examples/pytorch/text-generation/run_generation.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -225,7 +225,7 @@ def main():
225225
# Setup adapters
226226
if args.load_adapter:
227227
model.load_adapter(args.load_adapter, load_as="generation")
228-
model.set_active_adapters(["generation"])
228+
model.set_active_adapters("generation")
229229

230230
if args.fp16:
231231
model.half()

notebooks/04_Cross_Lingual_Transfer.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -761,7 +761,7 @@
761761
},
762762
"outputs": [],
763763
"source": [
764-
"model.train_adapter([\"copa\"])"
764+
"model.train_adapter(\"copa\")"
765765
]
766766
},
767767
{

notebooks/08_NER_Wikiann.ipynb

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -136,6 +136,8 @@
136136
"outputs": [],
137137
"source": [
138138
"from adapters import AdapterConfig\n",
139+
"from adapters.composition import Stack\n",
140+
"\n",
139141
"target_language = \"gn\" # Choose any language that a bert-base-multilingual-cased language adapter is available for\n",
140142
"source_language = \"en\" # We support \"en\", \"ja\", \"zh\", and \"ar\"\n",
141143
"\n",
@@ -156,7 +158,7 @@
156158
" leave_out=[11],\n",
157159
")\n",
158160
"# Set the adapters to be used in every forward pass\n",
159-
"model.set_active_adapters([lang_adapter_name, \"wikiann\"])"
161+
"model.set_active_adapters(Stack(lang_adapter_name, \"wikiann\"))"
160162
]
161163
},
162164
{

0 commit comments

Comments
 (0)