From 2887239727b149fb379464ba9a039561653b3b94 Mon Sep 17 00:00:00 2001 From: han Date: Sat, 4 Dec 2021 00:31:11 +0900 Subject: [PATCH] Fix import bugs in text-transformers (#103) * Fix import bugs in text-transformers.py - Add torchtext version in .meta.yaml * Update .meta.yml Co-authored-by: Jirka Borovec --- lightning_examples/text-transformers/.meta.yml | 5 +++-- lightning_examples/text-transformers/text-transformers.py | 2 +- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/lightning_examples/text-transformers/.meta.yml b/lightning_examples/text-transformers/.meta.yml index 17ee40925..394d05be4 100644 --- a/lightning_examples/text-transformers/.meta.yml +++ b/lightning_examples/text-transformers/.meta.yml @@ -1,9 +1,9 @@ title: Finetune Transformers Models with PyTorch Lightning author: PL team created: 2021-01-31 -updated: 2021-06-21 +updated: 2021-12-03 license: CC BY-SA -build: 1 +build: 2 tags: - Text description: | @@ -15,6 +15,7 @@ requirements: - datasets - scipy - scikit-learn + - torchtext>=0.9 accelerator: - CPU - GPU diff --git a/lightning_examples/text-transformers/text-transformers.py b/lightning_examples/text-transformers/text-transformers.py index 9e1a49ee8..8ff65f393 100644 --- a/lightning_examples/text-transformers/text-transformers.py +++ b/lightning_examples/text-transformers/text-transformers.py @@ -220,7 +220,7 @@ def setup(self, stage=None) -> None: if stage != "fit": return # Get dataloader by calling it - train_dataloader() is called after setup() by default - train_loader = self.train_dataloader() + train_loader = self.trainer.datamodule.train_dataloader() # Calculate total steps tb_size = self.hparams.train_batch_size * max(1, self.trainer.gpus)