diff --git a/CHANGELOG.md b/CHANGELOG.md index 570d8a6d..2a5912fa 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,7 +4,11 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). -## [0.18.8] - 2020-01-18 +## [0.18.9] - 2020-01-20 +### Fixed +- Fixed bug with importing certain pre-trained models in `MultiLabelClassificationModel`. + +## [0.18.8] - 2020-01-20 ### Added - Added `**kwargs` to the init methods of `ClassificationModel`, `MultiLabelClassificationModel`, `QuestionAnsweringModel`, and `NERModel`. These will be passed to the `from_pretrained()` method of the underlying model class. @@ -192,7 +196,9 @@ Model checkpoint is now saved for all epochs again. - This CHANGELOG file to hopefully serve as an evolving example of a standardized open source project CHANGELOG. -[0.18.8]: https://github.com/ThilinaRajapakse/simpletransformers/compare/44afa70...HEAD +[0.18.9]: https://github.com/ThilinaRajapakse/simpletransformers/compare/8ade0f4...HEAD + +[0.18.8]: https://github.com/ThilinaRajapakse/simpletransformers/compare/44afa70...8ade0f4 [0.18.6]: https://github.com/ThilinaRajapakse/simpletransformers/compare/aa7f650...44afa70 diff --git a/setup.py b/setup.py index 273c7608..4d378b44 100755 --- a/setup.py +++ b/setup.py @@ -6,7 +6,7 @@ setup( name="simpletransformers", - version="0.18.8", + version="0.18.9", author="Thilina Rajapakse", author_email="chaturangarajapakshe@gmail.com", description="An easy-to-use wrapper library for the Transformers library.", diff --git a/simpletransformers/custom_models/models.py b/simpletransformers/custom_models/models.py index 14387d57..f2e4256c 100755 --- a/simpletransformers/custom_models/models.py +++ b/simpletransformers/custom_models/models.py @@ -12,18 +12,8 @@ from transformers.modeling_albert import AlbertConfig, AlbertPreTrainedModel, AlbertModel - -ROBERTA_PRETRAINED_MODEL_ARCHIVE_MAP = { - 'roberta-base': "https://s3.amazonaws.com/models.huggingface.co/bert/roberta-base-pytorch_model.bin", - 'roberta-large': "https://s3.amazonaws.com/models.huggingface.co/bert/roberta-large-pytorch_model.bin", - 'roberta-large-mnli': "https://s3.amazonaws.com/models.huggingface.co/bert/roberta-large-mnli-pytorch_model.bin", - 'distilroberta-base': "https://s3.amazonaws.com/models.huggingface.co/bert/distilroberta-base-pytorch_model.bin", -} - -DISTILBERT_PRETRAINED_MODEL_ARCHIVE_MAP = { - 'distilbert-base-uncased': "https://s3.amazonaws.com/models.huggingface.co/bert/distilbert-base-uncased-pytorch_model.bin", - 'distilbert-base-uncased-distilled-squad': "https://s3.amazonaws.com/models.huggingface.co/bert/distilbert-base-uncased-distilled-squad-pytorch_model.bin" -} +from transformers.modeling_roberta import ROBERTA_PRETRAINED_MODEL_ARCHIVE_MAP +from transformers.modeling_distilbert import DISTILBERT_PRETRAINED_MODEL_ARCHIVE_MAP from torch.nn import BCEWithLogitsLoss