Skip to content

Commit

Permalink
bug fix in ConvAIModel
Browse files Browse the repository at this point in the history
  • Loading branch information
ThilinaRajapakse committed Feb 29, 2020
1 parent 29994cc commit 9b8bb04
Show file tree
Hide file tree
Showing 3 changed files with 19 additions and 11 deletions.
10 changes: 9 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,12 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).

## [0.21.1] - 2020-02-29

### Fixed

- Fixed bug in ConvAIModel where `reprocess_input_data` and `use_cached_eval_features` args were ignored.

## [0.21.0] - 2020-02-29

### Added
Expand Down Expand Up @@ -430,7 +436,9 @@ Model checkpoint is now saved for all epochs again.

- This CHANGELOG file to hopefully serve as an evolving example of a standardized open source project CHANGELOG.

[0.21.0]: https://github.com/ThilinaRajapakse/simpletransformers/compare/f484717...HEAD
[0.21.1]: https://github.com/ThilinaRajapakse/simpletransformers/compare/721c55c...HEAD

[0.21.0]: https://github.com/ThilinaRajapakse/simpletransformers/compare/f484717...721c55c

[0.20.3]: https://github.com/ThilinaRajapakse/simpletransformers/compare/daf5ccd...f484717

Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@

setup(
name="simpletransformers",
version="0.21.0",
version="0.21.1",
author="Thilina Rajapakse",
author_email="chaturangarajapakshe@gmail.com",
description="An easy-to-use wrapper library for the Transformers library.",
Expand Down
18 changes: 9 additions & 9 deletions simpletransformers/conv_ai/conv_ai_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -196,7 +196,9 @@ def train_model(
self._move_model_to_device()

train_dataloader, train_sampler = self.load_and_cache_examples(
dataset_path=train_file, verbose=verbose, no_cache=self.args["no_cache"]
dataset_path=train_file,
verbose=verbose,
no_cache=self.args["no_cache"] or self.args["reprocess_input_data"],
)

if self.args["evaluate_during_training"]:
Expand Down Expand Up @@ -224,13 +226,7 @@ def train_model(
print("Training of {} model complete. Saved to {}.".format(self.args["model_type"], output_dir))

def train(
self,
train_dataloader,
output_dir,
show_running_loss=True,
eval_dataloader=None,
verbose=True,
**kwargs,
self, train_dataloader, output_dir, show_running_loss=True, eval_dataloader=None, verbose=True, **kwargs,
):
"""
Trains the model on train_dataset.
Expand Down Expand Up @@ -510,7 +506,11 @@ def evaluate(self, eval_file, output_dir, verbose=True, silent=False, **kwargs):
eval_output_dir = output_dir

eval_dataloader, eval_sampler = self.load_and_cache_examples(
eval_file, evaluate=True, verbose=verbose, silent=silent
eval_file,
evaluate=True,
verbose=verbose,
silent=silent,
no_cache=self.args["no_cache"] or self.args["use_cached_eval_features"],
)
os.makedirs(eval_output_dir, exist_ok=True)

Expand Down

0 comments on commit 9b8bb04

Please sign in to comment.