Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Move test model folders #17034

Merged
merged 33 commits into from
May 3, 2022
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
Show all changes
33 commits
Select commit Hold shift + click to select a range
babdc23
test - to be revert
ydshieh Apr 27, 2022
8ff0bde
temp change to generate new cache - to be reverted
ydshieh Apr 27, 2022
d09b62b
temp change to generate new cache - to be reverted
ydshieh Apr 27, 2022
75ff03c
clean-up
ydshieh Apr 27, 2022
ffbcad9
move test model folders (TODO: fix imports and others)
ydshieh Apr 26, 2022
7851bdb
fix (potentially partially) imports (in model test modules)
ydshieh Apr 26, 2022
fed0b1d
fix (potentially partially) imports (in tokenization test modules)
ydshieh Apr 26, 2022
c439b16
fix (potentially partially) imports (in feature extraction test modules)
ydshieh Apr 26, 2022
f35f1c5
fix import utils.test_modeling_tf_core
ydshieh Apr 26, 2022
2d801d2
fix path ../fixtures/
ydshieh Apr 26, 2022
7d1d1a5
fix imports about generation.test_generation_flax_utils
ydshieh Apr 26, 2022
ab59261
fix more imports
ydshieh Apr 26, 2022
ae4827b
fix fixture path
ydshieh Apr 26, 2022
faba7b6
fix get_test_dir
ydshieh Apr 26, 2022
8c9600e
update module_to_test_file
ydshieh Apr 26, 2022
732ca46
fix get_tests_dir from wrong transformers.utils
ydshieh Apr 26, 2022
16e269e
update config.yml (CircleCI)
ydshieh Apr 26, 2022
6b52d1f
fix style
ydshieh Apr 26, 2022
e971915
remove missing imports
ydshieh Apr 26, 2022
4b0320e
update new model script
ydshieh Apr 26, 2022
8b470d0
update check_repo
ydshieh Apr 26, 2022
3dad271
update SPECIAL_MODULE_TO_TEST_MAP
ydshieh Apr 26, 2022
790f0d9
fix style
ydshieh Apr 26, 2022
c31b9c9
add __init__
ydshieh Apr 27, 2022
39966aa
update self-scheduled
ydshieh Apr 27, 2022
c90b35c
fix add_new_model scripts
ydshieh Apr 27, 2022
579cdbf
check one way to get location back
ydshieh Apr 27, 2022
fa9cfd4
python setup.py build install
ydshieh Apr 27, 2022
021ae85
fix import in test auto
ydshieh May 1, 2022
825581f
update self-scheduled.yml
ydshieh May 1, 2022
b83abd8
update slack notification script
ydshieh May 1, 2022
0e44f67
Add comments about artifact names
ydshieh May 2, 2022
9bc8e1a
fix for yolos
ydshieh May 3, 2022
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
remove missing imports
  • Loading branch information
ydshieh committed May 3, 2022
commit e97191506ec7faa49be341975b863406ef99a295
2 changes: 0 additions & 2 deletions tests/models/albert/test_tokenization_albert.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,9 +13,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.

import os
import unittest
from os.path import dirname

from transformers import AlbertTokenizer, AlbertTokenizerFast
from transformers.testing_utils import get_tests_dir, require_sentencepiece, require_tokenizers, slow
Expand Down
1 change: 0 additions & 1 deletion tests/models/auto/test_configuration_auto.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@
# limitations under the License.

import importlib
import os
import sys
import tempfile
import unittest
Expand Down
1 change: 0 additions & 1 deletion tests/models/auto/test_feature_extraction_auto.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@
# limitations under the License.

import json
import os
import sys
import tempfile
import unittest
Expand Down
1 change: 0 additions & 1 deletion tests/models/bartpho/test_tokenization_bartpho.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,6 @@

import os
import unittest
from os.path import dirname

from transformers.models.bartpho.tokenization_bartpho import VOCAB_FILES_NAMES, BartphoTokenizer
from transformers.testing_utils import get_tests_dir
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,9 +13,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.

import os
import unittest
from os.path import dirname

from transformers import BertGenerationTokenizer
from transformers.testing_utils import get_tests_dir, require_sentencepiece, require_torch, slow
Expand Down
2 changes: 0 additions & 2 deletions tests/models/big_bird/test_tokenization_big_bird.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,9 +13,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.

import os
import unittest
from os.path import dirname

from transformers import BigBirdTokenizer, BigBirdTokenizerFast
from transformers.testing_utils import get_tests_dir, require_sentencepiece, require_tokenizers, require_torch, slow
Expand Down
2 changes: 0 additions & 2 deletions tests/models/camembert/test_tokenization_camembert.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,9 +13,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.

import os
import unittest
from os.path import dirname

from transformers import CamembertTokenizer, CamembertTokenizerFast
from transformers.testing_utils import get_tests_dir, require_sentencepiece, require_tokenizers, slow
Expand Down
2 changes: 0 additions & 2 deletions tests/models/deberta_v2/test_tokenization_deberta_v2.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,9 +13,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.

import os
import unittest
from os.path import dirname

from transformers import DebertaV2Tokenizer, DebertaV2TokenizerFast
from transformers.testing_utils import get_tests_dir, require_sentencepiece, require_tokenizers, slow
Expand Down
1 change: 0 additions & 1 deletion tests/models/fnet/test_tokenization_fnet.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.

import os
import unittest

from transformers import FNetTokenizer, FNetTokenizerFast
Expand Down
1 change: 0 additions & 1 deletion tests/models/layoutxlm/test_processor_layoutxlm.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@
import shutil
import tempfile
import unittest
from os.path import dirname
from typing import List

from transformers import PreTrainedTokenizer, PreTrainedTokenizerBase, PreTrainedTokenizerFast
Expand Down
1 change: 0 additions & 1 deletion tests/models/layoutxlm/test_tokenization_layoutxlm.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@
# limitations under the License.

import inspect
import os
import shutil
import tempfile
import unittest
Expand Down
2 changes: 0 additions & 2 deletions tests/models/luke/test_tokenization_luke.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,9 +13,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.

import os
import unittest
from os.path import dirname
from typing import Tuple

from transformers import AddedToken, LukeTokenizer
Expand Down
2 changes: 0 additions & 2 deletions tests/models/m2m_100/test_tokenization_m2m_100.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,10 +12,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.

import os
import tempfile
import unittest
from os.path import dirname
from pathlib import Path
from shutil import copyfile

Expand Down
2 changes: 0 additions & 2 deletions tests/models/marian/test_tokenization_marian.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,10 +13,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.

import os
import tempfile
import unittest
from os.path import dirname
from pathlib import Path
from shutil import copyfile

Expand Down
1 change: 0 additions & 1 deletion tests/models/mbart/test_tokenization_mbart.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.

import os
import shutil
import tempfile
import unittest
Expand Down
2 changes: 0 additions & 2 deletions tests/models/mbart50/test_tokenization_mbart50.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,11 +12,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.

import os
import shutil
import tempfile
import unittest
from os.path import dirname

from transformers import SPIECE_UNDERLINE, BatchEncoding, MBart50Tokenizer, MBart50TokenizerFast, is_torch_available
from transformers.testing_utils import (
Expand Down
2 changes: 0 additions & 2 deletions tests/models/mluke/test_tokenization_mluke.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,9 +14,7 @@
# limitations under the License.


import os
import unittest
from os.path import dirname
from typing import Tuple

from transformers.models.mluke.tokenization_mluke import MLukeTokenizer
Expand Down
1 change: 0 additions & 1 deletion tests/models/plbart/test_tokenization_plbart.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.

import os
import tempfile
import unittest

Expand Down
1 change: 0 additions & 1 deletion tests/models/rag/test_modeling_rag.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@
import shutil
import tempfile
import unittest
from os.path import dirname
from unittest.mock import patch

import numpy as np
Expand Down
2 changes: 0 additions & 2 deletions tests/models/reformer/test_tokenization_reformer.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.

import os
import unittest
from os.path import dirname

from transformers import SPIECE_UNDERLINE, ReformerTokenizer, ReformerTokenizerFast
from transformers.testing_utils import get_tests_dir, require_sentencepiece, require_tokenizers, require_torch, slow
Expand Down
2 changes: 0 additions & 2 deletions tests/models/speech_to_text/test_processor_speech_to_text.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,11 +12,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.

import os
import shutil
import tempfile
import unittest
from os.path import dirname
from pathlib import Path
from shutil import copyfile

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.

import os
import unittest
from os.path import dirname
from pathlib import Path
from shutil import copyfile

Expand Down
1 change: 0 additions & 1 deletion tests/models/xglm/test_tokenization_xglm.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.

import os
import pickle
import shutil
import tempfile
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,9 +13,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.

import os
import unittest
from os.path import dirname

from transformers.models.xlm_prophetnet.tokenization_xlm_prophetnet import SPIECE_UNDERLINE, XLMProphetNetTokenizer
from transformers.testing_utils import get_tests_dir, require_sentencepiece, slow
Expand Down
2 changes: 0 additions & 2 deletions tests/models/xlm_roberta/test_tokenization_xlm_roberta.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,12 +13,10 @@
# See the License for the specific language governing permissions and
# limitations under the License.

import os
import pickle
import shutil
import tempfile
import unittest
from os.path import dirname

from transformers import SPIECE_UNDERLINE, XLMRobertaTokenizer, XLMRobertaTokenizerFast
from transformers.testing_utils import get_tests_dir, require_sentencepiece, require_tokenizers, slow
Expand Down
2 changes: 0 additions & 2 deletions tests/models/xlnet/test_tokenization_xlnet.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,9 +13,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.

import os
import unittest
from os.path import dirname

from transformers import SPIECE_UNDERLINE, XLNetTokenizer, XLNetTokenizerFast
from transformers.testing_utils import get_tests_dir, require_sentencepiece, require_tokenizers, slow
Expand Down