Skip to content

Commit 79444f3

Browse files
authored
Deprecate unused OpenLlama architecture (#24922)
* Resolve typo in check_repo.py * Specify encoding when opening modeling files * Deprecate the OpenLlama architecture * Add disclaimer pointing to Llama I'm open to different wordings here * Match the capitalisation of LLaMA
1 parent 8fd8c8e commit 79444f3

File tree

12 files changed

+64
-419
lines changed

12 files changed

+64
-419
lines changed

docs/source/en/model_doc/open-llama.md

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,21 @@ rendered properly in your Markdown viewer.
1616

1717
# Open-Llama
1818

19+
<Tip warning={true}>
20+
21+
This model is in maintenance mode only, so we won't accept any new PRs changing its code.
22+
23+
If you run into any issues running this model, please reinstall the last version that supported this model: v4.31.0.
24+
You can do so by running the following command: `pip install -U transformers==4.31.0`.
25+
26+
</Tip>
27+
28+
<Tip warning={true}>
29+
30+
This model differs from the [OpenLLaMA models](https://huggingface.co/models?search=openllama) on the Hugging Face Hub, which primarily use the [LLaMA](llama) architecture.
31+
32+
</Tip>
33+
1934
## Overview
2035

2136
The Open-Llama model was proposed in [Open-Llama project](https://github.com/s-JoL/Open-Llama) by community developer s-JoL.

src/transformers/__init__.py

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -278,6 +278,7 @@
278278
"MCTCTProcessor",
279279
],
280280
"models.deprecated.mmbt": ["MMBTConfig"],
281+
"models.deprecated.open_llama": ["OPEN_LLAMA_PRETRAINED_CONFIG_ARCHIVE_MAP", "OpenLlamaConfig"],
281282
"models.deprecated.retribert": [
282283
"RETRIBERT_PRETRAINED_CONFIG_ARCHIVE_MAP",
283284
"RetriBertConfig",
@@ -445,7 +446,6 @@
445446
"NystromformerConfig",
446447
],
447448
"models.oneformer": ["ONEFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP", "OneFormerConfig", "OneFormerProcessor"],
448-
"models.open_llama": ["OPEN_LLAMA_PRETRAINED_CONFIG_ARCHIVE_MAP", "OpenLlamaConfig"],
449449
"models.openai": ["OPENAI_GPT_PRETRAINED_CONFIG_ARCHIVE_MAP", "OpenAIGPTConfig", "OpenAIGPTTokenizer"],
450450
"models.opt": ["OPTConfig"],
451451
"models.owlvit": [
@@ -1536,6 +1536,9 @@
15361536
]
15371537
)
15381538
_import_structure["models.deprecated.mmbt"].extend(["MMBTForClassification", "MMBTModel", "ModalEmbeddings"])
1539+
_import_structure["models.deprecated.open_llama"].extend(
1540+
["OpenLlamaForCausalLM", "OpenLlamaForSequenceClassification", "OpenLlamaModel", "OpenLlamaPreTrainedModel"]
1541+
)
15391542
_import_structure["models.deprecated.retribert"].extend(
15401543
["RETRIBERT_PRETRAINED_MODEL_ARCHIVE_LIST", "RetriBertModel", "RetriBertPreTrainedModel"]
15411544
)
@@ -2300,9 +2303,6 @@
23002303
"OneFormerPreTrainedModel",
23012304
]
23022305
)
2303-
_import_structure["models.open_llama"].extend(
2304-
["OpenLlamaForCausalLM", "OpenLlamaForSequenceClassification", "OpenLlamaModel", "OpenLlamaPreTrainedModel"]
2305-
)
23062306
_import_structure["models.openai"].extend(
23072307
[
23082308
"OPENAI_GPT_PRETRAINED_MODEL_ARCHIVE_LIST",
@@ -4239,6 +4239,7 @@
42394239
MCTCTProcessor,
42404240
)
42414241
from .models.deprecated.mmbt import MMBTConfig
4242+
from .models.deprecated.open_llama import OPEN_LLAMA_PRETRAINED_CONFIG_ARCHIVE_MAP, OpenLlamaConfig
42424243
from .models.deprecated.retribert import (
42434244
RETRIBERT_PRETRAINED_CONFIG_ARCHIVE_MAP,
42444245
RetriBertConfig,
@@ -4390,7 +4391,6 @@
43904391
from .models.nllb_moe import NLLB_MOE_PRETRAINED_CONFIG_ARCHIVE_MAP, NllbMoeConfig
43914392
from .models.nystromformer import NYSTROMFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP, NystromformerConfig
43924393
from .models.oneformer import ONEFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP, OneFormerConfig, OneFormerProcessor
4393-
from .models.open_llama import OPEN_LLAMA_PRETRAINED_CONFIG_ARCHIVE_MAP, OpenLlamaConfig
43944394
from .models.openai import OPENAI_GPT_PRETRAINED_CONFIG_ARCHIVE_MAP, OpenAIGPTConfig, OpenAIGPTTokenizer
43954395
from .models.opt import OPTConfig
43964396
from .models.owlvit import (
@@ -5334,6 +5334,12 @@
53345334
MCTCTPreTrainedModel,
53355335
)
53365336
from .models.deprecated.mmbt import MMBTForClassification, MMBTModel, ModalEmbeddings
5337+
from .models.deprecated.open_llama import (
5338+
OpenLlamaForCausalLM,
5339+
OpenLlamaForSequenceClassification,
5340+
OpenLlamaModel,
5341+
OpenLlamaPreTrainedModel,
5342+
)
53375343
from .models.deprecated.retribert import (
53385344
RETRIBERT_PRETRAINED_MODEL_ARCHIVE_LIST,
53395345
RetriBertModel,
@@ -5954,12 +5960,6 @@
59545960
OneFormerModel,
59555961
OneFormerPreTrainedModel,
59565962
)
5957-
from .models.open_llama import (
5958-
OpenLlamaForCausalLM,
5959-
OpenLlamaForSequenceClassification,
5960-
OpenLlamaModel,
5961-
OpenLlamaPreTrainedModel,
5962-
)
59635963
from .models.openai import (
59645964
OPENAI_GPT_PRETRAINED_MODEL_ARCHIVE_LIST,
59655965
OpenAIGPTDoubleHeadsModel,

src/transformers/models/__init__.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -145,7 +145,6 @@
145145
nllb_moe,
146146
nystromformer,
147147
oneformer,
148-
open_llama,
149148
openai,
150149
opt,
151150
owlvit,

src/transformers/models/auto/configuration_auto.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -652,6 +652,7 @@
652652
"bort",
653653
"mctct",
654654
"mmbt",
655+
"open_llama",
655656
"retribert",
656657
"tapex",
657658
"trajectory_transformer",

src/transformers/models/open_llama/__init__.py renamed to src/transformers/models/deprecated/open_llama/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@
1313
# limitations under the License.
1414
from typing import TYPE_CHECKING
1515

16-
from ...utils import (
16+
from ....utils import (
1717
OptionalDependencyNotAvailable,
1818
_LazyModule,
1919
is_sentencepiece_available,

src/transformers/models/open_llama/configuration_open_llama.py renamed to src/transformers/models/deprecated/open_llama/configuration_open_llama.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -19,8 +19,8 @@
1919
# limitations under the License.
2020
""" Open-Llama model configuration"""
2121

22-
from ...configuration_utils import PretrainedConfig
23-
from ...utils import logging
22+
from ....configuration_utils import PretrainedConfig
23+
from ....utils import logging
2424

2525

2626
logger = logging.get_logger(__name__)

src/transformers/models/open_llama/modeling_open_llama.py renamed to src/transformers/models/deprecated/open_llama/modeling_open_llama.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -26,10 +26,10 @@
2626
from torch import nn
2727
from torch.nn import BCEWithLogitsLoss, CrossEntropyLoss, MSELoss
2828

29-
from ...activations import ACT2FN
30-
from ...modeling_outputs import BaseModelOutputWithPast, CausalLMOutputWithPast, SequenceClassifierOutputWithPast
31-
from ...modeling_utils import PreTrainedModel
32-
from ...utils import add_start_docstrings, add_start_docstrings_to_model_forward, logging, replace_return_docstrings
29+
from ....activations import ACT2FN
30+
from ....modeling_outputs import BaseModelOutputWithPast, CausalLMOutputWithPast, SequenceClassifierOutputWithPast
31+
from ....modeling_utils import PreTrainedModel
32+
from ....utils import add_start_docstrings, add_start_docstrings_to_model_forward, logging, replace_return_docstrings
3333
from .configuration_open_llama import OpenLlamaConfig
3434

3535

src/transformers/utils/dummy_pt_objects.py

Lines changed: 28 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -2396,6 +2396,34 @@ def __init__(self, *args, **kwargs):
23962396
requires_backends(self, ["torch"])
23972397

23982398

2399+
class OpenLlamaForCausalLM(metaclass=DummyObject):
2400+
_backends = ["torch"]
2401+
2402+
def __init__(self, *args, **kwargs):
2403+
requires_backends(self, ["torch"])
2404+
2405+
2406+
class OpenLlamaForSequenceClassification(metaclass=DummyObject):
2407+
_backends = ["torch"]
2408+
2409+
def __init__(self, *args, **kwargs):
2410+
requires_backends(self, ["torch"])
2411+
2412+
2413+
class OpenLlamaModel(metaclass=DummyObject):
2414+
_backends = ["torch"]
2415+
2416+
def __init__(self, *args, **kwargs):
2417+
requires_backends(self, ["torch"])
2418+
2419+
2420+
class OpenLlamaPreTrainedModel(metaclass=DummyObject):
2421+
_backends = ["torch"]
2422+
2423+
def __init__(self, *args, **kwargs):
2424+
requires_backends(self, ["torch"])
2425+
2426+
23992427
RETRIBERT_PRETRAINED_MODEL_ARCHIVE_LIST = None
24002428

24012429

@@ -5461,34 +5489,6 @@ def __init__(self, *args, **kwargs):
54615489
requires_backends(self, ["torch"])
54625490

54635491

5464-
class OpenLlamaForCausalLM(metaclass=DummyObject):
5465-
_backends = ["torch"]
5466-
5467-
def __init__(self, *args, **kwargs):
5468-
requires_backends(self, ["torch"])
5469-
5470-
5471-
class OpenLlamaForSequenceClassification(metaclass=DummyObject):
5472-
_backends = ["torch"]
5473-
5474-
def __init__(self, *args, **kwargs):
5475-
requires_backends(self, ["torch"])
5476-
5477-
5478-
class OpenLlamaModel(metaclass=DummyObject):
5479-
_backends = ["torch"]
5480-
5481-
def __init__(self, *args, **kwargs):
5482-
requires_backends(self, ["torch"])
5483-
5484-
5485-
class OpenLlamaPreTrainedModel(metaclass=DummyObject):
5486-
_backends = ["torch"]
5487-
5488-
def __init__(self, *args, **kwargs):
5489-
requires_backends(self, ["torch"])
5490-
5491-
54925492
OPENAI_GPT_PRETRAINED_MODEL_ARCHIVE_LIST = None
54935493

54945494

tests/models/open_llama/__init__.py

Whitespace-only changes.

0 commit comments

Comments
 (0)