Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

scripts: Use local gguf package when running from repo #2927

Merged
merged 5 commits into from
Aug 31, 2023
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Next Next commit
scripts: Use local gguf when running from repo
  • Loading branch information
KerfuffleV2 committed Aug 31, 2023
commit 4320055b22ca4301d17ace8fc7e4204a98d9e1a0
5 changes: 4 additions & 1 deletion convert-falcon-hf-to-gguf.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,11 +11,14 @@
from pathlib import Path
from typing import Any

import gguf
import numpy as np
import torch
from transformers import AutoTokenizer # type: ignore[import]

if os.environ.get('NO_LOCAL_GGUF') is None and Path('gguf-py', 'gguf', '__init__.py').is_file():
sys.path.insert(1, str(Path('gguf-py', 'gguf').absolute()))
import gguf


def bytes_to_unicode():
# ref: https://github.com/openai/gpt-2/blob/master/src/encoder.py
Expand Down
4 changes: 4 additions & 0 deletions convert-gptneox-hf-to-gguf.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,10 @@
import torch
from transformers import AutoTokenizer # type: ignore[import]

if os.environ.get('NO_LOCAL_GGUF') is None and Path('gguf-py', 'gguf', '__init__.py').is_file():
sys.path.insert(1, str(Path('gguf-py', 'gguf').absolute()))
import gguf

# ref: https://github.com/openai/gpt-2/blob/master/src/encoder.py


Expand Down
6 changes: 5 additions & 1 deletion convert-llama-ggmlv3-to-gguf.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,13 @@
import sys
from pathlib import Path

import gguf
import numpy as np

import os
if os.environ.get('NO_LOCAL_GGUF') is None and Path('gguf-py', 'gguf', '__init__.py').is_file():
sys.path.insert(1, str(Path('gguf-py', 'gguf').absolute()))
import gguf

# Note: Does not support GGML_QKK_64
QK_K = 256
# Items here are (block size, type size)
Expand Down
6 changes: 5 additions & 1 deletion convert.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,10 +25,14 @@
from pathlib import Path
from typing import IO, TYPE_CHECKING, Any, Callable, Generator, Iterable, Literal, Sequence, TypeVar

import gguf
import numpy as np
from sentencepiece import SentencePieceProcessor # type: ignore[import]

import os
if os.environ.get('NO_LOCAL_GGUF') is None and Path('gguf-py', 'gguf', '__init__.py').is_file():
sys.path.insert(1, str(Path('gguf-py', 'gguf').absolute()))
import gguf

if TYPE_CHECKING:
from typing import TypeAlias

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,19 @@
# train-text-from-scratch checkpoint --> gguf conversion

import argparse
import gguf
import os
import struct
import sys
import numpy as np
from pathlib import Path

if os.environ.get('NO_LOCAL_GGUF') is None:
if Path('gguf-py', 'gguf', '__init__.py').is_file():
sys.path.insert(1, str(Path('gguf-py', 'gguf').absolute()))
elif Path('..', '..', 'gguf-py', 'gguf', '__init__.py').is_file():
sys.path.insert(1, str(Path('..', '..', 'gguf-py', 'gguf').absolute()))
import gguf

# gguf constants
LLM_KV_OPTIMIZER_TYPE = "optimizer.type"
LLM_KV_OPTIMIZER_TYPE_ADAM = "adam"
Expand Down