Skip to content

Commit 01ed13c

Browse files
committed
Lint
1 parent e3352fa commit 01ed13c

File tree

3 files changed

+9
-6
lines changed

3 files changed

+9
-6
lines changed

examples/models/llama/runner/eager.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@ def __init__(self, args):
2828
params = json.loads(f.read())
2929
super().__init__(
3030
tokenizer_path=args.tokenizer_path,
31-
tokenizer_config_path=args.tokenizer_config_path
31+
tokenizer_config_path=args.tokenizer_config_path,
3232
max_seq_len=args.max_seq_length,
3333
max_batch_size=1,
3434
use_kv_cache=args.use_kv_cache,
@@ -73,12 +73,12 @@ def build_args_parser() -> argparse.ArgumentParser:
7373
action="store_true",
7474
default=False,
7575
help="Have multi-turn chat with the model",
76-
)p
76+
)
7777

7878
parser.add_argument(
7979
"--tokenizer_config_path",
8080
type=str,
81-
deafult=None,
81+
default=None,
8282
)
8383

8484
return parser

extension/llm/tokenizer/hf_tokenizer.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@ class HuggingFaceTokenizer:
1515
"""
1616
Tokenizing and encoding/decoding text using the Hugging face tokenizer.
1717
"""
18+
1819
def __init__(self, model_path: str, config_path: Optional[str] = None):
1920
"""
2021
Initializes the Tokenizer with a tokenizer.json from HuggingFace.
@@ -30,7 +31,11 @@ def __init__(self, model_path: str, config_path: Optional[str] = None):
3031
if config_path:
3132
with open(config_path) as f:
3233
tokenizer_config = json.load(f)
33-
self.bos_id = self.model.token_to_id(tokenizer_config["bos_token"])if tokenizer_config["bos_token"] else None
34+
self.bos_id = (
35+
self.model.token_to_id(tokenizer_config["bos_token"])
36+
if tokenizer_config["bos_token"]
37+
else None
38+
)
3439
self.eos_id = self.model.token_to_id(tokenizer_config["eos_token"])
3540
else: # Fallback guess.
3641
self.bos_id = self.model.token_to_id("<|begin_of_text|>")
@@ -49,4 +54,3 @@ def decode(self, t: List[int]) -> str:
4954

5055
def decode_token(self, t: int) -> str:
5156
return self.model.decode([t])
52-

extension/llm/tokenizer/utils.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,6 @@
44
# This source code is licensed under the BSD-style license found in the
55
# LICENSE file in the root directory of this source tree.
66

7-
import json
87
from typing import Optional
98

109
from executorch.examples.models.llama.tokenizer.tiktoken import Tokenizer as Tiktoken

0 commit comments

Comments
 (0)