gpt4free/g4f/api/_tokenizer.py
abc 8e7e694d81 ~ | updated g4f.api
new api and requirements
2023-10-20 19:04:13 +01:00

9 lines
272 B
Python

import tiktoken
from typing import Union
def tokenize(text: str, model: str = 'gpt-3.5-turbo') -> Union[int, str]:
encoding = tiktoken.encoding_for_model(model)
encoded = encoding.encode(text)
num_tokens = len(encoded)
return num_tokens, encoded