Spaces:
xh0o6
/
Running

hj / gpt4free-main /g4f /api /_tokenizer.py
nononno's picture
Upload 177 files
1b67eb7 verified
raw
history blame contribute delete
286 Bytes
# import tiktoken
# from typing import Union
# def tokenize(text: str, model: str = 'gpt-3.5-turbo') -> Union[int, str]:
# encoding = tiktoken.encoding_for_model(model)
# encoded = encoding.encode(text)
# num_tokens = len(encoded)
# return num_tokens, encoded