File size: 1,877 Bytes
dff620a |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 |
{
"cells": [
{
"cell_type": "code",
"execution_count": 6,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Token was found\n",
"<|im_start|>system\n",
"You are an AI assistant with access to various tools.<|im_end|>\n",
"<|im_start|>user\n",
"Hi !<|im_end|>\n",
"<|im_start|>assistant\n",
"Hi human, what can help you with ?<|im_end|>\n",
"<|im_start|>assistant\n",
"\n"
]
}
],
"source": [
"import os\n",
"\n",
"from transformers import AutoTokenizer\n",
"\n",
"messages = [\n",
" {\"role\": \"system\", \"content\": \"You are an AI assistant with access to various tools.\"},\n",
" {\"role\": \"user\", \"content\": \"Hi !\"},\n",
" {\"role\": \"assistant\", \"content\": \"Hi human, what can help you with ?\"},\n",
"]\n",
"\n",
"token = os.getenv('HF_TOKEN')\n",
"if token is None:\n",
" raise ValueError('You must set the HF_TOKEN environment variable')\n",
"else:\n",
" print('Token was found')\n",
" #print('Token:', token)\n",
"\n",
"\n",
"tokenizer = AutoTokenizer.from_pretrained(\"HuggingFaceTB/SmolLM2-1.7B-Instruct\", use_auth_token=token)\n",
"rendered_prompt = tokenizer.apply_chat_template(messages, tokenize=False, add_generation_prompt=True)\n",
"\n",
"print(rendered_prompt)"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.13.2"
}
},
"nbformat": 4,
"nbformat_minor": 2
}
|