Spaces:
Sleeping
Sleeping
from PIL import Image | |
import requests | |
import torch | |
import os | |
# Set the cache directory | |
cache_dir = "F:\\huggingface_cache" | |
# Set environment variables for good measure | |
# os.environ["TRANSFORMERS_CACHE"] = cache_dir | |
# os.environ["HF_HOME"] = cache_dir | |
# os.environ["HUGGINGFACE_HUB_CACHE"] = cache_dir | |
# Model ID | |
model_id = "google/gemma-3-4b-it" | |
from ollama import chat | |
from ollama import ChatResponse | |
response: ChatResponse = chat(model='llama3.2', messages=[ | |
{ | |
'role': 'user', | |
'content': 'Why is the sky blue?', | |
}, | |
]) | |
print(response['message']['content']) | |
# or access fields directly from the response object | |
print(response.message.content) | |