text-to-text / main.py
parkho87's picture
Create main.py
23e4a88 verified
raw
history blame contribute delete
494 Bytes
# Use a pipeline as a high-level helper
from transformers import pipeline
messages = [
{"role": "user", "content": "Who are you?"},
]
pipe = pipeline("text-generation", model="microsoft/phi-4", trust_remote_code=True)
pipe(messages)
# Load model directly
from transformers import AutoTokenizer, AutoModelForCausalLM
tokenizer = AutoTokenizer.from_pretrained("microsoft/phi-4", trust_remote_code=True)
model = AutoModelForCausalLM.from_pretrained("microsoft/phi-4", trust_remote_code=True)