View file src/colab/huggingface_microsoft_phi_2.py - Download
# -*- coding: utf-8 -*-
"""huggingface-microsoft-phi-2.ipynb
Automatically generated by Colaboratory.
Original file is located at
https://colab.research.google.com/drive/1HC3QEYh0TYcEnuvP1eASRZCyAwh1D8DT
https://huggingface.co/microsoft/phi-2
"""
!pip install einops
import torch
from transformers import AutoModelForCausalLM, AutoTokenizer
torch.set_default_device("cuda")
model = AutoModelForCausalLM.from_pretrained("microsoft/phi-2", torch_dtype="auto", trust_remote_code=True)
tokenizer = AutoTokenizer.from_pretrained("microsoft/phi-2", trust_remote_code=True)
inputs = tokenizer('''def print_prime(n):
"""
Print all primes between 1 and n
"""''', return_tensors="pt", return_attention_mask=False)
outputs = model.generate(**inputs, max_length=200)
text = tokenizer.batch_decode(outputs)[0]
print(text)
def print_prime(n):
"""
Print all primes between 1 and n
"""
for i in range(2, n+1):
for j in range(2, i):
if i % j == 0:
break
else:
print(i)
print_prime(100)
total = 0
for i in range(1, 101):
if i % 2 == 0:
total += i
print(total)