Install the Transformers and Datasets libraries to run this notebook.
In [ ]:
!pipinstalldatasetstransformers[sentencepiece]
In [ ]:
fromtransformersimportAutoModelForCausalLM,AutoTokenizerimporttorchmodel=AutoModelForCausalLM.from_pretrained("gpt2")tokenizer=AutoTokenizer.from_pretrained("gpt2")inputs=tokenizer("Hugging Face is a startup based in New York City and Paris",return_tensors="pt")loss=model(input_ids=inputs["input_ids"],labels=inputs["input_ids"]).lossppl=torch.exp(loss)print(f"Perplexity: {ppl.item():.2f}")