from causapscal.lens import Lens
= Lens.from_preset("gpt") lens
Loaded pretrained model gpt2 into HookedTransformer
Loaded pretrained model gpt2 into HookedTransformer
[ '<|endoftext|>', 'Q', 'uel', 'le', ' est', ' la', ' cou', 'le', 'ur', ' du', ' chat', ' de', ' Hermione', ' Granger', '?' ]
['!', '"', '#', '$', '%']
50257
tensor([[50256, 48, 2731, 293, 1556, 8591, 2284, 293, 333, 7043, 8537, 390, 19959, 46236, 30]], device='cuda:0')
['\n', ' (', '\n\n', ' I', ' A']
PHRASE = "Ceci est une superbe phrase qui ne sert à - Sckathapscal Gorphineus Quantifilius Artificewick des Vents. Ceci est une superbe phrase qui ne sert à - Sckathapscal Gorphineus Quantifilius Artificewick des Vents."
str_tokens = filtered_to_str_tokens(lens.model, [PHRASE])
tokens = lens.model.to_tokens(PHRASE)
logits, cache = lens.model.run_with_cache(tokens)
['orph', 'morph', 'omorph', 'obi', 'omb']