강의 링크 (link)
def fix_torch_seed(seed=42):
torch.manual_seed(seed)
torch.cuda.manual_seed(seed)
torch.backends.cudnn.deterministic = True
torch.backends.cudnn.benchmark = False
from transformers import TextStreamer
streamer = TextStreamer(
tiny_general_tokenizer,
skip_prompt=True, # If you set to false, the model will first return the prompt and then the generated text
skip_special_tokens=True
)
outputs = tiny_general_model.generate(
**inputs,
streamer=streamer,
use_cache=True,
max_new_tokens=128, # 최대로 생성되는 토큰 수
do_sample=False, # random output을 원한다면 True
temperature=0.0, # random output을 원한다면 값 변경
repetition_penalty=1.1
)