파이토치 (RNN 예제)

짬그브·2025년 4월 1일

RNN Ex1

import numpy as np


time_step = 10
input_size = 4
hidden_size = 8

inputs = np.random.random((time_step, input_size))
print(inputs.shape)
hidden_state_t = np.zeros((hidden_size,))
print(hidden_state_t.shape)

wx = np.random.random((input_size, hidden_size))
wh = np.random.random((hidden_size, hidden_size))
b = np.random.random((hidden_size,))
print()
print(wx.shape)
print(wh.shape)
print(b.shape)

total_hidden_state = []

for input_t in inputs:
    output_t = np.tanh(np.dot(input_t, wx) + np.dot(hidden_state_t, wh) + b)
    total_hidden_state.append(list(output_t))
    hidden_state_t = output_t

print()
total_hidden_state = np.stack(total_hidden_state, axis=0)
print(total_hidden_state.shape)

(10, 4)
(8,)

(4, 8)
(8, 8)
(8,)

(10, 8)

RNN Ex2

import torch
import torch.nn as nn
import numpy as np

string = 'hello pytorch. how long can a rnn cell remember? show me your limit!'
print(len(string))
chars = 'abcdefghijklmnopqrstuvwxyz ?!.,:;01'
char_list = [i for i in chars]
n_letter = len(char_list)
print(n_letter)
print(char_list)

n_hidden = 35
learning_rate = 0.01
total_epochs = 1000

def stringToOneht(string):
    start = np.zeros(n_letter, dtype=int)
    end = np.zeros(n_letter, dtype=int)
    start[-2] = 1   #[0,0,0,....,0,1,0]
    end[-1] = 1   #[0,0,0,....,0,0,1]

    for i in string:
        idx = char_list.index(i)
        odata = np.zeros(n_letter, dtype=int)
        odata[idx] = 1
        start = np.vstack([start, odata])
    output = np.vstack([start, end])
    return output

print()
print(stringToOneht('test'))

C:\Users\hi\Desktop\PS\python_lib\Scripts\python.exe C:\Users\hi\PycharmProjects\deep_learning_part1\day7\RNNEx2.py 
68
35
['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', ' ', '?', '!', '.', ',', ':', ';', '0', '1']

[[0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0]
 [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]
 [0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]
 [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]
 [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]
 [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1]]

RNN Ex2 연장

from math import hypot

import torch
import torch.nn as nn
import numpy as np


string = 'hello pytorch. how long can a rnn cell remember? show me your limit!'
print(len(string))
chars = 'abcdefghijklmnopqrstuvwxyz ?!.,:;01'
char_list = [i for i in chars]
n_letter = len(char_list)
print(n_letter)
print(char_list)

n_hidden = 64
learning_rate = 0.001
total_epochs = 10000

def stringToOneht(string):
    start = np.zeros(n_letter, dtype=int)
    end = np.zeros(n_letter, dtype=int)
    start[-2] = 1   #[0,0,0,....,0,1,0]
    end[-1] = 1   #[0,0,0,....,0,0,1]

    for i in string:
        idx = char_list.index(i)
        odata = np.zeros(n_letter, dtype=int)
        odata[idx] = 1
        start = np.vstack([start, odata])
    output = np.vstack([start, end])
    return output

print()
print(stringToOneht('test'))

def onehotToChar(onehot_d):
    onehot = torch.Tensor.numpy(onehot_d)
    return char_list[onehot.argmax()]

data = np.zeros(n_hidden, dtype=int)
data[5] = 1
print(onehotToChar(torch.from_numpy(data)))

class RNNet(nn.Module):
    def __init__(self, input_size, hidden_size, output_size):
        super().__init__()
        self.input_size = input_size
        self.hidden_size = hidden_size
        self.output_size = output_size

        self.i2h = nn.Linear(input_size + hidden_size, hidden_size)
        self.i2o = nn.Linear(input_size + hidden_size, output_size)
        self.ac_fn = nn.Tanh()

    def init_hidden(self):
        return torch.zeros(1, self.hidden_size)

    def forward(self, input, hidden):
        combined = torch.cat((input, hidden), dim=1)
        hidden = self.ac_fn(self.i2h(combined))
        output = self.i2o(combined)
        return output, hidden

rnn = RNNet(n_letter, n_hidden, n_letter)
loss_func = nn.MSELoss()
optimizer = torch.optim.Adam(rnn.parameters(), lr=learning_rate)

one_hot = torch.from_numpy(stringToOneht(string)).type_as(torch.FloatTensor())
print()
print(one_hot)

for i in range(total_epochs):
    optimizer.zero_grad()
    hidden = rnn.init_hidden()
    total_loss = 0

    for j in range(one_hot.size()[0]-1):
        input = one_hot[j:j+1, :]
        target = one_hot[j+1]
        hypothesis, hidden = rnn(input, hidden)
        loss = loss_func(hypothesis.view(-1), target.view(-1))
        total_loss += loss

    total_loss.backward()
    optimizer.step()

    start = torch.zeros(1, n_letter)
    start[:,-2] = 1
    with torch.no_grad():
        hidden = rnn.init_hidden()
        input = start
        output_string = ''
        for i in range(len(string)):
            output, hidden = rnn(input, hidden)
            output_string += onehotToChar(output.data)
            input = output
    print('', output_string)



RNN Ex3

import torch
import torch.optim as optim
import numpy as np

sentence = (" if you want to build a ship, don't drum up people together to "
            " collect wood and don't assign them tasks and work, but rather "
            " teach them to long for the endless immensity of the sea. ")

print(sentence)
char_set = list(set(sentence))
print(char_set)
char_dic = {c:i for i ,c in enumerate(char_set)}
print(char_dic)

dic_size = len(char_dic)

hidden_size = dic_size
sequence_length = 10
learning_rate = 0.1

x_data = []
y_data = []

for i in range(0, len(sentence)- sequence_length):
    x_str = sentence[i:i + sequence_length]
    y_str = sentence[i+1 : i + sequence_length + 1]
    print(i, x_str, '->', y_str)

    x_data.append([char_dic[c] for c in x_str])
    y_data.append([char_dic[c] for c in y_str])

print(x_data[0])
print(y_data[0])

print(np.eye(10)[1])
x_one_hot = [np.eye(dic_size)[x] for x in x_data]
print(x_one_hot)
x = torch.FloatTensor(x_one_hot)
y = torch.LongTensor(y_data)

import torch.nn as nn

class RNNet(nn.Module):
    def __init__(self, input_size, hidden_size, layers):
        super().__init__()
        self.rnn = nn.RNN(input_size, hidden_size, num_layers=layers, batch_first=True)
        self.fc = nn.Linear(hidden_size, hidden_size)

    def forward(self,x):
        output, hidden_state = self.rnn(x)
        y = self.fc(output)
        return y

model = RNNet(dic_size, hidden_size, layers=2)
loss_func = nn.CrossEntropyLoss()
optimizer = optim.Adam(model.parameters(), lr=learning_rate)

outputs = model(x)
print(outputs.shape)
print(outputs.view(-1, dic_size).shape)

print(y.shape)
print(y.view(-1).shape)

for epoch in range(1000):
    optimizer.zero_grad()
    hypothesis = model(x)
    loss = loss_func(hypothesis.view(-1, dic_size), y.view(-1))
    loss.backward()
    optimizer.step()


    predictions = hypothesis.argmax(dim=2)
    predict_str = ''
    for j, result in enumerate(predictions):
        if j == 0:
            predict_str += ''.join([char_set[t] for t in result])
        else:
            predict_str += char_set[result[-1]]
        print(predict_str)



tm you wan
tm you want
tm you want 
tm you want t
tm you want to
tm you want to 
tm you want to b
tm you want to bu
tm you want to bui
tm you want to buil
tm you want to build
tm you want to build 
tm you want to build a
tm you want to build a 
tm you want to build a s
tm you want to build a sh
tm you want to build a shi
tm you want to build a ship
tm you want to build a ship,

...

tm you want to build a ship, don't drum up people together to  collect wood and don't assign them tasks 
tm you want to build a ship, don't drum up people together to  collect wood and don't assign them tasks a
tm you want to build a ship, don't drum up people together to  collect wood and don't assign them tasks an
tm you want to build a ship, don't drum up people together to  collect wood and don't assign them tasks and
tm you want to build a ship, don't drum up people together to  collect wood and don't assign them tasks and 
tm you want to build a ship, don't drum up people together to  collect wood and don't assign them tasks and w
tm you want to build a ship, don't drum up people together to  collect wood and don't assign them tasks and wo
tm you want to build a ship, don't drum up people together to  collect wood and don't assign them tasks and wor
tm you want to build a ship, don't drum up people together to  collect wood and don't assign them tasks and work
tm you want to build a ship, don't drum up people together to  collect wood and don't assign them tasks and work,
tm you want to build a ship, don't drum up people together to  collect wood and don't assign them tasks and work, 
tm you want to build a ship, don't drum up people together to  collect wood and don't assign them tasks and work, b
tm you want to build a ship, don't drum up people together to  collect wood and don't assign them tasks and work, bu
tm you want to build a ship, don't drum up people together to  collect wood and don't assign them tasks and work, but
tm you want to build a ship, don't drum up people together to  collect wood and don't assign them tasks and work, but 
tm you want to build a ship, don't drum up people together to  collect wood and don't assign them tasks and work, but r
tm you want to build a ship, don't drum up people together to  collect wood and don't assign them tasks and work, but ra
tm you want to build a ship, don't drum up people together to  collect wood and don't assign them tasks and work, but rat
tm you want to build a ship, don't drum up people together to  collect wood and don't assign them tasks and work, but rath
tm you want to build a ship, don't drum up people together to  collect wood and don't assign them tasks and work, but rathe
tm you want to build a ship, don't drum up people together to  collect wood and don't assign them tasks and work, but rather
tm you want to build a ship, don't drum up people together to  collect wood and don't assign them tasks and work, but rather 
tm you want to build a ship, don't drum up people together to  collect wood and don't assign them tasks and work, but rather  
tm you want to build a ship, don't drum up people together to  collect wood and don't assign them tasks and work, but rather  t
tm you want to build a ship, don't drum up people together to  collect wood and don't assign them tasks and work, but rather  te
tm you want to build a ship, don't drum up people together to  collect wood and don't assign them tasks and work, but rather  tea
tm you want to build a ship, don't drum up people together to  collect wood and don't assign them tasks and work, but rather  teac
tm you want to build a ship, don't drum up people together to  collect wood and don't assign them tasks and work, but rather  teach
tm you want to build a ship, don't drum up people together to  collect wood and don't assign them tasks and work, but rather  teach 
tm you want to build a ship, don't drum up people together to  collect wood and don't assign them tasks and work, but rather  teach t
tm you want to build a ship, don't drum up people together to  collect wood and don't assign them tasks and work, but rather  teach th
tm you want to build a ship, don't drum up people together to  collect wood and don't assign them tasks and work, but rather  teach the
tm you want to build a ship, don't drum up people together to  collect wood and don't assign them tasks and work, but rather  teach them
tm you want to build a ship, don't drum up people together to  collect wood and don't assign them tasks and work, but rather  teach them 
tm you want to build a ship, don't drum up people together to  collect wood and don't assign them tasks and work, but rather  teach them t
tm you want to build a ship, don't drum up people together to  collect wood and don't assign them tasks and work, but rather  teach them to
tm you want to build a ship, don't drum up people together to  collect wood and don't assign them tasks and work, but rather  teach them to 
tm you want to build a ship, don't drum up people together to  collect wood and don't assign them tasks and work, but rather  teach them to l
tm you want to build a ship, don't drum up people together to  collect wood and don't assign them tasks and work, but rather  teach them to lo
tm you want to build a ship, don't drum up people together to  collect wood and don't assign them tasks and work, but rather  teach them to lon
tm you want to build a ship, don't drum up people together to  collect wood and don't assign them tasks and work, but rather  teach them to long
tm you want to build a ship, don't drum up people together to  collect wood and don't assign them tasks and work, but rather  teach them to long 
tm you want to build a ship, don't drum up people together to  collect wood and don't assign them tasks and work, but rather  teach them to long f
tm you want to build a ship, don't drum up people together to  collect wood and don't assign them tasks and work, but rather  teach them to long fo
tm you want to build a ship, don't drum up people together to  collect wood and don't assign them tasks and work, but rather  teach them to long for
tm you want to build a ship, don't drum up people together to  collect wood and don't assign them tasks and work, but rather  teach them to long for 
tm you want to build a ship, don't drum up people together to  collect wood and don't assign them tasks and work, but rather  teach them to long for t
tm you want to build a ship, don't drum up people together to  collect wood and don't assign them tasks and work, but rather  teach them to long for th
tm you want to build a ship, don't drum up people together to  collect wood and don't assign them tasks and work, but rather  teach them to long for the
tm you want to build a ship, don't drum up people together to  collect wood and don't assign them tasks and work, but rather  teach them to long for the 
tm you want to build a ship, don't drum up people together to  collect wood and don't assign them tasks and work, but rather  teach them to long for the e
tm you want to build a ship, don't drum up people together to  collect wood and don't assign them tasks and work, but rather  teach them to long for the en
tm you want to build a ship, don't drum up people together to  collect wood and don't assign them tasks and work, but rather  teach them to long for the end
tm you want to build a ship, don't drum up people together to  collect wood and don't assign them tasks and work, but rather  teach them to long for the endl
tm you want to build a ship, don't drum up people together to  collect wood and don't assign them tasks and work, but rather  teach them to long for the endle
tm you want to build a ship, don't drum up people together to  collect wood and don't assign them tasks and work, but rather  teach them to long for the endles
tm you want to build a ship, don't drum up people together to  collect wood and don't assign them tasks and work, but rather  teach them to long for the endless
tm you want to build a ship, don't drum up people together to  collect wood and don't assign them tasks and work, but rather  teach them to long for the endless 
tm you want to build a ship, don't drum up people together to  collect wood and don't assign them tasks and work, but rather  teach them to long for the endless i
tm you want to build a ship, don't drum up people together to  collect wood and don't assign them tasks and work, but rather  teach them to long for the endless im
tm you want to build a ship, don't drum up people together to  collect wood and don't assign them tasks and work, but rather  teach them to long for the endless imm
tm you want to build a ship, don't drum up people together to  collect wood and don't assign them tasks and work, but rather  teach them to long for the endless imme
tm you want to build a ship, don't drum up people together to  collect wood and don't assign them tasks and work, but rather  teach them to long for the endless immen
tm you want to build a ship, don't drum up people together to  collect wood and don't assign them tasks and work, but rather  teach them to long for the endless immens
tm you want to build a ship, don't drum up people together to  collect wood and don't assign them tasks and work, but rather  teach them to long for the endless immensi
tm you want to build a ship, don't drum up people together to  collect wood and don't assign them tasks and work, but rather  teach them to long for the endless immensit
tm you want to build a ship, don't drum up people together to  collect wood and don't assign them tasks and work, but rather  teach them to long for the endless immensity
tm you want to build a ship, don't drum up people together to  collect wood and don't assign them tasks and work, but rather  teach them to long for the endless immensity 
tm you want to build a ship, don't drum up people together to  collect wood and don't assign them tasks and work, but rather  teach them to long for the endless immensity o
tm you want to build a ship, don't drum up people together to  collect wood and don't assign them tasks and work, but rather  teach them to long for the endless immensity of
tm you want to build a ship, don't drum up people together to  collect wood and don't assign them tasks and work, but rather  teach them to long for the endless immensity of 
tm you want to build a ship, don't drum up people together to  collect wood and don't assign them tasks and work, but rather  teach them to long for the endless immensity of t
tm you want to build a ship, don't drum up people together to  collect wood and don't assign them tasks and work, but rather  teach them to long for the endless immensity of th
tm you want to build a ship, don't drum up people together to  collect wood and don't assign them tasks and work, but rather  teach them to long for the endless immensity of the
tm you want to build a ship, don't drum up people together to  collect wood and don't assign them tasks and work, but rather  teach them to long for the endless immensity of the 
tm you want to build a ship, don't drum up people together to  collect wood and don't assign them tasks and work, but rather  teach them to long for the endless immensity of the s
tm you want to build a ship, don't drum up people together to  collect wood and don't assign them tasks and work, but rather  teach them to long for the endless immensity of the se
tm you want to build a ship, don't drum up people together to  collect wood and don't assign them tasks and work, but rather  teach them to long for the endless immensity of the sea
tm you want to build a ship, don't drum up people together to  collect wood and don't assign them tasks and work, but rather  teach them to long for the endless immensity of the sea.
tm you want to build a ship, don't drum up people together to  collect wood and don't assign them tasks and work, but rather  teach them to long for the endless immensity of the sea. 

RNN Ex4

import torch
import torch.nn as nn
import unidecode
import string
import random

total_epochs = 2000
chunk_len = 200

hidden_size = 100
batch_size = 1
num_layer = 1
embedding = 70
learning_rate = 0.002

# print(string.printable)
all_characters = string.printable
n_characters = len(all_characters)
print(n_characters)

file = unidecode.unidecode(open('input.txt').read())
#print(file)
file_len = len(file)
print(file_len)

def random_chunk():
    start_index = random.randint(0, file_len - chunk_len)
    end_index = start_index + chunk_len + 1
    return file[start_index : end_index]

def char_tensor(string):
    tensor = torch.zeros(len(string)).long()
    for c in range(len(string)):
        tensor[c] = all_characters.index(string[c])
    return tensor

print(char_tensor('good'))

class RNNet(nn.Module):
    def __init__(self, input_size, embedding_size, hidden_size, output_size, num_layers = 1):
        super().__init__()
        self.input_size = input_size
        self.embedding_size = embedding_size
        self.hidden_size = hidden_size
        self.output_size = output_size
        self.num_layers = num_layers

        self.encoder = nn.Embedding(self.input_size,self.embedding_size)
        self.rnn = nn.LSTM(self.embedding_size, self.hidden_size, num_layers)
        self.fc = nn.Linear(self.hidden_size, self.output_size)

    def init_hidden(self):
        hidden = torch.zeros(self.num_layers, batch_size, hidden_size)
        cell = torch.zeros(self.num_layers, batch_size, hidden_size)
        return hidden, cell

    def forward(self, input, hidden, cell):
        x = self.encoder(input.view(1,-1))
        out, (hidden, cell) = self.rnn(x, (hidden, cell))
        y = self.fc(out.view(batch_size, -1))
        return y, hidden, cell

model = RNNet(input_size = n_characters,
              embedding_size = embedding,
              hidden_size= hidden_size,
              output_size= n_characters,
              num_layers = num_layer)

loss_func = nn.CrossEntropyLoss()
optimizer = torch.optim.Adam(model.parameters(), lr=learning_rate)

def random_train_set():
    chunk = random_chunk()
    input = char_tensor(chunk[:-1])
    target = char_tensor(chunk[1:])
    return input, target

def test():
    star_str = 'a'
    input = char_tensor(star_str)
    hidden, cell = model.init_hidden()

    for i in range(200):
        output, hidden, cell = model(input, hidden, cell)
        output_dist = output.data.view(-1).div(0.8).exp()
        top_i = torch.multinomial(output_dist, 1)[0]
        predicted_char = all_characters[top_i]
        print(predicted_char, end='')
        input = char_tensor(predicted_char)

for epoch in range(total_epochs):
    input, label = random_train_set()
    hidden, cell = model.init_hidden()
    loss = torch.tensor([0]).type(torch.FloatTensor)

    optimizer.zero_grad()
    for j in range(chunk_len - 1):
        x_train = input[j]
        y_train = label[j].unsqueeze(dim=0).type(torch.LongTensor)
        hypothesis, hidden, cell = model(x_train, hidden, cell)
        loss += loss_func(hypothesis, y_train)

    loss.backward()
    optimizer.step()

    if epoch % 50 == 0:
        print('='*200)
        print('loss:{:.4f}'.format(loss.item() /chunk_len), end='\n\n')
        test()
        print('\n','='*200, end='\n\n')

100
1115390
tensor([16, 24, 24, 13])
========================================================================================================================================================================================================
loss:4.5906

{!f%Y1|>	-23|eB&,gZg8qY)s0<VewG<oHNc,p/h!'r):9P|37fW6!a	V[8i=ASx #ue>kfR,S<v[_"P7x='2-M a
MUh5&E.(>;K9u:EnU(y8kYcr!N&dx*=MS/v6rx(yK2-v8`wAyV6f
 ========================================================================================================================================================================================================

========================================================================================================================================================================================================
loss:3.0332

To lowoghans oi,th
 obeno tht l wuarele Iw tthedath anhee noad
 ar tdo scs h uuac basind
Le, 3ou sewb.eeRand teit sedreol  thote theO afle t erer min be,e,y osl tse wioe cChedd tilpaTce thenu bW ewas 
 ========================================================================================================================================================================================================

========================================================================================================================================================================================================
loss:2.5604

tty y he hiso, wh fonO
:

ild wa,h de  an eand hes herat hisigees whe anonn wfois conge,
en aune wothent whcrpy ofs reag of woI mue mo, wifs lin't toIe wu ofnd lans louur:
Ios perms row ce hod an doce
 ========================================================================================================================================================================================================

========================================================================================================================================================================================================



...


lfor: Crigan that sens the the mardod of time.
'Tattiner ouse thee sin, how you not is benent.

BeENTIUS:
If briten:
No the me
Moles of he to most so secent,
Whur that and that blice be with he the an
 ========================================================================================================================================================================================================

========================================================================================================================================================================================================
loss:1.8853

ll I they halllow comsending yould the the shant nose to the firtest Iind
I look the shew, I have blens
Bow the preat hast with upust to rould furtanped,
That the sticksury to hisser hake homeen, frog
 ========================================================================================================================================================================================================

========================================================================================================================================================================================================
loss:1.8558

lk of mally in the partild----

FROMIO:
AP you you wake on to fod onter with morroce thest live aut, that condiblanber sonity me.

Forrse:
He to nour a roke bet you bue the deveracone fidaited;
Ony co
 ========================================================================================================================================================================================================

========================================================================================================================================================================================================
loss:1.7767

rves folirt, as dending! the in stong,
And be head end prither, chall gort.

FRETIO:
My wimperss, are your to forly fomen shis see?

RiRISAND:
Thwo me the vair are to eenouning to this well.

CLOUTIO:
 ========================================================================================================================================================================================================
profile
+AI to AI+

0개의 댓글