๐Ÿ™ƒ ๊ณต๊ฐ๋žญํ‚น๋‰ด์Šค ํฌ๋กค๋ง ๐Ÿ™ƒ

parkeuยท2022๋…„ 9์›” 8์ผ
0

ABC๋ถ€ํŠธ์บ ํ”„

๋ชฉ๋ก ๋ณด๊ธฐ
20/55

๐Ÿผ ์ค€๋น„

# ํ•œ๊ธ€ ํŒจ์น˜
import matplotlib as mpl
import matplotlib.pyplot as plt

%config InlineBackend.figure_format = 'retina'

!apt -qq -y install fonts-nanum

import matplotlib.font_manager as fm
fontpath = '/usr/share/fonts/truetype/nanum/NanumBarunGothic.ttf'
font = fm.FontProperties(fname=fontpath, size=9)
plt.rc('font', family='NanumBarunGothic') 
mpl.font_manager._rebuild()

# ์…€๋ ˆ๋‹ˆ์›€
!pip install selenium
!apt-get update
!apt install chromium-chromedriver  # ๋งˆ์šฐ์Šค, ํ‚ค๋ณด๋“œ ์ž…๋ ฅ ํšจ๊ณผ ์ค„ ์ˆ˜ ์žˆ๋‹ค.
!cp /usr/lib/chromium-browser/chromedriver /usr/bin

# ๋ผ์ด๋ธŒ๋Ÿฌ๋ฆฌ ์ž„ํฌํŠธ ๋ฐ ํ•จ์ˆ˜ ์ •์˜
from selenium import webdriver
from bs4 import BeautifulSoup
import time
from pytz import timezone
import datetime

import pandas as pd

import warnings
warnings.filterwarnings('ignore')

import matplotlib.pyplot as plt
from wordcloud import WordCloud, STOPWORDS, ImageColorGenerator
import re

def clean_text(inputString):
  text_rmv = re.sub('[-=+,#/\?:^.@*\"โ€ป~ใ†!ใ€โ€˜|\(\)\[\]`\'โ€ฆใ€‹\โ€\โ€œ\โ€™ยท]', ' ', inputString)
  return text_rmv

๐Ÿ‘„ ๊ณต๊ฐ๋žญํ‚น๋‰ด์Šค ํฌ๋กค๋ง

๐Ÿ’‹ ์–ด๋–ป๊ฒŒ??

  1. https://entertain.naver.com/ranking/sympathy ์ด์šฉ
  2. '์ˆœ์œ„', '๊ณต๊ฐ์ข…๋ฅ˜', '๊ธฐ์‚ฌ์ œ๋ชฉ', '๊ธฐ์‚ฌ๋งํฌ', '๊ธฐ์‚ฌ๋‚ด์šฉ', '๊ณต๊ฐ์ˆ˜', '์ˆ˜์ง‘์ผ์ž' ํฌ๋กค๋ง
  3. ์‚ฌ์šฉ์ž์—๊ฒŒ ๋ณด๊ณ ์‹ถ์€ ๋‰ด์Šค๋ฅผ ์ž…๋ ฅ๋ฐ›๊ธฐ(love, cheer, ...)
  4. ์ž…๋ ฅ๋ฐ›์€ ๋‰ด์Šค ์›Œ๋“œํด๋ผ์šฐ๋“œ ์‹œ๊ฐํ™” ์ง„ํ–‰

๐Ÿ“Ž ํ˜ผ์ž ํ•ด๋ณธ ์ฝ”๋“œ (์•„์ฃผ ๋ณต์žกํ•˜๊ณ  ์ข‹์€ ์ฝ”๋“œ๋Š” ์•„๋‹˜ !)

# ํฌ๋กฌ ๋ธŒ๋ผ์šฐ์ €๊ฐ€ ๋œจ์ง€ ์•Š๊ณ  ํฌ๋กค๋ง ์ง„ํ–‰
options = webdriver.ChromeOptions()  
options.add_argument('--headless')
options.add_argument('--no-sandbox') 
options.add_argument('--disable-dev-shm-usage')
driver = webdriver.Chrome('chromedriver', options=options)

# ๋ฐ์ดํ„ฐํ”„๋ ˆ์ž„ ์ƒ์„ฑ
data = pd.DataFrame(columns = ['์ˆœ์œ„', '๊ณต๊ฐ์ข…๋ฅ˜', '๊ธฐ์‚ฌ์ œ๋ชฉ', '๊ธฐ์‚ฌ๋งํฌ', '๊ธฐ์‚ฌ๋‚ด์šฉ', '๊ณต๊ฐ์ˆ˜', '์ˆ˜์ง‘์ผ์ž'])

# ์‚ฌ์šฉ์ž๊ฐ€ ์›ํ•˜๋Š” ๊ณต๊ฐ ์ข…๋ฅ˜ ์ž…๋ ฅ๋ฐ›๊ธฐ
# ๋Œ€๋ฌธ์ž๋กœ ์ž…๋ ฅํ•  ์ˆ˜๋„ ์žˆ์œผ๋‹ˆ lower()์„ ์ด์šฉํ•ด ์†Œ๋ฌธ์ž๋กœ ๋ฐ”๊พธ์–ด์คŒ
want = input('๋ณด๊ณ ์‹ถ์€ ๊ณต๊ฐ๋žญํ‚น ๋‰ด์Šค(love, cheer, congrats, expect, surprise, sad) : ').lower()

๐Ÿ‘€ ์—ฌ๊ธฐ๊นŒ์ง„ ๊ดœ์ฐฎ์•˜๋Š”๋ฐ ์ด ๋‹ค์Œ ๊ณผ์ •์—์„œ ๊ณต๊ฐ์ข…๋ฅ˜์™€ ๊ณต๊ฐ์ˆ˜๋ฅผ ์–ด๋–ป๊ฒŒ ๊ฐ€์ ธ์™€์•ผ ํ• ์ง€ ๋ง‰๋ง‰ํ–ˆ๋‹ค. ใ…œ^ใ…œ

๐Ÿ‘€ ๋„ค์ด๋ฒ„ ๊ณต๊ฐ ์ข…๋ฅ˜๊ฐ€ ๋‹ฌ๋ผ์งˆ ๋•Œ๋งˆ๋‹ค ์ฃผ์†Œ๊ฐ€ ์ผ์ • ๊ทœ์น™์œผ๋กœ ๋ฐ”๋€Œ๋Š” ๊ฒƒ์„ ์ด์šฉํ•ด์•ผ ํ•จ


if want == 'love':
  driver.get("https://entertain.naver.com/ranking/sympathy")
  num = 0
else:
  driver.get("https://entertain.naver.com/ranking/sympathy/" + want)
  if want == 'cheer': num = 1
  elif want == 'congrats' : num = 2
  elif want == 'expect' : num = 3
  elif want == 'surprise' : num = 4
  else: num = 5

๐Ÿ‘€ ์ผ๋‹จ ์•„๋ฌด๊ฑฐ๋‚˜ ์ž…๋ ฅํ•ด๋„ ์ข‹์•„์š”๊ฐ€ ๋œ๋‹ค๋Š” ๊ฒƒ์„ ๋ชฐ๋ž์„ ๋•Œ๋Š” ์ด๋ ‡๊ฒŒ ๋ถ„๋ฆฌํ•ด์„œ ์ฝ”๋“œ๋ฅผ ์งฐ๋‹ค. ์ด๋ ‡๊ฒŒ if๋ฌธ์ด ๋งŽ์ด ์ค‘์ฒฉ๋˜๋Š” ๊ฒฝ์šฐ๋Š” ์ข‹์ง€ ์•Š์€ ์ฝ”๋“œ์ธ ๊ฑธ ์•Œ๊ณ ์žˆ์—ˆ๊ณ  .. ๊ต์ˆ˜๋‹˜๋„ ์ข‹์ง€์•Š์€ ์ฝ”๋“œ๋ผ๊ณ  ๊ทธ๋Ÿฌ์…จ๋Š”๋ฐ ์ฒ˜์Œ ์งฐ์„ ๋•Œ๋Š” ๋„์ €ํžˆ ์–ด๋–ป๊ฒŒ ์งœ์•ผํ• ์ง€ ๋ชจ๋ฅด๊ฒ ์–ด์„œ ์ผ๋‹จ ..........................


driver.implicitly_wait(3)
time.sleep(1.5)

driver.execute_script('window.scrollTo(0, 800)')
time.sleep(3)

html_source = driver.page_source
soup = BeautifulSoup(html_source, 'html.parser')

li = soup.select('ul.news_lst.news_lst3.count_info > li') 
clk = soup.select('ul.likeitnews_nav_list > li > a > div.likeitnews_nav_item_name')[num]

for index_l in range(0, len(li)):
  try:
    # ์ˆœ์œ„
    rank = li[index_l].find('em', {'class', 'blind'}).text.replace('\n', '').replace('\t', '').strip()
    # ๊ณต๊ฐ์ข…๋ฅ˜
    thumb = clk.text.replace('\n', '').replace('\t', '').strip()
    # ๊ธฐ์‚ฌ์ œ๋ชฉ
    title = li[index_l].find('a', {'class', 'tit'}).text.replace('\n', '').replace('\t', '').strip()
    # ๊ธฐ์‚ฌ๋‚ด์šฉ
    summary = li[index_l].find('p', {'class', 'summary'}).text.replace('\n', '').replace('\t', '').strip()
    # ๊ณต๊ฐ์ˆ˜
    like = soup.select('div.tit_area > a')[2*index_l+1].text.replace('\n', '').replace('\t', '').strip()
    like = like[3:]
    # ๊ธฐ์‚ฌ๋งํฌ
    link = li[index_l].find('a').attrs['href']

    data = data.append({'์ˆœ์œ„' : rank,
                        '๊ธฐ์‚ฌ์ œ๋ชฉ' : title,
                        '๊ณต๊ฐ์ข…๋ฅ˜' : thumb,
                        '๊ธฐ์‚ฌ๋งํฌ' : 'https://entertain.naver.com'+link,
                        '๊ธฐ์‚ฌ๋‚ด์šฉ' : summary,
                        '๊ณต๊ฐ์ˆ˜' : like,
                        '์ˆ˜์ง‘์ผ์ž' : datetime.datetime.now(timezone('Asia/Seoul')).strftime('%Y-%m-%d %H:%M:%S')}, 
                       ignore_index=True)
    print('complets of '+ rank + ' : ' + title)

  except:
    pass

print('---------------------------------------------------')
print(data)


# ์ €์žฅ
data.to_csv('one.csv', encoding='utf-8-sig')

๐Ÿ‘€ ์—ฌ๊ธฐ์„œ ๊ณต๊ฐ์ข…๋ฅ˜์™€ ๊ณต๊ฐ์ˆ˜ ๋•Œ๋ฌธ์— ๋„ค์‹œ๊ฐ„ .......... . ..

๊ณต๊ฐ์ข…๋ฅ˜ ์„ค๋ช…

โฌ†๏ธ ์ด ๋ฐ•์Šค์—์„œ ๊ธ€์ž๋ฅผ ์ถ”์ถœ

# class๋ช…์ด likeitnews_nav_list์ธ ulํƒœ๊ทธ์˜ ์ž์‹ ํƒ€๊ณ ํƒ€๊ณ  class๋ช…์ด likeitnewss_nav_item_name ์ธ divํƒœ๊ทธ์— ๋„์ฐฉ
# ์ฒ˜์Œ์— ์‚ฌ์šฉ์ž์—๊ฒŒ ์ž…๋ ฅ๋ฐ›์€ ๊ณต๊ฐ ์ข…๋ฅ˜์— ๋”ฐ๋ผ ๋ถ€์—ฌํ•œ num ๊ฐ’์œผ๋กœ ๊ธ€์ž ์ถ”์ถœ
# ์ง„์งœ ๋‚ด๊ฐ€ ์ƒ๊ฐํ•ด๋„ ์–ด๊ฑฐ์ง€๋กœ ์ฅ์–ด์ง  ์ฝ”๋“œ๋ผ๊ณ  ์ƒ๊ฐํ•จ๋ฏธ๋‹ค . . .. 
clk = soup.select('ul.likeitnews_nav_list > li > a > div.likeitnews_nav_item_name')[num]

๊ณต๊ฐ์ˆ˜ ์„ค๋ช…

โฌ†๏ธ ์ด ๊ณต๊ฐ์ˆ˜ text๋ฅผ ์ถ”์ถœ
๐Ÿ‘€ ์ €๊ธฐ์„œ 4929๋ฅผ ๊ฐ€์ ธ์™€์•ผ ํ•จ ์Ÿค๋Š” spanํƒœ๊ทธ ์•ˆ์ด ์•„๋‹ˆ๋ผ aํƒœ๊ทธ ์ด์šฉํ•ด์„œ ๊ฐ€์ ธ์™€์•ผ ํ•˜๋Š”๋ฐ .. ๋ฐฉ๋ฒ•์„ ๋„์ €ํžˆ ๋ชจ๋ฅด๊ฒ ์—ˆ๋‹ค.

# tit_area ํด๋ž˜์Šค๋ช…์„ ๊ฐ€์ง„ divํƒœ๊ทธ์˜ ์ž์‹์ธ aํƒœ๊ทธ๋„์ฐฉ~~
# 
like = soup.select('div.tit_area > a')

๐Ÿ‘€ ๊ณ„์† ๋„์ „ํ•˜๋‹ค๊ฐ€ ๊ฒจ์šฐ ๋ฐฉ๋ฒ• ๋ฐœ๊ฒฌ........~~ ๋ณ„๋กœ์ธ๊ฑด ์•Œ์•˜์ง€๋งŒ ์ตœ์„ ์ด์—ˆ๋‹ฅ.
โฌ†๏ธ ์ด๊ฑด ๊ธฐ์‚ฌ๋“ค์ด์—์—ฌ
soup.select ๋’ค์˜ ๋Œ€๊ด„ํ˜ธ์— 2*์ˆœ์œ„(0๋ถ€ํ„ฐ29๊นŒ์ง€)+1์„ ํ•œ ์ˆซ์ž๋ฅผ ๋„ฃ์œผ๋ฉด ํ•ด๋‹น ๊ธฐ์‚ฌ์˜ ๊ณต๊ฐ ์ˆ˜๊ฐ€ ํฌํ•จ๋œ ๋ฌธ์ž์—ด์ด ์ถœ๋ ฅ๋œ๋‹ค๋Š” ๊ฑธ ์•Œ์•˜๋‹ค ใ…‹ใ…‹ใ…‹ใ…‹ ใ…œใ…œ ์™„์ „ ์–ด๊ฑฐ์ง€์ฃ  ..?์ €๋„์•Œ์•„์—ฌ..^^ ๊ทธ๋•Œ๋Š” ์ด๊ฒŒ ์ตœ์„ ์ด์—ˆ๋‹ต๋‹ˆ๋‹ค^~^!! ๊ทธ๋ž˜์„œ ๋ฐ‘์˜ ์ฝ”๋“œ๊ฐ€ ๋‚˜์˜จ๊ฑฐ์˜ˆ์—ฌ

like = soup.select('div.tit_area > a')[2*index_l+1].text.replace('\n', '').replace('\t', '').strip()
# '๊ธ€์ž์ˆ˜' ๋ผ๋Š” ๋ฌธ์ž์—ด์„ ๋นผ๊ธฐ์œ„ํ•œ ์Šฌ๋ผ์ด์‹ฑ
like = like[3:] 

๐Ÿ‘๏ธ ์›Œ๋“œํด๋ผ์šฐ๋“œ ์‹œ๊ฐํ™”

data.to_csv('one.csv', encoding='utf-8-sig')

df = pd.read_csv('/content/one.csv', index_col = 0)
df = df.astype({'๊ธฐ์‚ฌ์ œ๋ชฉ' : 'string'})
df['๊ธฐ์‚ฌ์ œ๋ชฉ'].replace('[^\w]', ' ', regex = True, inplace = True) 

text = ' '.join(clean_text(li) for li in df.๊ธฐ์‚ฌ์ œ๋ชฉ.astype(str))  

plt.subplots(figsize = (25, 15))
wordcloud = WordCloud(background_color = 'black', width = 1000, height = 700, font_path = fontpath).generate(text)
plt.axis('off')
plt.imshow(wordcloud, interpolation = 'bilinear')
plt.show()

๐Ÿ‘€ surprise(๋†€๋ž์–ด์š”) 20220908/23:02์‹œ์— ํฌ๋กค๋ง


๐Ÿ›Ž๏ธ ๊ต์ˆ˜๋‹˜ ์ฝ”๋“œ

data = pd.DataFrame(columns = ['์ˆœ์œ„', '๊ณต๊ฐ์ข…๋ฅ˜', '๊ธฐ์‚ฌ์ œ๋ชฉ', '๊ธฐ์‚ฌ๋งํฌ', '๊ธฐ์‚ฌ๋‚ด์šฉ', '๊ณต๊ฐ์ˆ˜', '์ˆ˜์ง‘์ผ์ž'])

options = webdriver.ChromeOptions()
options.add_argument('--headless') 
options.add_argument('--no-sandbox')
options.add_argument('--disable-dev-shm-usage')
driver = webdriver.Chrome('chromedriver', options=options)

# ๋จผ์ € ๊ณต๊ฐ์ข…๋ฅ˜๋ฅผ ๋ฆฌ์ŠคํŠธ์— ์ €์žฅ
url_list = ['', '/cheer', '/congrats', '/expect','/surprise','/sad']

for n in range(0, len(url_list)):
  url = 'https://entertain.naver.com/ranking/sympathy'
  url += url_list[n] # ๊ธฐ๋ณธ url๊ณผ  ๊ฐ๊ฐ ๊ณต๊ฐ์ข…๋ฅ˜ ์—ฐ๊ฒฐ
  sympathy = 'love'

  # ๊ณต๊ฐ ์ข…๋ฅ˜ ์ถ”์ถœ ํ•˜๊ธฐ
  if url_list[n] !='': 
    sympathy = url_list[n].replace('/','') # ๊ณต๊ฐ์ข…๋ฅ˜ ๋ฆฌ์ŠคํŠธ์—์„œ /๋ฅผ ๋–ผ์„œ ์˜์–ด๋งŒ ๋‚จ๊ธฐ๊ธฐ
  print('์ˆ˜์ง‘ ์ค‘ .. ' + url)

  driver.get(url)
  driver.implicitly_wait(3)

  time.sleep(1.5)

  driver.execute_script('window.scrollTo(0, 800)')
  time.sleep(3)

  html_source = driver.page_source
  soup = BeautifulSoup(html_source, 'html.parser')

  li = soup.select('li._inc_news_lst3_rank_reply')
    
  for n in range(0, len(li)):
    try:
      # ์ˆœ์œ„
      rank = li[n].find('em', {'class', 'blind'}).text.replace('\n', '').replace('\t', '').strip()
      # ๊ธฐ์‚ฌ์ œ๋ชฉ
      title = li[n].find('a', {'class', 'tit'}).text.replace('\n', '').replace('\t', '').strip()
      # ๊ธฐ์‚ฌ๋‚ด์šฉ
      summary = li[n].find('p', {'class', 'summary'}).text.replace('\n', '').replace('\t', '').strip()
      # ๋‰ด์Šค ๋งํฌ
      link = li[n].find('a').attrs['href']
      # ๊ณต๊ฐ ์ˆ˜
      # ๋ฐ‘์˜ ๊ณผ์ •๋งŒ ์ˆ˜ํ–‰ํ•˜๊ฒŒ ๋˜๋ฉด '๊ณต๊ฐ์ˆ˜7' ๊ณผ ๊ฐ™์ด '๊ณต๊ฐ์ˆ˜'๋ผ๋Š” ๋ฌธ์ž์—ด์ด ๋ถ™์–ด์„œ ์ถ”์ถœ๋จ
      temp_cnt = li[n].find('a', {'class','likeitnews_item_likeit'}).text.replace('\n','').replace('\t', '').strip()
      # ๋”ฐ๋ผ์„œ ๋ฐ‘์˜ ๊ณผ์ •์„ ์‹œํ–‰ํ•˜์—ฌ ๋ฌธ์ž๋ฅผ ์ œ๊ฑฐํ•จ
      cnt = re.sub(r'[^0-9]','',temp_cnt)

      data = data.append({'์ˆœ์œ„' : rank,
                      '๊ธฐ์‚ฌ์ œ๋ชฉ' : title,
                      '๊ณต๊ฐ์ข…๋ฅ˜' : sympathy,
                      '๊ธฐ์‚ฌ๋งํฌ' : 'https://entertain.naver.com'+link,
                      '๊ธฐ์‚ฌ๋‚ด์šฉ' : summary,
                      '๊ณต๊ฐ์ˆ˜' : cnt,
                      '์ˆ˜์ง‘์ผ์ž' : datetime.datetime.now(timezone('Asia/Seoul')).strftime('%Y-%m-%d %H:%M:%S')}, 
                      ignore_index=True)
      print('complets of '+ rank + ' : ' + title)
    except:
      pass
print('-------------------')

input_sympathy = input('๊ณต๊ฐ ์ข…๋ฅ˜ ์ž…๋ ฅ : ')
# ์›Œ๋“œํด๋ผ์šฐ๋“œ 
# data์˜ ๊ณต๊ฐ์ข…๋ฅ˜์™€ ์ž…๋ ฅ๋ฐ›์€ ๊ณต๊ฐ์ข…๋ฅ˜๊ฐ€ ๊ฐ™์€ ๊ฒƒ๋งŒ ์›Œ๋“œํด๋ผ์šฐ๋“œ ์‹œ๊ฐํ™”
text = ''.join(li for li in data[data.๊ณต๊ฐ์ข…๋ฅ˜ == input_sympathy].๊ธฐ์‚ฌ์ œ๋ชฉ.astype(str))
plt.subplots(figsize=(25,15))
wordcloud = WordCloud(background_color='black', width=1000, height=700, font_path=fontpath).generate(text)
plt.axis('off')
plt.imshow(wordcloud, interpolation='bilinear')
plt.show()

๐Ÿ‘€ ์ƒ์ƒ๋„ ๋ชปํ•ด๋ดค๋˜ ์ฝ”๋“œ๋ผ ์‹ ๊ธฐํ•˜๊ณ  ์ƒˆ๋กญ๋‹ค .. ใ…Žใ…Ž.

re.sub

  • ๊ต์ˆ˜๋‹˜ ์ฝ”๋“œ์—์„œ ๋ฌธ์ž ์ œ๊ฑฐํ•  ๋•Œ ์“ฐ์ž„
  • re.sub('์ฐพ์„ ํŒจํ„ด', '์ฐพ์€ ํŒจํ„ด์„ ๋ณ€๊ฒฝํ•  ๋‚ด์šฉ', '์›๋ณธ')
    [^0-9] ์ˆซ์ž๋ฅผ ์ œ์™ธํ•œ ๋ฌธ์ž์—ด

๐Ÿšฉ ์†Œ๊ฐ ๐Ÿšฉ

๋‚ด๊ฐ€ ์ฝ”๋“œ๋ฅผ ๋จผ์ € ์งœ๋ดค์„ ๋•Œ ์›น ํŽ˜์ด์ง€์˜ ๊ตฌ์„ฑ์ด ์กฐ๊ธˆ๋งŒ ๋ฐ”๋€Œ๋ฉด ์‚ฌ์šฉํ•  ์ˆ˜ ์—†๋Š” ์ฝ”๋“œ์ผ ์ •๋„๋กœ ์ž˜๋ชป์งฐ๋‹ค๊ณ  ์ƒ๊ฐ์€ํ–ˆ๋‹ค. ๊ทผ๋ฐ ๊ทธ๋•Œ๋Š” ์ผ๋‹จ ๋‹น์žฅ์€ ๋ผ์„œ ์ข‹์•˜๋‹ค^!^!! ์–ด๊ฑฐ์ง€์˜€๋”๋ผ๋„ ์„ฑ๊ณตํ–ˆ์œผ๋‹ˆ๊นŒ ๋ฟŒ๋“ฏํ–ˆ์—ˆ๋‹ค ๊ทธ๋ž˜๋„ ์ข‹์€ ์‹œ๋„์˜€๋‹น ใ…Žใ…Ž.. ๊ต์ˆ˜๋‹˜ ์ฝ”๋“œ๋ฅผ ๋ณด๊ณ  ์ •๋ง ๋งŽ์ด ๊ณต๋ถ€ํ•ด์•ผ๊ฒ ๋‹ค๊ณ  ๋Š๊ผˆ๋‹ค. ๊ต์ˆ˜๋‹˜ ์ฒ˜์Œ์—๋Š” ์ฝ”๋“œ ์ดํ•ดํ•˜๊ธฐ ์–ด๋ ค์› ๋Š”๋ฐ ๋ฒจ๋กœ๊ทธ ์ •๋ฆฌํ•ด๋ณด๋‹ˆ ์ดํ•ด์Œ‰๊ฐ€๋Šฅ๐Ÿคช. ์—ญ์‹œ ์•„๋Š”๊ฑฐ๋ž‘ ์ดํ•ดํ•˜๋Š”๊ฒŒ ๋งŽ์•„์•ผ ์ด๋ ‡๊ฒŒ๋„ ์ƒ๊ฐํ•ด๋ณด๊ณ  ์ €๋ ‡๊ฒŒ๋„ ์ƒ๊ฐํ•ด๋ณผ ์ˆ˜ ์žˆ๊ตฐ.. ๐Ÿ˜‡ ์˜ค๋Š˜ ์—ด์‹ฌํžˆ์ •๋ฆฌํ–ˆ๋‹ค ๋‚ด์ผ ์œ ํŠœ๋ธŒ ๋Œ“๊ธ€ ํฌ๋กค๋ง๋งŒ ์ •๋ฆฌํ•ด์•ผ์ง€ ~

profile
๋ฐฐ๊ณ ํŒŒ์šฉ.

0๊ฐœ์˜ ๋Œ“๊ธ€