# 네이버 검색 API 예제 - 블로그 검색
import os
import sys
import urllib.request
client_id = "1K8__p6vfg4HaGE6FmLj"
client_secret = "ktW9S7cSqB"
encText = urllib.parse.quote("크라임씬 리뷰")
url = "https://openapi.naver.com/v1/search/blog?query=" + encText # JSON 결과
# url = "https://openapi.naver.com/v1/search/blog.xml?query=" + encText # XML 결과
request = urllib.request.Request(url)
request.add_header("X-Naver-Client-Id",client_id)
request.add_header("X-Naver-Client-Secret",client_secret)
response = urllib.request.urlopen(request)
rescode = response.getcode()
if(rescode==200):
response_body = response.read()
print(response_body.decode('utf-8'))
else:
print("Error Code:" + rescode)
# 글자로 읽을 경우, decode utf-8 설정
print(response_body.decode("utf-8"))
import os
import sys
import urllib.request
client_id = "1K8__p6vfg4HaGE6FmLj"
client_secret = "ktW9S7cSqB"
encText = urllib.parse.quote("코난")
url = "https://openapi.naver.com/v1/search/book?query=" + encText # JSON 결과
# url = "https://openapi.naver.com/v1/search/blog.xml?query=" + encText # XML 결과
request = urllib.request.Request(url)
request.add_header("X-Naver-Client-Id",client_id)
request.add_header("X-Naver-Client-Secret",client_secret)
response = urllib.request.urlopen(request)
rescode = response.getcode()
if(rescode==200):
response_body = response.read()
print(response_body.decode('utf-8'))
else:
print("Error Code:" + rescode)
import os
import sys
import urllib.request
client_id = "1K8__p6vfg4HaGE6FmLj"
client_secret = "ktW9S7cSqB"
encText = urllib.parse.quote("플레이브")
url = "https://openapi.naver.com/v1/search/cafearticle?query=" + encText # JSON 결과
# url = "https://openapi.naver.com/v1/search/blog.xml?query=" + encText # XML 결과
request = urllib.request.Request(url)
request.add_header("X-Naver-Client-Id",client_id)
request.add_header("X-Naver-Client-Secret",client_secret)
response = urllib.request.urlopen(request)
rescode = response.getcode()
if(rescode==200):
response_body = response.read()
print(response_body.decode('utf-8'))
else:
print("Error Code:" + rescode)
import os
import sys
import urllib.request
client_id = "1K8__p6vfg4HaGE6FmLj"
client_secret = "ktW9S7cSqB"
encText = urllib.parse.quote("코난")
url = "https://openapi.naver.com/v1/search/shop?query=" + encText # JSON 결과
# url = "https://openapi.naver.com/v1/search/blog.xml?query=" + encText # XML 결과
request = urllib.request.Request(url)
request.add_header("X-Naver-Client-Id",client_id)
request.add_header("X-Naver-Client-Secret",client_secret)
response = urllib.request.urlopen(request)
rescode = response.getcode()
if(rescode==200):
response_body = response.read()
print(response_body.decode('utf-8'))
else:
print("Error Code:" + rescode)
import os
import sys
import urllib.request
client_id = "1K8__p6vfg4HaGE6FmLj"
client_secret = "ktW9S7cSqB"
encText = urllib.parse.quote("크라임씬 리뷰")
url = "https://openapi.naver.com/v1/search/encyc?query=" + encText # JSON 결과
# url = "https://openapi.naver.com/v1/search/blog.xml?query=" + encText # XML 결과
request = urllib.request.Request(url)
request.add_header("X-Naver-Client-Id",client_id)
request.add_header("X-Naver-Client-Secret",client_secret)
response = urllib.request.urlopen(request)
rescode = response.getcode()
if(rescode==200):
response_body = response.read()
print(response_body.decode('utf-8'))
else:
print("Error Code:" + rescode)
import os
import sys
import urllib.request
client_id = "1K8__p6vfg4HaGE6FmLj"
client_secret = "ktW9S7cSqB"
encText = urllib.parse.quote("몰스킨")
url = "https://openapi.naver.com/v1/search/shop?query=" + encText # JSON 결과
# url = "https://openapi.naver.com/v1/search/blog.xml?query=" + encText # XML 결과
request = urllib.request.Request(url)
request.add_header("X-Naver-Client-Id",client_id)
request.add_header("X-Naver-Client-Secret",client_secret)
response = urllib.request.urlopen(request)
rescode = response.getcode()
if(rescode==200):
response_body = response.read()
print(response_body.decode('utf-8'))
else:
print("Error Code:" + rescode)
(1) gen_search_url (generate url)
-encText = urllib.parse.quote("몰스킨")
-url = "https://openapi.naver.com/v1/search/shop?query=" + encText
def gen_search_url(api_node, search_text, start_num, disp_num):
base = "https://openapi.naver.com/v1/search"
node = "/" + api_node + ".json"
param_query = "?query=" + urllib.parse.quote(search_text)
param_start = "&start=" + str(start_num)
param_disp = "&display=" + str(disp_num)
return base + node + param_query + param_start + param_disp
gen_search_url("shop", "TEST", 10, 3)
-> 'https://openapi.naver.com/v1/search/shop.json?query=TEST&start=10&display=3'
(2) get_result_onpage (Get data on one page)
import json
import datetime
def get_result_onpage(url):
request = urllib.request.Request(url)
request.add_header("X-Naver-Client-Id",client_id)
request.add_header("X-Naver-Client-Secret",client_secret)
response = urllib.request.urlopen(request)
print("[%s] Url Request Success" % datetime.datetime.now())
return json.loads(response.read().decode("utf-8"))
datetime.datetime.now()
-> datetime.datetime(2023, 4, 6, 0, 27, 1, 552115)
url = gen_search_url("shop", "몰스킨", 1, 5)
one_result = get_result_onpage(url)
-> [2023-04-06 00:27:56.403297] Url Request Success
one_result["items"][0]["title"]
-> '몰스킨 노트 가죽 하드커버 감성 고급 업무용 이쁜 심플'
one_result["items"][0]["link"]
-> 'https://search.shopping.naver.com/gate.nhn?id=82526953942'
one_result["items"][0]["lprice"]
-> '28800'
one_result["items"][0]["mallName"]
-> '베스트펜'
(3) get_fields (Convert pandas data frame)
one_result["items"][0]
import pandas as pd
def get_fields(json_data):
title = [each["title"] for each in json_data["items"]]
link = [each["link"] for each in json_data["items"]]
lprice = [each["lprice"] for each in json_data["items"]]
mall_name = [each["mallName"] for each in json_data["items"]]
result_pd = pd.DataFrame({
"title": title,
"link": link,
"lprice": lprice,
"mall": mall_name
}, columns=["title", "lprice", "link", "mall"])
return result_pd
get_fields(one_result)
(4) delete_tag()
def delete_tag(input_str):
input_str = input_str.replace("<b>", "")
input_str = input_str.replace("</b>", "")
return input_str
import pandas as pd
def get_fields(json_data):
title = [delete_tag(each["title"]) for each in json_data["items"]]
link = [each["link"] for each in json_data["items"]]
lprice = [each["lprice"] for each in json_data["items"]]
mall_name = [each["mallName"] for each in json_data["items"]]
result_pd = pd.DataFrame({
"title": title,
"link": link,
"lprice": lprice,
"mall": mall_name
}, columns=["title", "lprice", "link", "mall"])
return result_pd
get_fields(one_result)
url = gen_search_url("shop", "몰스킨", 1, 5)
json_result = get_result_onpage(url)
pd_result = get_fields(json_result)
(5) actMain (All Data gathering)
for n in range(1, 1000, 100):
print(n)
-> 1
101
201
301
401
501
601
701
801
901
result_mol = []
for n in range(1, 1000, 100):
url = gen_search_url("shop", "몰스킨", n, 100)
json_result = get_result_onpage(url)
pd_result = get_fields(json_result)
result_mol.append(pd_result)
result_mol = pd.concat(result_mol)
# index 다시 달아주고 원래의 인덱스 삭제
result_mol.reset_index(drop=True, inplace=True)
result_mol.info()
# 가격을 float 형태로 바꾸기
result_mol["lprice"] = result_mol["lprice"].astype("float")
result_mol.info()
(6) toExcel (Export to Excel)
!pip install xlsxwriter
writer = pd.ExcelWriter("../data/06_molskin_diary_in_naver_shop.xlsx", engine="xlsxwriter")
result_mol.to_excel(writer, sheet_name="Sheet1")
workbook = writer.book
worksheet = writer.sheets["Sheet1"]
worksheet.set_column("A:A", 4) # 숫자는 간격, A:A 칼럼 간격을 지정
worksheet.set_column("B:B", 60)
worksheet.set_column("C:C", 10)
worksheet.set_column("D:D", 10)
worksheet.set_column("E:E", 50)
worksheet.set_column("F:F", 10)
worksheet.conditional_format("C2:C1001", {"type":"3_color_scale"})
writer.save()
import matplotlib.pyplot as plt
%matplotlib inline
from matplotlib import rc
rc("font", family="Malgun Gothic")
import seaborn as sns
plt.figure(figsize=(15, 6))
sns.countplot(
x=result_mol["mall"], # x축에 쇼핑몰별로 보기
data=result_mol,
palette="RdYlGn",
order=result_mol["mall"].value_counts().index # 몰별로 개수 세기
)
plt.xticks(rotation=90)
plt.show()
<제로베이스 데이터 취업 스쿨>