python_apps/crwlers/saURLbuild.py
2023-11-03 14:49:12 +09:00

361 lines
7.2 KiB
Python

#version 201904
import requests, bs4, urllib, sys, re, math, logging
from urllib import parse
from datetime import datetime
startTime = datetime.now()
url = "http://www.imarket.co.kr/display/malls.do"
def query(keyword) :
query = keyword.encode('euc-kr')
return query
def parameters( page, query ) :
data = {
'_method': 'searchGoods',
'sc.page': page,
'sc.row': '20',
'sc.viewType': 'list',
'sc.queryText': query
}
return data
def headers() :
headers = {
'Content-Type': 'application/x-www-form-urlencoded',
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
'Accept-Encoding': 'gzip, deflate',
'Accept-Language': 'ko-KR,ko;q=0.9,en-US;q=0.8,en;q=0.7',
'Cache-Control': 'no-cache',
'Connection': 'keep-alive',
'Host': 'www.imarket.co.kr',
'Pragma': 'no-cache',
'Referer': 'http://www.imarket.co.kr/',
'Save-Data': 'on',
'Upgrade-Insecure-Requests': '1',
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/66.0.3359.181 Safari/537.36'
}
return headers
def pageRequest( url, parameters, headers ) :
resp = requests.get(url, params = parameters, headers = headers)
resp.raise_for_status()
resp.encoding='EUC-KR'
html = resp.text
return html
term = str(sys.argv[1])
if term.find(" ") == -1 :
packed = term
sp = ''
else :
packed = re.sub(r"\ ","",term)
sp = 'sp'
if len(sys.argv) == 3 :
if sys.argv[2] : cpc = str(sys.argv[2])
utm_campaign = ''
utm_content = ''
elif len(sys.argv) == 4 :
if sys.argv[2] : cpc = str(sys.argv[2])
if sys.argv[3] : utm_campaign = str(sys.argv[3])
utm_content = ''
elif len(sys.argv) == 5 :
if sys.argv[2] : cpc = str(sys.argv[2])
if sys.argv[3] : utm_campaign = str(sys.argv[3])
if sys.argv[4] : utm_content = str(sys.argv[4])
else :
cpc = ''
utm_campaign = ''
utm_content = ''
urlterm = parse.quote(query(term))
htmlHead = pageRequest(url, parameters(1,query(term)), headers())
bs = bs4.BeautifulSoup(htmlHead, 'html.parser')
rc = bs.select('div.tit_category_wrap h2.tit_result span em')[0].getText().strip()
rc = re.sub(r"\,","",rc)
if rc == '0' : print(term + "\t" + urlterm + "\t" + "NoResult")
else :
_list = bs.select('ul.prd_list_type li')
outofStockCount = 0
priceArray = []
for i in range(len(_list)) :
price = _list[i].select('div.price_box span.sale_price em.num')[0].getText().strip()
price = re.sub(r"\,","",price)
if price == '특별할인가' : price = 0
moq = _list[i].select('div.amount_box span.btn_wrap label input.pr-number')[0].get('value')
outofStock = _list[i].select('div.btns a')[0].getText().strip()
if price != 0 and outofStock == '장바구니' :
priceArray.insert(i,int(price)*int(moq))
if outofStock == "장바구니" : outofStockCount_ = 0
else : outofStockCount_ = 1
outofStockCount = outofStockCount + outofStockCount_
valid_rc = 0
if int(rc) <= 20 : valid_rc = len(priceArray)
elif int(rc) > 20 and len(priceArray) < 20 : valid_rc = 20 - len(priceArray)
else : valid_rc = rc
if len(priceArray) != 0 : priceAvg = round(sum(priceArray)/len(priceArray))
else : priceAvg = 0
#category Get
categories = {}
_category = bs.select('div.filter_wrap ul li dl.category dd ul li a')
for i in range(len(_category)) :
categoryName = _category[i].getText().strip()
count = _category[i].select('em')[0].getText().strip()
categoryName = re.sub(r"\([0-9]+\)","",categoryName).strip()
count = re.sub(r"\(|\)","",count)
categories[categoryName] = int(count)
cate_tuple = sorted(categories.items(), key=lambda t : t[1], reverse=True)
top3cate = ''
for j in range(len(cate_tuple)) :
if j < 3 :
cates = list(cate_tuple[j])
top3cate = top3cate + cates[0] + "\t" + str(cates[1]) + "\t" + str(int(cates[1])/int(rc)) + "\t"
if j == 0 : utm_content = cates[0]
else : top3cate = top3cate + ""
if j < 3 :
loop = -(j - 3) - 1
for k in range(loop) :
top3cate = top3cate + "\t" + "\t" + "\t"
#check naverTong
query = urllib.parse.quote_plus(term)
#nTongUrl = 'https://search.naver.com/search.naver'
#nTongUrl2 = '?sm=tab_hty.top&where=nexearch&query=' + query + '&oquery=' + query
#nTongUrl = nTongUrl + nTongUrl2
#nTongResp = requests.get(nTongUrl)
#nTongResp.raise_for_status()
#nTongResp.encoding='UTF-8'
#nTongHtml = nTongResp.text
#nTongbs = bs4.BeautifulSoup(nTongHtml, 'html.parser')
#correctedTerm = nTongbs.select('div.sp_keyword dl dd em')
#if len(correctedTerm) != 0 :
# correctedKeyword = correctedTerm[0].getText().strip()
#else : correctedKeyword = 'N'
#tong_powerlink = nTongbs.select('div#power_link_body ul.lst_type li.lst')
#tong_misumi_txt = 'noComboMisumi'
#tong_navimro_txt = 'noComboNaviMRO'
#tong_imarket_txt = 'noComboiMarket'
#tong_speedmall_txt = "noSpeedMall"
#if len(tong_powerlink) > 0 :
# for i in range(len(tong_powerlink)) :
# site = tong_powerlink[i].select('div.inner a.lnk_url')[0].getText().strip()
# if site == 'www.imarket.co.kr' : tong_imarket_txt = str(i + 1)
# if site == 'kr.misumi-ec.com' : tong_misumi_txt = str(i + 1)
# if site == 'www.navimro.com' : tong_navimro_txt = str(i + 1)
# if site == 'www.speedmall.co.kr' : tong_speedmall_txt = str(i + 1)
#check Powerlink
#pwUrl = "https://ad.search.naver.com/search.naver?where=ad&sm=svc_nrs&query=" + query
#pwResp = requests.get(pwUrl)
#pwResp.raise_for_status()
#pwResp.encoding='UTF-8'
#pwHtml = pwResp.text
#pwbs = bs4.BeautifulSoup(pwHtml, 'html.parser')
#pwResultCount = pwbs.select('div.search_result div.inner span.num_result')[0].getText().strip()
#pwResultCount = re.sub(r"[0-9]+\-[0-9]+\ \/\s","",pwResultCount)
#pwResultCount = re.sub(r"건","",pwResultCount)
#pw_misumi_txt = 'noPwMisumi'
#pw_navimro_txt = 'noPwNaviMRO'
#pw_imarket_txt = 'noPwiMarket'
#pw_speedmall_txt = "noSpeedMall"
#if pwResultCount != 0 :
# pw_list = pwbs.select('div.ad_section ol.lst_type li.lst')
# for i in range(len(pw_list)) :
# site = pw_list[i].select('div.inner div.url_area a.url')[0].getText().strip()
# if site == 'http://www.imarket.co.kr' : pw_imarket_txt = str(i + 1)
# if site == 'http://kr.misumi-ec.com' : pw_misumi_txt = str(i + 1)
# if site == 'http://www.navimro.com' : pw_navimro_txt = str(i + 1)
# if site == 'http://www.speedmall.co.kr' : pw_speedmall_txt = str(i + 1)
utm_content = urllib.parse.quote_plus(utm_content) + sp
destURL = "http://www.imarket.co.kr/display/malls.do?_method=searchGoods&BIZ_CD=1010187&utm_source=naverPowerlink&utm_medium=prdcpc&sc.queryText=" + urlterm + "&utm_keyword=" + query + "&utm_campaign=" + urllib.parse.quote_plus(utm_campaign) + "&utm_content=" + utm_content
print(term + "\t" + packed + "\t"
+ str(rc) + "\t"
+ str(valid_rc) + "\t"
+ str(priceAvg) + "\t"
+ cpc + "\t"
+ top3cate + "\t"
# + correctedKeyword + "\t"
# + str(len(tong_powerlink)) + "\t"
# + str(pwResultCount) + "\t"
# + tong_imarket_txt + "\t"
# + pw_imarket_txt + "\t"
# + tong_navimro_txt + "\t"
# + pw_navimro_txt + "\t"
# + tong_misumi_txt + "\t"
# + pw_misumi_txt + "\t"
# + tong_speedmall_txt + "\t"
# + pw_speedmall_txt + "\t"
+ destURL
)
consumtime = datetime.now() - startTime
logging.warning(term + "\t" + str(consumtime))