swire_shop_item_list.py 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351
  1. # -*- coding: utf-8 -*-
  2. #from selenium import webdriver
  3. from seleniumwire import webdriver
  4. from selenium.webdriver.common.action_chains import ActionChains
  5. from selenium.webdriver.common.keys import Keys
  6. from selenium.webdriver.support import expected_conditions as EC
  7. from selenium.webdriver.support.wait import WebDriverWait
  8. from selenium.webdriver.common.by import By
  9. import selenium
  10. import traceback
  11. from bs4 import BeautifulSoup
  12. from utility import database_access as DA
  13. from utility.parseutils import *
  14. from utility.connect import *
  15. from datetime import datetime
  16. import pandas as pd
  17. import dataset
  18. import time
  19. import json
  20. import re
  21. import sys, os
  22. import socket
  23. import brotli
  24. from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
  25. import urllib.parse
  26. chrome_window=False
  27. globalkw=None
  28. proxyport=8787
  29. def brower_start(port):
  30. global proxyport
  31. global chrome_window
  32. print(proxyport)
  33. options = webdriver.ChromeOptions()
  34. if chrome_window:
  35. browser = webdriver.Chrome(
  36. desired_capabilities=options.to_capabilities()
  37. )
  38. else:
  39. chrome_options = webdriver.ChromeOptions()
  40. chrome_options.add_argument('--proxy-server=host.docker.internal:'+str(proxyport)) # Specify your Kubernetes service-name here
  41. chrome_options.add_argument('--ignore-certificate-errors')
  42. chrome_options.add_argument("--no-sandbox")
  43. chrome_options.add_argument("--disable-dev-shm-usage")
  44. browser = webdriver.Remote(
  45. command_executor='http://127.0.0.1:'+str(port)+'/wd/hub',
  46. desired_capabilities=chrome_options.to_capabilities(),
  47. seleniumwire_options={'addr':'0.0.0.0','port':proxyport,'auto_config': False}
  48. )
  49. # seleniumwire_options = {'addr': '172.17.0.2','port':4444})
  50. browser.set_window_size(1400,1000)
  51. return browser
  52. def page_down_(driver, xpath_css, time_):
  53. e = driver.find_element_by_css_selector('span[class="Jl2AFb"]')
  54. result_count = e.text.split('-')[1].replace(' 項結果','')
  55. print(result_count)
  56. if int(result_count) > 5:
  57. for i in range(time_):
  58. e = driver.find_elements_by_css_selector('div[class="TFQHme"]')
  59. action = webdriver.common.action_chains.ActionChains(driver)
  60. action.move_to_element_with_offset(e[-1], e[-1].size['width'] + 1 , 0)
  61. action.click()
  62. action.perform()
  63. time.sleep(0.5)
  64. def get_url_list(driver):
  65. page_down_(driver, '//div[@class="TFQHme"]', 8)
  66. url_soup = BeautifulSoup(driver.page_source, 'html.parser')
  67. url_list = []
  68. for i in url_soup.find_all('a'):
  69. try:
  70. if i['href'].find('maps/place') != -1:
  71. url_list += [[i['href'], i['aria-label']]]
  72. except:
  73. pass
  74. # print(len(url_list))
  75. return url_list
  76. def keyin_keyword(driver, keyword):
  77. button = driver.find_element_by_id("searchbox")
  78. driver.implicitly_wait(30)
  79. ActionChains(driver).move_to_element(button).send_keys(keyword).send_keys(Keys.RETURN).perform()
  80. time.sleep(3)
  81. def get_next_job(db,repeat=False,repkw=None,repnum=None):
  82. global globalkw
  83. result={}
  84. # if globalkw is not None:
  85. # cursor = db.query('select distinct(kw),num+1 as num from swire_progress_list where kw="'+globalkw+'"')
  86. # else:
  87. # cursor = db.query('select distinct(kw),num+1 as num from swire_progress_list where num < 367 order by rand() limit 1')
  88. cursor = db.query('select kw,num from areacodes where expand=0 order by rand()')
  89. for c in cursor:
  90. result['kw']=c['kw']
  91. result['num']=c['num']
  92. break
  93. if repeat:
  94. result['kw']=repkw
  95. result['num']=repnum
  96. if repkw=='REP':
  97. cursor = db.query('select lat_txt,lon_txt,keyword,num from swire_store_list order by rand() limit 1')
  98. for c in cursor:
  99. result['kw']=c['keyword']
  100. result['num']=c['num']
  101. result['lat']=c['lat_txt']
  102. result['lon']=c['lon_txt']
  103. result['loc']=''
  104. return result
  105. cursor = db.query('select lat,lon,loc from lat_lon_loc where num ="'+str(result['num'])+'"')
  106. for c in cursor:
  107. result['lat']=c['lat']
  108. result['lon']=c['lon']
  109. result['loc']=c['loc']
  110. break
  111. if repeat:
  112. cursor = db.query('select lat_txt,lon_txt from swire_store_list where num ="'+str(result['num'])+'" and keyword="'+result['kw']+'" order by rand() limit 1')
  113. for c in cursor:
  114. result['lat']=c['lat_txt']
  115. result['lon']=c['lon_txt']
  116. return result
  117. def write_to_file(jsobj,fname):
  118. import codecs
  119. fw=codecs.open(fname,'w','utf-8')
  120. fw.write(str(jsobj))
  121. fw.close()
  122. def parsing_js(orig):
  123. resultobj=[]
  124. content=""
  125. lines=orig.split('\n')
  126. for l in lines:
  127. newl=l.replace('\\"','"')
  128. # if '\\\\"' in newl:
  129. # print(newl)
  130. # newl=newl.repace('\\\\"','')
  131. newl=newl.replace('\\"','"')
  132. content+=newl
  133. result=re.search(r'\[\["',content)
  134. print(result)
  135. content_begin=result.start()
  136. result=re.search(r'\]\]"',content)
  137. print(result)
  138. content_end=result.end()
  139. jscontent=content[content_begin:content_end-1]
  140. # write_to_file(jscontent,'c:/tmp/debug.txt')
  141. jsobj=json.loads(jscontent)
  142. for x in jsobj[0][1][1:]:
  143. print(x[14][11])
  144. print(x[14][9])
  145. reviews_cnt=None
  146. photo=None
  147. rating=None
  148. biz_id=None
  149. loc_x=None
  150. loc_y=None
  151. addr_elmts=None
  152. tel=None
  153. try:
  154. rating=x[14][4][7]
  155. reviews_cnt=x[14][4][8]
  156. except:
  157. traceback.print_exc()
  158. try:
  159. photo=x[14][37][0][0][0]
  160. num_photos=x[14][37][0][0][6][1]
  161. except:
  162. traceback.print_exc()
  163. try:
  164. loc_x=x[14][37][0][0][29][0]
  165. loc_y=x[14][37][0][0][29][1]
  166. except:
  167. traceback.print_exc()
  168. try:
  169. biz_id=x[14][57][2]
  170. tel=x[14][178][0][3]
  171. except:
  172. traceback.print_exc()
  173. try:
  174. addr_elmts=str(x[14][82])
  175. except:
  176. traceback.print_exc()
  177. category=str(x[14][13])
  178. topic=str(x[14][89])
  179. print(x[14][13])
  180. print(x[14][10])
  181. print(x[14][2])
  182. print(x[14][78])
  183. try:
  184. resultobj.append({'name':x[14][11],'fid':x[14][10],'addr':x[14][2][0],'addr_elmts':addr_elmts,'place_id':x[14][78],'category':category,'rating':rating,'reviews_cnt':reviews_cnt,'lat':x[14][9][2],'lat_txt':str(x[14][9][2]),'lon':x[14][9][3],'lon_txt':str(x[14][9][3]),'topic':topic,'photo':photo,'num_photos':num_photos,'loc_x':loc_x,'loc_y':loc_y,'biz_id':biz_id,'tel':tel,'crawler_date':datetime.today().strftime("%Y/%m/%d %H:%M")})
  185. except:
  186. traceback.print_exc()
  187. return resultobj
  188. def save_js_to_db(jsobj,num,keyword):
  189. global store_list_table
  190. for r in jsobj:
  191. r['num']=num
  192. r['keyword']=keyword
  193. try:
  194. store_list_table.insert(r)
  195. # store_list_table.upsert(r,keys=['place_id'])
  196. except:
  197. traceback.print_exc()
  198. # store_list_table.upsert(r,keys=['place_id'])
  199. def process_web_request(driver,area_num,keyword):
  200. # query = WebDriverWait(driver, 30).until(EC.presence_of_element_located((By.XPATH, '//button[@vet="19128"]')))
  201. time.sleep(0.8)
  202. time.sleep(3)
  203. print("ppppppppp&**********************")
  204. for request in driver.requests:
  205. if 'search?' in request.url :
  206. print('searching.....')
  207. # else:
  208. # print(request.url[20:60])
  209. if request.response:
  210. # if 'https://www.google.com.tw/search?tbm=map' in request.url :
  211. if 'search?' in request.url :
  212. print('parsing js:')
  213. resp = brotli.decompress(request.response.body)
  214. jstext=resp.decode('utf-8')
  215. resultobj=parsing_js(jstext)
  216. print("before",datetime.now())
  217. save_js_to_db(resultobj,area_num,keyword)
  218. print("after",datetime.now())
  219. # time.sleep(9999)
  220. def main():
  221. global chrome_window
  222. global store_list_table
  223. global globalkw
  224. global proxyport
  225. if len(sys.argv)>1:
  226. globalkw=sys.argv[1]
  227. failcnt=0
  228. localip=socket.gethostbyname(socket.gethostname())
  229. # if localip=='192.168.1.108':
  230. # chrome_window=True
  231. # chrome_window=False
  232. db = dataset.connect('mysql://choozmo:pAssw0rd@db.ptt.cx:3306/google_poi?charset=utf8mb4')
  233. store_list_table = db['swire_store_list']
  234. # table2 = db['swire_progress_list']
  235. table2 = db['swire_area_progress']
  236. port=4444
  237. if len(sys.argv) == 3 :
  238. port=int(sys.argv[1])
  239. proxyport=int(sys.argv[2])
  240. if not chrome_window:
  241. print('restart docker p{}'.format(port))
  242. # os.system('sudo docker container restart p'+str(port))
  243. os.system('docker container restart p'+str(port))
  244. time.sleep(10)
  245. print('drvier start...')
  246. driver = brower_start(port)
  247. while True:
  248. try:
  249. if len(sys.argv) > 3 :
  250. repkw=sys.argv[1]
  251. repnum=sys.argv[2]
  252. job=get_next_job(db,repeat=True,repkw=repkw,repnum=repnum)
  253. else:
  254. job=get_next_job(db)
  255. print(job)
  256. keyword = job['kw']
  257. latitude = job['lat'] #緯度
  258. longitude = job['lon'] #精度
  259. area_num=job['num']
  260. safe_string = urllib.parse.quote_plus(keyword)
  261. url = 'https://www.google.com.tw/maps/@{},{},18z?hl=zh-TW'.format(latitude, longitude)
  262. # url = 'https://www.google.com/maps/search/'+safe_string+'/@{},{},16z/data=!3m1!4b1'.format(latitude, longitude)
  263. # url='https://www.google.com/maps/search/'+safe_string+'/@24.7962279,121.0449762,15z/data=!3m1!4b1?hl=zh-TW'
  264. # print(url)
  265. # url='https://www.google.com/maps/search/%E7%81%AB%E9%8D%8B%E9%A4%90%E5%BB%B3/@24.772608,121.0515456,13z'
  266. driver.get(url)
  267. # time.sleep(3)
  268. keyin_keyword(driver, keyword)
  269. process_web_request(driver,area_num,keyword)
  270. while True:
  271. element = driver.find_element_by_id('ppdPk-Ej1Yeb-LgbsSe-tJiF1e')
  272. if element.get_attribute('disabled'):
  273. break
  274. # driver.implicitly_wait(30)
  275. ActionChains(driver).move_to_element(element).click(element).perform()
  276. process_web_request(driver,area_num,keyword)
  277. # table2.upsert({'kw':keyword,'num':job['num']},['kw'])
  278. table2.insert({'kw':keyword,'num':job['num']},['kw'])
  279. db.query('update areacodes set expand = 1 where num="'+str(job['num'])+'" and kw="'+keyword+'" ')
  280. except:
  281. traceback.print_exc()
  282. failcnt+=1
  283. if failcnt>=15:
  284. sys.exit()
  285. pass
  286. if __name__ == '__main__':
  287. main()