swire_shop_item_list.py 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433
  1. # -*- coding: utf-8 -*-
  2. #from selenium import webdriver
  3. from seleniumwire import webdriver
  4. from selenium.webdriver.common.action_chains import ActionChains
  5. from selenium.webdriver.common.keys import Keys
  6. from selenium.webdriver.support import expected_conditions as EC
  7. from selenium.webdriver.support.wait import WebDriverWait
  8. from selenium.webdriver.common.by import By
  9. import selenium
  10. import traceback
  11. from bs4 import BeautifulSoup
  12. from utility import database_access as DA
  13. from utility.parseutils import *
  14. from utility.connect import *
  15. from datetime import datetime
  16. import pandas as pd
  17. import dataset
  18. import time
  19. import json
  20. import re
  21. import sys, os
  22. import socket
  23. import brotli
  24. from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
  25. import urllib.parse
  26. chrome_window=False
  27. #chrome_window=True
  28. globalkw=None
  29. proxyport=8787
  30. def build_cache(db):
  31. id_dict={}
  32. cursor = db.query('SELECT place_id FROM google_poi.swire_store_list;')
  33. for c in cursor:
  34. id_dict[c['place_id']]=1
  35. return id_dict
  36. #
  37. def brower_start(port):
  38. global proxyport
  39. global chrome_window
  40. print(proxyport)
  41. options = webdriver.ChromeOptions()
  42. if chrome_window:
  43. browser = webdriver.Chrome(
  44. desired_capabilities=options.to_capabilities()
  45. )
  46. else:
  47. chrome_options = webdriver.ChromeOptions()
  48. chrome_options.add_argument('--proxy-server=host.docker.internal:'+str(proxyport)) # Specify your Kubernetes service-name here
  49. chrome_options.add_argument('--ignore-certificate-errors')
  50. chrome_options.add_argument("--no-sandbox")
  51. chrome_options.add_argument("--disable-dev-shm-usage")
  52. browser = webdriver.Remote(
  53. command_executor='http://127.0.0.1:'+str(port)+'/wd/hub',
  54. desired_capabilities=chrome_options.to_capabilities(),
  55. seleniumwire_options={'addr':'0.0.0.0','port':proxyport,'auto_config': False}
  56. )
  57. # seleniumwire_options = {'addr': '172.17.0.2','port':4444})
  58. browser.set_window_size(1400,1000)
  59. return browser
  60. def page_down_(driver, xpath_css, time_):
  61. e = driver.find_element_by_css_selector('span[class="Jl2AFb"]')
  62. result_count = e.text.split('-')[1].replace(' 項結果','')
  63. print(result_count)
  64. if int(result_count) > 5:
  65. for i in range(time_):
  66. e = driver.find_elements_by_css_selector('div[class="TFQHme"]')
  67. action = webdriver.common.action_chains.ActionChains(driver)
  68. action.move_to_element_with_offset(e[-1], e[-1].size['width'] + 1 , 0)
  69. action.click()
  70. action.perform()
  71. time.sleep(0.5)
  72. def get_url_list(driver):
  73. page_down_(driver, '//div[@class="TFQHme"]', 8)
  74. url_soup = BeautifulSoup(driver.page_source, 'html.parser')
  75. url_list = []
  76. for i in url_soup.find_all('a'):
  77. try:
  78. if i['href'].find('maps/place') != -1:
  79. url_list += [[i['href'], i['aria-label']]]
  80. except:
  81. pass
  82. # print(len(url_list))
  83. return url_list
  84. def keyin_keyword(driver, keyword):
  85. button = driver.find_element_by_id("searchbox")
  86. driver.implicitly_wait(30)
  87. ActionChains(driver).move_to_element(button).send_keys(keyword).send_keys(Keys.RETURN).perform()
  88. time.sleep(3)
  89. def scan_job(db,kw):
  90. result={'kw':kw}
  91. cursor = db.query('select t1.num,next-prev as diff from google_poi.conv_log t1, (SELECT num,max(id) mid FROM google_poi.conv_log group by num ) t2 where t1.id=t2.mid having diff>0 order by rand()')
  92. for c in cursor:
  93. result['num']=c['num']
  94. break
  95. cursor = db.query('select lat,lon,loc from lat_lon_loc where num ="'+str(result['num'])+'"')
  96. for c in cursor:
  97. result['lat']=c['lat']
  98. result['lon']=c['lon']
  99. result['loc']=c['loc']
  100. return result
  101. def get_next_job(db,repeat=False,repkw=None,repnum=None):
  102. global globalkw
  103. result={}
  104. # if globalkw is not None:
  105. # cursor = db.query('select distinct(kw),num+1 as num from swire_progress_list where kw="'+globalkw+'"')
  106. # else:
  107. # cursor = db.query('select distinct(kw),num+1 as num from swire_progress_list where num < 367 order by rand() limit 1')
  108. # cursor = db.query('select kw,num from areacodes where expand=0 order by rand()')
  109. cursor = db.query('select kw,num from areacodes order by rand()')
  110. for c in cursor:
  111. repkw=c['kw']
  112. result['kw']=c['kw']
  113. result['num']=c['num']
  114. break
  115. if repkw is not None:
  116. result['kw']=repkw
  117. if result.get('num') is not None:
  118. cursor = db.query('select lat,lon,loc from lat_lon_loc where num ="'+str(result['num'])+'"')
  119. for c in cursor:
  120. result['lat']=c['lat']
  121. result['lon']=c['lon']
  122. result['loc']=c['loc']
  123. break
  124. if repeat and repkw!= 'REP':
  125. result['kw']=repkw
  126. result['num']=repnum
  127. if 'REP' in repkw:
  128. if repnum=='REP':
  129. repnum=None
  130. # cursor = db.query('select num from swire_store_list where num not in (select num from conv_log) order by rand() limit 1')
  131. cursor = db.query('select num from swire_store_list order by rand() limit 1')
  132. for c in cursor:
  133. repnum=c['num']
  134. break
  135. if repnum is None:
  136. cursor = db.query('select num from swire_store_list order by rand() limit 1')
  137. for c in cursor:
  138. repnum=c['num']
  139. break
  140. # cursor = db.query('select lat_txt,lon_txt,keyword,num from swire_store_list order by rand() limit 1')
  141. cursor = db.query('select lat_txt,lon_txt,keyword,num from swire_store_list where num="'+str(repnum)+'" limit 1')
  142. for c in cursor:
  143. result['kw']=c['keyword']
  144. result['num']=c['num']
  145. result['lat']=c['lat_txt']
  146. result['lon']=c['lon_txt']
  147. result['loc']=''
  148. return result
  149. if repeat:
  150. # cursor = db.query('select lat_txt,lon_txt from swire_store_list where num ="'+str(result['num'])+'" and keyword="'+result['kw']+'" order by rand() limit 1')
  151. cursor = db.query('select lat_txt,lon_txt from swire_store_list order by rand() limit 1')
  152. for c in cursor:
  153. result['lat']=c['lat_txt']
  154. result['lon']=c['lon_txt']
  155. return result
  156. def write_to_file(jsobj,fname):
  157. import codecs
  158. fw=codecs.open(fname,'w','utf-8')
  159. fw.write(str(jsobj))
  160. fw.close()
  161. def parsing_js(orig):
  162. resultobj=[]
  163. content=""
  164. lines=orig.split('\n')
  165. for l in lines:
  166. newl=l.replace('\\"','"')
  167. # if '\\\\"' in newl:
  168. # print(newl)
  169. # newl=newl.repace('\\\\"','')
  170. newl=newl.replace('\\"','"')
  171. content+=newl
  172. result=re.search(r'\[\["',content)
  173. print(result)
  174. content_begin=result.start()
  175. result=re.search(r'\]\]"',content)
  176. print(result)
  177. content_end=result.end()
  178. jscontent=content[content_begin:content_end-1]
  179. # write_to_file(jscontent,'c:/tmp/debug.txt')
  180. jsobj=json.loads(jscontent)
  181. for x in jsobj[0][1][1:]:
  182. print(x[14][11])
  183. print(x[14][9])
  184. reviews_cnt=None
  185. photo=None
  186. rating=None
  187. biz_id=None
  188. loc_x=None
  189. loc_y=None
  190. addr_elmts=None
  191. tel=None
  192. try:
  193. rating=x[14][4][7]
  194. reviews_cnt=x[14][4][8]
  195. except:
  196. traceback.print_exc()
  197. try:
  198. photo=x[14][37][0][0][0]
  199. num_photos=x[14][37][0][0][6][1]
  200. except:
  201. traceback.print_exc()
  202. try:
  203. loc_x=x[14][37][0][0][29][0]
  204. loc_y=x[14][37][0][0][29][1]
  205. except:
  206. traceback.print_exc()
  207. try:
  208. biz_id=x[14][57][2]
  209. tel=x[14][178][0][3]
  210. except:
  211. traceback.print_exc()
  212. try:
  213. addr_elmts=str(x[14][82])
  214. except:
  215. traceback.print_exc()
  216. category=str(x[14][13])
  217. topic=str(x[14][89])
  218. print(x[14][13])
  219. print(x[14][10])
  220. print(x[14][2])
  221. print(x[14][78])
  222. try:
  223. resultobj.append({'name':x[14][11],'fid':x[14][10],'addr':x[14][2][0],'addr_elmts':addr_elmts,'place_id':x[14][78],'category':category,'rating':rating,'reviews_cnt':reviews_cnt,'lat':x[14][9][2],'lat_txt':str(x[14][9][2]),'lon':x[14][9][3],'lon_txt':str(x[14][9][3]),'topic':topic,'photo':photo,'num_photos':num_photos,'loc_x':loc_x,'loc_y':loc_y,'biz_id':biz_id,'tel':tel,'crawler_date':datetime.today().strftime("%Y/%m/%d %H:%M")})
  224. except:
  225. traceback.print_exc()
  226. return resultobj
  227. def save_js_to_db(jsobj,num,keyword):
  228. global store_list_table
  229. global iddict
  230. for r in jsobj:
  231. if iddict.get(r['place_id']) is not None:
  232. continue
  233. r['num']=num
  234. r['keyword']=keyword
  235. try:
  236. store_list_table.insert(r)
  237. # store_list_table.upsert(r,keys=['place_id'])
  238. except:
  239. traceback.print_exc()
  240. # store_list_table.upsert(r,keys=['place_id'])
  241. def process_web_request(db,driver,area_num,keyword):
  242. global prev_cnt
  243. # query = WebDriverWait(driver, 30).until(EC.presence_of_element_located((By.XPATH, '//button[@vet="19128"]')))
  244. time.sleep(0.8)
  245. time.sleep(3)
  246. print("ppppppppp&**********************")
  247. for request in driver.requests:
  248. if 'search?' in request.url :
  249. print('searching.....')
  250. # else:
  251. # print(request.url[20:60])
  252. if request.response:
  253. # if 'https://www.google.com.tw/search?tbm=map' in request.url :
  254. if 'search?' in request.url :
  255. print('parsing js:')
  256. resp = brotli.decompress(request.response.body)
  257. jstext=resp.decode('utf-8')
  258. resultobj=parsing_js(jstext)
  259. print("before",datetime.now())
  260. print("num: "+str(area_num))
  261. save_js_to_db(resultobj,area_num,keyword)
  262. print("after",datetime.now())
  263. aft_cnt=0
  264. cursor = db.query('select count(*) as cnt from swire_store_list where num="'+str(area_num)+'" ')
  265. for c in cursor:
  266. aft_cnt=c['cnt']
  267. break
  268. db['conv_log'].insert({'num':area_num,'prev':prev_cnt,'next':aft_cnt,'dt':datetime.now()})
  269. # time.sleep(9999)
  270. def main():
  271. global chrome_window
  272. global store_list_table
  273. global globalkw
  274. global proxyport
  275. global iddict
  276. global prev_cnt
  277. port=4444
  278. # if len(sys.argv) == 3 :
  279. # port=int(sys.argv[1])
  280. # proxyport=int(sys.argv[2])
  281. if len(sys.argv)>1:
  282. globalkw=sys.argv[1]
  283. port=int(sys.argv[2])
  284. proxyport=int(sys.argv[3])
  285. print(globalkw, port, proxyport)
  286. failcnt=0
  287. localip=socket.gethostbyname(socket.gethostname())
  288. # if localip=='192.168.1.108':
  289. # chrome_window=True
  290. # chrome_window=False
  291. db = dataset.connect('mysql://choozmo:pAssw0rd@db.ptt.cx:3306/google_poi?charset=utf8mb4')
  292. iddict=build_cache(db)
  293. store_list_table = db['swire_store_list']
  294. # table2 = db['swire_progress_list']
  295. table2 = db['swire_area_progress']
  296. if not chrome_window:
  297. print('restart docker p{}'.format(port))
  298. # os.system('sudo docker container restart p'+str(port))
  299. os.system('docker container restart p'+str(port))
  300. time.sleep(10)
  301. print('drvier start...')
  302. driver = brower_start(port)
  303. area_num=None
  304. while True:
  305. try:
  306. if len(sys.argv) > 4 :
  307. repkw=sys.argv[1]
  308. repnum=sys.argv[2]
  309. if 'SCAN' in repkw:
  310. job=scan_job(db,repnum)
  311. else:
  312. job=get_next_job(db,repeat=True,repkw=repkw,repnum=repnum)
  313. else:
  314. job=get_next_job(db, repkw=globalkw)
  315. print(job)
  316. keyword = job['kw']
  317. latitude = job['lat'] #緯度
  318. longitude = job['lon'] #精度
  319. area_num=job['num']
  320. safe_string = urllib.parse.quote_plus(keyword)
  321. url = 'https://www.google.com.tw/maps/@{},{},18z?hl=zh-TW'.format(latitude, longitude)
  322. prev_cnt=0
  323. cursor = db.query('select count(*) as cnt from swire_store_list where num="'+str(area_num)+'" ')
  324. for c in cursor:
  325. prev_cnt=c['cnt']
  326. break
  327. # url = 'https://www.google.com/maps/search/'+safe_string+'/@{},{},16z/data=!3m1!4b1'.format(latitude, longitude)
  328. # url='https://www.google.com/maps/search/'+safe_string+'/@24.7962279,121.0449762,15z/data=!3m1!4b1?hl=zh-TW'
  329. # print(url)
  330. # url='https://www.google.com/maps/search/%E7%81%AB%E9%8D%8B%E9%A4%90%E5%BB%B3/@24.772608,121.0515456,13z'
  331. driver.get(url)
  332. # time.sleep(3)
  333. keyin_keyword(driver, keyword)
  334. process_web_request(db,driver,area_num,keyword)
  335. pagecnt=0
  336. while True:
  337. element = driver.find_element_by_id('ppdPk-Ej1Yeb-LgbsSe-tJiF1e')
  338. if element.get_attribute('disabled'):
  339. break
  340. # driver.implicitly_wait(30)
  341. ActionChains(driver).move_to_element(element).click(element).perform()
  342. process_web_request(db,driver,area_num,keyword)
  343. pagecnt+=1
  344. if pagecnt>=5:
  345. break
  346. # table2.upsert({'kw':keyword,'num':job['num']},['kw'])
  347. table2.insert({'kw':keyword,'num':job['num']},['kw'])
  348. db.query('update areacodes set expand = 1 where num="'+str(job['num'])+'" and kw="'+keyword+'" ')
  349. except:
  350. traceback.print_exc()
  351. failcnt+=1
  352. if failcnt>=15:
  353. sys.exit()
  354. pass
  355. if __name__ == '__main__':
  356. main()