swire_shop_item_list.py 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437
  1. # -*- coding: utf-8 -*-
  2. #from selenium import webdriver
  3. from seleniumwire import webdriver
  4. from selenium.webdriver.common.action_chains import ActionChains
  5. from selenium.webdriver.common.keys import Keys
  6. from selenium.webdriver.support import expected_conditions as EC
  7. from selenium.webdriver.support.wait import WebDriverWait
  8. from selenium.webdriver.common.by import By
  9. import selenium
  10. import traceback
  11. from bs4 import BeautifulSoup
  12. from utility import database_access as DA
  13. from utility.parseutils import *
  14. from utility.connect import *
  15. from datetime import datetime
  16. import pandas as pd
  17. import dataset
  18. import time
  19. import json
  20. import re
  21. import sys, os
  22. import socket
  23. import brotli
  24. from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
  25. import urllib.parse
  26. chrome_window=False
  27. #chrome_window=True
  28. globalkw=None
  29. proxyport=8787
  30. def build_cache(db):
  31. id_dict={}
  32. cursor = db.query('SELECT place_id FROM google_poi.swire_store_list;')
  33. for c in cursor:
  34. id_dict[c['place_id']]=1
  35. return id_dict
  36. #
  37. def brower_start(port):
  38. global proxyport
  39. global chrome_window
  40. print(proxyport)
  41. options = webdriver.ChromeOptions()
  42. if chrome_window:
  43. browser = webdriver.Chrome(
  44. desired_capabilities=options.to_capabilities()
  45. )
  46. else:
  47. chrome_options = webdriver.ChromeOptions()
  48. chrome_options.add_argument('--proxy-server=host.docker.internal:'+str(proxyport)) # Specify your Kubernetes service-name here
  49. chrome_options.add_argument('--ignore-certificate-errors')
  50. chrome_options.add_argument("--no-sandbox")
  51. chrome_options.add_argument("--disable-dev-shm-usage")
  52. browser = webdriver.Remote(
  53. command_executor='http://127.0.0.1:'+str(port)+'/wd/hub',
  54. desired_capabilities=chrome_options.to_capabilities(),
  55. seleniumwire_options={'addr':'0.0.0.0','port':proxyport,'auto_config': False}
  56. )
  57. # seleniumwire_options = {'addr': '172.17.0.2','port':4444})
  58. browser.set_window_size(1400,1000)
  59. return browser
  60. def page_down_(driver, xpath_css, time_):
  61. e = driver.find_element_by_css_selector('span[class="Jl2AFb"]')
  62. result_count = e.text.split('-')[1].replace(' 項結果','')
  63. print(result_count)
  64. if int(result_count) > 5:
  65. for i in range(time_):
  66. e = driver.find_elements_by_css_selector('div[class="TFQHme"]')
  67. action = webdriver.common.action_chains.ActionChains(driver)
  68. action.move_to_element_with_offset(e[-1], e[-1].size['width'] + 1 , 0)
  69. action.click()
  70. action.perform()
  71. time.sleep(0.5)
  72. def get_url_list(driver):
  73. page_down_(driver, '//div[@class="TFQHme"]', 8)
  74. url_soup = BeautifulSoup(driver.page_source, 'html.parser')
  75. url_list = []
  76. for i in url_soup.find_all('a'):
  77. try:
  78. if i['href'].find('maps/place') != -1:
  79. url_list += [[i['href'], i['aria-label']]]
  80. except:
  81. pass
  82. # print(len(url_list))
  83. return url_list
  84. def keyin_keyword(driver, keyword):
  85. button = driver.find_element_by_id("searchbox")
  86. driver.implicitly_wait(30)
  87. ActionChains(driver).move_to_element(button).send_keys(keyword).send_keys(Keys.RETURN).perform()
  88. time.sleep(3)
  89. def scan_job(db,kw):
  90. result={'kw':kw}
  91. cursor = db.query('select t1.num,next-prev as diff from google_poi.conv_log t1, (SELECT num,max(id) mid FROM google_poi.conv_log group by num ) t2 where t1.id=t2.mid having diff>0 order by rand()')
  92. for c in cursor:
  93. result['num']=c['num']
  94. break
  95. cursor = db.query('select lat,lon,loc from lat_lon_loc where num ="'+str(result['num'])+'"')
  96. for c in cursor:
  97. result['lat']=c['lat']
  98. result['lon']=c['lon']
  99. result['loc']=c['loc']
  100. return result
  101. def get_next_job(db,repeat=False,repkw=None,repnum=None):
  102. global globalkw
  103. result={}
  104. # if globalkw is not None:
  105. # cursor = db.query('select distinct(kw),num+1 as num from swire_progress_list where kw="'+globalkw+'"')
  106. # else:
  107. # cursor = db.query('select distinct(kw),num+1 as num from swire_progress_list where num < 367 order by rand() limit 1')
  108. # cursor = db.query('select kw,num from areacodes where expand=0 order by rand()')
  109. cursor = db.query('select kw,num from areacodes order by rand()')
  110. for c in cursor:
  111. # repkw=c['kw']
  112. if repkw is None:
  113. repkw=c['kw']
  114. result['kw']=c['kw']
  115. result['num']=c['num']
  116. break
  117. if repkw is not None:
  118. result['kw']=repkw
  119. if result.get('num') is not None:
  120. cursor = db.query('select lat,lon,loc from lat_lon_loc where num ="'+str(result['num'])+'"')
  121. for c in cursor:
  122. result['lat']=c['lat']
  123. result['lon']=c['lon']
  124. result['loc']=c['loc']
  125. break
  126. if repeat and repkw!= 'REP':
  127. result['kw']=repkw
  128. result['num']=repnum
  129. if 'REP' in repkw:
  130. if repnum=='REP':
  131. repnum=None
  132. # cursor = db.query('select num from swire_store_list where num not in (select num from conv_log) order by rand() limit 1')
  133. cursor = db.query('select num from swire_store_list order by rand() limit 1')
  134. for c in cursor:
  135. repnum=c['num']
  136. break
  137. if repnum is None:
  138. cursor = db.query('select num from swire_store_list order by rand() limit 1')
  139. for c in cursor:
  140. repnum=c['num']
  141. break
  142. # cursor = db.query('select lat_txt,lon_txt,keyword,num from swire_store_list order by rand() limit 1')
  143. cursor = db.query('select lat_txt,lon_txt,keyword,num from swire_store_list where num="'+str(repnum)+'" limit 1')
  144. for c in cursor:
  145. result['kw']=c['keyword']
  146. result['num']=c['num']
  147. result['lat']=c['lat_txt']
  148. result['lon']=c['lon_txt']
  149. result['loc']=''
  150. return result
  151. if repeat:
  152. # cursor = db.query('select lat_txt,lon_txt from swire_store_list where num ="'+str(result['num'])+'" and keyword="'+result['kw']+'" order by rand() limit 1')
  153. cursor = db.query('select lat_txt,lon_txt,keyword from swire_store_list order by rand() limit 1')
  154. for c in cursor:
  155. result['kw']=c['keyword']
  156. result['lat']=c['lat_txt']
  157. result['lon']=c['lon_txt']
  158. return result
  159. def write_to_file(jsobj,fname):
  160. import codecs
  161. fw=codecs.open(fname,'w','utf-8')
  162. fw.write(str(jsobj))
  163. fw.close()
  164. def parsing_js(orig):
  165. resultobj=[]
  166. content=""
  167. lines=orig.split('\n')
  168. for l in lines:
  169. newl=l.replace('\\"','"')
  170. # if '\\\\"' in newl:
  171. # print(newl)
  172. # newl=newl.repace('\\\\"','')
  173. newl=newl.replace('\\"','"')
  174. content+=newl
  175. result=re.search(r'\[\["',content)
  176. print(result)
  177. content_begin=result.start()
  178. result=re.search(r'\]\]"',content)
  179. print(result)
  180. content_end=result.end()
  181. jscontent=content[content_begin:content_end-1]
  182. # write_to_file(jscontent,'c:/tmp/debug.txt')
  183. jsobj=json.loads(jscontent)
  184. for x in jsobj[0][1][1:]:
  185. print(x[14][11])
  186. print(x[14][9])
  187. reviews_cnt=None
  188. photo=None
  189. rating=None
  190. biz_id=None
  191. loc_x=None
  192. loc_y=None
  193. addr_elmts=None
  194. tel=None
  195. try:
  196. rating=x[14][4][7]
  197. reviews_cnt=x[14][4][8]
  198. except:
  199. traceback.print_exc()
  200. try:
  201. photo=x[14][37][0][0][0]
  202. num_photos=x[14][37][0][0][6][1]
  203. except:
  204. traceback.print_exc()
  205. try:
  206. loc_x=x[14][37][0][0][29][0]
  207. loc_y=x[14][37][0][0][29][1]
  208. except:
  209. traceback.print_exc()
  210. try:
  211. biz_id=x[14][57][2]
  212. tel=x[14][178][0][3]
  213. except:
  214. traceback.print_exc()
  215. try:
  216. addr_elmts=str(x[14][82])
  217. except:
  218. traceback.print_exc()
  219. category=str(x[14][13])
  220. topic=str(x[14][89])
  221. print(x[14][13])
  222. print(x[14][10])
  223. print(x[14][2])
  224. print(x[14][78])
  225. try:
  226. resultobj.append({'name':x[14][11],'fid':x[14][10],'addr':x[14][2][0],'addr_elmts':addr_elmts,'place_id':x[14][78],'category':category,'rating':rating,'reviews_cnt':reviews_cnt,'lat':x[14][9][2],'lat_txt':str(x[14][9][2]),'lon':x[14][9][3],'lon_txt':str(x[14][9][3]),'topic':topic,'photo':photo,'num_photos':num_photos,'loc_x':loc_x,'loc_y':loc_y,'biz_id':biz_id,'tel':tel,'crawler_date':datetime.today().strftime("%Y/%m/%d %H:%M")})
  227. except:
  228. traceback.print_exc()
  229. return resultobj
  230. def save_js_to_db(jsobj,num,keyword):
  231. global store_list_table
  232. global iddict
  233. for r in jsobj:
  234. if iddict.get(r['place_id']) is not None:
  235. continue
  236. r['num']=num
  237. r['keyword']=keyword
  238. try:
  239. store_list_table.insert(r)
  240. # store_list_table.upsert(r,keys=['place_id'])
  241. except:
  242. traceback.print_exc()
  243. # store_list_table.upsert(r,keys=['place_id'])
  244. def process_web_request(db,driver,area_num,keyword):
  245. global prev_cnt
  246. # query = WebDriverWait(driver, 30).until(EC.presence_of_element_located((By.XPATH, '//button[@vet="19128"]')))
  247. time.sleep(0.8)
  248. time.sleep(3)
  249. print("ppppppppp&**********************")
  250. for request in driver.requests:
  251. if 'search?' in request.url :
  252. print('searching.....')
  253. # else:
  254. # print(request.url[20:60])
  255. if request.response:
  256. # if 'https://www.google.com.tw/search?tbm=map' in request.url :
  257. if 'search?' in request.url :
  258. print('parsing js:')
  259. resp = brotli.decompress(request.response.body)
  260. jstext=resp.decode('utf-8')
  261. resultobj=parsing_js(jstext)
  262. print("before",datetime.now())
  263. print("num: "+str(area_num))
  264. save_js_to_db(resultobj,area_num,keyword)
  265. print("after",datetime.now())
  266. aft_cnt=0
  267. cursor = db.query('select count(*) as cnt from swire_store_list where num="'+str(area_num)+'" ')
  268. for c in cursor:
  269. aft_cnt=c['cnt']
  270. break
  271. db['conv_log'].insert({'num':area_num,'prev':prev_cnt,'next':aft_cnt,'dt':datetime.now()})
  272. del driver.requests
  273. # time.sleep(9999)
  274. def main():
  275. global chrome_window
  276. global store_list_table
  277. global globalkw
  278. global proxyport
  279. global iddict
  280. global prev_cnt
  281. port=4444
  282. # if len(sys.argv) == 3 :
  283. # port=int(sys.argv[1])
  284. # proxyport=int(sys.argv[2])
  285. if len(sys.argv)>1:
  286. globalkw=sys.argv[1]
  287. port=int(sys.argv[2])
  288. proxyport=int(sys.argv[3])
  289. print(globalkw, port, proxyport)
  290. failcnt=0
  291. localip=socket.gethostbyname(socket.gethostname())
  292. # if localip=='192.168.1.108':
  293. # chrome_window=True
  294. # chrome_window=False
  295. db = dataset.connect('mysql://choozmo:pAssw0rd@db.ptt.cx:3306/google_poi?charset=utf8mb4')
  296. iddict=build_cache(db)
  297. store_list_table = db['swire_store_list']
  298. # table2 = db['swire_progress_list']
  299. table2 = db['swire_area_progress']
  300. if not chrome_window:
  301. print('restart docker p{}'.format(port))
  302. # os.system('sudo docker container restart p'+str(port))
  303. os.system('docker container restart p'+str(port))
  304. time.sleep(10)
  305. print('drvier start...')
  306. driver = brower_start(port)
  307. area_num=None
  308. while True:
  309. try:
  310. if len(sys.argv) > 4 :
  311. repkw=sys.argv[1]
  312. repnum=sys.argv[2]
  313. if 'SCAN' in repkw:
  314. job=scan_job(db,repnum)
  315. else:
  316. job=get_next_job(db,repeat=True,repkw=repkw,repnum=repnum)
  317. else:
  318. job=get_next_job(db, repkw=globalkw)
  319. print(job)
  320. keyword = job['kw']
  321. latitude = job['lat'] #緯度
  322. longitude = job['lon'] #精度
  323. area_num=job['num']
  324. safe_string = urllib.parse.quote_plus(keyword)
  325. url = 'https://www.google.com.tw/maps/@{},{},18z?hl=zh-TW'.format(latitude, longitude)
  326. prev_cnt=0
  327. cursor = db.query('select count(*) as cnt from swire_store_list where num="'+str(area_num)+'" ')
  328. for c in cursor:
  329. prev_cnt=c['cnt']
  330. break
  331. # url = 'https://www.google.com/maps/search/'+safe_string+'/@{},{},16z/data=!3m1!4b1'.format(latitude, longitude)
  332. # url='https://www.google.com/maps/search/'+safe_string+'/@24.7962279,121.0449762,15z/data=!3m1!4b1?hl=zh-TW'
  333. # print(url)
  334. # url='https://www.google.com/maps/search/%E7%81%AB%E9%8D%8B%E9%A4%90%E5%BB%B3/@24.772608,121.0515456,13z'
  335. driver.get(url)
  336. # time.sleep(3)
  337. keyin_keyword(driver, keyword)
  338. process_web_request(db,driver,area_num,keyword)
  339. pagecnt=0
  340. while True:
  341. element = driver.find_element_by_id('ppdPk-Ej1Yeb-LgbsSe-tJiF1e')
  342. if element.get_attribute('disabled'):
  343. break
  344. # driver.implicitly_wait(30)
  345. ActionChains(driver).move_to_element(element).click(element).perform()
  346. process_web_request(db,driver,area_num,keyword)
  347. pagecnt+=1
  348. if pagecnt>=5:
  349. break
  350. # table2.upsert({'kw':keyword,'num':job['num']},['kw'])
  351. table2.insert({'kw':keyword,'num':job['num']},['kw'])
  352. db.query('update areacodes set expand = 1 where num="'+str(job['num'])+'" and kw="'+keyword+'" ')
  353. except:
  354. traceback.print_exc()
  355. failcnt+=1
  356. if failcnt>=15:
  357. sys.exit()
  358. pass
  359. if __name__ == '__main__':
  360. main()