jared_shop_item_list.py 3.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118
  1. # -*- coding: utf-8 -*-
  2. from selenium import webdriver
  3. from selenium.webdriver.common.action_chains import ActionChains
  4. from selenium.webdriver.common.keys import Keys
  5. from selenium.webdriver.support import expected_conditions as EC
  6. from selenium.webdriver.support.wait import WebDriverWait
  7. from selenium.webdriver.common.by import By
  8. from bs4 import BeautifulSoup
  9. from utility import database_access as DA
  10. from utility.parseutils import *
  11. from utility.connect import *
  12. import dataset
  13. import sys
  14. from datetime import datetime
  15. import pandas as pd
  16. import time
  17. import json
  18. import re
  19. def brower_start():
  20. options = webdriver.ChromeOptions()
  21. browser = webdriver.Chrome(options=options)
  22. # browser = webdriver.Remote(
  23. # command_executor='http://192.53.174.202:4444/wd/hub',
  24. # desired_capabilities=options.to_capabilities()
  25. # )
  26. return browser
  27. def get_url_list(driver):
  28. for i in range(5, 43, 2):
  29. try:
  30. wait = WebDriverWait(driver, 60)
  31. wait.until(
  32. EC.element_to_be_clickable((By.XPATH, '//*[@id="pane"]/div/div[1]/div/div/div[2]/div[1]/div[{}]/div/a'.format(i)))
  33. )
  34. driver.find_element(By.XPATH,'//*[@id="pane"]/div/div[1]/div/div/div[2]/div[1]/div[{}]/div/a'.format(i)).send_keys(Keys.DOWN)
  35. time.sleep(1)
  36. except:
  37. pass
  38. url_soup = BeautifulSoup(driver.page_source, 'html.parser')
  39. url_list = []
  40. for i in url_soup.find_all('a'):
  41. try:
  42. if i['href'].find('maps/place') != -1:
  43. url_list += [[i['href'], i['aria-label']]]
  44. except:
  45. pass
  46. return url_list
  47. def keyin_keyword(driver, keyword):
  48. button = driver.find_element_by_id("searchbox")
  49. driver.implicitly_wait(30)
  50. ActionChains(driver).move_to_element(button).send_keys(keyword).send_keys(Keys.RETURN).perform()
  51. time.sleep(3)
  52. def main():
  53. db = dataset.connect('mysql://choozmo:pAssw0rd@db.ptt.cx:3306/google_poi?charset=utf8mb4')
  54. print('drvier start...')
  55. driver = brower_start()
  56. keyword = '麻辣火鍋'
  57. if len(sys.argv) >1:
  58. keyword=sys.argv[1]
  59. num=0
  60. cursor=db.query('select num from progress_list where kw = "'+keyword+'"')
  61. for c in cursor:
  62. num=c['num']
  63. break
  64. table2=db['progress_list']
  65. cursor=db.query('select * from lat_lon_loc where num >= '+str(num))
  66. # cursor=db.query('select * from lat_lon_loc')
  67. lst=[]
  68. for c in cursor:
  69. lst.append({'num':c['num'],'loc':c['loc'],'lat':c['lat'],'lon':c['lon']})
  70. for r in lst:
  71. latitude = r['lat'] #緯度
  72. longitude = r['lon'] #精度
  73. table2.upsert({'kw':keyword,'num':r['num']},['kw'])
  74. url = 'https://www.google.com.tw/maps/@{},{},15z?hl=zh-TW'.format(latitude, longitude)
  75. driver.get(url)
  76. keyin_keyword(driver, keyword)
  77. for page in range(4):
  78. print( r['loc'], latitude, longitude, page)
  79. url_list = get_url_list(driver)
  80. print(url_list)
  81. shop_item_list_col = ['name','lon','lat','keyword','item_url','crawler_date']
  82. for item in url_list:
  83. result = [item[1], longitude, latitude, keyword, item[0], datetime.today().strftime("%Y/%m/%d %H:%M")]
  84. print(result)
  85. # insert_sql = """INSERT IGNORE INTO {}{} VALUES {}"""\
  86. # .format('shop_item_list', str(tuple(shop_item_list_col)).replace('\'',''), tuple(result))
  87. # DA.mysql_insert_data(db, insert_sql)
  88. if page < 2 :
  89. element = driver.find_element_by_id('ppdPk-Ej1Yeb-LgbsSe-tJiF1e')
  90. driver.implicitly_wait(30)
  91. ActionChains(driver).move_to_element(element).click(element).perform()
  92. if __name__ == '__main__':
  93. main()