zooey %!s(int64=2) %!d(string=hai) anos
pai
achega
705b46ece2
Modificáronse 7 ficheiros con 54 adicións e 56 borrados
  1. 10 10
      SEO/clickbot_100.py
  2. 2 2
      SEO/clickbot_100_one.py
  3. 20 16
      SEO/csv_to_sql.py
  4. 7 9
      SEO/google_rank.py
  5. 6 12
      SEO/notify_nda.py
  6. 1 1
      SEO/ranking_report.py
  7. 8 6
      SEO/ranking_world.py

+ 10 - 10
SEO/clickbot_100.py

@@ -43,26 +43,26 @@ def restart_browser():
 
 def process_one():
     db = dataset.connect('mysql://choozmo:pAssw0rd@db.ptt.cx:3306/seo?charset=utf8mb4')
-    lst=[]
+    lst=['台中 留學代辦']
     table=db['google_rank']
-    cursor = db.query('select term,domain from seo.select_kw where client="loginheart"')
+    # cursor = db.query('select term,domain from seo.selected_kw where client="CLIQ露營椅"')
     # cursor = db.query('select term,url from seo.sns_kw where client="loginheart"')
     # cursor = db.query('select term from seo.selected_kw where client="鑫富"')
     # cursor = db.query('select term from seo.selected_kw where id between 1902 and 1923')
     # cursor=db.query('select term from selected_kw and term not in (SELECT distinct(keyword) FROM ig_tags.save_result where url like "%beastparadise.net%" and datediff(now(),dt)=0)')
-    for c in cursor:
-        lst.append([c['term'],c['domain']])
+    # for c in cursor:
+    #     lst.append([c['term'],c['domain']])
 
-    # domain = 'vickybrain.com'
-    for i in lst[13::]:
+    domain = 'hsinfei'
+    for i in lst:
         print(i)
         driver=restart_browser()
-        escaped_search_term=urllib.parse.quote(i[0])
+        escaped_search_term=urllib.parse.quote(i)
         googleurl = 'https://www.google.com/search?q={}&num={}&hl={}'.format(escaped_search_term, 100,'zh-TW')
         driver.get(googleurl)
         time.sleep(60)
         print(driver.current_url)
-        # driver.save_screenshot('C:\/Users\/s1301\/Pictures\/Saved Pictures\/鑫富\/'+term+'.png')
+        # driver.save_screenshot('C:\/Users\/s1301\/Pictures\/Saved Pictures\/CLIQ\/'+i[0]+'.png')
         df=pd.DataFrame()
         elmts=driver.find_elements(By.XPATH,"//div[@class='yuRUbf']/a")
         print('結果數量',len(elmts))
@@ -72,8 +72,8 @@ def process_one():
         for elmt in elmts:
             try:
                 href=elmt.get_attribute('href')
-                if i[1] == href:
-                    table.insert({'title':elmt.text,'url':href,'keyword':i[0],'dt':datetime.datetime.now(),'ranking':cnt})
+                if domain in href:
+                    table.insert({'title':elmt.text,'url':href,'keyword':i,'dt':datetime.datetime.now(),'ranking':cnt})
                     print(href)
                     print(elmt.text)
                 # datadict['搜尋詞'].append(term)

+ 2 - 2
SEO/clickbot_100_one.py

@@ -41,8 +41,8 @@ def restart_browser():
     return driver
 
 def process_one():
-    lst=['護佳國際']
-    date='1209'
+    lst=['雙響泡','雙響砲','双響泡']
+    date='1214'
     for term in lst:
         driver=restart_browser()
         escaped_search_term=urllib.parse.quote(term)

+ 20 - 16
SEO/csv_to_sql.py

@@ -7,32 +7,36 @@ pymysql.install_as_MySQLdb()
 
 
 db = dataset.connect('mysql://choozmo:pAssw0rd@db.ptt.cx:3306/seo?charset=utf8mb4')
-# table=db['seo_jobs']
-table=db['select_kw']
-client='loginheart'
+table=db['seo_jobs']
+# table=db['selected_kw']
+client='泡麵'
 domain=''
 # for index,row in df.iterrows():
 # with open("C:\/Users\/s1301\/Documents\/新飛國際遊學SEO - 關鍵字12.08.csv") as f:
 #     data_all = f.readlines()
 # print(data_all)
-f = open("C:\/Users\/s1301\/Documents\/關鍵字建議.csv",encoding='utf-8')
-df = pd.read_csv(f,header=None, names=['kw', 'url'])
+f = open("C:\/Users\/s1301\/Documents\/456.csv",encoding='utf-8')
+# df = pd.read_csv(f,header=None, names=['kw', 'url'])
+df = pd.read_csv(f,header=None, names=['prefix', 'positive','rnd','URL'])
 df=df.fillna('')
 # print(df)
+url='cliqproducts.com.tw'
+
 for index,row in df.iterrows():
-    print(row['kw'],row['url'])
-    # prefix='"'+row['prefix']+'"'
-    # positive='"'+row['positive']+'"'
-    # domain='"'+row['URL']+'"'
-    # rnd='"'+str(row['rnd'])+'"'
-    # postfix='""'
-    # data = f'"prefix":{prefix},"postfix":{postfix},"positive":[{positive}],"domain":[{domain}],"rnd":[{rnd}]'
-    # json='{'+data+'}'
-    # print(json)
-    # table.insert({'cust':client,'plan':'形象SEO','json':json})
-    table.insert({'term':row['kw'],'client':client,'domain':row['url']})
+    # print(row['kw'],url)
+    prefix='"'+row['prefix']+'"'
+    positive='"'+row['positive']+'"'
+    domain='"'+row['URL']+'"'
+    rnd='"'+str(row['rnd'])+'"'
+    postfix='""'
+    data = f'"prefix":{prefix},"postfix":{postfix},"positive":[{positive}],"domain":[{domain}],"rnd":[{rnd}]'
+    json='{'+data+'}'
+    print(json)
+    table.insert({'cust':client,'plan':'形象SEO','json':json})
+    # table.insert({'term':row['kw'],'client':client,'domain':url})
 
 {"prefix": "護佳", "postfix": "", "domain": ["www.hujiai-international.com"], "positive": ["細胞食物","紅蔘"], "rnd": [""]}
 
 db.close()
 
+####先從雲端下載csv 再用記事本打開另存一個csv#########

+ 7 - 9
SEO/google_rank.py

@@ -18,7 +18,8 @@ import time
 import traceback
 import sys
 import fire
-
+import pymysql
+pymysql.install_as_MySQLdb()
 
 driver = None
 
@@ -46,7 +47,7 @@ def process_query(qs):
     idx = 1
     ranking = -1
     print(len(elmts))
-    driver.save_screenshot('c:/tmp/test.png')
+    # driver.save_screenshot('C:\/Users\/s1301\/Pictures\/Saved Pictures\/')
     
     for elmt in elmts:
         href = elmt.get_attribute('href')
@@ -60,7 +61,7 @@ def run_once(q):
     global driver
     result = []
     user_agent = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36'
-    s = Service(r'C:\Users\Administrator\Downloads\chromedriver_108\chromedriver')
+    s = Service('C:\/Users\/s1301\/Downloads\/chromedriver_107\/chromedriver')
     options = webdriver.ChromeOptions()
     options.add_argument('--headless')
     # options.add_argument('--remote-debugging-port=9222')
@@ -90,11 +91,8 @@ def run_once(q):
 #    sleepint=random.randint(320,520)
 #    time.sleep(sleepint)
 
-lst = ['波囍','信義房屋','護佳國際','信義房仲','信義 房屋','信義 房仲']
+lst = ['龔國權','信義房屋','護佳國際','信義房仲','信義 房屋','信義 房仲','雙響泡']
 for i in lst:
-    while True:
-        try:
-            run_once(i)
-        except:
-            continue
+    run_once(i)
+
 

+ 6 - 12
SEO/notify_nda.py

@@ -31,7 +31,7 @@ headers = {
 def send_msg(kw):
     params = {"message": kw}
     print('通知結果', params)
-    r = requests.post("https://notify-api.line.me/api/notify", headers=headers, params=params)
+    # r = requests.post("https://notify-api.line.me/api/notify", headers=headers, params=params)
 
 
 db = dataset.connect('mysql://choozmo:pAssw0rd@db.ptt.cx:3306/seo?charset=utf8mb4')
@@ -39,8 +39,8 @@ cursor = db.query('SELECT * FROM seo.nda_log where datediff(now(),dt)=0 order by
 
 result = "\n"
 idx = 0
-d = {'波囍': 0, '龔國權': 0, '信義': 0, '護': 0, 'angelo koo': 0}
-d_computer = {'64G':0,'p1':0,'p2':0,'i5':0,'7000':0,'139':0}
+d = {'龔國權': 0, '信義': 0, '護': 0, 'angelo koo': 0,'雙響':0,'双響泡':0}
+d_computer = {'64G':0,'p1':0,'p2':0,'i5':0,'156':0,'139':0}
 for c in cursor:
     custom_name = c['kw']
     client = c['client']
@@ -53,8 +53,9 @@ for c in cursor:
         if check_2:
             d_computer[j]+=1
 
-per_total_click = f"波囍:{d['波囍']}\nKNIGHT:{d['龔國權']}\n信義房屋:{d['信義']}\n護佳:{d['護']}\n美東:{d['angelo koo']}\n"
-computer_total_click = f"64G:{d_computer['64G']}\np1:{d_computer['p1']}\np2:{d_computer['p2']}\ni5:{d_computer['i5']}\n7000:{d_computer['7000']}\n139:{d_computer['139']}\n"
+per_total_click = f"雙響泡:{d['雙響']}\n双響泡:{d['双響泡']}\nKNIGHT:{d['龔國權']}\n信義房屋:{d['信義']}\n護佳:{d['護']}\n美東:{d['angelo koo']}\n"
+computer_total_click = f"64G:{d_computer['64G']}\np1:{d_computer['p1']}\np2:{d_computer['p2']}\ni5:{d_computer['i5']}\n156" \
+                       f":{d_computer['156']}\n139:{d_computer['139']}\n"
 print(per_total_click)
 print(computer_total_click)
 msg_per_total_click = '\n點擊次數總計:\n' + per_total_click
@@ -69,10 +70,3 @@ db.close()
 
 SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
 sys.path.append(os.path.dirname(SCRIPT_DIR))
-
-options = webdriver.ChromeOptions()
-options.add_argument('--headless')
-options.add_argument("--no-sandbox")
-options.add_argument("--disable-dev-shm-usage")
-options.add_argument("--incognito")
-driver = webdriver.Remote(options=options, command_executor=f'http://127.0.0.1:4497/wd/hub')

+ 1 - 1
SEO/ranking_report.py

@@ -4,7 +4,7 @@ import pymysql
 pymysql.install_as_MySQLdb()
 
 name='宏康'
-date = '1212'
+date = '1215'
 
 
 def day():

+ 8 - 6
SEO/ranking_world.py

@@ -64,7 +64,7 @@ def re_get_webdriver():
         options = webdriver.ChromeOptions()
         options.add_argument("--no-sandbox")
         options.add_argument("--disable-dev-shm-usage")
-        options.add_argument("--headless")
+        # options.add_argument("--headless")
         print(user_agent)
         options.add_experimental_option('prefs', {'intl.accept_languages': 'en,en_US'})
         options.add_argument("--incognito")
@@ -74,8 +74,7 @@ def re_get_webdriver():
                 driver = webdriver.Chrome(options=options)
 
             else:
-                driver = webdriver.Chrome(
-                    executable_path=r'C:\Users\Administrator\Downloads\chromedriver_108\chromedriver', options=options)
+                driver = webdriver.Chrome(executable_path=r'C:\Users\Administrator\Downloads\chromedriver_108\chromedriver', options=options)
         except:
             traceback.print_exc()
             return
@@ -134,7 +133,7 @@ def run_once(jsobj):
         driver.execute_cdp_cmd("Emulation.setGeolocationOverride", Map_coordinates)
         driver.get('https://www.google.com?num=100&lr=lang_en')
 
-        time.sleep(3)
+        time.sleep(30)
         print(driver.current_url)
         elmt = driver.find_element(By.XPATH, "//input[@name='q']")
         time.sleep(1)
@@ -146,6 +145,7 @@ def run_once(jsobj):
         elmts = driver.find_elements(By.XPATH, "//div[@class='yuRUbf']/a")
         numresults = len(elmts)
         print('搜尋結果數量', numresults)
+        time.sleep(30)
         driver.save_screenshot('C:\/Users\/s1301\/Pictures\/Saved Pictures\/' +date +fname + ".png")
         if numresults == 0:
             send_msg('stop working...')
@@ -168,7 +168,7 @@ def run_once(jsobj):
                 desc = None
             #            print(desc)
             table.insert(
-                {'title': elmt.text, 'url': href, 'keyword': kw, 'dt': datetime.datetime.now(), 'ranking': idx})
+                {'title': elmt.text, 'url': href, 'keyword': kw, 'dt': datetime.datetime.now(), 'ranking': idx,'description':fname})
             datadict['搜尋詞'].append(kw)
             datadict['結果標題'].append(txt)
             datadict['結果網址'].append(href)
@@ -233,4 +233,6 @@ def run_once(jsobj):
     sys.exit()
 
 
-run_once({'kw':'angelo koo','fname':'miami','date':'1208'})
+run_once({'kw':'angelo koo','fname':'ny','date':'1215_2'})
+
+