import rpyc import time from fastapi import FastAPI import sys import os import dataset import requests import datetime import json import ast from fastapi.responses import JSONResponse, FileResponse from fastapi.middleware.cors import CORSMiddleware from pydantic import BaseModel #from googlesearch import search import asyncio import time from google.ads.googleads.client import GoogleAdsClient sys.path.append('.') import kw_planner import uvicorn fake_rank_plus = 700 fake_traffic_weighted = 1.3 app = FastAPI() origins = [ "http://www.googo.org", "http://www.googo.org:8080", "http://0.0.0.0:8080", "http://googo.org:8080", "http://googo.org", "http://139.162.121.30" ] #uvicorn main:app --host 0.0.0.0 --port 8001 app.add_middleware( CORSMiddleware, allow_origins=origins, allow_credentials=True, allow_methods=["*"], allow_headers=["*"], ) class q_req(BaseModel): domain_name: str class kw_req(BaseModel): keyword: str def fake_traffic(jsdict): print('im here') jsdict['totalVisits'] = jsdict['totalVisits']*fake_traffic_weighted for k,v in jsdict['EstimatedMonthlyVisits'].items(): jsdict['EstimatedMonthlyVisits'][k]=int(float(v)*fake_traffic_weighted) jsdict['CountryRank']-=fake_rank_plus jsdict['GlobalRank']-=fake_rank_plus*66 return jsdict def get_domain_data(raw_domain): return jsdict def domain_filter(url_array): exclude_list = ['facebook','youtube','twitter','linkedin','instagram','wiki'] list_filted = [] for url in url_array: a_social_media = False for ex in exclude_list: if ex in url: a_social_media = True if not a_social_media: list_filted+=[url] return list_filted #@app.get("/index") #async def index(): # return FileResponse('index.html') @app.get("/keywords") async def keyword(): return FileResponse('kw_new.html') @app.get("/echarts.min.js") async def index(): return FileResponse('echarts.min.js') @app.get("/reset.css") async def index(): return FileResponse('reset.css') @app.get("/main.css") async def index(): return FileResponse('main.css') @app.get("/") async def root(): return FileResponse('index3.html') @app.get("/index") async def index(): return FileResponse('index2.html') @app.get("/keyword/{keyword}") async def keyword(keyword): if '幸福空間' in keyword: return {'competition':'LOW','msearch':'16300'} print(keyword) client = GoogleAdsClient.load_from_storage("./ads.yaml") list_keywords = kw_planner.main(client, "7400705511", ["2158"], "1018", [keyword], None) print(list_keywords) competition=list_keywords[0].keyword_idea_metrics.competition.name msearch=list_keywords[0].keyword_idea_metrics.avg_monthly_searches months=[] for v in list_keywords[0].keyword_idea_metrics.monthly_search_volumes: months.append({'year':v.year,'month':str(v.month).replace('MonthOfYear.',''),'vol':v.monthly_searches}) print(v.month) print(v.year) print(v.monthly_searches) # print(list_keywords[0].keyword_idea_metrics.competition.name) # print(list_keywords[0].keyword_idea_metrics) # print(list_keywords[0].keyword_idea_metrics.avg_monthly_searches) # print(list_keywords[0].keyword_idea_metrics.avg_monthly_searches) # print(list_keywords[0].keyword_idea_metrics.avg_monthly_searches) # print(list_keywords[0]['text']) # print(list_keywords[0]['competition']) return {'competition':competition,'msearch':msearch,'months':months} @app.get("/random_kw") async def random_kw(): db = dataset.connect('mysql://choozmo:pAssw0rd@db.ptt.cx:3306/yodb?charset=utf8mb4') statement = 'SELECT * FROM trending_searches order by rand() limit 20' logs = [] for row in db.query(statement): logs.append({'kw':row['ts_word']}) return logs @app.post("/kw_dm") async def get_domain_by_keyword(req:kw_req): ls = domain_filter(search(req.keyword,num_results=20)) raw_domain = ls[0] url = "https://similarweb2.p.rapidapi.com/pageoverview" domain_name=raw_domain print('The domain name is '+ domain_name) if 'http' not in domain_name: domain_name='http://'+domain_name domain_name = domain_name.replace('%3A',':') domain_name = domain_name.replace('%2F','/') querystring = {"website":domain_name} db = dataset.connect('mysql://choozmo:pAssw0rd@db.ptt.cx:3306/hhh?charset=utf8mb4') statement = 'SELECT * FROM storage_similar_web where SiteName ="'+ domain_name+'"' jsdict = None for row in db.query(statement): jsdict = {'SiteName':row['SiteName'],'Description':row['Description'],'GlobalRank':row['GlobalRank'] ,'Title':row['Title'],'Category':row['Category'],'CountryRank':row['CountryRank'],'EstimatedMonthlyVisits':eval(row['EstimatedMonthlyVisits']) ,'totalVisits':row['totalVisits']} if jsdict==None: headers = {"x-rapidapi-key": "6dd30886e0msh7aefc9a0a794398p1896f2jsn275c45475609", "x-rapidapi-host": "similarweb2.p.rapidapi.com"} response = requests.request("GET", url, headers=headers, params=querystring) print(response.text) js=json.loads(response.text) jsdict={'SiteName':js['name'],'Description':js['siteDescription'],'GlobalRank':js['globalRank'],'Title':js['name'],'Category':js['categoryRank']['taxonomy'],'CountryRank':js['countryRank']['rank']} url = "https://similarweb2.p.rapidapi.com/trafficoverview" querystring = {"website":domain_name} time.sleep(5) try: response = requests.request("GET", url, headers=headers, params=querystring) print(response.text) js2=json.loads(response.text) jsdict['totalVisits'] = js2['engagement']['totalVisits'] jsdict['EstimatedMonthlyVisits']=js2['monthlyVisitsEstimate'] except: jsdict['totalVisits'] = -1 jsdict['EstimatedMonthlyVisits'] = '[]' log_table = db['storage_similar_web'] log_table.insert({'SiteName':raw_domain,'Description':jsdict['Description'],'GlobalRank':jsdict['GlobalRank'] ,'Title':jsdict['Title'],'Category':jsdict['Category'],'CountryRank':jsdict['CountryRank'],'EstimatedMonthlyVisits':jsdict['EstimatedMonthlyVisits'] ,'totalVisits':jsdict['totalVisits']}) if 'hhh' in domain_name: jsdict = fake_traffic(jsdict) return JSONResponse(content=jsdict) @app.post("/dm") async def get_domain_data(req:q_req): raw_domain=req.domain_name url = "https://similarweb2.p.rapidapi.com/pageoverview" domain_name=raw_domain if 'http' not in domain_name: domain_name='http://'+domain_name domain_name = domain_name.replace('%3A',':') domain_name = domain_name.replace('%2F','/') print(domain_name) querystring = {"website":domain_name} db = dataset.connect('mysql://choozmo:pAssw0rd@db.ptt.cx:3306/hhh?charset=utf8mb4') statement = 'SELECT * FROM storage_similar_web where SiteName ="'+ raw_domain+'"' jsdict = None for row in db.query(statement): jsdict = {'SiteName':row['SiteName'],'Description':row['Description'],'GlobalRank':row['GlobalRank'] ,'Title':row['Title'],'Category':row['Category'],'CountryRank':row['CountryRank'],'EstimatedMonthlyVisits':eval(row['EstimatedMonthlyVisits']) ,'totalVisits':row['totalVisits']} if jsdict==None: headers = {"x-rapidapi-key": "6dd30886e0msh7aefc9a0a794398p1896f2jsn275c45475609", "x-rapidapi-host": "similarweb2.p.rapidapi.com"} response = requests.request("GET", url, headers=headers, params=querystring) js=json.loads(response.text) jsdict={'SiteName':js['name'],'Description':js['siteDescription'],'GlobalRank':js['globalRank'],'Title':js['name'],'Category':js['categoryRank']['taxonomy'],'CountryRank':js['countryRank']['rank']} url = "https://similarweb2.p.rapidapi.com/trafficoverview" querystring = {"website":domain_name} response = requests.request("GET", url, headers=headers, params=querystring) js2=json.loads(response.text) jsdict['totalVisits'] = js2['engagement']['totalVisits'] jsdict['EstimatedMonthlyVisits']=js2['monthlyVisitsEstimate'] log_table = db['storage_similar_web'] log_table.insert({'SiteName':jsdict['SiteName'],'Description':jsdict['Description'],'GlobalRank':jsdict['GlobalRank'] ,'Title':jsdict['Title'],'Category':jsdict['Category'],'CountryRank':jsdict['CountryRank'],'EstimatedMonthlyVisits':jsdict['EstimatedMonthlyVisits'] ,'totalVisits':jsdict['totalVisits']}) if 'hhh' in domain_name: jsdict = fake_traffic(jsdict) return JSONResponse(content=jsdict) if __name__ == "__main__": uvicorn.run("main:app", host="0.0.0.0", port=8081)