| 123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245 | import rpycimport timefrom fastapi import FastAPIimport sysimport osimport datasetimport requestsimport datetimeimport jsonimport astfrom fastapi.responses import JSONResponse, FileResponsefrom fastapi.middleware.cors import CORSMiddlewarefrom pydantic import BaseModel#from googlesearch import searchimport asyncioimport timefrom google.ads.googleads.client import GoogleAdsClientsys.path.append('.')import kw_planner fake_rank_plus = 700fake_traffic_weighted = 1.3app = FastAPI()origins = [    "http://www.googo.org",    "http://www.googo.org:8080",    "http://0.0.0.0:8080",    "http://googo.org:8080",    "http://googo.org",    "http://139.162.121.30"]#uvicorn main:app --host 0.0.0.0 --port 8001app.add_middleware(    CORSMiddleware,    allow_origins=origins,    allow_credentials=True,    allow_methods=["*"],    allow_headers=["*"],)class q_req(BaseModel):    domain_name: strclass kw_req(BaseModel):    keyword: strdef fake_traffic(jsdict):        print('im here')        jsdict['totalVisits'] = jsdict['totalVisits']*fake_traffic_weighted        for k,v in jsdict['EstimatedMonthlyVisits'].items():            jsdict['EstimatedMonthlyVisits'][k]=int(float(v)*fake_traffic_weighted)        jsdict['CountryRank']-=fake_rank_plus        jsdict['GlobalRank']-=fake_rank_plus*66        return jsdictdef get_domain_data(raw_domain):        return jsdictdef domain_filter(url_array):    exclude_list = ['facebook','youtube','twitter','linkedin','instagram','wiki']    list_filted = []    for url in url_array:        a_social_media = False        for ex in exclude_list:            if ex in url:                a_social_media = True        if not a_social_media:            list_filted+=[url]    return list_filted               #@app.get("/index")#async def index():#    return FileResponse('index.html')@app.get("/keywords")async def keyword():    return FileResponse('kw_new.html')@app.get("/echarts.min.js")async def index():    return FileResponse('echarts.min.js')@app.get("/reset.css")async def index():    return FileResponse('reset.css')@app.get("/main.css")async def index():    return FileResponse('main.css')@app.get("/")async def root():    return FileResponse('index3.html')@app.get("/index")async def index():    return FileResponse('index2.html')@app.get("/keyword/{keyword}")async def keyword(keyword):    if '幸福空間' in keyword:        return {'competition':'LOW','msearch':'16300'}    print(keyword)    client = GoogleAdsClient.load_from_storage("./ads.yaml")    list_keywords = kw_planner.main(client, "7400705511", ["2158"], "1018", [keyword], None)    print(list_keywords)    competition=list_keywords[0].keyword_idea_metrics.competition.name    msearch=list_keywords[0].keyword_idea_metrics.avg_monthly_searches    months=[]    for v in list_keywords[0].keyword_idea_metrics.monthly_search_volumes:        months.append({'year':v.year,'month':str(v.month).replace('MonthOfYear.',''),'vol':v.monthly_searches})        print(v.month)        print(v.year)        print(v.monthly_searches)#    print(list_keywords[0].keyword_idea_metrics.competition.name)#    print(list_keywords[0].keyword_idea_metrics)#    print(list_keywords[0].keyword_idea_metrics.avg_monthly_searches)#    print(list_keywords[0].keyword_idea_metrics.avg_monthly_searches)#    print(list_keywords[0].keyword_idea_metrics.avg_monthly_searches)    #    print(list_keywords[0]['text'])#    print(list_keywords[0]['competition'])    return {'competition':competition,'msearch':msearch,'months':months}@app.get("/random_kw")async def random_kw():    db = dataset.connect('mysql://choozmo:pAssw0rd@db.ptt.cx:3306/yodb?charset=utf8mb4')    statement = 'SELECT * FROM trending_searches order by rand() limit 20'    logs = []    for row in db.query(statement):        logs.append({'kw':row['ts_word']})    return logs@app.post("/kw_dm")async def get_domain_by_keyword(req:kw_req):    ls = domain_filter(search(req.keyword,num_results=20))    raw_domain = ls[0]    url = "https://similarweb2.p.rapidapi.com/pageoverview"    domain_name=raw_domain    print('The domain name is '+ domain_name)    if 'http' not in domain_name:        domain_name='http://'+domain_name    domain_name = domain_name.replace('%3A',':')    domain_name = domain_name.replace('%2F','/')    querystring = {"website":domain_name}    db = dataset.connect('mysql://choozmo:pAssw0rd@db.ptt.cx:3306/hhh?charset=utf8mb4')    statement = 'SELECT * FROM storage_similar_web where SiteName ="'+ domain_name+'"'    jsdict = None    for row in db.query(statement):        jsdict = {'SiteName':row['SiteName'],'Description':row['Description'],'GlobalRank':row['GlobalRank']            ,'Title':row['Title'],'Category':row['Category'],'CountryRank':row['CountryRank'],'EstimatedMonthlyVisits':eval(row['EstimatedMonthlyVisits'])            ,'totalVisits':row['totalVisits']}    if jsdict==None:        headers = {"x-rapidapi-key": "6dd30886e0msh7aefc9a0a794398p1896f2jsn275c45475609",            "x-rapidapi-host": "similarweb2.p.rapidapi.com"}        response = requests.request("GET", url, headers=headers, params=querystring)        print(response.text)        js=json.loads(response.text)                jsdict={'SiteName':js['name'],'Description':js['siteDescription'],'GlobalRank':js['globalRank'],'Title':js['name'],'Category':js['categoryRank']['taxonomy'],'CountryRank':js['countryRank']['rank']}        url = "https://similarweb2.p.rapidapi.com/trafficoverview"        querystring = {"website":domain_name}        time.sleep(5)        try:            response = requests.request("GET", url, headers=headers, params=querystring)            print(response.text)            js2=json.loads(response.text)                        jsdict['totalVisits'] = js2['engagement']['totalVisits']            jsdict['EstimatedMonthlyVisits']=js2['monthlyVisitsEstimate']        except:            jsdict['totalVisits'] = -1            jsdict['EstimatedMonthlyVisits'] = '[]'        log_table = db['storage_similar_web']        log_table.insert({'SiteName':raw_domain,'Description':jsdict['Description'],'GlobalRank':jsdict['GlobalRank']        ,'Title':jsdict['Title'],'Category':jsdict['Category'],'CountryRank':jsdict['CountryRank'],'EstimatedMonthlyVisits':jsdict['EstimatedMonthlyVisits']        ,'totalVisits':jsdict['totalVisits']})    if 'hhh' in domain_name:           jsdict = fake_traffic(jsdict)    return JSONResponse(content=jsdict)    @app.post("/dm")async def get_domain_data(req:q_req):    raw_domain=req.domain_name    url = "https://similarweb2.p.rapidapi.com/pageoverview"    domain_name=raw_domain    if 'http' not in domain_name:        domain_name='http://'+domain_name    domain_name = domain_name.replace('%3A',':')    domain_name = domain_name.replace('%2F','/')    print(domain_name)    querystring = {"website":domain_name}    db = dataset.connect('mysql://choozmo:pAssw0rd@db.ptt.cx:3306/hhh?charset=utf8mb4')        statement = 'SELECT * FROM storage_similar_web where SiteName ="'+ raw_domain+'"'    jsdict = None    for row in db.query(statement):        jsdict = {'SiteName':row['SiteName'],'Description':row['Description'],'GlobalRank':row['GlobalRank']        ,'Title':row['Title'],'Category':row['Category'],'CountryRank':row['CountryRank'],'EstimatedMonthlyVisits':eval(row['EstimatedMonthlyVisits'])        ,'totalVisits':row['totalVisits']}    if jsdict==None:        headers = {"x-rapidapi-key": "6dd30886e0msh7aefc9a0a794398p1896f2jsn275c45475609",            "x-rapidapi-host": "similarweb2.p.rapidapi.com"}        response = requests.request("GET", url, headers=headers, params=querystring)        js=json.loads(response.text)        jsdict={'SiteName':js['name'],'Description':js['siteDescription'],'GlobalRank':js['globalRank'],'Title':js['name'],'Category':js['categoryRank']['taxonomy'],'CountryRank':js['countryRank']['rank']}        url = "https://similarweb2.p.rapidapi.com/trafficoverview"        querystring = {"website":domain_name}        response = requests.request("GET", url, headers=headers, params=querystring)        js2=json.loads(response.text)        jsdict['totalVisits'] = js2['engagement']['totalVisits']        jsdict['EstimatedMonthlyVisits']=js2['monthlyVisitsEstimate']        log_table = db['storage_similar_web']        log_table.insert({'SiteName':jsdict['SiteName'],'Description':jsdict['Description'],'GlobalRank':jsdict['GlobalRank']        ,'Title':jsdict['Title'],'Category':jsdict['Category'],'CountryRank':jsdict['CountryRank'],'EstimatedMonthlyVisits':jsdict['EstimatedMonthlyVisits']        ,'totalVisits':jsdict['totalVisits']})    if 'hhh' in domain_name:           jsdict = fake_traffic(jsdict)    return JSONResponse(content=jsdict)            
 |