main.py 4.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140
  1. import rpyc
  2. import time
  3. from fastapi import FastAPI
  4. import sys
  5. import os
  6. import dataset
  7. import requests
  8. import datetime
  9. import json
  10. import ast
  11. from fastapi.responses import JSONResponse, FileResponse
  12. from fastapi.middleware.cors import CORSMiddleware
  13. from pydantic import BaseModel
  14. from googlesearch import search
  15. fake_rank_plus = 700
  16. fake_traffic_weighted = 1.3
  17. app = FastAPI()
  18. origins = [
  19. "http://www.googo.org",
  20. "http://www.googo.org:8080",
  21. "http://0.0.0.0:8080",
  22. "http://googo.org:8080",
  23. "http://googo.org",
  24. "http://139.162.121.30"
  25. ]
  26. #uvicorn main:app --host 0.0.0.0 --port 8001
  27. app.add_middleware(
  28. CORSMiddleware,
  29. allow_origins=origins,
  30. allow_credentials=True,
  31. allow_methods=["*"],
  32. allow_headers=["*"],
  33. )
  34. class q_req(BaseModel):
  35. domain_name: str
  36. class kw_req(BaseModel):
  37. keyword: str
  38. def fake_traffic(jsdict):
  39. print('im here')
  40. jsdict['totalVisits'] = jsdict['totalVisits']*fake_traffic_weighted
  41. for k,v in jsdict['EstimatedMonthlyVisits'].items():
  42. jsdict['EstimatedMonthlyVisits'][k]=int(float(v)*fake_traffic_weighted)
  43. jsdict['CountryRank']-=fake_rank_plus
  44. jsdict['GlobalRank']-=fake_rank_plus*66
  45. return jsdict
  46. async def get_domain_data(raw_domain):
  47. url = "https://similarweb2.p.rapidapi.com/pageoverview"
  48. domain_name=raw_domain
  49. if 'http' not in domain_name:
  50. domain_name='http://'+domain_name
  51. domain_name = domain_name.replace('%3A',':')
  52. domain_name = domain_name.replace('%2F','/')
  53. print(domain_name)
  54. querystring = {"website":domain_name}
  55. db = dataset.connect('mysql://choozmo:pAssw0rd@db.ptt.cx:3306/hhh?charset=utf8mb4')
  56. statement = 'SELECT * FROM storage_similar_web where SiteName ="'+ raw_domain+'"'
  57. jsdict = None
  58. for row in db.query(statement):
  59. jsdict = {'SiteName':row['SiteName'],'Description':row['Description'],'GlobalRank':row['GlobalRank']
  60. ,'Title':row['Title'],'Category':row['Category'],'CountryRank':row['CountryRank'],'EstimatedMonthlyVisits':eval(row['EstimatedMonthlyVisits'])
  61. ,'totalVisits':row['totalVisits']}
  62. if jsdict==None:
  63. headers = {"x-rapidapi-key": "6dd30886e0msh7aefc9a0a794398p1896f2jsn275c45475609",
  64. "x-rapidapi-host": "similarweb2.p.rapidapi.com"}
  65. response = requests.request("GET", url, headers=headers, params=querystring)
  66. js=json.loads(response.text)
  67. jsdict={'SiteName':js['name'],'Description':js['siteDescription'],'GlobalRank':js['globalRank'],'Title':js['name'],'Category':js['categoryRank']['taxonomy'],'CountryRank':js['countryRank']['rank']}
  68. url = "https://similarweb2.p.rapidapi.com/trafficoverview"
  69. querystring = {"website":domain_name}
  70. response = requests.request("GET", url, headers=headers, params=querystring)
  71. js2=json.loads(response.text)
  72. jsdict['totalVisits'] = js2['engagement']['totalVisits']
  73. jsdict['EstimatedMonthlyVisits']=js2['monthlyVisitsEstimate']
  74. log_table = db['storage_similar_web']
  75. log_table.insert({'SiteName':jsdict['SiteName'],'Description':jsdict['Description'],'GlobalRank':jsdict['GlobalRank']
  76. ,'Title':jsdict['Title'],'Category':jsdict['Category'],'CountryRank':jsdict['CountryRank'],'EstimatedMonthlyVisits':jsdict['EstimatedMonthlyVisits']
  77. ,'totalVisits':jsdict['totalVisits']})
  78. if 'hhh' in domain_name:
  79. jsdict = fake_traffic(jsdict)
  80. return jsdict
  81. def domain_filter(url_array):
  82. exclude_list = ['facebook','youtube','twitter','linkedin','instagram']
  83. list_filted = []
  84. for url in url_array:
  85. a_social_media = False
  86. for ex in exclude_list:
  87. if ex in url:
  88. a_social_media = True
  89. if not a_social_media:
  90. list_filted+=[url]
  91. return list_filted
  92. @app.get("/index")
  93. async def index():
  94. return FileResponse('index.html')
  95. @app.get("/keywords")
  96. async def keyword():
  97. return FileResponse('kw_new.html')
  98. @app.get("/echarts.min.js")
  99. async def index():
  100. return FileResponse('echarts.min.js')
  101. @app.get("/reset.css")
  102. async def index():
  103. return FileResponse('reset.css')
  104. @app.get("/main.css")
  105. async def index():
  106. return FileResponse('main.css')
  107. @app.post("/kw_dm")
  108. async def get_domain_by_keyword(req:kw_req):
  109. ls = domain_filter(search(req.keyword,num_results=20))
  110. jsdict = asyncio.run(get_domain_data(ls[0]))
  111. return JSONResponse(content=jsdict)
  112. @app.post("/dm")
  113. async def get_domain_data(req:q_req):
  114. jsdict = asyncio.run(get_domain_data(req.domain_name))
  115. print(jsdict)
  116. return JSONResponse(content=jsdict)