ming 4 роки тому
батько
коміт
8a9e146193
2 змінених файлів з 34 додано та 31 видалено
  1. 17 0
      create_fake.py
  2. 17 31
      main.py

+ 17 - 0
create_fake.py

@@ -0,0 +1,17 @@
+
+import dataset
+import datetime
+
+db = dataset.connect('mysql://choozmo:pAssw0rd@db.ptt.cx:3306/cmm_test?charset=utf8mb4')
+news_table = db['tag_table']
+print(type(news_table))
+#for idx in range(5):
+    #news_table.insert({'name':'標籤'+str(idx),'remark':'remark'+str(idx)})
+
+table = db.load_table('tag_table')
+statement = 'SELECT id,name FROM tag_table'
+for row in db.query(statement):
+    print(row['name'], row['id'])
+    
+
+

+ 17 - 31
main.py

@@ -1,37 +1,23 @@
-from GoogleNews import GoogleNews
-import dataset
-import datetime
-from celery import Celery
 
-app = Celery('tasks', broker='redis://db.ptt.cx')
+from enum import Enum
+from typing import Optional
 
-@app.task
-def crawl_keyword_news(keyword):
-    db = dataset.connect('mysql://choozmo:pAssw0rd@db.ptt.cx:3306/cmm_test?charset=utf8mb4')
-    news_table = db['news_table']
-    googlenews = GoogleNews(lang='zh-TW')
-    #kw='裝潢'
-    kw=keyword
-    googlenews.set_lang('zh-TW')
-    #googlenews.search('建材')
-    googlenews.search(kw)
+from fastapi import FastAPI, Query
+import dataset,json
 
 
-    rs=googlenews.results()
-    #print(rs)
-    for r in rs:
-        print(r['title'])
-        print(r['desc'])
-        print(r['link'])
-        print(r['datetime'])
-        news_table.insert({'kw':kw,'dt':r['datetime'],'title':r['title'],'desc':r['desc'],'link':r['link'],'crawl_dt':datetime.datetime.now()})
+app = FastAPI()
+db = dataset.connect('mysql://choozmo:pAssw0rd@db.ptt.cx:3306/cmm_test?charset=utf8mb4')
+
+@app.get("/tags")
+async def get_tags():
+    tag_dict = {}
+    table = db.load_table('tag_table')
+    statement = 'SELECT id,name FROM tag_table'
+    for row in db.query(statement):
+        tag_dict[row['id']]=row['name']
+        json_dump = json.dumps(tag_dict, ensure_ascii=False)
+    
+    return json_dump
 
-    for i in range(2,6):
-        rs = googlenews.page_at(i)
-        for r in rs:
-            print(r['title'])
-            print(r['desc'])
-            print(r['link'])
-            print(r['datetime'])
-            news_table.insert({'kw':kw,'dt':r['datetime'],'title':r['title'],'desc':r['desc'],'link':r['link'],'crawl_dt':datetime.datetime.now()})