main.py 19 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401
  1. from __future__ import print_function
  2. import os.path
  3. import google.auth
  4. import datetime as dt
  5. import math
  6. from google.auth.transport.requests import Request
  7. from google.oauth2.credentials import Credentials
  8. from google_auth_oauthlib.flow import InstalledAppFlow
  9. from googleapiclient.discovery import build
  10. from googleapiclient.errors import HttpError
  11. from oauth2client.service_account import ServiceAccountCredentials
  12. import pandas as pd
  13. import datetime as dt
  14. import pymysql
  15. pymysql.install_as_MySQLdb()
  16. import gspread
  17. import requests
  18. from oauth2client.service_account import ServiceAccountCredentials
  19. # SETTINGS, FOR DEBUG USE ONLY -- SET TO 0 IF UNUSED, ANY OTHER VALUE WILL TRIGGER THE FUNCTION
  20. makepres = 0 # generates presentation files, files will be under service/報告 folder
  21. notifygroup = 0 # notify to LINE group
  22. # If modifying these scopes, delete the file token.json.
  23. GOOGLE_APPLICATION_CREDENTIALS="choozmomain-6c1dffe1ee7e.json"
  24. SCOPES = ['https://www.googleapis.com/auth/drive']
  25. creds = None
  26. # The file token.json stores the user's access and refresh tokens, and is
  27. # created automatically when the authorization flow completes for the first
  28. # time.
  29. if os.path.exists('token.json'):
  30. creds = Credentials.from_authorized_user_file('token.json', SCOPES)
  31. # If there are no (valid) credentials available, let the user log in.
  32. if not creds or not creds.valid:
  33. if creds and creds.expired and creds.refresh_token:
  34. creds.refresh(Request())
  35. else:
  36. flow = InstalledAppFlow.from_client_secrets_file(
  37. 'credentials.json', SCOPES)
  38. creds = flow.run_local_server(port=0)
  39. # Save the credentials for the next run
  40. with open('token.json', 'w') as token:
  41. token.write(creds.to_json())
  42. gc = gspread.authorize(creds)
  43. sheet = gc.open_by_key('1zpE8NpF7wzGppLlx95V785rnQotapzP-8fZSFOP8Ouk')
  44. sheet_instance = sheet.get_worksheet(0)
  45. records_data = sheet_instance.get_all_records()
  46. clients = []
  47. for rd in records_data:
  48. clients.append([rd["client"], rd["alias"].split(','), rd['sheetsid'], rd['discontinued']])
  49. print(clients) # from left to right: client name, client alias, sheets ID, discontinued flag
  50. # The ID of the base presentation.
  51. PRESENTATION_ID = '1dLmTtHF0ZmrXkEDLXOcXaH7NgL6qYZtELVHMGT1Qe7E'
  52. def notify_group(msg):
  53. gid='WekCRfnAirSiSxALiD6gcm0B56EejsoK89zFbIaiZQD'
  54. headers = {"Authorization": "Bearer " + gid,"Content-Type": "application/x-www-form-urlencoded"}
  55. r = requests.post("https://notify-api.line.me/api/notify",headers=headers, params={"message": msg})
  56. def copy_presentation(presentation_id, copy_title):
  57. """
  58. Creates the copy Presentation the user has access to.
  59. Load pre-authorized user credentials from the environment.
  60. TODO(developer) - See https://developers.google.com/identity
  61. for guides on implementing OAuth2 for the application.
  62. """
  63. #creds, _ = google.auth.default()
  64. # pylint: disable=maybe-no-member
  65. try:
  66. drive_service = build('drive', 'v3', credentials=creds)
  67. body = {
  68. 'name': copy_title
  69. }
  70. drive_response = drive_service.files().copy(
  71. fileId=presentation_id, body=body).execute()
  72. presentation_copy_id = drive_response.get('id')
  73. except HttpError as error:
  74. print(f"An error occurred: {error}")
  75. print("Presentations not copied")
  76. return error
  77. return presentation_copy_id
  78. def totaling(data):
  79. d = list(zip(*data))
  80. res = []
  81. for i in d:
  82. try:
  83. total=0
  84. for j in i:
  85. total = total + float(j)
  86. res.append(total)
  87. except:
  88. res.append("NA") # we dont need all the fields anyway, some fields are in text
  89. print(res)
  90. return res
  91. def process_worksheet(ws, client, n):
  92. data = []
  93. r = ws.get_all_values()
  94. r1 = r[12]
  95. r2 = totaling(list(r[13:]))
  96. data.append(r1)
  97. data.append(r2)
  98. f = pd.DataFrame(data)
  99. f.columns = f.iloc[0]
  100. f = f[1:]
  101. f.to_excel(client + str(n) + " f.xlsx")
  102. print(f)
  103. return f
  104. def getValue(before, after):
  105. res = after-before
  106. if res > 0:
  107. value = "{:,.2f}".format(res)
  108. return "↑ " + (value.rstrip('0').rstrip('.') if '.' in value else value)
  109. elif res == 0:
  110. return "→ 0"
  111. else:
  112. value = "{:,.2f}".format(abs(res))
  113. return "↓ " + (value.rstrip('0').rstrip('.') if '.' in value else value)
  114. def checkFloat(x):
  115. print(x)
  116. if x % 1 == 0.99 or x % 1 == 0.01:
  117. return round(x)
  118. return x
  119. def gather(client, source):
  120. data = {}
  121. #ads data here
  122. fields1 = ["Clicks (Compare to)", "Impr. (Compare to)", "Avg. CPC (Compare to)", "Avg. CPM (Compare to)", "Conversions (Compare to)", "Cost (Compare to)", "Clicks", "Impr.", "Avg. CPC", "Avg. CPM", "Conversions", "Cost", ]
  123. fields2 = ["點擊 (比較對象)", "曝光 (比較對象)", "平均單次點擊出價 (比較對象)", "平均單次點擊出價 (比較對象)", "所有轉換 (比較對象)", "費用 (比較對象)","點擊", "曝光", "平均單次點擊出價", "平均單次點擊出價", "所有轉換", "費用"]
  124. credentials = ServiceAccountCredentials.from_json_keyfile_name(GOOGLE_APPLICATION_CREDENTIALS, SCOPES)
  125. gc = gspread.authorize(credentials)
  126. sh = gc.open_by_key(source)
  127. msg = ""
  128. file = process_worksheet(sh.worksheet(client + ' 前一週D'), client, 1)
  129. file2 = process_worksheet(sh.worksheet(client + ' 前一週S'), client, 2)
  130. # quick testing to determine which field names to use -- in English (fields1) or Chinese (fields2)
  131. try:
  132. print(file[fields1[0]].values[0])
  133. fields = fields1
  134. except:
  135. fields = fields2
  136. for f in fields: # if NA then it is assumed to be 0 -- we still need to calculate the delta
  137. if file[f].values[0] == "NA":
  138. val1 = 0.00
  139. file[f].values[0] = 0.00
  140. else:
  141. val1 = file[f].values[0]
  142. if file2[f].values[0] == "NA":
  143. val2 = 0.00
  144. file2[f].values[0] = 0.00
  145. else:
  146. val2 = file2[f].values[0]
  147. msg += f + ": " + str(val1) + " / " + str(val2) + "\n"
  148. # well, you cant have decimals in clicks or impressions or conversions
  149. file[fields[0]].values[0] = checkFloat(file[fields[0]].values[0])
  150. file[fields[6]].values[0] = checkFloat(file[fields[6]].values[0])
  151. file2[fields[0]].values[0] = checkFloat(file2[fields[0]].values[0])
  152. file2[fields[6]].values[0] = checkFloat(file2[fields[6]].values[0])
  153. file[fields[1]].values[0] = checkFloat(file[fields[1]].values[0])
  154. file[fields[7]].values[0] = checkFloat(file[fields[7]].values[0])
  155. file2[fields[1]].values[0] = checkFloat(file2[fields[1]].values[0])
  156. file2[fields[7]].values[0] = checkFloat(file2[fields[7]].values[0])
  157. file[fields[4]].values[0] = checkFloat(file[fields[4]].values[0])
  158. file[fields[10]].values[0] = checkFloat(file[fields[10]].values[0])
  159. file2[fields[4]].values[0] = checkFloat(file2[fields[4]].values[0])
  160. file2[fields[10]].values[0] = checkFloat(file2[fields[10]].values[0])
  161. data["{{v111}}"] = file[fields[0]].values[0]
  162. data["{{v211}}"] = file[fields[1]].values[0]
  163. data["{{v311}}"] = file[fields[2]].values[0]
  164. data["{{v411}}"] = file[fields[3]].values[0]
  165. data["{{v511}}"] = file[fields[4]].values[0]
  166. data["{{v611}}"] = file[fields[5]].values[0]
  167. data["{{v121}}"] = file[fields[6]].values[0]
  168. data["{{v221}}"] = file[fields[7]].values[0]
  169. data["{{v321}}"] = file[fields[8]].values[0]
  170. data["{{v421}}"] = file[fields[9]].values[0]
  171. data["{{v521}}"] = file[fields[10]].values[0]
  172. data["{{v621}}"] = file[fields[11]].values[0]
  173. data["{{v112}}"] = file2[fields[0]].values[0]
  174. data["{{v212}}"] = file2[fields[1]].values[0]
  175. data["{{v312}}"] = file2[fields[2]].values[0]
  176. data["{{v412}}"] = file2[fields[3]].values[0]
  177. data["{{v512}}"] = file2[fields[4]].values[0]
  178. data["{{v612}}"] = file2[fields[5]].values[0]
  179. data["{{v122}}"] = file2[fields[6]].values[0]
  180. data["{{v222}}"] = file2[fields[7]].values[0]
  181. data["{{v322}}"] = file2[fields[8]].values[0]
  182. data["{{v422}}"] = file2[fields[9]].values[0]
  183. data["{{v522}}"] = file2[fields[10]].values[0]
  184. data["{{v622}}"] = file2[fields[11]].values[0]
  185. data["{{v113}}"] = float(file[fields[0]].values[0]) + float(file2[fields[0]].values[0])
  186. data["{{v213}}"] = float(file[fields[1]].values[0]) + float(file2[fields[1]].values[0])
  187. data["{{v313}}"] = float(file[fields[2]].values[0]) + float(file2[fields[2]].values[0])
  188. data["{{v413}}"] = float(file[fields[3]].values[0]) + float(file2[fields[3]].values[0])
  189. data["{{v513}}"] = float(file[fields[4]].values[0]) + float(file2[fields[4]].values[0])
  190. data["{{v613}}"] = float(file[fields[5]].values[0]) + float(file2[fields[5]].values[0])
  191. data["{{v123}}"] = float(file[fields[6]].values[0]) + float(file2[fields[6]].values[0])
  192. data["{{v223}}"] = float(file[fields[7]].values[0]) + float(file2[fields[7]].values[0])
  193. data["{{v323}}"] = float(file[fields[8]].values[0]) + float(file2[fields[8]].values[0])
  194. data["{{v423}}"] = float(file[fields[9]].values[0]) + float(file2[fields[9]].values[0])
  195. data["{{v523}}"] = float(file[fields[10]].values[0]) + float(file2[fields[10]].values[0])
  196. data["{{v623}}"] = float(file[fields[11]].values[0]) + float(file2[fields[11]].values[0])
  197. data["{{1d1}}"] = getValue(file[fields[0]].values[0],file[fields[6]].values[0])
  198. data["{{2d1}}"] = getValue(file[fields[1]].values[0],file[fields[7]].values[0])
  199. data["{{3d1}}"] = getValue(file[fields[2]].values[0],file[fields[8]].values[0])
  200. data["{{4d1}}"] = getValue(file[fields[3]].values[0],file[fields[9]].values[0])
  201. data["{{5d1}}"] = getValue(file[fields[4]].values[0],file[fields[10]].values[0])
  202. data["{{1d2}}"] = getValue(file2[fields[0]].values[0],file2[fields[6]].values[0])
  203. data["{{2d2}}"] = getValue(file2[fields[1]].values[0],file2[fields[7]].values[0])
  204. data["{{3d2}}"] = getValue(file2[fields[2]].values[0],file2[fields[8]].values[0])
  205. data["{{4d2}}"] = getValue(file2[fields[3]].values[0],file2[fields[9]].values[0])
  206. data["{{5d2}}"] = getValue(file2[fields[4]].values[0],file2[fields[10]].values[0])
  207. data["{{1d3}}"] = getValue(file[fields[0]].values[0] + file2[fields[0]].values[0],file[fields[6]].values[0] + file2[fields[6]].values[0])
  208. data["{{2d3}}"] = getValue(file[fields[1]].values[0] + file2[fields[1]].values[0],file[fields[7]].values[0] + file2[fields[7]].values[0])
  209. data["{{3d3}}"] = getValue(file[fields[2]].values[0] + file2[fields[2]].values[0],file[fields[8]].values[0] + file2[fields[8]].values[0])
  210. data["{{4d3}}"] = getValue(file[fields[3]].values[0] + file2[fields[3]].values[0],file[fields[9]].values[0] + file2[fields[9]].values[0])
  211. data["{{5d3}}"] = getValue(file[fields[4]].values[0] + file2[fields[4]].values[0],file[fields[10]].values[0] + file2[fields[10]].values[0])
  212. for i in data:
  213. try:
  214. value = "{:,.2f}".format(data[i])
  215. data[i] = value.rstrip('0').rstrip('.') if '.' in value else value
  216. except:
  217. pass
  218. return data
  219. def check(client, data):
  220. if client == data[0]:
  221. return True
  222. for alias in data[1]:
  223. if client.upper() == alias:
  224. return True
  225. return False
  226. def getClientData(): # currently unused
  227. client = input("請輸入客戶名稱: ")
  228. for c in clients:
  229. if check(client, c):
  230. return c[0], c[2]
  231. source = input('請輸入Google Ads 資料來源的Google Sheets ID (應有 "[客戶名稱] 前一周" 的活頁簿 : ')
  232. return client, source
  233. def process(c, service):
  234. client = c[0]
  235. DATA_ID = c[2]
  236. print (client, DATA_ID)
  237. # GATHER DATA HERE
  238. data = gather(client, DATA_ID)
  239. print(data)
  240. new_pres = "0000000000"
  241. if makepres != 0:
  242. new_pres = copy_presentation(PRESENTATION_ID, client + "週報" + str(dt.date.today().strftime("%Y%m%d")))
  243. # Call the Slides API
  244. presentation = service.presentations().get(
  245. presentationId=new_pres).execute()
  246. slides = presentation.get('slides')
  247. print('The presentation contains {} slides:'.format(len(slides)))
  248. for i, slide in enumerate(slides):
  249. content = slide.get('pageElements')
  250. print('- Slide #{} contains {} elements.'.format(
  251. i + 1, len(content)))
  252. print(content[0]['objectId'])
  253. # ALL THE STUFF YOU WANT TO DO GOES HERE
  254. reqs = [
  255. {'replaceAllText': {'containsText': {'text': '{{client}}'}, 'replaceText': client}},
  256. {'replaceAllText': {'containsText': {'text': '{{reportdate}}'}, 'replaceText': str(dt.date.today().strftime("%Y/%m/%d"))}},
  257. {'replaceAllText': {'containsText': {'text': '{{startdate}}'},
  258. 'replaceText': str(((dt.date.today()) - dt.timedelta(days=7)).strftime("%m/%d"))}},
  259. {'replaceAllText': {'containsText': {'text': '{{enddate}}'},
  260. 'replaceText': str(((dt.date.today()) - dt.timedelta(days=1)).strftime("%m/%d"))}},
  261. {'replaceAllText': {'containsText': {'text': '{{v111}}'}, 'replaceText': data["{{v111}}"]}},
  262. {'replaceAllText': {'containsText': {'text': '{{v211}}'}, 'replaceText': data["{{v211}}"]}},
  263. {'replaceAllText': {'containsText': {'text': '{{v311}}'}, 'replaceText': data["{{v311}}"]}},
  264. {'replaceAllText': {'containsText': {'text': '{{v411}}'}, 'replaceText': data["{{v411}}"]}},
  265. {'replaceAllText': {'containsText': {'text': '{{v511}}'}, 'replaceText': data["{{v511}}"]}},
  266. {'replaceAllText': {'containsText': {'text': '{{v611}}'}, 'replaceText': data["{{v611}}"]}},
  267. {'replaceAllText': {'containsText': {'text': '{{v121}}'}, 'replaceText': data["{{v121}}"]}},
  268. {'replaceAllText': {'containsText': {'text': '{{v221}}'}, 'replaceText': data["{{v221}}"]}},
  269. {'replaceAllText': {'containsText': {'text': '{{v321}}'}, 'replaceText': data["{{v321}}"]}},
  270. {'replaceAllText': {'containsText': {'text': '{{v421}}'}, 'replaceText': data["{{v421}}"]}},
  271. {'replaceAllText': {'containsText': {'text': '{{v521}}'}, 'replaceText': data["{{v521}}"]}},
  272. {'replaceAllText': {'containsText': {'text': '{{v621}}'}, 'replaceText': data["{{v621}}"]}},
  273. {'replaceAllText': {'containsText': {'text': '{{v112}}'}, 'replaceText': data["{{v112}}"]}},
  274. {'replaceAllText': {'containsText': {'text': '{{v212}}'}, 'replaceText': data["{{v212}}"]}},
  275. {'replaceAllText': {'containsText': {'text': '{{v312}}'}, 'replaceText': data["{{v312}}"]}},
  276. {'replaceAllText': {'containsText': {'text': '{{v412}}'}, 'replaceText': data["{{v412}}"]}},
  277. {'replaceAllText': {'containsText': {'text': '{{v512}}'}, 'replaceText': data["{{v512}}"]}},
  278. {'replaceAllText': {'containsText': {'text': '{{v612}}'}, 'replaceText': data["{{v612}}"]}},
  279. {'replaceAllText': {'containsText': {'text': '{{v122}}'}, 'replaceText': data["{{v122}}"]}},
  280. {'replaceAllText': {'containsText': {'text': '{{v222}}'}, 'replaceText': data["{{v222}}"]}},
  281. {'replaceAllText': {'containsText': {'text': '{{v322}}'}, 'replaceText': data["{{v322}}"]}},
  282. {'replaceAllText': {'containsText': {'text': '{{v422}}'}, 'replaceText': data["{{v422}}"]}},
  283. {'replaceAllText': {'containsText': {'text': '{{v522}}'}, 'replaceText': data["{{v522}}"]}},
  284. {'replaceAllText': {'containsText': {'text': '{{v622}}'}, 'replaceText': data["{{v622}}"]}},
  285. {'replaceAllText': {'containsText': {'text': '{{v113}}'}, 'replaceText': data["{{v113}}"]}},
  286. {'replaceAllText': {'containsText': {'text': '{{v213}}'}, 'replaceText': data["{{v213}}"]}},
  287. {'replaceAllText': {'containsText': {'text': '{{v313}}'}, 'replaceText': data["{{v313}}"]}},
  288. {'replaceAllText': {'containsText': {'text': '{{v413}}'}, 'replaceText': data["{{v413}}"]}},
  289. {'replaceAllText': {'containsText': {'text': '{{v513}}'}, 'replaceText': data["{{v513}}"]}},
  290. {'replaceAllText': {'containsText': {'text': '{{v613}}'}, 'replaceText': data["{{v613}}"]}},
  291. {'replaceAllText': {'containsText': {'text': '{{v123}}'}, 'replaceText': data["{{v123}}"]}},
  292. {'replaceAllText': {'containsText': {'text': '{{v223}}'}, 'replaceText': data["{{v223}}"]}},
  293. {'replaceAllText': {'containsText': {'text': '{{v323}}'}, 'replaceText': data["{{v323}}"]}},
  294. {'replaceAllText': {'containsText': {'text': '{{v423}}'}, 'replaceText': data["{{v423}}"]}},
  295. {'replaceAllText': {'containsText': {'text': '{{v523}}'}, 'replaceText': data["{{v523}}"]}},
  296. {'replaceAllText': {'containsText': {'text': '{{v623}}'}, 'replaceText': data["{{v623}}"]}},
  297. {'replaceAllText': {'containsText': {'text': '{{1d1}}'}, 'replaceText': data["{{1d1}}"]}},
  298. {'replaceAllText': {'containsText': {'text': '{{2d1}}'}, 'replaceText': data["{{2d1}}"]}},
  299. {'replaceAllText': {'containsText': {'text': '{{3d1}}'}, 'replaceText': data["{{3d1}}"]}},
  300. {'replaceAllText': {'containsText': {'text': '{{4d1}}'}, 'replaceText': data["{{4d1}}"]}},
  301. {'replaceAllText': {'containsText': {'text': '{{5d1}}'}, 'replaceText': data["{{5d1}}"]}},
  302. {'replaceAllText': {'containsText': {'text': '{{1d2}}'}, 'replaceText': data["{{1d2}}"]}},
  303. {'replaceAllText': {'containsText': {'text': '{{2d2}}'}, 'replaceText': data["{{2d2}}"]}},
  304. {'replaceAllText': {'containsText': {'text': '{{3d2}}'}, 'replaceText': data["{{3d2}}"]}},
  305. {'replaceAllText': {'containsText': {'text': '{{4d2}}'}, 'replaceText': data["{{4d2}}"]}},
  306. {'replaceAllText': {'containsText': {'text': '{{5d2}}'}, 'replaceText': data["{{5d2}}"]}},
  307. {'replaceAllText': {'containsText': {'text': '{{1d3}}'}, 'replaceText': data["{{1d3}}"]}},
  308. {'replaceAllText': {'containsText': {'text': '{{2d3}}'}, 'replaceText': data["{{2d3}}"]}},
  309. {'replaceAllText': {'containsText': {'text': '{{3d3}}'}, 'replaceText': data["{{3d3}}"]}},
  310. {'replaceAllText': {'containsText': {'text': '{{4d3}}'}, 'replaceText': data["{{4d3}}"]}},
  311. {'replaceAllText': {'containsText': {'text': '{{5d3}}'}, 'replaceText': data["{{5d3}}"]}},
  312. ]
  313. service.presentations().batchUpdate(body={'requests':reqs}, presentationId=new_pres, fields='').execute()
  314. return "https://docs.google.com/presentation/d/" + new_pres # returns link of presentation
  315. def main():
  316. msg = "以下是客戶的Ads週報,相關負責人員請完成填寫報告內容,謝謝。\n\n"
  317. try:
  318. service = build('slides', 'v1', credentials=creds)
  319. #client, DATA_ID = getClientData()
  320. for c in clients:
  321. if c[3]!=1: # if client is not discontinued
  322. link = process(c, service)
  323. msg += c[0] + ": " + link + "\n"
  324. print(msg)
  325. if notifygroup != 0:
  326. notify_group(msg)
  327. except HttpError as err:
  328. print(err)
  329. if __name__ == '__main__':
  330. main()