report.py 25 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617
  1. import requests
  2. import json
  3. #import csv
  4. from tabulate import tabulate
  5. from itertools import groupby
  6. from operator import itemgetter
  7. import numpy as np
  8. import matplotlib.pyplot as plt
  9. url = 'https://joystream2.yyagi.cloud/graphql'
  10. #url = 'https://query.joystream.org/graphql'
  11. file_name = "{}-12:00-objects.txt"
  12. file_server = "http://87.236.146.74:8000/"
  13. operators = [{'id':"0x2bc", 'bucket': 0},{'id':"alexznet", 'bucket': 2},{'id':"Craci_BwareLabs", 'bucket': 10},{'id':"GodsHunter", 'bucket': 6},{'id':"joystreamstats", 'bucket': 1},{'id':"l1dev", 'bucket': 4},{'id':"maxlevush", 'bucket': 3},{'id':"mmx1916", 'bucket': 9},{'id':"razumv", 'bucket': 11},{'id':"yyagi", 'bucket': 8}, {'id':"sieemma", 'bucket': 12} ]
  14. credential = {'username': '', 'password' :'joystream'}
  15. query_group = "storageWorkingGroup"
  16. #def queryGrapql(query, url= 'https://query.joystream.org/graphql' ):
  17. def queryGrapql(query, url= 'https://joystream2.yyagi.cloud/graphql' ):
  18. headers = {'Accept-Encoding': 'gzip, deflate, br', 'Content-Type': 'application/json',
  19. 'Accept': 'application/json', 'Connection': 'keep-alive', 'DNT': '1',
  20. 'Origin': 'https://query.joystream.org' }
  21. response = requests.post(url, headers=headers, json=query)
  22. return response.json()['data']
  23. def get_councils_period(url):
  24. query = {"query":'query MyQuery{ electedCouncils { electedAtBlock endedAtBlock endedAtTime electedAtTime } }'}
  25. data = queryGrapql(query, url)['electedCouncils']
  26. #data = sorted(data, key = itemgetter('endedAtBlock'), reverse=True)
  27. if data[-1]['endedAtTime'] == None:
  28. data.pop(-1)
  29. data = sorted(data, key = itemgetter('endedAtBlock'))
  30. period = len(data)
  31. return data[-1], data[-2], data[0], period
  32. def get_backets(url, start_time = '', end_time = '', createdat = False, deletedat = False):
  33. if start_time and end_time :
  34. if createdat :
  35. query = {"query":'query MyQuery {{ storageBuckets ( where: {{createdAt_gt: "{}" , createdAt_lt: "{}"}}){{ id dataObjectsSize dataObjectsSizeLimit dataObjectsCount bags {{ id createdAt }} }}}}'.format(start_time, end_time)}
  36. elif deletedat:
  37. query = {"query":'query MyQuery {{ storageBuckets ( where: {{deletedAt_gt: "{}" , deletedAt_lt: "{}"}}){{ id dataObjectsSize dataObjectsSizeLimit dataObjectsCount bags {{ id createdAt }} }}}}'.format(start_time, end_time)}
  38. else:
  39. query = {"query":"query MyQuery { storageBuckets { id dataObjectsSize dataObjectsSizeLimit dataObjectsCount bags { id createdAt } }}"}
  40. data = queryGrapql(query, url)['storageBuckets']
  41. for record in data:
  42. record['bags'] = len(record['bags'])
  43. record['Utilization'] = int(record['dataObjectsSize'])/int(record['dataObjectsSizeLimit'])
  44. record['dataObjectsSize, GB'] = int(record['dataObjectsSize']) / 1074790400
  45. #keys = list(data[0].keys())
  46. #file_name= 'backets_info_'+ time.strftime("%Y%m%d%H%M%S")+'.csv'
  47. # with open(file_name, 'w') as csvfile:
  48. # writer = csv.DictWriter(csvfile, fieldnames = keys)
  49. # writer.writeheader()
  50. # writer.writerows(data)
  51. #return file_name
  52. return data
  53. def get_rewards(start_time, end_time):
  54. query = '{{ rewardPaidEvents(limit: 33000, offset: 0, where: {{group: {{id_eq: "storageWorkingGroup"}}, createdAt_gt: "{}", createdAt_lt: "{}"}}) {{ paymentType amount workerId }} }}'.format(start_time, end_time)
  55. query_dict = {"query": query}
  56. data = queryGrapql(query_dict,url)['rewardPaidEvents']
  57. total = 0
  58. result = []
  59. sorted_data = sorted(data, key = itemgetter('workerId'))
  60. for key, values in groupby(sorted_data, key = itemgetter('workerId')):
  61. worker_total = 0
  62. for value in list(values):
  63. worker_total += int(value["amount"])
  64. result.append({'workerId':key, 'worker_total':worker_total})
  65. total += worker_total
  66. return total,result
  67. def get_new_opening(start_time, end_time):
  68. query = '{{ openingAddedEvents(where: {{group: {{id_eq: "storageWorkingGroup"}}, createdAt_gt: "{}", createdAt_lt: "{}"}}) {{ opening {{ createdAt id openingcanceledeventopening {{ createdAt }} }} }} }}'.format(start_time, end_time)
  69. query_dict = {"query": query}
  70. data = queryGrapql(query_dict,url)['openingAddedEvents']
  71. result = []
  72. if len(data) == 0:
  73. return 0,result
  74. for record in data:
  75. if len(record['opening']['openingcanceledeventopening']) == 0:
  76. result.append({'id': record['opening']['id'], 'createdAt': record['opening']['createdAt']})
  77. length = len(result)
  78. return length,result
  79. def get_new_hire(start_time, end_time):
  80. query = '{{ openingFilledEvents(where: {{group: {{id_eq: "storageWorkingGroup"}}, createdAt_gt: "{}", createdAt_lt: "{}"}}) {{ createdAt workersHired {{ id membershipId}}}}}}'.format(start_time, end_time)
  81. query_dict = {"query": query}
  82. data = queryGrapql(query_dict,url)['openingFilledEvents']
  83. result = []
  84. if len(data) == 0:
  85. return 0,result
  86. for record in data:
  87. record['workersHired'][0]['createdAt'] = record['createdAt']
  88. result.append(record['workersHired'][0])
  89. length = len(result)
  90. return length, result
  91. def get_slashes(start_time, end_time):
  92. query = '{{ stakeSlashedEvents(where: {{group: {{id_eq: "storageWorkingGroup", createdAt_gt: "{}", createdAt_lt: "{}"}}}}) {{ createdAt worker {{ membershipId }} slashedAmount workerId }}}}'.format(start_time, end_time)
  93. query_dict = {"query": query}
  94. data = queryGrapql(query_dict,url)['stakeSlashedEvents']
  95. length = len(data)
  96. if length > 0:
  97. for record in data:
  98. record["worker"] = record["worker"]["membershipId"]
  99. return length,data
  100. def get_termination(start_time, end_time):
  101. query = '{{ terminatedWorkerEvents(where: {{group: {{id_eq: "storageWorkingGroup"}}, createdAt_gt: "{}", createdAt_lt: "{}"}}) {{createdAt workerId worker {{membershipId}} }}}}'.format(start_time, end_time)
  102. query_dict = {"query": query}
  103. data = queryGrapql(query_dict,url)['terminatedWorkerEvents']
  104. length = len(data)
  105. if length > 0:
  106. for record in data:
  107. record["worker"] = record["worker"]["membershipId"]
  108. return length,data
  109. def get_bags_nums(start_time = '', end_time = ''):
  110. if start_time and end_time :
  111. query_created = {"query": 'query MyQuery {{ storageBags( limit: 33000, offset: 0, where: {{createdAt_gt: "{}" , createdAt_lt: "{}"}}) {{ id }} }}'.format(start_time, end_time) }
  112. query_deleted = {"query": 'query MyQuery {{ storageBags( limit: 33000, offset: 0, where: {{deletedAt_gt: "{}" , deletedAt_lt: "{}"}}) {{ id }} }}'.format(start_time, end_time) }
  113. else :
  114. query_created = {"query": 'query MyQuery { storageBags(limit: 3000, offset:0) { id } }'}
  115. query_deleted = {"query": 'query MyQuery { storageBags(limit: 3000, offset:0) { id } }'}
  116. data_created = queryGrapql(query_created)['storageBags']
  117. data_deleted = queryGrapql(query_deleted)['storageBags']
  118. num_bags_created = len(data_created)
  119. num_bags_deleted = len(data_deleted)
  120. return {"bag created": num_bags_created, "bags deleted": num_bags_deleted}
  121. def get_bags(start_time='', end_time=''):
  122. if start_time and end_time :
  123. query = {"query": 'query MyQuery {{ storageBags( limit: 33000, offset: 0, where: {{createdAt_gt: "{}" , createdAt_lt: "{}"}}) {{ id createdAt deletedAt }} }}'.format(start_time, end_time) }
  124. else:
  125. query = {"query": 'query MyQuery { storageBags( limit: 33000, offset: 0) { id createdAt deletedAt }} ' }
  126. data = queryGrapql(query)['storageBags']
  127. return len(data), data
  128. def get_objects(start_time='',end_time=''):
  129. if start_time and end_time :
  130. query_created = {"query":'query MyQuery {{ storageDataObjects(limit: 33000, offset: 0,where: {{createdAt_gt: "{}" , createdAt_lt: "{}"}}) {{ createdAt size id storageBagId }} }}'.format(start_time, end_time) }
  131. else :
  132. query_created = {"query":'query MyQuery { storageDataObjects(limit: 33000, offset: 0) { createdAt deletedAt size id storageBagId } }' }
  133. objects_created = queryGrapql(query_created)['storageDataObjects']
  134. for obj in objects_created:
  135. obj['storageBagId'] = obj['storageBagId'].split(":")[2]
  136. return objects_created
  137. def get_objects_files(file_server, operators, end_date, credential):
  138. result= []
  139. file = end_date+"-12:00-objects.txt"
  140. for operator in operators:
  141. url = file_server+operator['id']+"/"+file
  142. response = requests.get(url, auth=(credential['username'], credential['password']))
  143. if response.status_code == 200 and not response.text.startswith('<!DOCTYPE html>'):
  144. result.append({'operator':operator['id'], 'file': file, 'response': response.content})
  145. return result
  146. def load_objects(lines):
  147. objects_file = []
  148. for line in lines:
  149. if line.startswith('d') or line.startswith('total') or not line.strip():
  150. continue
  151. line_split = line.split(",")
  152. objects_file.append({'size': line_split[4], 'id': line_split[8].strip('\n')})
  153. return objects_file
  154. def load_objects_from_server(data):
  155. objects_file = []
  156. for operator in data:
  157. opertor_response = operator['response'].decode("utf-8")
  158. lines = opertor_response.split('\r\n')
  159. objects_file.append({'operator': operator['operator'],'objects':load_objects(lines)})
  160. return objects_file
  161. def load_objects_from_file(file_name):
  162. objects_file = []
  163. with open(file_name) as f:
  164. lines = f.readlines()
  165. objects_file = objects_file = load_objects(lines)
  166. return objects_file
  167. def compare_objects(file_objects, objects):
  168. lost = []
  169. for obj in objects:
  170. found = False
  171. for file_obj in file_objects:
  172. if obj['id'] == file_obj['id']:
  173. found = True
  174. break
  175. if not found:
  176. lost.append(obj)
  177. return lost
  178. def get_lost(start_time, end_time):
  179. query = '{{ storageDataObjects(limit: 3000, offset: 0, where: {{isAccepted_eq: false, createdAt_gt: "{}", createdAt_lt: "{}"}}) {{ createdAt size id storageBagId }}}}'.format(start_time, end_time)
  180. query_dict = {"query": query}
  181. data = queryGrapql(query_dict,url)['storageDataObjects']
  182. for obj in data:
  183. obj['storageBagId'] = obj['storageBagId'].split(":")[2]
  184. length = len(data)
  185. return length,data
  186. def objects_stats(start_time='',end_time=''):
  187. data_created = get_objects(start_time,end_time)
  188. num_objects_created = len(data_created)
  189. total_size = 0
  190. sizes = {'<10 MB': 0,'<100 MB': 0,'<1000 MB': 0,'<10000 MB': 0,'<100000 MB': 0,'<1000000 MB': 0}
  191. sizes_range = {'0-10 MB': 0,'10-100 MB': 0,'100-1000 MB': 0,'1000-10000 MB': 0,'10000-100000 MB': 0,'100000-10000000 MB': 0}
  192. total_size,sizes,sizes_range =get_0bjects_ranges(data_created,total_size,sizes,sizes_range)
  193. bags_stats = bag_stats(data_created)
  194. return num_objects_created, total_size,sizes,sizes_range,bags_stats
  195. def get_0bjects_ranges(data_created,total_size,sizes,sizes_range):
  196. for record in data_created:
  197. size = int(record['size'])
  198. total_size += size
  199. size = size / 1048576
  200. if size < 10:
  201. sizes['<10 MB'] += 1
  202. sizes['<100 MB'] += 1
  203. sizes['<1000 MB'] += 1
  204. sizes['<10000 MB'] += 1
  205. sizes['<100000 MB'] += 1
  206. sizes['<1000000 MB'] += 1
  207. elif size < 100:
  208. sizes['<100 MB'] += 1
  209. sizes['<1000 MB'] += 1
  210. sizes['<10000 MB'] += 1
  211. sizes['<100000 MB'] += 1
  212. sizes['<1000000 MB'] += 1
  213. elif size < 1000:
  214. sizes['<1000 MB'] += 1
  215. sizes['<10000 MB'] += 1
  216. sizes['<100000 MB'] += 1
  217. sizes['<1000000 MB'] += 1
  218. elif size < 10000:
  219. sizes['<10000 MB'] += 1
  220. sizes['<100000 MB'] += 1
  221. sizes['<1000000 MB'] += 1
  222. elif size < 100000:
  223. sizes['<100000 MB'] += 1
  224. sizes['<1000000 MB'] += 1
  225. else:
  226. sizes['<1000000 MB'] += 1
  227. if size < 10:
  228. sizes_range['0-10 MB'] += 1
  229. elif size < 100:
  230. sizes_range['10-100 MB'] += 1
  231. elif size < 1000:
  232. sizes_range['100-1000 MB'] += 1
  233. elif size < 10000:
  234. sizes_range['1000-10000 MB'] += 1
  235. elif size < 100000:
  236. sizes_range['10000-100000 MB'] += 1
  237. else:
  238. sizes_range['100000-10000000 MB'] += 1
  239. return total_size, sizes, sizes_range
  240. def get_grouped_obj_dates(data, action):
  241. result = {}
  242. data = sorted(data, key = itemgetter(action))
  243. for key, records in groupby(data, key = itemgetter(action)):
  244. records = list(records)
  245. size = 0
  246. num_objects = len(records)
  247. for record in records:
  248. size += int(record['size'])
  249. result[key] = { 'size': size, 'num_objects': num_objects}
  250. return result
  251. def get_draw_objects(file1name, file2name):
  252. data = get_objects()
  253. created_objects = []
  254. deleted_objects = []
  255. for record in data:
  256. record['createdAt'] = record['createdAt'].split('T')[0]
  257. created_objects.append({'createdAt': record['createdAt'], 'size': record['size']})
  258. if record['deletedAt']:
  259. record['deletedAt'] = record['deletedAt'].split('T')[0]
  260. deleted_objects.append({'deletedAt': record['deletedAt'], 'size': record['size']})
  261. num_created_objects = len(created_objects)
  262. num_deleted_objects = len(deleted_objects)
  263. if num_created_objects > 0:
  264. created_objects = get_grouped_obj_dates(created_objects, 'createdAt')
  265. if num_deleted_objects > 0:
  266. deleted_objects = get_grouped_obj_dates(deleted_objects, 'deletedAt')
  267. for key, value in deleted_objects.items:
  268. created_objects[key]['size'] -= value['size']
  269. created_objects[key]['num_objects'] -= value['num_objects']
  270. dates = list(created_objects.keys())
  271. sizes = [round(int(k['size'])/1074790400, 2) for k in created_objects.values()]
  272. for index, size in enumerate(sizes):
  273. if index == 0:
  274. continue
  275. sizes[index] += sizes[index-1]
  276. num_objects = [k['num_objects'] for k in created_objects.values()]
  277. for index, num_object in enumerate(num_objects):
  278. if index == 0:
  279. continue
  280. num_objects[index] += num_objects[index-1]
  281. plot(dates[1:], sizes[1:], 'Size (Sum, GB)', 'Dates', 'Size', 0, 750 , 10, 25,file1name)
  282. plot(dates[1:], num_objects[1:], 'Number of Objects', 'Dates', 'Number of Objects', 0, 12000, 10, 500,file2name)
  283. def plot(x, y, title, x_label, y_label, x_start, y_start, x_spacing, y_spacing,filename):
  284. fig, ax = plt.subplots()
  285. fig.set_size_inches(15, 10)
  286. plt.plot(x, y)
  287. ax.set_xticks(np.arange(x_start, len(x)+1, x_spacing))
  288. ax.set_yticks(np.arange(y_start, max(y), y_spacing))
  289. ax.set_title(title)
  290. ax.set(xlabel=x_label, ylabel=y_label)
  291. plt.xticks(rotation=45)
  292. plt.yticks(rotation=45)
  293. fig.set_dpi(100)
  294. fig.savefig(filename)
  295. plt.close()
  296. def get_created_deleted_bags(data):
  297. created_bags = []
  298. deleted_bags = []
  299. for record in data:
  300. record['createdAt'] = record['createdAt'].split('T')[0]
  301. created_bags.append({'createdAt': record['createdAt'], 'id': record['id']})
  302. if record['deletedAt']:
  303. record['deletedAt'] = record['deletedAt'].split('T')[0]
  304. deleted_bags.append({'deletedAt': record['deletedAt'], 'id': record['id']})
  305. return created_bags,deleted_bags
  306. def get_draw_bags(filename):
  307. num, data = get_bags()
  308. created_bags ,deleted_bags = get_created_deleted_bags(data)
  309. num_created_bags = len(created_bags)
  310. num_deleted_bags = len(deleted_bags)
  311. bags = {}
  312. if num_created_bags > 0:
  313. created_bags = sort_bags(created_bags, 'createdAt')
  314. for key, record in created_bags.items():
  315. bags[key] = len(record)
  316. if num_deleted_bags > 0:
  317. deleted_bags = sort_bags(deleted_bags, 'deletedAt')
  318. for key, record in deleted_objects.items():
  319. bags[key] -= len(record)
  320. dates = list(bags.keys())
  321. num_bags = [k for k in bags.values()]
  322. for index, num_bag in enumerate(num_bags):
  323. if index == 0:
  324. continue
  325. num_bags[index] += num_bags[index-1]
  326. plot(dates[1:], num_bags[1:], 'Number of Bags {}'.format(num_created_bags - num_deleted_bags), 'Dates', 'Number of Bags', 0, 250 , 3, 50,filename)
  327. def sort_bags(data, key):
  328. bags = {}
  329. sorted_data = sorted(data, key = itemgetter(key))
  330. for key, value in groupby(sorted_data, key = itemgetter(key)):
  331. #key = key.split(":")[2]
  332. bags[key]= list(value)
  333. return(bags)
  334. def bag_stats(data_created):
  335. bags = sort_bags(data_created, 'storageBagId')
  336. #print(bags)
  337. result= []
  338. for key, value in bags.items():
  339. bag = {}
  340. bag['id'] = key
  341. total_size = 0
  342. bag['objects_num'] = len(value)
  343. for obj in value:
  344. total_size += int(obj['size'])
  345. bag['total_size bytes'] = total_size
  346. bag['average_size bytes'] = int(total_size / bag['objects_num'])
  347. result.append(bag)
  348. return result
  349. def print_table(data, master_key = '', sort_key = ''):
  350. if sort_key:
  351. data = sorted(data, key = itemgetter(sort_key), reverse=True)
  352. headers = [*data[0]]
  353. if master_key:
  354. headers.append(master_key)
  355. headers.remove(master_key)
  356. headers = [master_key] + headers
  357. table = []
  358. for line in data:
  359. row = []
  360. if master_key:
  361. value = line.pop(master_key)
  362. row.append(value)
  363. for key in [*line]:
  364. row.append(line[key])
  365. table.append(row)
  366. try:
  367. result = tabulate(table, headers, tablefmt="github")
  368. print(result)
  369. return result
  370. except UnicodeEncodeError:
  371. result = tabulate(table, headers, tablefmt="grid")
  372. print(result)
  373. return result
  374. if __name__ == '__main__':
  375. last_council,previous_council,first_council, period = get_councils_period(url)
  376. report = ''
  377. first_time = first_council['electedAtTime']
  378. start_time = last_council['electedAtTime']
  379. end_time = last_council['endedAtTime']
  380. start_date = start_time.split('T')[0]
  381. end_date = end_time.split('T')[0]
  382. previous_start_time = previous_council['electedAtTime']
  383. previous_end_time = previous_council['endedAtTime']
  384. file_name = 'report-'+end_time
  385. print(start_time)
  386. print(end_time)
  387. print('Full report for the Term: {} \n\n'.format(period-1))
  388. print('Start date: {} \n'.format(start_date))
  389. print('End date: {} \n'.format(end_date))
  390. report += 'Full report for the Term: {} \n\n'.format(period-1)
  391. report += 'Start date: {} \n\n'.format(start_date)
  392. report += 'End date: {} \n\n'.format(end_date)
  393. print('Start Time: {}\n'.format(start_time))
  394. print('End Time: {}\n'.format(end_time))
  395. print('Start Block: {}\n'.format(last_council['electedAtBlock']))
  396. print('End Block: {}\n'.format(last_council['endedAtBlock']))
  397. report += 'Start Block: {} \n\n'.format(last_council['electedAtBlock'])
  398. report += 'End Block: {} \n\n'.format(last_council['endedAtBlock'])
  399. print('# Opening')
  400. num_openings, openings = get_new_opening(start_time, end_time)
  401. print('Number of openings: {}'.format(num_openings))
  402. report += '# Opening \n'
  403. report += 'Number of openings: {} \n'.format(num_openings)
  404. if num_openings > 0:
  405. tble = print_table(openings)
  406. report += tble+'\n'
  407. print('# Hiring')
  408. num_workers, hired_workers = get_new_hire(start_time, end_time)
  409. print('Number of hired works: {}'.format(num_workers))
  410. report += '# Hiring\n'
  411. report += 'Number of hired works: {}\n'.format(num_workers)
  412. if num_workers > 0:
  413. tble = print_table(hired_workers)
  414. report += tble+'\n'
  415. print('# Terminated workers')
  416. num_workers, terminated_workers = get_termination(start_time, end_time)
  417. print('Number of terminated workers: {}'.format(num_workers))
  418. report += '# Terminated workers \n'
  419. report += 'Number of terminated workers: {} \n'.format(num_workers)
  420. if num_workers > 0:
  421. tble = print_table(terminated_workers)
  422. report += tble+'\n'
  423. print('# Slashed workers')
  424. num_workers, slashed_workers = get_slashes(start_time, end_time)
  425. print('Number of slashed workers: {}'.format(num_workers))
  426. report += '# Slashed workers \n'
  427. report += 'Number of slashed workers: {} \n'.format(num_workers)
  428. if num_workers > 0:
  429. tble = print_table(slashed_workers)
  430. report += tble+'\n'
  431. print('# Rewards')
  432. report += '# Rewards\n'
  433. total_rewards,rewards = get_rewards(start_time, end_time)
  434. print('Total Rewards: {}'.format(total_rewards))
  435. report += 'Total Rewards: {}\n'.format(total_rewards)
  436. tble = print_table(rewards)
  437. report += tble+'\n'
  438. print('# BUCKETS Info ')
  439. report += '# BUCKETS Info \n'
  440. buckets = get_backets(url)
  441. buckets_file = 'buckets_'+end_time
  442. with open(buckets_file, 'w') as file:
  443. json.dump(buckets, file)
  444. file.close()
  445. tble = print_table(buckets)
  446. report += tble+'\n'
  447. print('## BUCKETS CREATED')
  448. report += '## BUCKETS CREATED\n'
  449. buckets_created = get_backets(url,start_time,end_time,createdat = True)
  450. number_buckets_created = len(buckets_created)
  451. print('Bucket Created: {}'.format(number_buckets_created))
  452. report += 'Bucket Created: {}\n'.format(number_buckets_created)
  453. if number_buckets_created > 0:
  454. tble = print_table(buckets_created)
  455. report += tble+'\n'
  456. print('## BUCKETS DELETED')
  457. report += '## BUCKETS DELETED\n'
  458. buckets_deleted = get_backets(url,start_time,end_time,deletedat = True)
  459. number_buckets_deleted = len(buckets_deleted)
  460. print('Bucket Deleted: {}\n'.format(number_buckets_deleted))
  461. report += 'Bucket Deleted: {}\n'.format(number_buckets_deleted)
  462. if number_buckets_deleted > 0:
  463. tble = print_table(buckets_deleted)
  464. report += tble+'\n'
  465. print('## Bags')
  466. report += '## Bags\n'
  467. bags = get_bags_nums(start_time, end_time)
  468. print('Bags Created: {} \n'.format(bags['bag created']))
  469. print('Bags Deleted: {} \n'.format(bags['bags deleted']))
  470. report += 'Bags Created: {} \n\n'.format(bags['bag created'])
  471. report += 'Bags Deleted: {} \n\n'.format(bags['bags deleted'])
  472. print('# Objects Info during this Council Period')
  473. report += '# Objects Info during this Council Period \n'
  474. #print(get_objects(start_time,end_time))
  475. objects_num, total_size,sizes,sizes_range,bags_stats = objects_stats(start_time,end_time)
  476. print('Total Objects Size: {}\n'.format(objects_num))
  477. report += 'Total Objects Size: {} \n\n'.format(objects_num)
  478. print('Total Objects Size: {}\n'.format(total_size))
  479. report += 'Total Objects Size: {} bytes \n\n'.format(total_size)
  480. print('## Objects Size Distribution')
  481. report += '## Objects Size Distribution\n'
  482. tble = print_table([sizes])
  483. report += tble+'\n \r\n'
  484. print('\n')
  485. tble = print_table([sizes_range])
  486. report += tble+'\n'
  487. print('## Objects Size Distribution Per Bag')
  488. tble = print_table(bags_stats)
  489. report += '## Objects Size Distribution Per Bag \n'
  490. report += tble+'\n'
  491. print('# Total object Info')
  492. report += '# Total object Info \n'
  493. #print(get_objects(start_time,end_time))
  494. objects_num, total_size,sizes,sizes_range,bags_stats = objects_stats()
  495. print('Total Objects: {}\n'.format(objects_num))
  496. report += 'Total Objects: {} \n\n'.format(objects_num)
  497. print('Total Objects Size: {}\n'.format(total_size))
  498. report += 'Total Objects Size: {} bytes\n\n'.format(total_size)
  499. total_num_bags = len(bags_stats)
  500. print('Total Number of Bags in use: {}\n'.format(total_num_bags))
  501. report += 'Total Number of Bags in use: {} bytes\n\n'.format(total_num_bags)
  502. num, data = get_bags()
  503. created_bags ,deleted_bags = get_created_deleted_bags(data)
  504. num_created_bags = len(created_bags)
  505. num_deleted_bags = len(deleted_bags)
  506. total_num_bags = num_created_bags - num_deleted_bags
  507. print('Grand Total Number of Bags: {}\n'.format(total_num_bags))
  508. report += 'Grand Total Number of Bags: {} bytes\n\n'.format(total_num_bags)
  509. print('## Objects Size Distribution')
  510. report += '## Objects Size Distribution \n'
  511. tble = print_table([sizes])
  512. report += tble+'\n \r\n'
  513. print('\n')
  514. tble = print_table([sizes_range])
  515. report += tble+'\n'
  516. print('## Objects Size Distribution Per Bag')
  517. report += '## Objects Size Distribution Per Bag \n'
  518. tble = print_table(bags_stats, sort_key = 'total_size bytes')
  519. report += tble+'\n\n\n'
  520. image1_file = 'objects_size_{}'.format(end_date)
  521. image2_file = 'objects_number_{}'.format(end_date)
  522. get_draw_objects(image1_file, image2_file)
  523. report += '![objects sizes](./{}.png) \n'.format(image1_file)
  524. report += '![objects number](./{}.png) \n'.format(image2_file)
  525. image3_file = 'bags_number_{}'.format(end_date)
  526. get_draw_bags(image3_file)
  527. report += '![objects sizes](./{}.png) \n'.format(image3_file)
  528. #print('# Lost Objects - Server compare')
  529. #report += '# Lost Objects - Server compare \n'
  530. master_objects = get_objects(start_time,end_time)
  531. #data = get_objects_files(file_server, operators, end_date, credential)
  532. #operators = load_objects_from_server(data)
  533. #operators_objects = []
  534. #for operator in operators:
  535. # operators_objects = operators_objects + operator['objects']
  536. #lost = compare_objects(operators_objects, master_objects)
  537. total_objects = len(master_objects)
  538. #lost_object = len(lost)
  539. #print('Total Objects: {}\n'.format(total_objects))
  540. #print('Total Lost Objects: {}\n'.format(lost_object))
  541. #print('Percentage Lost Objects: %{}\n'.format(100*lost_object/total_objects))
  542. #if lost_object > 0:
  543. # tble = print_table(lost, master_key = 'id')
  544. #report += 'Total Objects: {} \n\n'.format(total_objects)
  545. #report += 'Total Lost Objects: {} \n\n'.format(lost_object)
  546. #report += 'Percentage Lost Objects: %{} \n\n'.format(100*lost_object/total_objects)
  547. # report += tble+' \n'
  548. print('# Lost Objects - GraphQl')
  549. report += '# Lost Objects - GraphQl \n'
  550. number_lost, lost = get_lost(start_time,end_time)
  551. print('Total Objects: {}\n'.format(total_objects))
  552. print('Total Lost Objects: {}\n'.format(number_lost))
  553. print('Percentage Lost Objects: %{}\n'.format(100*number_lost/total_objects))
  554. if number_lost > 0:
  555. tble = print_table(lost, master_key = 'id')
  556. report += 'Total Objects: {} \n\n'.format(total_objects)
  557. report += 'Total Lost Objects: {} \n\n'.format(number_lost)
  558. report += 'Percentage Lost Objects: %{} \n\n'.format(100*number_lost/total_objects)
  559. report += tble+' \n'
  560. file_name = 'report_'+end_time+'.md'
  561. with open(file_name, 'w') as file:
  562. file.write(report)
  563. file.close()