http.py 8.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246
  1. import base64
  2. import datetime
  3. import logging
  4. import os
  5. import time
  6. from functools import reduce
  7. import cv2
  8. import numpy as np
  9. from flask import (Blueprint, Flask, Response, current_app, jsonify,
  10. make_response, request)
  11. from peewee import SqliteDatabase, operator, fn, DoesNotExist
  12. from playhouse.shortcuts import model_to_dict
  13. from frigate.models import Event
  14. logger = logging.getLogger(__name__)
  15. bp = Blueprint('frigate', __name__)
  16. def create_app(frigate_config, database: SqliteDatabase, camera_metrics, detectors, detected_frames_processor):
  17. app = Flask(__name__)
  18. @app.before_request
  19. def _db_connect():
  20. database.connect()
  21. @app.teardown_request
  22. def _db_close(exc):
  23. if not database.is_closed():
  24. database.close()
  25. app.frigate_config = frigate_config
  26. app.camera_metrics = camera_metrics
  27. app.detectors = detectors
  28. app.detected_frames_processor = detected_frames_processor
  29. app.register_blueprint(bp)
  30. return app
  31. @bp.route('/')
  32. def is_healthy():
  33. return "Frigate is running. Alive and healthy!"
  34. @bp.route('/events/summary')
  35. def events_summary():
  36. groups = (
  37. Event
  38. .select(
  39. Event.camera,
  40. Event.label,
  41. fn.strftime('%Y-%m-%d', fn.datetime(Event.start_time, 'unixepoch', 'localtime')).alias('day'),
  42. Event.zones,
  43. fn.COUNT(Event.id).alias('count')
  44. )
  45. .group_by(
  46. Event.camera,
  47. Event.label,
  48. fn.strftime('%Y-%m-%d', fn.datetime(Event.start_time, 'unixepoch', 'localtime')),
  49. Event.zones
  50. )
  51. )
  52. return jsonify([e for e in groups.dicts()])
  53. @bp.route('/events/<id>')
  54. def event(id):
  55. try:
  56. return model_to_dict(Event.get(Event.id == id))
  57. except DoesNotExist:
  58. return "Event not found", 404
  59. @bp.route('/events/<id>/snapshot.jpg')
  60. def event_snapshot(id):
  61. format = request.args.get('format', 'ios')
  62. thumbnail_bytes = None
  63. try:
  64. event = Event.get(Event.id == id)
  65. thumbnail_bytes = base64.b64decode(event.thumbnail)
  66. except DoesNotExist:
  67. # see if the object is currently being tracked
  68. try:
  69. for camera_state in current_app.detected_frames_processor.camera_states.values():
  70. if id in camera_state.tracked_objects:
  71. tracked_obj = camera_state.tracked_objects.get(id)
  72. if not tracked_obj is None:
  73. thumbnail_bytes = tracked_obj.get_jpg_bytes()
  74. except:
  75. return "Event not found", 404
  76. if thumbnail_bytes is None:
  77. return "Event not found", 404
  78. # android notifications prefer a 2:1 ratio
  79. if format == 'android':
  80. jpg_as_np = np.frombuffer(thumbnail_bytes, dtype=np.uint8)
  81. img = cv2.imdecode(jpg_as_np, flags=1)
  82. thumbnail = cv2.copyMakeBorder(img, 0, 0, int(img.shape[1]*0.5), int(img.shape[1]*0.5), cv2.BORDER_CONSTANT, (0,0,0))
  83. ret, jpg = cv2.imencode('.jpg', thumbnail)
  84. thumbnail_bytes = jpg.tobytes()
  85. response = make_response(thumbnail_bytes)
  86. response.headers['Content-Type'] = 'image/jpg'
  87. return response
  88. @bp.route('/events')
  89. def events():
  90. limit = request.args.get('limit', 100)
  91. camera = request.args.get('camera')
  92. label = request.args.get('label')
  93. zone = request.args.get('zone')
  94. after = request.args.get('after', type=int)
  95. before = request.args.get('before', type=int)
  96. clauses = []
  97. if camera:
  98. clauses.append((Event.camera == camera))
  99. if label:
  100. clauses.append((Event.label == label))
  101. if zone:
  102. clauses.append((Event.zones.cast('text') % f"*\"{zone}\"*"))
  103. if after:
  104. clauses.append((Event.start_time >= after))
  105. if before:
  106. clauses.append((Event.start_time <= before))
  107. if len(clauses) == 0:
  108. clauses.append((1 == 1))
  109. events = (Event.select()
  110. .where(reduce(operator.and_, clauses))
  111. .order_by(Event.start_time.desc())
  112. .limit(limit))
  113. return jsonify([model_to_dict(e) for e in events])
  114. @bp.route('/config')
  115. def config():
  116. return jsonify(current_app.frigate_config.to_dict())
  117. @bp.route('/stats')
  118. def stats():
  119. camera_metrics = current_app.camera_metrics
  120. stats = {}
  121. total_detection_fps = 0
  122. for name, camera_stats in camera_metrics.items():
  123. total_detection_fps += camera_stats['detection_fps'].value
  124. stats[name] = {
  125. 'camera_fps': round(camera_stats['camera_fps'].value, 2),
  126. 'process_fps': round(camera_stats['process_fps'].value, 2),
  127. 'skipped_fps': round(camera_stats['skipped_fps'].value, 2),
  128. 'detection_fps': round(camera_stats['detection_fps'].value, 2),
  129. 'pid': camera_stats['process'].pid,
  130. 'capture_pid': camera_stats['capture_process'].pid
  131. }
  132. stats['detectors'] = {}
  133. for name, detector in current_app.detectors.items():
  134. stats['detectors'][name] = {
  135. 'inference_speed': round(detector.avg_inference_speed.value*1000, 2),
  136. 'detection_start': detector.detection_start.value,
  137. 'pid': detector.detect_process.pid
  138. }
  139. stats['detection_fps'] = round(total_detection_fps, 2)
  140. return jsonify(stats)
  141. @bp.route('/<camera_name>/<label>/best.jpg')
  142. def best(camera_name, label):
  143. if camera_name in current_app.frigate_config.cameras:
  144. best_object = current_app.detected_frames_processor.get_best(camera_name, label)
  145. best_frame = best_object.get('frame')
  146. if best_frame is None:
  147. best_frame = np.zeros((720,1280,3), np.uint8)
  148. else:
  149. best_frame = cv2.cvtColor(best_frame, cv2.COLOR_YUV2BGR_I420)
  150. crop = bool(request.args.get('crop', 0, type=int))
  151. if crop:
  152. region = best_object.get('region', [0,0,300,300])
  153. best_frame = best_frame[region[1]:region[3], region[0]:region[2]]
  154. height = int(request.args.get('h', str(best_frame.shape[0])))
  155. width = int(height*best_frame.shape[1]/best_frame.shape[0])
  156. best_frame = cv2.resize(best_frame, dsize=(width, height), interpolation=cv2.INTER_AREA)
  157. ret, jpg = cv2.imencode('.jpg', best_frame)
  158. response = make_response(jpg.tobytes())
  159. response.headers['Content-Type'] = 'image/jpg'
  160. return response
  161. else:
  162. return "Camera named {} not found".format(camera_name), 404
  163. @bp.route('/<camera_name>')
  164. def mjpeg_feed(camera_name):
  165. fps = int(request.args.get('fps', '3'))
  166. height = int(request.args.get('h', '360'))
  167. if camera_name in current_app.frigate_config.cameras:
  168. # return a multipart response
  169. return Response(imagestream(current_app.detected_frames_processor, camera_name, fps, height),
  170. mimetype='multipart/x-mixed-replace; boundary=frame')
  171. else:
  172. return "Camera named {} not found".format(camera_name), 404
  173. @bp.route('/<camera_name>/latest.jpg')
  174. def latest_frame(camera_name):
  175. if camera_name in current_app.frigate_config.cameras:
  176. # max out at specified FPS
  177. frame = current_app.detected_frames_processor.get_current_frame(camera_name)
  178. if frame is None:
  179. frame = np.zeros((720,1280,3), np.uint8)
  180. height = int(request.args.get('h', str(frame.shape[0])))
  181. width = int(height*frame.shape[1]/frame.shape[0])
  182. frame = cv2.resize(frame, dsize=(width, height), interpolation=cv2.INTER_AREA)
  183. ret, jpg = cv2.imencode('.jpg', frame)
  184. response = make_response(jpg.tobytes())
  185. response.headers['Content-Type'] = 'image/jpg'
  186. return response
  187. else:
  188. return "Camera named {} not found".format(camera_name), 404
  189. def imagestream(detected_frames_processor, camera_name, fps, height):
  190. while True:
  191. # max out at specified FPS
  192. time.sleep(1/fps)
  193. frame = detected_frames_processor.get_current_frame(camera_name, draw=True)
  194. if frame is None:
  195. frame = np.zeros((height,int(height*16/9),3), np.uint8)
  196. width = int(height*frame.shape[1]/frame.shape[0])
  197. frame = cv2.resize(frame, dsize=(width, height), interpolation=cv2.INTER_LINEAR)
  198. ret, jpg = cv2.imencode('.jpg', frame)
  199. yield (b'--frame\r\n'
  200. b'Content-Type: image/jpeg\r\n\r\n' + jpg.tobytes() + b'\r\n\r\n')