http.py 8.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251
  1. import base64
  2. import datetime
  3. import logging
  4. import os
  5. import time
  6. from functools import reduce
  7. import cv2
  8. import numpy as np
  9. from flask import (Blueprint, Flask, Response, current_app, jsonify,
  10. make_response, request)
  11. from peewee import SqliteDatabase, operator, fn, DoesNotExist
  12. from playhouse.shortcuts import model_to_dict
  13. from frigate.models import Event
  14. from frigate.version import VERSION
  15. logger = logging.getLogger(__name__)
  16. bp = Blueprint('frigate', __name__)
  17. def create_app(frigate_config, database: SqliteDatabase, camera_metrics, detectors, detected_frames_processor):
  18. app = Flask(__name__)
  19. @app.before_request
  20. def _db_connect():
  21. database.connect()
  22. @app.teardown_request
  23. def _db_close(exc):
  24. if not database.is_closed():
  25. database.close()
  26. app.frigate_config = frigate_config
  27. app.camera_metrics = camera_metrics
  28. app.detectors = detectors
  29. app.detected_frames_processor = detected_frames_processor
  30. app.register_blueprint(bp)
  31. return app
  32. @bp.route('/')
  33. def is_healthy():
  34. return "Frigate is running. Alive and healthy!"
  35. @bp.route('/events/summary')
  36. def events_summary():
  37. groups = (
  38. Event
  39. .select(
  40. Event.camera,
  41. Event.label,
  42. fn.strftime('%Y-%m-%d', fn.datetime(Event.start_time, 'unixepoch', 'localtime')).alias('day'),
  43. Event.zones,
  44. fn.COUNT(Event.id).alias('count')
  45. )
  46. .group_by(
  47. Event.camera,
  48. Event.label,
  49. fn.strftime('%Y-%m-%d', fn.datetime(Event.start_time, 'unixepoch', 'localtime')),
  50. Event.zones
  51. )
  52. )
  53. return jsonify([e for e in groups.dicts()])
  54. @bp.route('/events/<id>')
  55. def event(id):
  56. try:
  57. return model_to_dict(Event.get(Event.id == id))
  58. except DoesNotExist:
  59. return "Event not found", 404
  60. @bp.route('/events/<id>/snapshot.jpg')
  61. def event_snapshot(id):
  62. format = request.args.get('format', 'ios')
  63. thumbnail_bytes = None
  64. try:
  65. event = Event.get(Event.id == id)
  66. thumbnail_bytes = base64.b64decode(event.thumbnail)
  67. except DoesNotExist:
  68. # see if the object is currently being tracked
  69. try:
  70. for camera_state in current_app.detected_frames_processor.camera_states.values():
  71. if id in camera_state.tracked_objects:
  72. tracked_obj = camera_state.tracked_objects.get(id)
  73. if not tracked_obj is None:
  74. thumbnail_bytes = tracked_obj.get_jpg_bytes()
  75. except:
  76. return "Event not found", 404
  77. if thumbnail_bytes is None:
  78. return "Event not found", 404
  79. # android notifications prefer a 2:1 ratio
  80. if format == 'android':
  81. jpg_as_np = np.frombuffer(thumbnail_bytes, dtype=np.uint8)
  82. img = cv2.imdecode(jpg_as_np, flags=1)
  83. thumbnail = cv2.copyMakeBorder(img, 0, 0, int(img.shape[1]*0.5), int(img.shape[1]*0.5), cv2.BORDER_CONSTANT, (0,0,0))
  84. ret, jpg = cv2.imencode('.jpg', thumbnail)
  85. thumbnail_bytes = jpg.tobytes()
  86. response = make_response(thumbnail_bytes)
  87. response.headers['Content-Type'] = 'image/jpg'
  88. return response
  89. @bp.route('/events')
  90. def events():
  91. limit = request.args.get('limit', 100)
  92. camera = request.args.get('camera')
  93. label = request.args.get('label')
  94. zone = request.args.get('zone')
  95. after = request.args.get('after', type=int)
  96. before = request.args.get('before', type=int)
  97. clauses = []
  98. if camera:
  99. clauses.append((Event.camera == camera))
  100. if label:
  101. clauses.append((Event.label == label))
  102. if zone:
  103. clauses.append((Event.zones.cast('text') % f"*\"{zone}\"*"))
  104. if after:
  105. clauses.append((Event.start_time >= after))
  106. if before:
  107. clauses.append((Event.start_time <= before))
  108. if len(clauses) == 0:
  109. clauses.append((1 == 1))
  110. events = (Event.select()
  111. .where(reduce(operator.and_, clauses))
  112. .order_by(Event.start_time.desc())
  113. .limit(limit))
  114. return jsonify([model_to_dict(e) for e in events])
  115. @bp.route('/config')
  116. def config():
  117. return jsonify(current_app.frigate_config.to_dict())
  118. @bp.route('/version')
  119. def version():
  120. return VERSION
  121. @bp.route('/stats')
  122. def stats():
  123. camera_metrics = current_app.camera_metrics
  124. stats = {}
  125. total_detection_fps = 0
  126. for name, camera_stats in camera_metrics.items():
  127. total_detection_fps += camera_stats['detection_fps'].value
  128. stats[name] = {
  129. 'camera_fps': round(camera_stats['camera_fps'].value, 2),
  130. 'process_fps': round(camera_stats['process_fps'].value, 2),
  131. 'skipped_fps': round(camera_stats['skipped_fps'].value, 2),
  132. 'detection_fps': round(camera_stats['detection_fps'].value, 2),
  133. 'pid': camera_stats['process'].pid,
  134. 'capture_pid': camera_stats['capture_process'].pid
  135. }
  136. stats['detectors'] = {}
  137. for name, detector in current_app.detectors.items():
  138. stats['detectors'][name] = {
  139. 'inference_speed': round(detector.avg_inference_speed.value*1000, 2),
  140. 'detection_start': detector.detection_start.value,
  141. 'pid': detector.detect_process.pid
  142. }
  143. stats['detection_fps'] = round(total_detection_fps, 2)
  144. return jsonify(stats)
  145. @bp.route('/<camera_name>/<label>/best.jpg')
  146. def best(camera_name, label):
  147. if camera_name in current_app.frigate_config.cameras:
  148. best_object = current_app.detected_frames_processor.get_best(camera_name, label)
  149. best_frame = best_object.get('frame')
  150. if best_frame is None:
  151. best_frame = np.zeros((720,1280,3), np.uint8)
  152. else:
  153. best_frame = cv2.cvtColor(best_frame, cv2.COLOR_YUV2BGR_I420)
  154. crop = bool(request.args.get('crop', 0, type=int))
  155. if crop:
  156. region = best_object.get('region', [0,0,300,300])
  157. best_frame = best_frame[region[1]:region[3], region[0]:region[2]]
  158. height = int(request.args.get('h', str(best_frame.shape[0])))
  159. width = int(height*best_frame.shape[1]/best_frame.shape[0])
  160. best_frame = cv2.resize(best_frame, dsize=(width, height), interpolation=cv2.INTER_AREA)
  161. ret, jpg = cv2.imencode('.jpg', best_frame)
  162. response = make_response(jpg.tobytes())
  163. response.headers['Content-Type'] = 'image/jpg'
  164. return response
  165. else:
  166. return "Camera named {} not found".format(camera_name), 404
  167. @bp.route('/<camera_name>')
  168. def mjpeg_feed(camera_name):
  169. fps = int(request.args.get('fps', '3'))
  170. height = int(request.args.get('h', '360'))
  171. if camera_name in current_app.frigate_config.cameras:
  172. # return a multipart response
  173. return Response(imagestream(current_app.detected_frames_processor, camera_name, fps, height),
  174. mimetype='multipart/x-mixed-replace; boundary=frame')
  175. else:
  176. return "Camera named {} not found".format(camera_name), 404
  177. @bp.route('/<camera_name>/latest.jpg')
  178. def latest_frame(camera_name):
  179. if camera_name in current_app.frigate_config.cameras:
  180. # max out at specified FPS
  181. frame = current_app.detected_frames_processor.get_current_frame(camera_name)
  182. if frame is None:
  183. frame = np.zeros((720,1280,3), np.uint8)
  184. height = int(request.args.get('h', str(frame.shape[0])))
  185. width = int(height*frame.shape[1]/frame.shape[0])
  186. frame = cv2.resize(frame, dsize=(width, height), interpolation=cv2.INTER_AREA)
  187. ret, jpg = cv2.imencode('.jpg', frame)
  188. response = make_response(jpg.tobytes())
  189. response.headers['Content-Type'] = 'image/jpg'
  190. return response
  191. else:
  192. return "Camera named {} not found".format(camera_name), 404
  193. def imagestream(detected_frames_processor, camera_name, fps, height):
  194. while True:
  195. # max out at specified FPS
  196. time.sleep(1/fps)
  197. frame = detected_frames_processor.get_current_frame(camera_name, draw=True)
  198. if frame is None:
  199. frame = np.zeros((height,int(height*16/9),3), np.uint8)
  200. width = int(height*frame.shape[1]/frame.shape[0])
  201. frame = cv2.resize(frame, dsize=(width, height), interpolation=cv2.INTER_LINEAR)
  202. ret, jpg = cv2.imencode('.jpg', frame)
  203. yield (b'--frame\r\n'
  204. b'Content-Type: image/jpeg\r\n\r\n' + jpg.tobytes() + b'\r\n\r\n')