detect_objects.py 5.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155
  1. import cv2
  2. import time
  3. import queue
  4. import yaml
  5. import numpy as np
  6. from flask import Flask, Response, make_response, jsonify
  7. import paho.mqtt.client as mqtt
  8. from frigate.video import Camera
  9. from frigate.object_detection import PreppedQueueProcessor
  10. from frigate.util import EventsPerSecond
  11. with open('/config/config.yml') as f:
  12. CONFIG = yaml.safe_load(f)
  13. MQTT_HOST = CONFIG['mqtt']['host']
  14. MQTT_PORT = CONFIG.get('mqtt', {}).get('port', 1883)
  15. MQTT_TOPIC_PREFIX = CONFIG.get('mqtt', {}).get('topic_prefix', 'frigate')
  16. MQTT_USER = CONFIG.get('mqtt', {}).get('user')
  17. MQTT_PASS = CONFIG.get('mqtt', {}).get('password')
  18. MQTT_CLIENT_ID = CONFIG.get('mqtt', {}).get('client_id', 'frigate')
  19. # Set the default FFmpeg config
  20. FFMPEG_CONFIG = CONFIG.get('ffmpeg', {})
  21. FFMPEG_DEFAULT_CONFIG = {
  22. 'global_args': FFMPEG_CONFIG.get('global_args',
  23. ['-hide_banner','-loglevel','panic']),
  24. 'hwaccel_args': FFMPEG_CONFIG.get('hwaccel_args',
  25. []),
  26. 'input_args': FFMPEG_CONFIG.get('input_args',
  27. ['-avoid_negative_ts', 'make_zero',
  28. '-fflags', 'nobuffer',
  29. '-flags', 'low_delay',
  30. '-strict', 'experimental',
  31. '-fflags', '+genpts+discardcorrupt',
  32. '-vsync', 'drop',
  33. '-rtsp_transport', 'tcp',
  34. '-stimeout', '5000000',
  35. '-use_wallclock_as_timestamps', '1']),
  36. 'output_args': FFMPEG_CONFIG.get('output_args',
  37. ['-vf', 'mpdecimate',
  38. '-f', 'rawvideo',
  39. '-pix_fmt', 'rgb24'])
  40. }
  41. GLOBAL_OBJECT_CONFIG = CONFIG.get('objects', {})
  42. WEB_PORT = CONFIG.get('web_port', 5000)
  43. DEBUG = (CONFIG.get('debug', '0') == '1')
  44. def main():
  45. # connect to mqtt and setup last will
  46. def on_connect(client, userdata, flags, rc):
  47. print("On connect called")
  48. if rc != 0:
  49. if rc == 3:
  50. print ("MQTT Server unavailable")
  51. elif rc == 4:
  52. print ("MQTT Bad username or password")
  53. elif rc == 5:
  54. print ("MQTT Not authorized")
  55. else:
  56. print ("Unable to connect to MQTT: Connection refused. Error code: " + str(rc))
  57. # publish a message to signal that the service is running
  58. client.publish(MQTT_TOPIC_PREFIX+'/available', 'online', retain=True)
  59. client = mqtt.Client(client_id=MQTT_CLIENT_ID)
  60. client.on_connect = on_connect
  61. client.will_set(MQTT_TOPIC_PREFIX+'/available', payload='offline', qos=1, retain=True)
  62. if not MQTT_USER is None:
  63. client.username_pw_set(MQTT_USER, password=MQTT_PASS)
  64. client.connect(MQTT_HOST, MQTT_PORT, 60)
  65. client.loop_start()
  66. # Queue for prepped frames, max size set to number of regions * 3
  67. prepped_frame_queue = queue.Queue()
  68. cameras = {}
  69. for name, config in CONFIG['cameras'].items():
  70. cameras[name] = Camera(name, FFMPEG_DEFAULT_CONFIG, GLOBAL_OBJECT_CONFIG, config,
  71. prepped_frame_queue, client, MQTT_TOPIC_PREFIX)
  72. fps_tracker = EventsPerSecond()
  73. prepped_queue_processor = PreppedQueueProcessor(
  74. cameras,
  75. prepped_frame_queue,
  76. fps_tracker
  77. )
  78. prepped_queue_processor.start()
  79. fps_tracker.start()
  80. for name, camera in cameras.items():
  81. camera.start()
  82. print("Capture process for {}: {}".format(name, camera.get_capture_pid()))
  83. # create a flask app that encodes frames a mjpeg on demand
  84. app = Flask(__name__)
  85. @app.route('/')
  86. def ishealthy():
  87. # return a healh
  88. return "Frigate is running. Alive and healthy!"
  89. @app.route('/debug/stats')
  90. def stats():
  91. stats = {
  92. 'coral': {
  93. 'fps': fps_tracker.eps(),
  94. 'inference_speed': prepped_queue_processor.avg_inference_speed,
  95. 'queue_length': prepped_frame_queue.qsize()
  96. }
  97. }
  98. for name, camera in cameras.items():
  99. stats[name] = camera.stats()
  100. return jsonify(stats)
  101. @app.route('/<camera_name>/<label>/best.jpg')
  102. def best(camera_name, label):
  103. if camera_name in cameras:
  104. best_frame = cameras[camera_name].get_best(label)
  105. if best_frame is None:
  106. best_frame = np.zeros((720,1280,3), np.uint8)
  107. best_frame = cv2.cvtColor(best_frame, cv2.COLOR_RGB2BGR)
  108. ret, jpg = cv2.imencode('.jpg', best_frame)
  109. response = make_response(jpg.tobytes())
  110. response.headers['Content-Type'] = 'image/jpg'
  111. return response
  112. else:
  113. return "Camera named {} not found".format(camera_name), 404
  114. @app.route('/<camera_name>')
  115. def mjpeg_feed(camera_name):
  116. if camera_name in cameras:
  117. # return a multipart response
  118. return Response(imagestream(camera_name),
  119. mimetype='multipart/x-mixed-replace; boundary=frame')
  120. else:
  121. return "Camera named {} not found".format(camera_name), 404
  122. def imagestream(camera_name):
  123. while True:
  124. # max out at 1 FPS
  125. time.sleep(1)
  126. frame = cameras[camera_name].get_current_frame_with_objects()
  127. yield (b'--frame\r\n'
  128. b'Content-Type: image/jpeg\r\n\r\n' + frame + b'\r\n\r\n')
  129. app.run(host='0.0.0.0', port=WEB_PORT, debug=False)
  130. camera.join()
  131. if __name__ == '__main__':
  132. main()