http.py 22 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732
  1. import base64
  2. from collections import OrderedDict
  3. from datetime import datetime, timedelta
  4. import json
  5. import glob
  6. import logging
  7. import os
  8. import re
  9. import subprocess as sp
  10. import time
  11. from functools import reduce
  12. from pathlib import Path
  13. import cv2
  14. from flask.helpers import send_file
  15. import numpy as np
  16. from flask import (
  17. Blueprint,
  18. Flask,
  19. Response,
  20. current_app,
  21. jsonify,
  22. make_response,
  23. request,
  24. )
  25. from peewee import SqliteDatabase, operator, fn, DoesNotExist, Value
  26. from playhouse.shortcuts import model_to_dict
  27. from frigate.const import CLIPS_DIR, RECORD_DIR
  28. from frigate.models import Event, Recordings
  29. from frigate.stats import stats_snapshot
  30. from frigate.util import calculate_region
  31. from frigate.version import VERSION
  32. logger = logging.getLogger(__name__)
  33. bp = Blueprint("frigate", __name__)
  34. def create_app(
  35. frigate_config,
  36. database: SqliteDatabase,
  37. stats_tracking,
  38. detected_frames_processor,
  39. ):
  40. app = Flask(__name__)
  41. @app.before_request
  42. def _db_connect():
  43. if database.is_closed():
  44. database.connect()
  45. @app.teardown_request
  46. def _db_close(exc):
  47. if not database.is_closed():
  48. database.close()
  49. app.frigate_config = frigate_config
  50. app.stats_tracking = stats_tracking
  51. app.detected_frames_processor = detected_frames_processor
  52. app.register_blueprint(bp)
  53. return app
  54. @bp.route("/")
  55. def is_healthy():
  56. return "Frigate is running. Alive and healthy!"
  57. @bp.route("/events/summary")
  58. def events_summary():
  59. has_clip = request.args.get("has_clip", type=int)
  60. has_snapshot = request.args.get("has_snapshot", type=int)
  61. clauses = []
  62. if not has_clip is None:
  63. clauses.append((Event.has_clip == has_clip))
  64. if not has_snapshot is None:
  65. clauses.append((Event.has_snapshot == has_snapshot))
  66. if len(clauses) == 0:
  67. clauses.append((True))
  68. groups = (
  69. Event.select(
  70. Event.camera,
  71. Event.label,
  72. fn.strftime(
  73. "%Y-%m-%d", fn.datetime(Event.start_time, "unixepoch", "localtime")
  74. ).alias("day"),
  75. Event.zones,
  76. fn.COUNT(Event.id).alias("count"),
  77. )
  78. .where(reduce(operator.and_, clauses))
  79. .group_by(
  80. Event.camera,
  81. Event.label,
  82. fn.strftime(
  83. "%Y-%m-%d", fn.datetime(Event.start_time, "unixepoch", "localtime")
  84. ),
  85. Event.zones,
  86. )
  87. )
  88. return jsonify([e for e in groups.dicts()])
  89. @bp.route("/events/<id>", methods=("GET",))
  90. def event(id):
  91. try:
  92. return model_to_dict(Event.get(Event.id == id))
  93. except DoesNotExist:
  94. return "Event not found", 404
  95. @bp.route("/events/<id>", methods=("DELETE",))
  96. def delete_event(id):
  97. try:
  98. event = Event.get(Event.id == id)
  99. except DoesNotExist:
  100. return make_response(
  101. jsonify({"success": False, "message": "Event" + id + " not found"}), 404
  102. )
  103. media_name = f"{event.camera}-{event.id}"
  104. if event.has_snapshot:
  105. media = Path(f"{os.path.join(CLIPS_DIR, media_name)}.jpg")
  106. media.unlink(missing_ok=True)
  107. if event.has_clip:
  108. media = Path(f"{os.path.join(CLIPS_DIR, media_name)}.mp4")
  109. media.unlink(missing_ok=True)
  110. event.delete_instance()
  111. return make_response(
  112. jsonify({"success": True, "message": "Event" + id + " deleted"}), 200
  113. )
  114. @bp.route("/events/<id>/thumbnail.jpg")
  115. def event_thumbnail(id):
  116. format = request.args.get("format", "ios")
  117. thumbnail_bytes = None
  118. try:
  119. event = Event.get(Event.id == id)
  120. thumbnail_bytes = base64.b64decode(event.thumbnail)
  121. except DoesNotExist:
  122. # see if the object is currently being tracked
  123. try:
  124. camera_states = current_app.detected_frames_processor.camera_states.values()
  125. for camera_state in camera_states:
  126. if id in camera_state.tracked_objects:
  127. tracked_obj = camera_state.tracked_objects.get(id)
  128. if not tracked_obj is None:
  129. thumbnail_bytes = tracked_obj.get_thumbnail()
  130. except:
  131. return "Event not found", 404
  132. if thumbnail_bytes is None:
  133. return "Event not found", 404
  134. # android notifications prefer a 2:1 ratio
  135. if format == "android":
  136. jpg_as_np = np.frombuffer(thumbnail_bytes, dtype=np.uint8)
  137. img = cv2.imdecode(jpg_as_np, flags=1)
  138. thumbnail = cv2.copyMakeBorder(
  139. img,
  140. 0,
  141. 0,
  142. int(img.shape[1] * 0.5),
  143. int(img.shape[1] * 0.5),
  144. cv2.BORDER_CONSTANT,
  145. (0, 0, 0),
  146. )
  147. ret, jpg = cv2.imencode(".jpg", thumbnail, [int(cv2.IMWRITE_JPEG_QUALITY), 70])
  148. thumbnail_bytes = jpg.tobytes()
  149. response = make_response(thumbnail_bytes)
  150. response.headers["Content-Type"] = "image/jpg"
  151. return response
  152. @bp.route("/events/<id>/snapshot.jpg")
  153. def event_snapshot(id):
  154. download = request.args.get("download", type=bool)
  155. jpg_bytes = None
  156. try:
  157. event = Event.get(Event.id == id)
  158. if not event.has_snapshot:
  159. return "Snapshot not available", 404
  160. # read snapshot from disk
  161. with open(
  162. os.path.join(CLIPS_DIR, f"{event.camera}-{id}.jpg"), "rb"
  163. ) as image_file:
  164. jpg_bytes = image_file.read()
  165. except DoesNotExist:
  166. # see if the object is currently being tracked
  167. try:
  168. camera_states = current_app.detected_frames_processor.camera_states.values()
  169. for camera_state in camera_states:
  170. if id in camera_state.tracked_objects:
  171. tracked_obj = camera_state.tracked_objects.get(id)
  172. if not tracked_obj is None:
  173. jpg_bytes = tracked_obj.get_jpg_bytes(
  174. timestamp=request.args.get("timestamp", type=int),
  175. bounding_box=request.args.get("bbox", type=int),
  176. crop=request.args.get("crop", type=int),
  177. height=request.args.get("h", type=int),
  178. quality=request.args.get("quality", default=70, type=int),
  179. )
  180. except:
  181. return "Event not found", 404
  182. except:
  183. return "Event not found", 404
  184. if jpg_bytes is None:
  185. return "Event not found", 404
  186. response = make_response(jpg_bytes)
  187. response.headers["Content-Type"] = "image/jpg"
  188. if download:
  189. response.headers[
  190. "Content-Disposition"
  191. ] = f"attachment; filename=snapshot-{id}.jpg"
  192. return response
  193. @bp.route("/events/<id>/clip.mp4")
  194. def event_clip(id):
  195. download = request.args.get("download", type=bool)
  196. try:
  197. event: Event = Event.get(Event.id == id)
  198. except DoesNotExist:
  199. return "Event not found.", 404
  200. if not event.has_clip:
  201. return "Clip not available", 404
  202. event_config = current_app.frigate_config.cameras[event.camera].record.events
  203. start_ts = event.start_time - event_config.pre_capture
  204. end_ts = event.end_time + event_config.post_capture
  205. clip_path = os.path.join(CLIPS_DIR, f"{event.camera}-{id}.mp4")
  206. if not os.path.isfile(clip_path):
  207. return recording_clip(event.camera, start_ts, end_ts)
  208. return send_file(
  209. clip_path,
  210. mimetype="video/mp4",
  211. as_attachment=download,
  212. attachment_filename=f"{event.camera}_{start_ts}-{end_ts}.mp4",
  213. )
  214. @bp.route("/events")
  215. def events():
  216. limit = request.args.get("limit", 100)
  217. camera = request.args.get("camera")
  218. label = request.args.get("label")
  219. zone = request.args.get("zone")
  220. after = request.args.get("after", type=float)
  221. before = request.args.get("before", type=float)
  222. has_clip = request.args.get("has_clip", type=int)
  223. has_snapshot = request.args.get("has_snapshot", type=int)
  224. include_thumbnails = request.args.get("include_thumbnails", default=1, type=int)
  225. clauses = []
  226. excluded_fields = []
  227. if camera:
  228. clauses.append((Event.camera == camera))
  229. if label:
  230. clauses.append((Event.label == label))
  231. if zone:
  232. clauses.append((Event.zones.cast("text") % f'*"{zone}"*'))
  233. if after:
  234. clauses.append((Event.start_time >= after))
  235. if before:
  236. clauses.append((Event.start_time <= before))
  237. if not has_clip is None:
  238. clauses.append((Event.has_clip == has_clip))
  239. if not has_snapshot is None:
  240. clauses.append((Event.has_snapshot == has_snapshot))
  241. if not include_thumbnails:
  242. excluded_fields.append(Event.thumbnail)
  243. if len(clauses) == 0:
  244. clauses.append((True))
  245. events = (
  246. Event.select()
  247. .where(reduce(operator.and_, clauses))
  248. .order_by(Event.start_time.desc())
  249. .limit(limit)
  250. )
  251. return jsonify([model_to_dict(e, exclude=excluded_fields) for e in events])
  252. @bp.route("/config")
  253. def config():
  254. config = current_app.frigate_config.dict()
  255. # add in the ffmpeg_cmds
  256. for camera_name, camera in current_app.frigate_config.cameras.items():
  257. camera_dict = config["cameras"][camera_name]
  258. camera_dict["ffmpeg_cmds"] = camera.ffmpeg_cmds
  259. for cmd in camera_dict["ffmpeg_cmds"]:
  260. cmd["cmd"] = " ".join(cmd["cmd"])
  261. return jsonify(config)
  262. @bp.route("/config/schema")
  263. def config_schema():
  264. return current_app.response_class(
  265. current_app.frigate_config.schema_json(), mimetype="application/json"
  266. )
  267. @bp.route("/version")
  268. def version():
  269. return VERSION
  270. @bp.route("/stats")
  271. def stats():
  272. stats = stats_snapshot(current_app.stats_tracking)
  273. return jsonify(stats)
  274. @bp.route("/<camera_name>/<label>/best.jpg")
  275. def best(camera_name, label):
  276. if camera_name in current_app.frigate_config.cameras:
  277. best_object = current_app.detected_frames_processor.get_best(camera_name, label)
  278. best_frame = best_object.get("frame")
  279. if best_frame is None:
  280. best_frame = np.zeros((720, 1280, 3), np.uint8)
  281. else:
  282. best_frame = cv2.cvtColor(best_frame, cv2.COLOR_YUV2BGR_I420)
  283. crop = bool(request.args.get("crop", 0, type=int))
  284. if crop:
  285. box = best_object.get("box", (0, 0, 300, 300))
  286. region = calculate_region(
  287. best_frame.shape, box[0], box[1], box[2], box[3], 1.1
  288. )
  289. best_frame = best_frame[region[1] : region[3], region[0] : region[2]]
  290. height = int(request.args.get("h", str(best_frame.shape[0])))
  291. width = int(height * best_frame.shape[1] / best_frame.shape[0])
  292. resize_quality = request.args.get("quality", default=70, type=int)
  293. best_frame = cv2.resize(
  294. best_frame, dsize=(width, height), interpolation=cv2.INTER_AREA
  295. )
  296. ret, jpg = cv2.imencode(
  297. ".jpg", best_frame, [int(cv2.IMWRITE_JPEG_QUALITY), resize_quality]
  298. )
  299. response = make_response(jpg.tobytes())
  300. response.headers["Content-Type"] = "image/jpg"
  301. return response
  302. else:
  303. return "Camera named {} not found".format(camera_name), 404
  304. @bp.route("/<camera_name>")
  305. def mjpeg_feed(camera_name):
  306. fps = int(request.args.get("fps", "3"))
  307. height = int(request.args.get("h", "360"))
  308. draw_options = {
  309. "bounding_boxes": request.args.get("bbox", type=int),
  310. "timestamp": request.args.get("timestamp", type=int),
  311. "zones": request.args.get("zones", type=int),
  312. "mask": request.args.get("mask", type=int),
  313. "motion_boxes": request.args.get("motion", type=int),
  314. "regions": request.args.get("regions", type=int),
  315. }
  316. if camera_name in current_app.frigate_config.cameras:
  317. # return a multipart response
  318. return Response(
  319. imagestream(
  320. current_app.detected_frames_processor,
  321. camera_name,
  322. fps,
  323. height,
  324. draw_options,
  325. ),
  326. mimetype="multipart/x-mixed-replace; boundary=frame",
  327. )
  328. else:
  329. return "Camera named {} not found".format(camera_name), 404
  330. @bp.route("/<camera_name>/latest.jpg")
  331. def latest_frame(camera_name):
  332. draw_options = {
  333. "bounding_boxes": request.args.get("bbox", type=int),
  334. "timestamp": request.args.get("timestamp", type=int),
  335. "zones": request.args.get("zones", type=int),
  336. "mask": request.args.get("mask", type=int),
  337. "motion_boxes": request.args.get("motion", type=int),
  338. "regions": request.args.get("regions", type=int),
  339. }
  340. resize_quality = request.args.get("quality", default=70, type=int)
  341. if camera_name in current_app.frigate_config.cameras:
  342. frame = current_app.detected_frames_processor.get_current_frame(
  343. camera_name, draw_options
  344. )
  345. if frame is None:
  346. frame = np.zeros((720, 1280, 3), np.uint8)
  347. height = int(request.args.get("h", str(frame.shape[0])))
  348. width = int(height * frame.shape[1] / frame.shape[0])
  349. frame = cv2.resize(frame, dsize=(width, height), interpolation=cv2.INTER_AREA)
  350. ret, jpg = cv2.imencode(
  351. ".jpg", frame, [int(cv2.IMWRITE_JPEG_QUALITY), resize_quality]
  352. )
  353. response = make_response(jpg.tobytes())
  354. response.headers["Content-Type"] = "image/jpg"
  355. return response
  356. else:
  357. return "Camera named {} not found".format(camera_name), 404
  358. @bp.route("/<camera_name>/recordings")
  359. def recordings(camera_name):
  360. dates = OrderedDict()
  361. # Retrieve all recordings for this camera
  362. recordings = (
  363. Recordings.select()
  364. .where(Recordings.camera == camera_name)
  365. .order_by(Recordings.start_time.asc())
  366. )
  367. last_end = 0
  368. recording: Recordings
  369. for recording in recordings:
  370. date = datetime.fromtimestamp(recording.start_time)
  371. key = date.strftime("%Y-%m-%d")
  372. hour = date.strftime("%H")
  373. # Create Day Record
  374. if key not in dates:
  375. dates[key] = OrderedDict()
  376. # Create Hour Record
  377. if hour not in dates[key]:
  378. dates[key][hour] = {"delay": {}, "events": []}
  379. # Check for delay
  380. the_hour = datetime.strptime(f"{key} {hour}", "%Y-%m-%d %H").timestamp()
  381. # diff current recording start time and the greater of the previous end time or top of the hour
  382. diff = recording.start_time - max(last_end, the_hour)
  383. # Determine seconds into recording
  384. seconds = 0
  385. if datetime.fromtimestamp(last_end).strftime("%H") == hour:
  386. seconds = int(last_end - the_hour)
  387. # Determine the delay
  388. delay = min(int(diff), 3600 - seconds)
  389. if delay > 1:
  390. # Add an offset for any delay greater than a second
  391. dates[key][hour]["delay"][seconds] = delay
  392. last_end = recording.end_time
  393. # Packing intervals to return all events with same label and overlapping times as one row.
  394. # See: https://blogs.solidq.com/en/sqlserver/packing-intervals/
  395. events = Event.raw(
  396. """WITH C1 AS
  397. (
  398. SELECT id, label, camera, top_score, start_time AS ts, +1 AS type, 1 AS sub
  399. FROM event
  400. WHERE camera = ?
  401. UNION ALL
  402. SELECT id, label, camera, top_score, end_time + 15 AS ts, -1 AS type, 0 AS sub
  403. FROM event
  404. WHERE camera = ?
  405. ),
  406. C2 AS
  407. (
  408. SELECT C1.*,
  409. SUM(type) OVER(PARTITION BY label ORDER BY ts, type DESC
  410. ROWS BETWEEN UNBOUNDED PRECEDING
  411. AND CURRENT ROW) - sub AS cnt
  412. FROM C1
  413. ),
  414. C3 AS
  415. (
  416. SELECT id, label, camera, top_score, ts,
  417. (ROW_NUMBER() OVER(PARTITION BY label ORDER BY ts) - 1) / 2 + 1
  418. AS grpnum
  419. FROM C2
  420. WHERE cnt = 0
  421. )
  422. SELECT MIN(id) as id, label, camera, MAX(top_score) as top_score, MIN(ts) AS start_time, max(ts) AS end_time
  423. FROM C3
  424. GROUP BY label, grpnum
  425. ORDER BY start_time;""",
  426. camera_name,
  427. camera_name,
  428. )
  429. event: Event
  430. for event in events:
  431. date = datetime.fromtimestamp(event.start_time)
  432. key = date.strftime("%Y-%m-%d")
  433. hour = date.strftime("%H")
  434. if key in dates and hour in dates[key]:
  435. dates[key][hour]["events"].append(
  436. model_to_dict(
  437. event,
  438. exclude=[
  439. Event.false_positive,
  440. Event.zones,
  441. Event.thumbnail,
  442. Event.has_clip,
  443. Event.has_snapshot,
  444. ],
  445. )
  446. )
  447. return jsonify(
  448. [
  449. {
  450. "date": date,
  451. "events": sum([len(value["events"]) for value in hours.values()]),
  452. "recordings": [
  453. {"hour": hour, "delay": value["delay"], "events": value["events"]}
  454. for hour, value in hours.items()
  455. ],
  456. }
  457. for date, hours in dates.items()
  458. ]
  459. )
  460. @bp.route("/<camera>/start/<int:start_ts>/end/<int:end_ts>/clip.mp4")
  461. @bp.route("/<camera>/start/<float:start_ts>/end/<float:end_ts>/clip.mp4")
  462. def recording_clip(camera, start_ts, end_ts):
  463. download = request.args.get("download", type=bool)
  464. recordings = (
  465. Recordings.select()
  466. .where(
  467. (Recordings.start_time.between(start_ts, end_ts))
  468. | (Recordings.end_time.between(start_ts, end_ts))
  469. | ((start_ts > Recordings.start_time) & (end_ts < Recordings.end_time))
  470. )
  471. .where(Recordings.camera == camera)
  472. .order_by(Recordings.start_time.asc())
  473. )
  474. playlist_lines = []
  475. clip: Recordings
  476. for clip in recordings:
  477. playlist_lines.append(f"file '{clip.path}'")
  478. # if this is the starting clip, add an inpoint
  479. if clip.start_time < start_ts:
  480. playlist_lines.append(f"inpoint {int(start_ts - clip.start_time)}")
  481. # if this is the ending clip, add an outpoint
  482. if clip.end_time > end_ts:
  483. playlist_lines.append(f"outpoint {int(end_ts - clip.start_time)}")
  484. path = f"/tmp/cache/tmp_clip_{camera}_{start_ts}-{end_ts}.mp4"
  485. ffmpeg_cmd = [
  486. "ffmpeg",
  487. "-y",
  488. "-protocol_whitelist",
  489. "pipe,file",
  490. "-f",
  491. "concat",
  492. "-safe",
  493. "0",
  494. "-i",
  495. "-",
  496. "-c",
  497. "copy",
  498. "-movflags",
  499. "+faststart",
  500. path,
  501. ]
  502. p = sp.run(
  503. ffmpeg_cmd,
  504. input="\n".join(playlist_lines),
  505. encoding="ascii",
  506. capture_output=True,
  507. )
  508. if p.returncode != 0:
  509. logger.error(p.stderr)
  510. return f"Could not create clip from recordings for {camera}.", 500
  511. mp4_bytes = None
  512. try:
  513. # read clip from disk
  514. with open(path, "rb") as mp4_file:
  515. mp4_bytes = mp4_file.read()
  516. # delete after we have the bytes
  517. os.remove(path)
  518. except DoesNotExist:
  519. return f"Could not create clip from recordings for {camera}.", 500
  520. response = make_response(mp4_bytes)
  521. response.mimetype = "video/mp4"
  522. if download:
  523. response.headers[
  524. "Content-Disposition"
  525. ] = f"attachment; filename={camera}_{start_ts}-{end_ts}.mp4"
  526. return response
  527. @bp.route("/vod/<camera>/start/<int:start_ts>/end/<int:end_ts>")
  528. @bp.route("/vod/<camera>/start/<float:start_ts>/end/<float:end_ts>")
  529. def vod_ts(camera, start_ts, end_ts):
  530. recordings = (
  531. Recordings.select()
  532. .where(
  533. Recordings.start_time.between(start_ts, end_ts)
  534. | Recordings.end_time.between(start_ts, end_ts)
  535. | ((start_ts > Recordings.start_time) & (end_ts < Recordings.end_time))
  536. )
  537. .where(Recordings.camera == camera)
  538. .order_by(Recordings.start_time.asc())
  539. )
  540. clips = []
  541. durations = []
  542. recording: Recordings
  543. for recording in recordings:
  544. clip = {"type": "source", "path": recording.path}
  545. duration = int(recording.duration * 1000)
  546. # Determine if offset is needed for first clip
  547. if recording.start_time < start_ts:
  548. offset = int((start_ts - recording.start_time) * 1000)
  549. clip["clipFrom"] = offset
  550. duration -= offset
  551. # Determine if we need to end the last clip early
  552. if recording.end_time > end_ts:
  553. duration -= int((recording.end_time - end_ts) * 1000)
  554. clips.append(clip)
  555. durations.append(duration)
  556. if not clips:
  557. return "No recordings found.", 404
  558. hour_ago = datetime.now() - timedelta(hours=1)
  559. return jsonify(
  560. {
  561. "cache": hour_ago.timestamp() > start_ts,
  562. "discontinuity": False,
  563. "durations": durations,
  564. "sequences": [{"clips": clips}],
  565. }
  566. )
  567. @bp.route("/vod/<year_month>/<day>/<hour>/<camera>")
  568. def vod_hour(year_month, day, hour, camera):
  569. start_date = datetime.strptime(f"{year_month}-{day} {hour}", "%Y-%m-%d %H")
  570. end_date = start_date + timedelta(hours=1) - timedelta(milliseconds=1)
  571. start_ts = start_date.timestamp()
  572. end_ts = end_date.timestamp()
  573. return vod_ts(camera, start_ts, end_ts)
  574. @bp.route("/vod/event/<id>")
  575. def vod_event(id):
  576. try:
  577. event: Event = Event.get(Event.id == id)
  578. except DoesNotExist:
  579. return "Event not found.", 404
  580. if not event.has_clip:
  581. return "Clip not available", 404
  582. event_config = current_app.frigate_config.cameras[event.camera].record.events
  583. start_ts = event.start_time - event_config.pre_capture
  584. end_ts = event.end_time + event_config.post_capture
  585. clip_path = os.path.join(CLIPS_DIR, f"{event.camera}-{id}.mp4")
  586. if not os.path.isfile(clip_path):
  587. return vod_ts(event.camera, start_ts, end_ts)
  588. duration = int((end_ts - start_ts) * 1000)
  589. return jsonify(
  590. {
  591. "cache": True,
  592. "discontinuity": False,
  593. "durations": [duration],
  594. "sequences": [{"clips": [{"type": "source", "path": clip_path}]}],
  595. }
  596. )
  597. def imagestream(detected_frames_processor, camera_name, fps, height, draw_options):
  598. while True:
  599. # max out at specified FPS
  600. time.sleep(1 / fps)
  601. frame = detected_frames_processor.get_current_frame(camera_name, draw_options)
  602. if frame is None:
  603. frame = np.zeros((height, int(height * 16 / 9), 3), np.uint8)
  604. width = int(height * frame.shape[1] / frame.shape[0])
  605. frame = cv2.resize(frame, dsize=(width, height), interpolation=cv2.INTER_LINEAR)
  606. ret, jpg = cv2.imencode(".jpg", frame, [int(cv2.IMWRITE_JPEG_QUALITY), 70])
  607. yield (
  608. b"--frame\r\n"
  609. b"Content-Type: image/jpeg\r\n\r\n" + jpg.tobytes() + b"\r\n\r\n"
  610. )