http.py 24 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761
  1. import base64
  2. from collections import OrderedDict
  3. from datetime import datetime, timedelta
  4. import copy
  5. import json
  6. import glob
  7. import logging
  8. import os
  9. import re
  10. import subprocess as sp
  11. import time
  12. from functools import reduce
  13. from pathlib import Path
  14. import cv2
  15. from flask.helpers import send_file
  16. import numpy as np
  17. from flask import (
  18. Blueprint,
  19. Flask,
  20. Response,
  21. current_app,
  22. jsonify,
  23. make_response,
  24. request,
  25. )
  26. from peewee import SqliteDatabase, operator, fn, DoesNotExist, Value
  27. from playhouse.shortcuts import model_to_dict
  28. from frigate.const import CLIPS_DIR, RECORD_DIR
  29. from frigate.models import Event, Recordings
  30. from frigate.stats import stats_snapshot
  31. from frigate.util import calculate_region
  32. from frigate.version import VERSION
  33. logger = logging.getLogger(__name__)
  34. bp = Blueprint("frigate", __name__)
  35. def create_app(
  36. frigate_config,
  37. database: SqliteDatabase,
  38. stats_tracking,
  39. detected_frames_processor,
  40. ):
  41. app = Flask(__name__)
  42. @app.before_request
  43. def _db_connect():
  44. if database.is_closed():
  45. database.connect()
  46. @app.teardown_request
  47. def _db_close(exc):
  48. if not database.is_closed():
  49. database.close()
  50. app.frigate_config = frigate_config
  51. app.stats_tracking = stats_tracking
  52. app.detected_frames_processor = detected_frames_processor
  53. app.register_blueprint(bp)
  54. return app
  55. @bp.route("/")
  56. def is_healthy():
  57. return "Frigate is running. Alive and healthy!"
  58. @bp.route("/events/summary")
  59. def events_summary():
  60. has_clip = request.args.get("has_clip", type=int)
  61. has_snapshot = request.args.get("has_snapshot", type=int)
  62. clauses = []
  63. if not has_clip is None:
  64. clauses.append((Event.has_clip == has_clip))
  65. if not has_snapshot is None:
  66. clauses.append((Event.has_snapshot == has_snapshot))
  67. if len(clauses) == 0:
  68. clauses.append((True))
  69. groups = (
  70. Event.select(
  71. Event.camera,
  72. Event.label,
  73. fn.strftime(
  74. "%Y-%m-%d", fn.datetime(Event.start_time, "unixepoch", "localtime")
  75. ).alias("day"),
  76. Event.zones,
  77. fn.COUNT(Event.id).alias("count"),
  78. )
  79. .where(reduce(operator.and_, clauses))
  80. .group_by(
  81. Event.camera,
  82. Event.label,
  83. fn.strftime(
  84. "%Y-%m-%d", fn.datetime(Event.start_time, "unixepoch", "localtime")
  85. ),
  86. Event.zones,
  87. )
  88. )
  89. return jsonify([e for e in groups.dicts()])
  90. @bp.route("/events/<id>", methods=("GET",))
  91. def event(id):
  92. try:
  93. return model_to_dict(Event.get(Event.id == id))
  94. except DoesNotExist:
  95. return "Event not found", 404
  96. @bp.route("/events/<id>", methods=("DELETE",))
  97. def delete_event(id):
  98. try:
  99. event = Event.get(Event.id == id)
  100. except DoesNotExist:
  101. return make_response(
  102. jsonify({"success": False, "message": "Event" + id + " not found"}), 404
  103. )
  104. media_name = f"{event.camera}-{event.id}"
  105. if event.has_snapshot:
  106. media = Path(f"{os.path.join(CLIPS_DIR, media_name)}.jpg")
  107. media.unlink(missing_ok=True)
  108. media = Path(f"{os.path.join(CLIPS_DIR, media_name)}-clean.png")
  109. media.unlink(missing_ok=True)
  110. if event.has_clip:
  111. media = Path(f"{os.path.join(CLIPS_DIR, media_name)}.mp4")
  112. media.unlink(missing_ok=True)
  113. event.delete_instance()
  114. return make_response(
  115. jsonify({"success": True, "message": "Event" + id + " deleted"}), 200
  116. )
  117. @bp.route("/events/<id>/thumbnail.jpg")
  118. def event_thumbnail(id):
  119. format = request.args.get("format", "ios")
  120. thumbnail_bytes = None
  121. try:
  122. event = Event.get(Event.id == id)
  123. thumbnail_bytes = base64.b64decode(event.thumbnail)
  124. except DoesNotExist:
  125. # see if the object is currently being tracked
  126. try:
  127. camera_states = current_app.detected_frames_processor.camera_states.values()
  128. for camera_state in camera_states:
  129. if id in camera_state.tracked_objects:
  130. tracked_obj = camera_state.tracked_objects.get(id)
  131. if not tracked_obj is None:
  132. thumbnail_bytes = tracked_obj.get_thumbnail()
  133. except:
  134. return "Event not found", 404
  135. if thumbnail_bytes is None:
  136. return "Event not found", 404
  137. # android notifications prefer a 2:1 ratio
  138. if format == "android":
  139. jpg_as_np = np.frombuffer(thumbnail_bytes, dtype=np.uint8)
  140. img = cv2.imdecode(jpg_as_np, flags=1)
  141. thumbnail = cv2.copyMakeBorder(
  142. img,
  143. 0,
  144. 0,
  145. int(img.shape[1] * 0.5),
  146. int(img.shape[1] * 0.5),
  147. cv2.BORDER_CONSTANT,
  148. (0, 0, 0),
  149. )
  150. ret, jpg = cv2.imencode(".jpg", thumbnail, [int(cv2.IMWRITE_JPEG_QUALITY), 70])
  151. thumbnail_bytes = jpg.tobytes()
  152. response = make_response(thumbnail_bytes)
  153. response.headers["Content-Type"] = "image/jpeg"
  154. return response
  155. @bp.route("/events/<id>/snapshot.jpg")
  156. def event_snapshot(id):
  157. download = request.args.get("download", type=bool)
  158. jpg_bytes = None
  159. try:
  160. event = Event.get(Event.id == id, Event.end_time != None)
  161. if not event.has_snapshot:
  162. return "Snapshot not available", 404
  163. # read snapshot from disk
  164. with open(
  165. os.path.join(CLIPS_DIR, f"{event.camera}-{id}.jpg"), "rb"
  166. ) as image_file:
  167. jpg_bytes = image_file.read()
  168. except DoesNotExist:
  169. # see if the object is currently being tracked
  170. try:
  171. camera_states = current_app.detected_frames_processor.camera_states.values()
  172. for camera_state in camera_states:
  173. if id in camera_state.tracked_objects:
  174. tracked_obj = camera_state.tracked_objects.get(id)
  175. if not tracked_obj is None:
  176. jpg_bytes = tracked_obj.get_jpg_bytes(
  177. timestamp=request.args.get("timestamp", type=int),
  178. bounding_box=request.args.get("bbox", type=int),
  179. crop=request.args.get("crop", type=int),
  180. height=request.args.get("h", type=int),
  181. quality=request.args.get("quality", default=70, type=int),
  182. )
  183. except:
  184. return "Event not found", 404
  185. except:
  186. return "Event not found", 404
  187. if jpg_bytes is None:
  188. return "Event not found", 404
  189. response = make_response(jpg_bytes)
  190. response.headers["Content-Type"] = "image/jpeg"
  191. if download:
  192. response.headers[
  193. "Content-Disposition"
  194. ] = f"attachment; filename=snapshot-{id}.jpg"
  195. return response
  196. @bp.route("/events/<id>/clip.mp4")
  197. def event_clip(id):
  198. download = request.args.get("download", type=bool)
  199. try:
  200. event: Event = Event.get(Event.id == id)
  201. except DoesNotExist:
  202. return "Event not found.", 404
  203. if not event.has_clip:
  204. return "Clip not available", 404
  205. file_name = f"{event.camera}-{id}.mp4"
  206. clip_path = os.path.join(CLIPS_DIR, file_name)
  207. if not os.path.isfile(clip_path):
  208. return recording_clip(event.camera, event.start_time, event.end_time)
  209. response = make_response()
  210. response.headers["Content-Description"] = "File Transfer"
  211. response.headers["Cache-Control"] = "no-cache"
  212. response.headers["Content-Type"] = "video/mp4"
  213. if download:
  214. response.headers["Content-Disposition"] = "attachment; filename=%s" % file_name
  215. response.headers["Content-Length"] = os.path.getsize(clip_path)
  216. response.headers[
  217. "X-Accel-Redirect"
  218. ] = f"/clips/{file_name}" # nginx: http://wiki.nginx.org/NginxXSendfile
  219. return response
  220. @bp.route("/events")
  221. def events():
  222. limit = request.args.get("limit", 100)
  223. camera = request.args.get("camera")
  224. label = request.args.get("label")
  225. zone = request.args.get("zone")
  226. after = request.args.get("after", type=float)
  227. before = request.args.get("before", type=float)
  228. has_clip = request.args.get("has_clip", type=int)
  229. has_snapshot = request.args.get("has_snapshot", type=int)
  230. include_thumbnails = request.args.get("include_thumbnails", default=1, type=int)
  231. clauses = []
  232. excluded_fields = []
  233. if camera:
  234. clauses.append((Event.camera == camera))
  235. if label:
  236. clauses.append((Event.label == label))
  237. if zone:
  238. clauses.append((Event.zones.cast("text") % f'*"{zone}"*'))
  239. if after:
  240. clauses.append((Event.start_time >= after))
  241. if before:
  242. clauses.append((Event.start_time <= before))
  243. if not has_clip is None:
  244. clauses.append((Event.has_clip == has_clip))
  245. if not has_snapshot is None:
  246. clauses.append((Event.has_snapshot == has_snapshot))
  247. if not include_thumbnails:
  248. excluded_fields.append(Event.thumbnail)
  249. if len(clauses) == 0:
  250. clauses.append((True))
  251. events = (
  252. Event.select()
  253. .where(reduce(operator.and_, clauses))
  254. .order_by(Event.start_time.desc())
  255. .limit(limit)
  256. )
  257. return jsonify([model_to_dict(e, exclude=excluded_fields) for e in events])
  258. @bp.route("/config")
  259. def config():
  260. config = current_app.frigate_config.dict()
  261. # add in the ffmpeg_cmds
  262. for camera_name, camera in current_app.frigate_config.cameras.items():
  263. camera_dict = config["cameras"][camera_name]
  264. camera_dict["ffmpeg_cmds"] = copy.deepcopy(camera.ffmpeg_cmds)
  265. for cmd in camera_dict["ffmpeg_cmds"]:
  266. cmd["cmd"] = " ".join(cmd["cmd"])
  267. return jsonify(config)
  268. @bp.route("/config/schema")
  269. def config_schema():
  270. return current_app.response_class(
  271. current_app.frigate_config.schema_json(), mimetype="application/json"
  272. )
  273. @bp.route("/version")
  274. def version():
  275. return VERSION
  276. @bp.route("/stats")
  277. def stats():
  278. stats = stats_snapshot(current_app.stats_tracking)
  279. return jsonify(stats)
  280. @bp.route("/<camera_name>/<label>/best.jpg")
  281. def best(camera_name, label):
  282. if camera_name in current_app.frigate_config.cameras:
  283. best_object = current_app.detected_frames_processor.get_best(camera_name, label)
  284. best_frame = best_object.get("frame")
  285. if best_frame is None:
  286. best_frame = np.zeros((720, 1280, 3), np.uint8)
  287. else:
  288. best_frame = cv2.cvtColor(best_frame, cv2.COLOR_YUV2BGR_I420)
  289. crop = bool(request.args.get("crop", 0, type=int))
  290. if crop:
  291. box_size = 300
  292. box = best_object.get("box", (0, 0, box_size, box_size))
  293. region = calculate_region(
  294. best_frame.shape,
  295. box[0],
  296. box[1],
  297. box[2],
  298. box[3],
  299. box_size,
  300. multiplier=1.1,
  301. )
  302. best_frame = best_frame[region[1] : region[3], region[0] : region[2]]
  303. height = int(request.args.get("h", str(best_frame.shape[0])))
  304. width = int(height * best_frame.shape[1] / best_frame.shape[0])
  305. resize_quality = request.args.get("quality", default=70, type=int)
  306. best_frame = cv2.resize(
  307. best_frame, dsize=(width, height), interpolation=cv2.INTER_AREA
  308. )
  309. ret, jpg = cv2.imencode(
  310. ".jpg", best_frame, [int(cv2.IMWRITE_JPEG_QUALITY), resize_quality]
  311. )
  312. response = make_response(jpg.tobytes())
  313. response.headers["Content-Type"] = "image/jpeg"
  314. return response
  315. else:
  316. return "Camera named {} not found".format(camera_name), 404
  317. @bp.route("/<camera_name>")
  318. def mjpeg_feed(camera_name):
  319. fps = int(request.args.get("fps", "3"))
  320. height = int(request.args.get("h", "360"))
  321. draw_options = {
  322. "bounding_boxes": request.args.get("bbox", type=int),
  323. "timestamp": request.args.get("timestamp", type=int),
  324. "zones": request.args.get("zones", type=int),
  325. "mask": request.args.get("mask", type=int),
  326. "motion_boxes": request.args.get("motion", type=int),
  327. "regions": request.args.get("regions", type=int),
  328. }
  329. if camera_name in current_app.frigate_config.cameras:
  330. # return a multipart response
  331. return Response(
  332. imagestream(
  333. current_app.detected_frames_processor,
  334. camera_name,
  335. fps,
  336. height,
  337. draw_options,
  338. ),
  339. mimetype="multipart/x-mixed-replace; boundary=frame",
  340. )
  341. else:
  342. return "Camera named {} not found".format(camera_name), 404
  343. @bp.route("/<camera_name>/latest.jpg")
  344. def latest_frame(camera_name):
  345. draw_options = {
  346. "bounding_boxes": request.args.get("bbox", type=int),
  347. "timestamp": request.args.get("timestamp", type=int),
  348. "zones": request.args.get("zones", type=int),
  349. "mask": request.args.get("mask", type=int),
  350. "motion_boxes": request.args.get("motion", type=int),
  351. "regions": request.args.get("regions", type=int),
  352. }
  353. resize_quality = request.args.get("quality", default=70, type=int)
  354. if camera_name in current_app.frigate_config.cameras:
  355. frame = current_app.detected_frames_processor.get_current_frame(
  356. camera_name, draw_options
  357. )
  358. if frame is None:
  359. frame = np.zeros((720, 1280, 3), np.uint8)
  360. height = int(request.args.get("h", str(frame.shape[0])))
  361. width = int(height * frame.shape[1] / frame.shape[0])
  362. frame = cv2.resize(frame, dsize=(width, height), interpolation=cv2.INTER_AREA)
  363. ret, jpg = cv2.imencode(
  364. ".jpg", frame, [int(cv2.IMWRITE_JPEG_QUALITY), resize_quality]
  365. )
  366. response = make_response(jpg.tobytes())
  367. response.headers["Content-Type"] = "image/jpeg"
  368. return response
  369. else:
  370. return "Camera named {} not found".format(camera_name), 404
  371. @bp.route("/<camera_name>/recordings")
  372. def recordings(camera_name):
  373. dates = OrderedDict()
  374. # Retrieve all recordings for this camera
  375. recordings = (
  376. Recordings.select()
  377. .where(Recordings.camera == camera_name)
  378. .order_by(Recordings.start_time.asc())
  379. )
  380. last_end = 0
  381. recording: Recordings
  382. for recording in recordings:
  383. date = datetime.fromtimestamp(recording.start_time)
  384. key = date.strftime("%Y-%m-%d")
  385. hour = date.strftime("%H")
  386. # Create Day Record
  387. if key not in dates:
  388. dates[key] = OrderedDict()
  389. # Create Hour Record
  390. if hour not in dates[key]:
  391. dates[key][hour] = {"delay": {}, "events": []}
  392. # Check for delay
  393. the_hour = datetime.strptime(f"{key} {hour}", "%Y-%m-%d %H").timestamp()
  394. # diff current recording start time and the greater of the previous end time or top of the hour
  395. diff = recording.start_time - max(last_end, the_hour)
  396. # Determine seconds into recording
  397. seconds = 0
  398. if datetime.fromtimestamp(last_end).strftime("%H") == hour:
  399. seconds = int(last_end - the_hour)
  400. # Determine the delay
  401. delay = min(int(diff), 3600 - seconds)
  402. if delay > 1:
  403. # Add an offset for any delay greater than a second
  404. dates[key][hour]["delay"][seconds] = delay
  405. last_end = recording.end_time
  406. # Packing intervals to return all events with same label and overlapping times as one row.
  407. # See: https://blogs.solidq.com/en/sqlserver/packing-intervals/
  408. events = Event.raw(
  409. """WITH C1 AS
  410. (
  411. SELECT id, label, camera, top_score, start_time AS ts, +1 AS type, 1 AS sub
  412. FROM event
  413. WHERE camera = ?
  414. UNION ALL
  415. SELECT id, label, camera, top_score, end_time + 15 AS ts, -1 AS type, 0 AS sub
  416. FROM event
  417. WHERE camera = ?
  418. ),
  419. C2 AS
  420. (
  421. SELECT C1.*,
  422. SUM(type) OVER(PARTITION BY label ORDER BY ts, type DESC
  423. ROWS BETWEEN UNBOUNDED PRECEDING
  424. AND CURRENT ROW) - sub AS cnt
  425. FROM C1
  426. ),
  427. C3 AS
  428. (
  429. SELECT id, label, camera, top_score, ts,
  430. (ROW_NUMBER() OVER(PARTITION BY label ORDER BY ts) - 1) / 2 + 1
  431. AS grpnum
  432. FROM C2
  433. WHERE cnt = 0
  434. )
  435. SELECT id, label, camera, top_score, start_time, end_time
  436. FROM event
  437. WHERE camera = ? AND end_time IS NULL
  438. UNION ALL
  439. SELECT MIN(id) as id, label, camera, MAX(top_score) as top_score, MIN(ts) AS start_time, max(ts) AS end_time
  440. FROM C3
  441. GROUP BY label, grpnum
  442. ORDER BY start_time;""",
  443. camera_name,
  444. camera_name,
  445. camera_name,
  446. )
  447. event: Event
  448. for event in events:
  449. date = datetime.fromtimestamp(event.start_time)
  450. key = date.strftime("%Y-%m-%d")
  451. hour = date.strftime("%H")
  452. if key in dates and hour in dates[key]:
  453. dates[key][hour]["events"].append(
  454. model_to_dict(
  455. event,
  456. exclude=[
  457. Event.false_positive,
  458. Event.zones,
  459. Event.thumbnail,
  460. Event.has_clip,
  461. Event.has_snapshot,
  462. ],
  463. )
  464. )
  465. return jsonify(
  466. [
  467. {
  468. "date": date,
  469. "events": sum([len(value["events"]) for value in hours.values()]),
  470. "recordings": [
  471. {"hour": hour, "delay": value["delay"], "events": value["events"]}
  472. for hour, value in hours.items()
  473. ],
  474. }
  475. for date, hours in dates.items()
  476. ]
  477. )
  478. @bp.route("/<camera>/start/<int:start_ts>/end/<int:end_ts>/clip.mp4")
  479. @bp.route("/<camera>/start/<float:start_ts>/end/<float:end_ts>/clip.mp4")
  480. def recording_clip(camera, start_ts, end_ts):
  481. download = request.args.get("download", type=bool)
  482. recordings = (
  483. Recordings.select()
  484. .where(
  485. (Recordings.start_time.between(start_ts, end_ts))
  486. | (Recordings.end_time.between(start_ts, end_ts))
  487. | ((start_ts > Recordings.start_time) & (end_ts < Recordings.end_time))
  488. )
  489. .where(Recordings.camera == camera)
  490. .order_by(Recordings.start_time.asc())
  491. )
  492. playlist_lines = []
  493. clip: Recordings
  494. for clip in recordings:
  495. playlist_lines.append(f"file '{clip.path}'")
  496. # if this is the starting clip, add an inpoint
  497. if clip.start_time < start_ts:
  498. playlist_lines.append(f"inpoint {int(start_ts - clip.start_time)}")
  499. # if this is the ending clip, add an outpoint
  500. if clip.end_time > end_ts:
  501. playlist_lines.append(f"outpoint {int(end_ts - clip.start_time)}")
  502. file_name = f"clip_{camera}_{start_ts}-{end_ts}.mp4"
  503. path = f"/tmp/cache/{file_name}"
  504. ffmpeg_cmd = [
  505. "ffmpeg",
  506. "-y",
  507. "-protocol_whitelist",
  508. "pipe,file",
  509. "-f",
  510. "concat",
  511. "-safe",
  512. "0",
  513. "-i",
  514. "-",
  515. "-c",
  516. "copy",
  517. "-movflags",
  518. "+faststart",
  519. path,
  520. ]
  521. p = sp.run(
  522. ffmpeg_cmd,
  523. input="\n".join(playlist_lines),
  524. encoding="ascii",
  525. capture_output=True,
  526. )
  527. if p.returncode != 0:
  528. logger.error(p.stderr)
  529. return f"Could not create clip from recordings for {camera}.", 500
  530. response = make_response()
  531. response.headers["Content-Description"] = "File Transfer"
  532. response.headers["Cache-Control"] = "no-cache"
  533. response.headers["Content-Type"] = "video/mp4"
  534. if download:
  535. response.headers["Content-Disposition"] = "attachment; filename=%s" % file_name
  536. response.headers["Content-Length"] = os.path.getsize(path)
  537. response.headers[
  538. "X-Accel-Redirect"
  539. ] = f"/cache/{file_name}" # nginx: http://wiki.nginx.org/NginxXSendfile
  540. return response
  541. @bp.route("/vod/<camera>/start/<int:start_ts>/end/<int:end_ts>")
  542. @bp.route("/vod/<camera>/start/<float:start_ts>/end/<float:end_ts>")
  543. def vod_ts(camera, start_ts, end_ts):
  544. recordings = (
  545. Recordings.select()
  546. .where(
  547. Recordings.start_time.between(start_ts, end_ts)
  548. | Recordings.end_time.between(start_ts, end_ts)
  549. | ((start_ts > Recordings.start_time) & (end_ts < Recordings.end_time))
  550. )
  551. .where(Recordings.camera == camera)
  552. .order_by(Recordings.start_time.asc())
  553. )
  554. clips = []
  555. durations = []
  556. recording: Recordings
  557. for recording in recordings:
  558. clip = {"type": "source", "path": recording.path}
  559. duration = int(recording.duration * 1000)
  560. # Determine if offset is needed for first clip
  561. if recording.start_time < start_ts:
  562. offset = int((start_ts - recording.start_time) * 1000)
  563. clip["clipFrom"] = offset
  564. duration -= offset
  565. # Determine if we need to end the last clip early
  566. if recording.end_time > end_ts:
  567. duration -= int((recording.end_time - end_ts) * 1000)
  568. if duration > 0:
  569. clips.append(clip)
  570. durations.append(duration)
  571. else:
  572. logger.warning(f"Recording clip is missing or empty: {recording.path}")
  573. if not clips:
  574. logger.error("No recordings found for the requested time range")
  575. return "No recordings found.", 404
  576. hour_ago = datetime.now() - timedelta(hours=1)
  577. return jsonify(
  578. {
  579. "cache": hour_ago.timestamp() > start_ts,
  580. "discontinuity": False,
  581. "durations": durations,
  582. "sequences": [{"clips": clips}],
  583. }
  584. )
  585. @bp.route("/vod/<year_month>/<day>/<hour>/<camera>")
  586. def vod_hour(year_month, day, hour, camera):
  587. start_date = datetime.strptime(f"{year_month}-{day} {hour}", "%Y-%m-%d %H")
  588. end_date = start_date + timedelta(hours=1) - timedelta(milliseconds=1)
  589. start_ts = start_date.timestamp()
  590. end_ts = end_date.timestamp()
  591. return vod_ts(camera, start_ts, end_ts)
  592. @bp.route("/vod/event/<id>")
  593. def vod_event(id):
  594. try:
  595. event: Event = Event.get(Event.id == id)
  596. except DoesNotExist:
  597. logger.error(f"Event not found: {id}")
  598. return "Event not found.", 404
  599. if not event.has_clip:
  600. logger.error(f"Event does not have recordings: {id}")
  601. return "Recordings not available", 404
  602. clip_path = os.path.join(CLIPS_DIR, f"{event.camera}-{id}.mp4")
  603. if not os.path.isfile(clip_path):
  604. end_ts = (
  605. datetime.now().timestamp() if event.end_time is None else event.end_time
  606. )
  607. vod_response = vod_ts(event.camera, event.start_time, end_ts)
  608. # If the recordings are not found, set has_clip to false
  609. if (
  610. type(vod_response) == tuple
  611. and len(vod_response) == 2
  612. and vod_response[1] == 404
  613. ):
  614. Event.update(has_clip=False).where(Event.id == id).execute()
  615. return vod_response
  616. duration = int((event.end_time - event.start_time) * 1000)
  617. return jsonify(
  618. {
  619. "cache": True,
  620. "discontinuity": False,
  621. "durations": [duration],
  622. "sequences": [{"clips": [{"type": "source", "path": clip_path}]}],
  623. }
  624. )
  625. def imagestream(detected_frames_processor, camera_name, fps, height, draw_options):
  626. while True:
  627. # max out at specified FPS
  628. time.sleep(1 / fps)
  629. frame = detected_frames_processor.get_current_frame(camera_name, draw_options)
  630. if frame is None:
  631. frame = np.zeros((height, int(height * 16 / 9), 3), np.uint8)
  632. width = int(height * frame.shape[1] / frame.shape[0])
  633. frame = cv2.resize(frame, dsize=(width, height), interpolation=cv2.INTER_LINEAR)
  634. ret, jpg = cv2.imencode(".jpg", frame, [int(cv2.IMWRITE_JPEG_QUALITY), 70])
  635. yield (
  636. b"--frame\r\n"
  637. b"Content-Type: image/jpeg\r\n\r\n" + jpg.tobytes() + b"\r\n\r\n"
  638. )