events.py 15 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411
  1. import datetime
  2. import json
  3. import logging
  4. import os
  5. import queue
  6. import subprocess as sp
  7. import threading
  8. import time
  9. from collections import defaultdict
  10. from pathlib import Path
  11. import psutil
  12. import shutil
  13. from frigate.config import FrigateConfig
  14. from frigate.const import RECORD_DIR, CLIPS_DIR, CACHE_DIR
  15. from frigate.models import Event
  16. from peewee import fn
  17. logger = logging.getLogger(__name__)
  18. class EventProcessor(threading.Thread):
  19. def __init__(
  20. self, config, camera_processes, event_queue, event_processed_queue, stop_event
  21. ):
  22. threading.Thread.__init__(self)
  23. self.name = "event_processor"
  24. self.config = config
  25. self.camera_processes = camera_processes
  26. self.cached_clips = {}
  27. self.event_queue = event_queue
  28. self.event_processed_queue = event_processed_queue
  29. self.events_in_process = {}
  30. self.stop_event = stop_event
  31. def should_create_clip(self, camera, event_data):
  32. if event_data["false_positive"]:
  33. return False
  34. # if there are required zones and there is no overlap
  35. required_zones = self.config.cameras[camera].clips.required_zones
  36. if len(required_zones) > 0 and not set(event_data["entered_zones"]) & set(
  37. required_zones
  38. ):
  39. logger.debug(
  40. f"Not creating clip for {event_data['id']} because it did not enter required zones"
  41. )
  42. return False
  43. return True
  44. def refresh_cache(self):
  45. cached_files = os.listdir(CACHE_DIR)
  46. files_in_use = []
  47. for process in psutil.process_iter():
  48. try:
  49. if process.name() != "ffmpeg":
  50. continue
  51. flist = process.open_files()
  52. if flist:
  53. for nt in flist:
  54. if nt.path.startswith(CACHE_DIR):
  55. files_in_use.append(nt.path.split("/")[-1])
  56. except:
  57. continue
  58. for f in cached_files:
  59. if f in files_in_use or f in self.cached_clips:
  60. continue
  61. basename = os.path.splitext(f)[0]
  62. camera, date = basename.rsplit("-", maxsplit=1)
  63. start_time = datetime.datetime.strptime(date, "%Y%m%d%H%M%S")
  64. ffprobe_cmd = [
  65. "ffprobe",
  66. "-v",
  67. "error",
  68. "-show_entries",
  69. "format=duration",
  70. "-of",
  71. "default=noprint_wrappers=1:nokey=1",
  72. f"{os.path.join(CACHE_DIR, f)}",
  73. ]
  74. p = sp.run(ffprobe_cmd, capture_output=True)
  75. if p.returncode == 0:
  76. duration = float(p.stdout.decode().strip())
  77. else:
  78. logger.info(f"bad file: {f}")
  79. os.remove(os.path.join(CACHE_DIR, f))
  80. continue
  81. self.cached_clips[f] = {
  82. "path": f,
  83. "camera": camera,
  84. "start_time": start_time.timestamp(),
  85. "duration": duration,
  86. }
  87. if len(self.events_in_process) > 0:
  88. earliest_event = min(
  89. self.events_in_process.values(), key=lambda x: x["start_time"]
  90. )["start_time"]
  91. else:
  92. earliest_event = datetime.datetime.now().timestamp()
  93. # if the earliest event is more tha max seconds ago, cap it
  94. earliest_event = max(
  95. earliest_event, datetime.datetime.now().timestamp() - max_seconds
  96. )
  97. for f, data in list(self.cached_clips.items()):
  98. if earliest_event - 90 > data["start_time"] + data["duration"]:
  99. del self.cached_clips[f]
  100. logger.debug(f"Cleaning up cached file {f}")
  101. os.remove(os.path.join(CACHE_DIR, f))
  102. # if we are still using more than 90% of the cache, proactively cleanup
  103. cache_usage = shutil.disk_usage("/tmp/cache")
  104. if (
  105. cache_usage.used / cache_usage.total > 0.9
  106. and cache_usage.free < 200000000
  107. and len(self.cached_clips) > 0
  108. ):
  109. logger.warning("More than 90% of the cache is used.")
  110. logger.warning(
  111. "Consider increasing space available at /tmp/cache or reducing max_seconds in your clips config."
  112. )
  113. logger.warning("Proactively cleaning up the cache...")
  114. while cache_usage.used / cache_usage.total > 0.9:
  115. oldest_clip = min(
  116. self.cached_clips.values(), key=lambda x: x["start_time"]
  117. )
  118. del self.cached_clips[oldest_clip["path"]]
  119. os.remove(os.path.join(CACHE_DIR, oldest_clip["path"]))
  120. cache_usage = shutil.disk_usage("/tmp/cache")
  121. def create_clip(self, camera, event_data, pre_capture, post_capture):
  122. # get all clips from the camera with the event sorted
  123. sorted_clips = sorted(
  124. [c for c in self.cached_clips.values() if c["camera"] == camera],
  125. key=lambda i: i["start_time"],
  126. )
  127. # if there are no clips in the cache or we are still waiting on a needed file check every 5 seconds
  128. wait_count = 0
  129. while (
  130. len(sorted_clips) == 0
  131. or sorted_clips[-1]["start_time"] + sorted_clips[-1]["duration"]
  132. < event_data["end_time"] + post_capture
  133. ):
  134. if wait_count > 4:
  135. logger.warning(
  136. f"Unable to create clip for {camera} and event {event_data['id']}. There were no cache files for this event."
  137. )
  138. return False
  139. logger.debug(f"No cache clips for {camera}. Waiting...")
  140. time.sleep(5)
  141. self.refresh_cache()
  142. # get all clips from the camera with the event sorted
  143. sorted_clips = sorted(
  144. [c for c in self.cached_clips.values() if c["camera"] == camera],
  145. key=lambda i: i["start_time"],
  146. )
  147. wait_count += 1
  148. playlist_start = event_data["start_time"] - pre_capture
  149. playlist_end = event_data["end_time"] + post_capture
  150. playlist_lines = []
  151. for clip in sorted_clips:
  152. # clip ends before playlist start time, skip
  153. if clip["start_time"] + clip["duration"] < playlist_start:
  154. continue
  155. # clip starts after playlist ends, finish
  156. if clip["start_time"] > playlist_end:
  157. break
  158. playlist_lines.append(f"file '{os.path.join(CACHE_DIR,clip['path'])}'")
  159. # if this is the starting clip, add an inpoint
  160. if clip["start_time"] < playlist_start:
  161. playlist_lines.append(
  162. f"inpoint {int(playlist_start-clip['start_time'])}"
  163. )
  164. # if this is the ending clip, add an outpoint
  165. if clip["start_time"] + clip["duration"] > playlist_end:
  166. playlist_lines.append(
  167. f"outpoint {int(playlist_end-clip['start_time'])}"
  168. )
  169. clip_name = f"{camera}-{event_data['id']}"
  170. ffmpeg_cmd = [
  171. "ffmpeg",
  172. "-y",
  173. "-protocol_whitelist",
  174. "pipe,file",
  175. "-f",
  176. "concat",
  177. "-safe",
  178. "0",
  179. "-i",
  180. "-",
  181. "-c",
  182. "copy",
  183. "-movflags",
  184. "+faststart",
  185. f"{os.path.join(CLIPS_DIR, clip_name)}.mp4",
  186. ]
  187. p = sp.run(
  188. ffmpeg_cmd,
  189. input="\n".join(playlist_lines),
  190. encoding="ascii",
  191. capture_output=True,
  192. )
  193. if p.returncode != 0:
  194. logger.error(p.stderr)
  195. return False
  196. return True
  197. def run(self):
  198. while not self.stop_event.is_set():
  199. try:
  200. event_type, camera, event_data = self.event_queue.get(timeout=10)
  201. except queue.Empty:
  202. if not self.stop_event.is_set():
  203. self.refresh_cache()
  204. continue
  205. logger.debug(f"Event received: {event_type} {camera} {event_data['id']}")
  206. self.refresh_cache()
  207. if event_type == "start":
  208. self.events_in_process[event_data["id"]] = event_data
  209. if event_type == "end":
  210. clips_config = self.config.cameras[camera].clips
  211. clip_created = False
  212. if self.should_create_clip(camera, event_data):
  213. if clips_config.enabled and (
  214. clips_config.objects is None
  215. or event_data["label"] in clips_config.objects
  216. ):
  217. clip_created = self.create_clip(
  218. camera,
  219. event_data,
  220. clips_config.pre_capture,
  221. clips_config.post_capture,
  222. )
  223. if clip_created or event_data["has_snapshot"]:
  224. Event.create(
  225. id=event_data["id"],
  226. label=event_data["label"],
  227. camera=camera,
  228. start_time=event_data["start_time"],
  229. end_time=event_data["end_time"],
  230. top_score=event_data["top_score"],
  231. false_positive=event_data["false_positive"],
  232. zones=list(event_data["entered_zones"]),
  233. thumbnail=event_data["thumbnail"],
  234. has_clip=clip_created,
  235. has_snapshot=event_data["has_snapshot"],
  236. )
  237. del self.events_in_process[event_data["id"]]
  238. self.event_processed_queue.put((event_data["id"], camera))
  239. logger.info(f"Exiting event processor...")
  240. class EventCleanup(threading.Thread):
  241. def __init__(self, config: FrigateConfig, stop_event):
  242. threading.Thread.__init__(self)
  243. self.name = "event_cleanup"
  244. self.config = config
  245. self.stop_event = stop_event
  246. self.camera_keys = list(self.config.cameras.keys())
  247. def expire(self, media):
  248. ## Expire events from unlisted cameras based on the global config
  249. if media == "clips":
  250. retain_config = self.config.clips.retain
  251. file_extension = "mp4"
  252. update_params = {"has_clip": False}
  253. else:
  254. retain_config = self.config.snapshots.retain
  255. file_extension = "jpg"
  256. update_params = {"has_snapshot": False}
  257. distinct_labels = (
  258. Event.select(Event.label)
  259. .where(Event.camera.not_in(self.camera_keys))
  260. .distinct()
  261. )
  262. # loop over object types in db
  263. for l in distinct_labels:
  264. # get expiration time for this label
  265. expire_days = retain_config.objects.get(l.label, retain_config.default)
  266. expire_after = (
  267. datetime.datetime.now() - datetime.timedelta(days=expire_days)
  268. ).timestamp()
  269. # grab all events after specific time
  270. expired_events = Event.select().where(
  271. Event.camera.not_in(self.camera_keys),
  272. Event.start_time < expire_after,
  273. Event.label == l.label,
  274. )
  275. # delete the media from disk
  276. for event in expired_events:
  277. media_name = f"{event.camera}-{event.id}"
  278. media = Path(f"{os.path.join(CLIPS_DIR, media_name)}.{file_extension}")
  279. media.unlink(missing_ok=True)
  280. # update the clips attribute for the db entry
  281. update_query = Event.update(update_params).where(
  282. Event.camera.not_in(self.camera_keys),
  283. Event.start_time < expire_after,
  284. Event.label == l.label,
  285. )
  286. update_query.execute()
  287. ## Expire events from cameras based on the camera config
  288. for name, camera in self.config.cameras.items():
  289. if media == "clips":
  290. retain_config = camera.clips.retain
  291. else:
  292. retain_config = camera.snapshots.retain
  293. # get distinct objects in database for this camera
  294. distinct_labels = (
  295. Event.select(Event.label).where(Event.camera == name).distinct()
  296. )
  297. # loop over object types in db
  298. for l in distinct_labels:
  299. # get expiration time for this label
  300. expire_days = retain_config.objects.get(l.label, retain_config.default)
  301. expire_after = (
  302. datetime.datetime.now() - datetime.timedelta(days=expire_days)
  303. ).timestamp()
  304. # grab all events after specific time
  305. expired_events = Event.select().where(
  306. Event.camera == name,
  307. Event.start_time < expire_after,
  308. Event.label == l.label,
  309. )
  310. # delete the grabbed clips from disk
  311. for event in expired_events:
  312. media_name = f"{event.camera}-{event.id}"
  313. media = Path(
  314. f"{os.path.join(CLIPS_DIR, media_name)}.{file_extension}"
  315. )
  316. media.unlink(missing_ok=True)
  317. # update the clips attribute for the db entry
  318. update_query = Event.update(update_params).where(
  319. Event.camera == name,
  320. Event.start_time < expire_after,
  321. Event.label == l.label,
  322. )
  323. update_query.execute()
  324. def purge_duplicates(self):
  325. duplicate_query = """with grouped_events as (
  326. select id,
  327. label,
  328. camera,
  329. has_snapshot,
  330. has_clip,
  331. row_number() over (
  332. partition by label, camera, round(start_time/5,0)*5
  333. order by end_time-start_time desc
  334. ) as copy_number
  335. from event
  336. )
  337. select distinct id, camera, has_snapshot, has_clip from grouped_events
  338. where copy_number > 1;"""
  339. duplicate_events = Event.raw(duplicate_query)
  340. for event in duplicate_events:
  341. logger.debug(f"Removing duplicate: {event.id}")
  342. media_name = f"{event.camera}-{event.id}"
  343. if event.has_snapshot:
  344. media = Path(f"{os.path.join(CLIPS_DIR, media_name)}.jpg")
  345. media.unlink(missing_ok=True)
  346. if event.has_clip:
  347. media = Path(f"{os.path.join(CLIPS_DIR, media_name)}.mp4")
  348. media.unlink(missing_ok=True)
  349. (
  350. Event.delete()
  351. .where(Event.id << [event.id for event in duplicate_events])
  352. .execute()
  353. )
  354. def run(self):
  355. # only expire events every 5 minutes
  356. while not self.stop_event.wait(300):
  357. self.expire("clips")
  358. self.expire("snapshots")
  359. self.purge_duplicates()
  360. # drop events from db where has_clip and has_snapshot are false
  361. delete_query = Event.delete().where(
  362. Event.has_clip == False, Event.has_snapshot == False
  363. )
  364. delete_query.execute()
  365. logger.info(f"Exiting event cleanup...")