record.py 9.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282
  1. import datetime
  2. import itertools
  3. import logging
  4. import os
  5. import random
  6. import shutil
  7. import string
  8. import subprocess as sp
  9. import threading
  10. from pathlib import Path
  11. import psutil
  12. from peewee import JOIN, DoesNotExist
  13. from frigate.config import FrigateConfig
  14. from frigate.const import CACHE_DIR, RECORD_DIR
  15. from frigate.models import Event, Recordings
  16. logger = logging.getLogger(__name__)
  17. SECONDS_IN_DAY = 60 * 60 * 24
  18. def remove_empty_directories(directory):
  19. # list all directories recursively and sort them by path,
  20. # longest first
  21. paths = sorted(
  22. [x[0] for x in os.walk(RECORD_DIR)],
  23. key=lambda p: len(str(p)),
  24. reverse=True,
  25. )
  26. for path in paths:
  27. # don't delete the parent
  28. if path == RECORD_DIR:
  29. continue
  30. if len(os.listdir(path)) == 0:
  31. os.rmdir(path)
  32. class RecordingMaintainer(threading.Thread):
  33. def __init__(self, config: FrigateConfig, stop_event):
  34. threading.Thread.__init__(self)
  35. self.name = "recording_maint"
  36. self.config = config
  37. self.stop_event = stop_event
  38. def move_files(self):
  39. recordings = [
  40. d
  41. for d in os.listdir(CACHE_DIR)
  42. if os.path.isfile(os.path.join(CACHE_DIR, d))
  43. and d.endswith(".mp4")
  44. and not d.startswith("clip_")
  45. ]
  46. files_in_use = []
  47. for process in psutil.process_iter():
  48. try:
  49. if process.name() != "ffmpeg":
  50. continue
  51. flist = process.open_files()
  52. if flist:
  53. for nt in flist:
  54. if nt.path.startswith(CACHE_DIR):
  55. files_in_use.append(nt.path.split("/")[-1])
  56. except:
  57. continue
  58. for f in recordings:
  59. # Skip files currently in use
  60. if f in files_in_use:
  61. continue
  62. cache_path = os.path.join(CACHE_DIR, f)
  63. basename = os.path.splitext(f)[0]
  64. camera, date = basename.rsplit("-", maxsplit=1)
  65. start_time = datetime.datetime.strptime(date, "%Y%m%d%H%M%S")
  66. # Just delete files if recordings are turned off
  67. if (
  68. not camera in self.config.cameras
  69. or not self.config.cameras[camera].record.enabled
  70. ):
  71. Path(cache_path).unlink(missing_ok=True)
  72. continue
  73. ffprobe_cmd = [
  74. "ffprobe",
  75. "-v",
  76. "error",
  77. "-show_entries",
  78. "format=duration",
  79. "-of",
  80. "default=noprint_wrappers=1:nokey=1",
  81. f"{cache_path}",
  82. ]
  83. p = sp.run(ffprobe_cmd, capture_output=True)
  84. if p.returncode == 0:
  85. duration = float(p.stdout.decode().strip())
  86. end_time = start_time + datetime.timedelta(seconds=duration)
  87. else:
  88. logger.info(f"bad file: {f}")
  89. Path(cache_path).unlink(missing_ok=True)
  90. continue
  91. directory = os.path.join(
  92. RECORD_DIR, start_time.strftime("%Y-%m/%d/%H"), camera
  93. )
  94. if not os.path.exists(directory):
  95. os.makedirs(directory)
  96. file_name = f"{start_time.strftime('%M.%S.mp4')}"
  97. file_path = os.path.join(directory, file_name)
  98. # copy then delete is required when recordings are stored on some network drives
  99. shutil.copyfile(cache_path, file_path)
  100. os.remove(cache_path)
  101. rand_id = "".join(
  102. random.choices(string.ascii_lowercase + string.digits, k=6)
  103. )
  104. Recordings.create(
  105. id=f"{start_time.timestamp()}-{rand_id}",
  106. camera=camera,
  107. path=file_path,
  108. start_time=start_time.timestamp(),
  109. end_time=end_time.timestamp(),
  110. duration=duration,
  111. )
  112. def run(self):
  113. # Check for new files every 5 seconds
  114. while not self.stop_event.wait(5):
  115. self.move_files()
  116. logger.info(f"Exiting recording maintenance...")
  117. class RecordingCleanup(threading.Thread):
  118. def __init__(self, config: FrigateConfig, stop_event):
  119. threading.Thread.__init__(self)
  120. self.name = "recording_cleanup"
  121. self.config = config
  122. self.stop_event = stop_event
  123. def clean_tmp_clips(self):
  124. # delete any clips more than 5 minutes old
  125. for p in Path("/tmp/cache").rglob("clip_*.mp4"):
  126. logger.debug(f"Checking tmp clip {p}.")
  127. if p.stat().st_mtime < (datetime.datetime.now().timestamp() - 60 * 1):
  128. logger.debug("Deleting tmp clip.")
  129. p.unlink(missing_ok=True)
  130. def expire_recordings(self):
  131. logger.debug("Start expire recordings (new).")
  132. logger.debug("Start deleted cameras.")
  133. # Handle deleted cameras
  134. expire_days = self.config.record.retain_days
  135. expire_before = (
  136. datetime.datetime.now() - datetime.timedelta(days=expire_days)
  137. ).timestamp()
  138. no_camera_recordings: Recordings = Recordings.select().where(
  139. Recordings.camera.not_in(list(self.config.cameras.keys())),
  140. Recordings.end_time < expire_before,
  141. )
  142. for recording in no_camera_recordings:
  143. Path(recording.path).unlink(missing_ok=True)
  144. Recordings.delete_by_id(recording.id)
  145. logger.debug("End deleted cameras.")
  146. logger.debug("Start all cameras.")
  147. for camera, config in self.config.cameras.items():
  148. logger.debug(f"Start camera: {camera}.")
  149. # When deleting recordings without events, we have to keep at LEAST the configured max clip duration
  150. min_end = (
  151. datetime.datetime.now()
  152. - datetime.timedelta(seconds=config.record.events.max_seconds)
  153. ).timestamp()
  154. expire_days = config.record.retain_days
  155. expire_before = (
  156. datetime.datetime.now() - datetime.timedelta(days=expire_days)
  157. ).timestamp()
  158. expire_date = min(min_end, expire_before)
  159. # Get recordings to check for expiration
  160. recordings: Recordings = Recordings.select().where(
  161. Recordings.camera == camera,
  162. Recordings.end_time < expire_date,
  163. )
  164. # Get all the events to check against
  165. events: Event = Event.select().where(
  166. Event.camera == camera, Event.end_time < expire_date, Event.has_clip
  167. )
  168. # loop over recordings and see if they overlap with any non-expired events
  169. for recording in recordings:
  170. keep = False
  171. for event in events:
  172. if (
  173. ( # event starts in this segment
  174. event.start_time > recording.start_time
  175. and event.start_time < recording.end_time
  176. )
  177. or ( # event ends in this segment
  178. event.end_time > recording.start_time
  179. and event.end_time < recording.end_time
  180. )
  181. or ( # event spans this segment
  182. recording.start_time > event.start_time
  183. and recording.end_time < event.end_time
  184. )
  185. ):
  186. keep = True
  187. break
  188. # Delete recordings outside of the retention window
  189. if not keep:
  190. Path(recording.path).unlink(missing_ok=True)
  191. Recordings.delete_by_id(recording.id)
  192. logger.debug(f"End camera: {camera}.")
  193. logger.debug("End all cameras.")
  194. logger.debug("End expire recordings (new).")
  195. def expire_files(self):
  196. logger.debug("Start expire files (legacy).")
  197. default_expire = (
  198. datetime.datetime.now().timestamp()
  199. - SECONDS_IN_DAY * self.config.record.retain_days
  200. )
  201. delete_before = {}
  202. for name, camera in self.config.cameras.items():
  203. delete_before[name] = (
  204. datetime.datetime.now().timestamp()
  205. - SECONDS_IN_DAY * camera.record.retain_days
  206. )
  207. # find all the recordings older than the oldest recording in the db
  208. try:
  209. oldest_recording = (
  210. Recordings.select().order_by(Recordings.start_time.desc()).get()
  211. )
  212. oldest_timestamp = oldest_recording.start_time
  213. except DoesNotExist:
  214. oldest_timestamp = datetime.datetime.now().timestamp()
  215. logger.debug(f"Oldest recording in the db: {oldest_timestamp}")
  216. process = sp.run(
  217. ["find", RECORD_DIR, "-type", "f", "-newermt", f"@{oldest_timestamp}"],
  218. capture_output=True,
  219. text=True,
  220. )
  221. files_to_check = process.stdout.splitlines()
  222. for f in files_to_check:
  223. p = Path(f)
  224. if p.stat().st_mtime < delete_before.get(p.parent.name, default_expire):
  225. p.unlink(missing_ok=True)
  226. logger.debug("End expire files (legacy).")
  227. def run(self):
  228. # Expire recordings every minute, clean directories every hour.
  229. for counter in itertools.cycle(range(60)):
  230. if self.stop_event.wait(60):
  231. logger.info(f"Exiting recording cleanup...")
  232. break
  233. self.expire_recordings()
  234. self.clean_tmp_clips()
  235. if counter == 0:
  236. self.expire_files()
  237. remove_empty_directories(RECORD_DIR)