* Change the API boundary between stream and camera Shift more of the stream lifecycle management to the camera. The motivation is to support stream urls that expire giving the camera the ability to change the stream once it is created. * Document stream lifecycle and simplify stream/camera interaction * Reorder create_stream function to reduce diffs * Increase test coverage for camera_sdm.py * Fix ffmpeg typo. * Add a stream identifier for each stream, managed by camera * Remove stream record service * Update homeassistant/components/stream/__init__.py Co-authored-by: Paulus Schoutsen <paulus@home-assistant.io> * Unroll changes to Stream interface back into camera component * Fix preload stream to actually start the background worker * Reduce unncessary diffs for readability * Remove redundant camera stream start code Co-authored-by: Paulus Schoutsen <paulus@home-assistant.io>
256 lines
8.6 KiB
Python
256 lines
8.6 KiB
Python
"""Provide functionality to stream video source.
|
|
|
|
Components use create_stream with a stream source (e.g. an rtsp url) to create
|
|
a new Stream object. Stream manages:
|
|
- Background work to fetch and decode a stream
|
|
- Desired output formats
|
|
- Home Assistant URLs for viewing a stream
|
|
- Access tokens for URLs for viewing a stream
|
|
|
|
A Stream consists of a background worker, and one or more output formats each
|
|
with their own idle timeout managed by the stream component. When an output
|
|
format is no longer in use, the stream component will expire it. When there
|
|
are no active output formats, the background worker is shut down and access
|
|
tokens are expired. Alternatively, a Stream can be configured with keepalive
|
|
to always keep workers active.
|
|
"""
|
|
import logging
|
|
import secrets
|
|
import threading
|
|
import time
|
|
from types import MappingProxyType
|
|
|
|
from homeassistant.const import EVENT_HOMEASSISTANT_STOP
|
|
from homeassistant.core import callback
|
|
from homeassistant.exceptions import HomeAssistantError
|
|
|
|
from .const import (
|
|
ATTR_ENDPOINTS,
|
|
ATTR_STREAMS,
|
|
DOMAIN,
|
|
MAX_SEGMENTS,
|
|
OUTPUT_IDLE_TIMEOUT,
|
|
STREAM_RESTART_INCREMENT,
|
|
STREAM_RESTART_RESET_TIME,
|
|
)
|
|
from .core import PROVIDERS, IdleTimer
|
|
from .hls import async_setup_hls
|
|
|
|
_LOGGER = logging.getLogger(__name__)
|
|
|
|
|
|
def create_stream(hass, stream_source, options=None):
|
|
"""Create a stream with the specified identfier based on the source url.
|
|
|
|
The stream_source is typically an rtsp url and options are passed into
|
|
pyav / ffmpeg as options.
|
|
"""
|
|
if DOMAIN not in hass.config.components:
|
|
raise HomeAssistantError("Stream integration is not set up.")
|
|
|
|
if options is None:
|
|
options = {}
|
|
|
|
# For RTSP streams, prefer TCP
|
|
if isinstance(stream_source, str) and stream_source[:7] == "rtsp://":
|
|
options = {
|
|
"rtsp_flags": "prefer_tcp",
|
|
"stimeout": "5000000",
|
|
**options,
|
|
}
|
|
|
|
stream = Stream(hass, stream_source, options=options)
|
|
hass.data[DOMAIN][ATTR_STREAMS].append(stream)
|
|
return stream
|
|
|
|
|
|
async def async_setup(hass, config):
|
|
"""Set up stream."""
|
|
# Set log level to error for libav
|
|
logging.getLogger("libav").setLevel(logging.ERROR)
|
|
logging.getLogger("libav.mp4").setLevel(logging.ERROR)
|
|
|
|
# Keep import here so that we can import stream integration without installing reqs
|
|
# pylint: disable=import-outside-toplevel
|
|
from .recorder import async_setup_recorder
|
|
|
|
hass.data[DOMAIN] = {}
|
|
hass.data[DOMAIN][ATTR_ENDPOINTS] = {}
|
|
hass.data[DOMAIN][ATTR_STREAMS] = []
|
|
|
|
# Setup HLS
|
|
hls_endpoint = async_setup_hls(hass)
|
|
hass.data[DOMAIN][ATTR_ENDPOINTS]["hls"] = hls_endpoint
|
|
|
|
# Setup Recorder
|
|
async_setup_recorder(hass)
|
|
|
|
@callback
|
|
def shutdown(event):
|
|
"""Stop all stream workers."""
|
|
for stream in hass.data[DOMAIN][ATTR_STREAMS]:
|
|
stream.keepalive = False
|
|
stream.stop()
|
|
_LOGGER.info("Stopped stream workers")
|
|
|
|
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, shutdown)
|
|
|
|
return True
|
|
|
|
|
|
class Stream:
|
|
"""Represents a single stream."""
|
|
|
|
def __init__(self, hass, source, options=None):
|
|
"""Initialize a stream."""
|
|
self.hass = hass
|
|
self.source = source
|
|
self.options = options
|
|
self.keepalive = False
|
|
self.access_token = None
|
|
self._thread = None
|
|
self._thread_quit = None
|
|
self._outputs = {}
|
|
|
|
if self.options is None:
|
|
self.options = {}
|
|
|
|
def endpoint_url(self, fmt):
|
|
"""Start the stream and returns a url for the output format."""
|
|
if fmt not in self._outputs:
|
|
raise ValueError(f"Stream is not configured for format '{fmt}'")
|
|
if not self.access_token:
|
|
self.access_token = secrets.token_hex()
|
|
return self.hass.data[DOMAIN][ATTR_ENDPOINTS][fmt].format(self.access_token)
|
|
|
|
@property
|
|
def outputs(self):
|
|
"""Return a copy of the stream outputs."""
|
|
# A copy is returned so the caller can iterate through the outputs
|
|
# without concern about self._outputs being modified from another thread.
|
|
return MappingProxyType(self._outputs.copy())
|
|
|
|
def add_provider(self, fmt, timeout=OUTPUT_IDLE_TIMEOUT):
|
|
"""Add provider output stream."""
|
|
if not self._outputs.get(fmt):
|
|
|
|
@callback
|
|
def idle_callback():
|
|
if not self.keepalive and fmt in self._outputs:
|
|
self.remove_provider(self._outputs[fmt])
|
|
self.check_idle()
|
|
|
|
provider = PROVIDERS[fmt](
|
|
self.hass, IdleTimer(self.hass, timeout, idle_callback)
|
|
)
|
|
self._outputs[fmt] = provider
|
|
return self._outputs[fmt]
|
|
|
|
def remove_provider(self, provider):
|
|
"""Remove provider output stream."""
|
|
if provider.name in self._outputs:
|
|
self._outputs[provider.name].cleanup()
|
|
del self._outputs[provider.name]
|
|
|
|
if not self._outputs:
|
|
self.stop()
|
|
|
|
def check_idle(self):
|
|
"""Reset access token if all providers are idle."""
|
|
if all([p.idle for p in self._outputs.values()]):
|
|
self.access_token = None
|
|
|
|
def start(self):
|
|
"""Start a stream."""
|
|
if self._thread is None or not self._thread.is_alive():
|
|
if self._thread is not None:
|
|
# The thread must have crashed/exited. Join to clean up the
|
|
# previous thread.
|
|
self._thread.join(timeout=0)
|
|
self._thread_quit = threading.Event()
|
|
self._thread = threading.Thread(
|
|
name="stream_worker",
|
|
target=self._run_worker,
|
|
)
|
|
self._thread.start()
|
|
_LOGGER.info("Started stream: %s", self.source)
|
|
|
|
def _run_worker(self):
|
|
"""Handle consuming streams and restart keepalive streams."""
|
|
# Keep import here so that we can import stream integration without installing reqs
|
|
# pylint: disable=import-outside-toplevel
|
|
from .worker import stream_worker
|
|
|
|
wait_timeout = 0
|
|
while not self._thread_quit.wait(timeout=wait_timeout):
|
|
start_time = time.time()
|
|
stream_worker(self.hass, self, self._thread_quit)
|
|
if not self.keepalive or self._thread_quit.is_set():
|
|
break
|
|
|
|
# To avoid excessive restarts, wait before restarting
|
|
# As the required recovery time may be different for different setups, start
|
|
# with trying a short wait_timeout and increase it on each reconnection attempt.
|
|
# Reset the wait_timeout after the worker has been up for several minutes
|
|
if time.time() - start_time > STREAM_RESTART_RESET_TIME:
|
|
wait_timeout = 0
|
|
wait_timeout += STREAM_RESTART_INCREMENT
|
|
_LOGGER.debug(
|
|
"Restarting stream worker in %d seconds: %s",
|
|
wait_timeout,
|
|
self.source,
|
|
)
|
|
self._worker_finished()
|
|
|
|
def _worker_finished(self):
|
|
"""Schedule cleanup of all outputs."""
|
|
|
|
@callback
|
|
def remove_outputs():
|
|
for provider in self.outputs.values():
|
|
self.remove_provider(provider)
|
|
|
|
self.hass.loop.call_soon_threadsafe(remove_outputs)
|
|
|
|
def stop(self):
|
|
"""Remove outputs and access token."""
|
|
self._outputs = {}
|
|
self.access_token = None
|
|
|
|
if not self.keepalive:
|
|
self._stop()
|
|
|
|
def _stop(self):
|
|
"""Stop worker thread."""
|
|
if self._thread is not None:
|
|
self._thread_quit.set()
|
|
self._thread.join()
|
|
self._thread = None
|
|
_LOGGER.info("Stopped stream: %s", self.source)
|
|
|
|
async def async_record(self, video_path, duration=30, lookback=5):
|
|
"""Make a .mp4 recording from a provided stream."""
|
|
|
|
# Check for file access
|
|
if not self.hass.config.is_allowed_path(video_path):
|
|
raise HomeAssistantError(f"Can't write {video_path}, no access to path!")
|
|
|
|
# Add recorder
|
|
recorder = self.outputs.get("recorder")
|
|
if recorder:
|
|
raise HomeAssistantError(
|
|
f"Stream already recording to {recorder.video_path}!"
|
|
)
|
|
recorder = self.add_provider("recorder", timeout=duration)
|
|
recorder.video_path = video_path
|
|
|
|
self.start()
|
|
|
|
# Take advantage of lookback
|
|
hls = self.outputs.get("hls")
|
|
if lookback > 0 and hls:
|
|
num_segments = min(int(lookback // hls.target_duration), MAX_SEGMENTS)
|
|
# Wait for latest segment, then add the lookback
|
|
await hls.recv()
|
|
recorder.prepend(list(hls.get_segment())[-num_segments:])
|