Recording playback & timeline seek
This example shows how to list recordings for a camera, start a playback stream from a chosen point in the timeline, seek to a different position, and keep your UI clock in sync with the actual stream position.
1. List recordings for a camera
Section titled “1. List recordings for a camera”A camera can have multiple recording configurations (e.g. continuous + motion-triggered). Fetch them all first.
const TOKEN = 'YOUR_PERSONAL_ACCESS_TOKEN';
async function getRecordings(cameraId) { const res = await fetch(`https://api.angelcam.com/v1/cameras/${cameraId}/recording/`, { headers: { Authorization: `PersonalAccessToken ${TOKEN}` }, }); const data = await res.json(); return data.results;}import requests
TOKEN = 'YOUR_PERSONAL_ACCESS_TOKEN'
def get_recordings(camera_id): res = requests.get( f'https://api.angelcam.com/v1/cameras/{camera_id}/recording/', headers={'Authorization': f'PersonalAccessToken {TOKEN}'}, ) res.raise_for_status() return res.json()['results']2. Load the timeline
Section titled “2. Load the timeline”The timeline endpoint returns the recorded segments within a given time range (maximum 24 hours per request). Use it to know what footage is available and to draw a scrubber.
async function getTimeline(recordingId, from, to) { // from / to are ISO 8601 strings, e.g. '2024-06-01T10:00:00Z' const params = new URLSearchParams({ start: from, end: to }); const res = await fetch( `https://api.angelcam.com/v1/recording/${recordingId}/timeline/?${params}`, { headers: { Authorization: `PersonalAccessToken ${TOKEN}` } }, ); const data = await res.json(); return data.segments; // [{ start, end }, ...]}def get_timeline(recording_id, from_time, to_time): # from_time / to_time are ISO 8601 strings, e.g. '2024-06-01T10:00:00Z' res = requests.get( f'https://api.angelcam.com/v1/recording/{recording_id}/timeline/', params={'start': from_time, 'end': to_time}, headers={'Authorization': f'PersonalAccessToken {TOKEN}'}, ) res.raise_for_status() return res.json()['segments'] # [{'start': ..., 'end': ...}, ...]Gaps between segments represent periods without recorded footage (camera offline, manual stop, etc.).
3. Start a playback stream
Section titled “3. Start a playback stream”Create a stream starting from the desired point. Omit end to let it continue to the end of recorded footage, or include it to limit the window.
async function createPlaybackStream(recordingId, startTime, endTime = null) { const params = new URLSearchParams({ start: startTime }); if (endTime) params.set('end', endTime);
const res = await fetch( `https://api.angelcam.com/v1/recording/${recordingId}/stream/?${params}`, { headers: { Authorization: `PersonalAccessToken ${TOKEN}` } }, ); return res.json(); // returns { url, base_url, play, pause, speed, ... }}def create_playback_stream(recording_id, start_time, end_time=None): params = {'start': start_time} if end_time: params['end'] = end_time
res = requests.get( f'https://api.angelcam.com/v1/recording/{recording_id}/stream/', params=params, headers={'Authorization': f'PersonalAccessToken {TOKEN}'}, ) res.raise_for_status() return res.json() # returns {'url': ..., 'base_url': ..., 'play': ..., 'pause': ..., 'speed': ...}4. Play the stream with hls.js
Section titled “4. Play the stream with hls.js”The url from the stream response is an HLS playlist — pass it to hls.js in the browser. The backend (Node.js or Python) only needs to supply this URL to the frontend.
<video id="player" controls></video><script src="https://cdn.jsdelivr.net/npm/hls.js@latest"></script><script> let hlsInstance = null;
function attachStream(videoEl, streamUrl) { if (hlsInstance) hlsInstance.destroy();
if (Hls.isSupported()) { hlsInstance = new Hls({ liveSyncDurationCount: 3 }); hlsInstance.loadSource(streamUrl); hlsInstance.attachMedia(videoEl); } else if (videoEl.canPlayType('application/vnd.apple.mpegurl')) { videoEl.src = streamUrl; } }
async function play(recordingId, startTime) { const stream = await createPlaybackStream(recordingId, startTime); attachStream(document.getElementById('player'), stream.url); startTimelineSync(stream.base_url); }</script># Flask — create the stream server-side and inject the URL into the pagefrom flask import Flask, render_template_string
app = Flask(__name__)
PLAYER_PAGE = """<!doctype html><title>Recording playback</title><video id="player" controls></video><div id="timeline-clock"></div><script src="https://cdn.jsdelivr.net/npm/hls.js@latest"></script><script> let hlsInstance = null;
function attachStream(videoEl, streamUrl) { if (hlsInstance) hlsInstance.destroy(); if (Hls.isSupported()) { hlsInstance = new Hls({ liveSyncDurationCount: 3 }); hlsInstance.loadSource(streamUrl); hlsInstance.attachMedia(videoEl); } else if (videoEl.canPlayType('application/vnd.apple.mpegurl')) { videoEl.src = streamUrl; } }
const stream = {{ stream | tojson }}; attachStream(document.getElementById('player'), stream.url); startTimelineSync(stream.base_url); // defined in section 5</script>"""
@app.route('/playback/<recording_id>')def playback(recording_id): from datetime import datetime, timezone, timedelta now = datetime.now(timezone.utc) day_start = now.replace(hour=0, minute=0, second=0, microsecond=0) stream = create_playback_stream(recording_id, day_start.isoformat()) return render_template_string(PLAYER_PAGE, stream=stream)5. Keep the timeline in sync
Section titled “5. Keep the timeline in sync”Network buffering means wall-clock time and stream time diverge. Periodically call the stream info endpoint (base_url from the stream response) to get the exact current position. This runs in the browser regardless of backend language.
let syncInterval = null;
function startTimelineSync(baseUrl) { stopTimelineSync(); syncInterval = setInterval(() => syncTimeline(baseUrl), 2000);}
function stopTimelineSync() { if (syncInterval) clearInterval(syncInterval);}
async function syncTimeline(baseUrl) { const res = await fetch(baseUrl, { headers: { Authorization: `PersonalAccessToken ${TOKEN}` }, }); const info = await res.json(); // info.current_time is the ISO 8601 timestamp currently shown by the stream document.getElementById('timeline-clock').textContent = new Date(info.current_time).toLocaleTimeString();}6. Seek to a different position
Section titled “6. Seek to a different position”Seeking means creating a new stream starting from the desired timestamp and replacing the current hls.js instance. This also runs in the browser.
async function seekTo(recordingId, targetTime) { stopTimelineSync();
const stream = await createPlaybackStream(recordingId, targetTime); attachStream(document.getElementById('player'), stream.url); startTimelineSync(stream.base_url);}
// Trigger seek when the user moves the scrubberdocument.getElementById('scrubber').addEventListener('change', e => { const targetTime = scrubberValueToIso(e.target.value); // your conversion seekTo(RECORDING_ID, targetTime);});7. Full flow — putting it together
Section titled “7. Full flow — putting it together”async function initPlayback(cameraId) { const recordings = await getRecordings(cameraId); if (!recordings.length) return console.warn('No recordings found');
const recording = recordings[0];
const now = new Date(); const dayStart = new Date(now); dayStart.setHours(0, 0, 0, 0);
const segments = await getTimeline( recording.id, dayStart.toISOString(), now.toISOString(), ); if (!segments.length) return console.warn('No footage in range');
drawScrubber(segments); // render your timeline UI
await play(recording.id, segments[0].start);}from datetime import datetime, timezone
@app.route('/playback/camera/<camera_id>')def playback_camera(camera_id): recordings = get_recordings(camera_id) if not recordings: return 'No recordings found', 404
recording = recordings[0]
now = datetime.now(timezone.utc) day_start = now.replace(hour=0, minute=0, second=0, microsecond=0)
segments = get_timeline( recording['id'], day_start.isoformat(), now.isoformat(), ) if not segments: return 'No footage in range', 404
stream = create_playback_stream(recording['id'], segments[0]['start']) return render_template_string(PLAYER_PAGE, stream=stream)- The stream
urlis a short-lived HLS playlist. Re-request it if the stream stalls after a long pause. - Use
play,pause, andspeedURLs from the stream response to control playback server-side — useful when synchronizing multiple cameras. - For multi-camera sync, poll the stream info endpoint for each stream and calculate the offset between their
current_timevalues.