Live camera view
This example walks through fetching a user’s cameras via the API and embedding a live HLS stream.
1. Fetch the camera list
Section titled “1. Fetch the camera list”const TOKEN = 'YOUR_PERSONAL_ACCESS_TOKEN'; // or a Bearer token from OAuth2
async function getCameras() { const res = await fetch('https://api.angelcam.com/v1/cameras/', { headers: { Authorization: `PersonalAccessToken ${TOKEN}` }, }); const data = await res.json(); return data.results;}import requests
TOKEN = 'YOUR_PERSONAL_ACCESS_TOKEN' # or a Bearer token from OAuth2
def get_cameras(): res = requests.get( 'https://api.angelcam.com/v1/cameras/', headers={'Authorization': f'PersonalAccessToken {TOKEN}'}, ) res.raise_for_status() return res.json()['results']Each camera object contains an id, name, type (h264, h265, or mjpeg), and a snapshot URL for a preview image.
2. Get the live stream URL
Section titled “2. Get the live stream URL”For H.264/H.265 cameras request the HLS stream. For MJPEG cameras an MJPEG stream is the only option.
async function getLiveStream(cameraId) { const res = await fetch(`https://api.angelcam.com/v1/cameras/${cameraId}/streams/`, { headers: { Authorization: `PersonalAccessToken ${TOKEN}` }, }); const { streams } = await res.json();
const hls = streams.find(s => s.format === 'hls'); const mjpeg = streams.find(s => s.format === 'mjpeg');
return { hls: hls?.url, mjpeg: mjpeg?.url };}def get_live_stream(camera_id): res = requests.get( f'https://api.angelcam.com/v1/cameras/{camera_id}/streams/', headers={'Authorization': f'PersonalAccessToken {TOKEN}'}, ) res.raise_for_status() streams = res.json()['streams']
hls = next((s['url'] for s in streams if s['format'] == 'hls'), None) mjpeg = next((s['url'] for s in streams if s['format'] == 'mjpeg'), None)
return {'hls': hls, 'mjpeg': mjpeg}3. Display the stream
Section titled “3. Display the stream”HLS (H.264 / H.265)
Section titled “HLS (H.264 / H.265)”Use hls.js for broad browser support.
<video id="player" controls autoplay muted></video><script src="https://cdn.jsdelivr.net/npm/hls.js@latest"></script><script> async function startLiveView(cameraId) { const { hls: hlsUrl } = await getLiveStream(cameraId); const video = document.getElementById('player');
if (Hls.isSupported()) { const hls = new Hls(); hls.loadSource(hlsUrl); hls.attachMedia(video); } else if (video.canPlayType('application/vnd.apple.mpegurl')) { // Native HLS (Safari) video.src = hlsUrl; } }
startLiveView('CAMERA_ID');</script># Flask — fetch the stream URL on the server and inject it into the pagefrom flask import Flask, render_template_string
app = Flask(__name__)
PLAYER_PAGE = """<!doctype html><title>Live view</title><video id="player" controls autoplay muted></video><script src="https://cdn.jsdelivr.net/npm/hls.js@latest"></script><script> const url = {{ hls_url | tojson }}; const video = document.getElementById('player'); if (Hls.isSupported()) { const hls = new Hls(); hls.loadSource(url); hls.attachMedia(video); } else if (video.canPlayType('application/vnd.apple.mpegurl')) { video.src = url; }</script>"""
@app.route('/live/<camera_id>')def live(camera_id): streams = get_live_stream(camera_id) return render_template_string(PLAYER_PAGE, hls_url=streams['hls'])MJPEG streams work as a plain <img> src — no player library needed.
async function startMjpegView(cameraId) { const { mjpeg: mjpegUrl } = await getLiveStream(cameraId); document.getElementById('mjpeg-view').src = mjpegUrl;}@app.route('/live-mjpeg/<camera_id>')def live_mjpeg(camera_id): streams = get_live_stream(camera_id) return f'<img src="{streams["mjpeg"]}" alt="Live view">'4. Putting it together — camera picker
Section titled “4. Putting it together — camera picker”async function init() { const cameras = await getCameras();
for (const cam of cameras) { const { hls: hlsUrl, mjpeg: mjpegUrl } = await getLiveStream(cam.id);
if (hlsUrl) { renderHlsPlayer(cam.name, hlsUrl); } else if (mjpegUrl) { renderMjpegPlayer(cam.name, mjpegUrl); } }}
function renderHlsPlayer(name, url) { const video = document.createElement('video'); video.controls = true; video.autoplay = true; video.muted = true;
document.body.appendChild(Object.assign(document.createElement('h3'), { textContent: name })); document.body.appendChild(video);
const hls = new Hls(); hls.loadSource(url); hls.attachMedia(video);}
function renderMjpegPlayer(name, url) { const img = document.createElement('img'); img.src = url; img.alt = name;
document.body.appendChild(Object.assign(document.createElement('h3'), { textContent: name })); document.body.appendChild(img);}@app.route('/')def camera_list(): cameras = get_cameras()
items = [] for cam in cameras: streams = get_live_stream(cam['id']) if streams['hls']: items.append({'name': cam['name'], 'fmt': 'hls', 'url': streams['hls']}) elif streams['mjpeg']: items.append({'name': cam['name'], 'fmt': 'mjpeg', 'url': streams['mjpeg']})
rows = [] for item in items: rows.append(f"<h3>{item['name']}</h3>") if item['fmt'] == 'hls': rows.append(f"<video data-src=\"{item['url']}\" controls autoplay muted></video>") else: rows.append(f"<img src=\"{item['url']}\" alt=\"{item['name']}\">")
page = """<!doctype html><script src="https://cdn.jsdelivr.net/npm/hls.js@latest"></script>""" + '\n'.join(rows) + """<script> document.querySelectorAll('video[data-src]').forEach(v => { if (Hls.isSupported()) { const hls = new Hls(); hls.loadSource(v.dataset.src); hls.attachMedia(v); } else { v.src = v.dataset.src; } });</script>""" return page- Each camera supports up to 10 concurrent consumers. Your Angelcam web/mobile sessions count toward this limit.
- Use snapshots for thumbnail previews — they don’t count toward the concurrency limit.
- For public or high-concurrency use cases, look at the Broadcasting service instead.