Light Painting With Android
FleetingA PWA for light painting on Android. Uses the rear camera to capture video frames and accumulates them with a “lighten” blend (keep the brighter pixel), producing light trail effects in real time.
Initially considered using kivy on android, but a PWA is simpler to build and deploy — no APK build needed, works directly in Chrome, installable on home screen.
The core technique is frame stacking: each new video frame is compared pixel-by-pixel with an accumulator canvas, keeping whichever value is brighter. This means only light sources leave trails while the dark background stays dark.
Lessons learned
Camera access on Android Chrome
navigator.mediaDevicesrequires a secure context: must uselocalhostor HTTPS, not a bare IP address.facingMode: { exact: "environment" }andfacingMode: "environment"both hang on Xiaomi mojito / LineageOS. Workaround: open any camera with{ video: true }, thenenumerateDevices()to get labels, and let the user pick via a<select>.enumerateDevices()returns empty labels before the firstgetUserMediasucceeds (permission not yet granted).- Calling
getUserMediaa second time right afterstop()-ing the first stream can hang — the device needs time to release the camera hardware. - The
loadedmetadataevent listener must be set before callingvideo.play(), otherwise dimensions stay 0.
Exposure control via Web API
MediaStreamTrack.getCapabilities()reportsexposureMode: ["continuous", "manual"],exposureTimeup to ~300s, ISO 100–1550.track.applyConstraints({ advanced: [{ exposureMode: 'manual', exposureTime: X }] })sets the values but does NOT affect the video stream —getSettings().exposureModereturns"none", frame rate stays 30fps.- Exposure constraints are ImageCapture-only in Chrome Android: only
ImageCapture.takePhoto()respects them.grabFrame()does not. takePhoto()withexposureTime: 3000genuinely takes ~3.3s — confirmed working.- Cannot mix ImageCapture constraints (
exposureTime) with stream constraints (frameRate) inapplyConstraints.
Kivy alternative
Kivy + Pyjnius can access the Camera2 API directly, which supports per-frame manual exposure on the preview stream (not just photo capture). But requires APK build via buildozer.
Manifest
{
"name": "Light Painting",
"short_name": "LightPaint",
"description": "Light painting with frame stacking",
"start_url": "./index.html",
"display": "fullscreen",
"background_color": "#000000",
"theme_color": "#000000",
"icons": [],
"prefer_related_applications": false,
"scope": "./",
"categories": []
}
Service worker
const PRECACHE = 'precache-v1';
const RUNTIME = 'runtime';
const PRECACHE_URLS = [
"index.html",
];
self.addEventListener('install', event => {
event.waitUntil(
caches.open(PRECACHE)
.then(cache => cache.addAll(PRECACHE_URLS))
.then(self.skipWaiting())
);
});
self.addEventListener('activate', event => {
const currentCaches = [PRECACHE, RUNTIME];
event.waitUntil(
caches.keys().then(cacheNames => {
return cacheNames.filter(cacheName => !currentCaches.includes(cacheName));
}).then(cachesToDelete => {
return Promise.all(cachesToDelete.map(cacheToDelete => {
return caches.delete(cacheToDelete);
}));
}).then(() => self.clients.claim())
);
});
[[progressive_web_app.org:network_first-ex()]]
Application
<html>
<head>
<meta charset="UTF-8"/>
<meta name="viewport" content="width=device-width, initial-scale=1.0, user-scalable=no" />
<link rel="manifest" href="./manifest.json">
<script defer src="https://ipfs.konubinix.eu/p/bafkreic33rowgvvugzgzajagkuwidfnit2dyqyn465iygfs67agsukk24i?orig=https://cdn.jsdelivr.net/npm/alpinejs@3.x.x/dist/cdn.min.js"></script>
<script src="https://ipfs.konubinix.eu/p/bafybeihp5kzlgqt56dmy5l4z7kpymfc4kn3fnehrrtr7cid7cn7ra36yha?orig=https://cdn.tailwindcss.com/3.4.3"></script>
<style>
body {
overscroll-behavior-y: contain;
background: #000;
margin: 0;
padding: 0;
}
canvas, video {
display: block;
width: 100%;
height: auto;
}
</style>
<script>
document.addEventListener('alpine:init', () => {
Alpine.data('app', () => ({
running: false,
video: null,
accCanvas: null,
accCtx: null,
displayCanvas: null,
displayCtx: null,
tmpCanvas: null,
tmpCtx: null,
wakelock: null,
threshold: 30,
width: 0,
height: 0,
message: '',
stacking: true,
exposureMode: 'continuous',
exposureTime: 166,
exposureTimeMin: 0,
exposureTimeMax: 3000,
iso: 100,
isoMin: 100,
isoMax: 1600,
hasExposureControl: false,
loadSettings() {
try {
const s = JSON.parse(localStorage.getItem('lightpaint') || '{}');
if (s.threshold != null) this.threshold = s.threshold;
if (s.stacking != null) this.stacking = s.stacking;
if (s.exposureMode) this.exposureMode = s.exposureMode;
if (s.exposureTime != null) this.exposureTime = s.exposureTime;
if (s.iso != null) this.iso = s.iso;
if (s.selectedCam) this.selectedCam = s.selectedCam;
} catch(e) {}
},
saveSettings() {
localStorage.setItem('lightpaint', JSON.stringify({
threshold: this.threshold,
stacking: this.stacking,
exposureMode: this.exposureMode,
exposureTime: this.exposureTime,
iso: this.iso,
selectedCam: this.selectedCam,
}));
},
async init() {
this.loadSettings();
if ('serviceWorker' in navigator) {
navigator.serviceWorker
.register('./sw.js')
.then(() => console.log('Service Worker Registered'));
}
this.video = this.$refs.video;
this.displayCanvas = this.$refs.displayCanvas;
this.displayCtx = this.displayCanvas.getContext('2d');
this.accCanvas = document.createElement('canvas');
this.accCtx = this.accCanvas.getContext('2d');
this.tmpCanvas = document.createElement('canvas');
this.tmpCtx = this.tmpCanvas.getContext('2d');
try {
const wl = await navigator.wakeLock.request("screen");
this.wakelock = wl;
} catch (err) {
console.warn('WakeLock failed:', err);
}
this.message = 'Tap "Camera" to connect';
for (const key of ['threshold', 'stacking', 'exposureMode', 'exposureTime', 'iso', 'selectedCam']) {
this.$watch(key, () => this.saveSettings());
}
},
cameras: [],
selectedCam: '',
async setupCamera() {
this.message = 'Opening camera...';
const stream = await navigator.mediaDevices.getUserMedia({ video: true });
// Populate camera list now that permission is granted
const devices = await navigator.mediaDevices.enumerateDevices();
this.cameras = devices.filter(d => d.kind === 'videoinput');
this.selectedCam = stream.getTracks()[0].getSettings().deviceId || '';
this.video.srcObject = stream;
await new Promise(resolve => {
this.video.addEventListener('loadedmetadata', resolve, { once: true });
});
this.width = this.video.videoWidth;
this.height = this.video.videoHeight;
this.displayCanvas.width = this.width;
this.displayCanvas.height = this.height;
this.accCanvas.width = this.width;
this.accCanvas.height = this.height;
this.tmpCanvas.width = this.width;
this.tmpCanvas.height = this.height;
await this.video.play();
this.readExposureCaps();
this.message = '';
},
readExposureCaps() {
const track = this.video.srcObject.getVideoTracks()[0];
const caps = track.getCapabilities();
const settings = track.getSettings();
if (caps.exposureMode && caps.exposureMode.includes('manual')) {
this.hasExposureControl = true;
this.exposureTimeMin = caps.exposureTime.min;
this.exposureTimeMax = caps.exposureTime.max;
this.exposureTime = settings.exposureTime;
this.isoMin = caps.iso.min;
this.isoMax = caps.iso.max;
this.iso = settings.iso;
this.exposureMode = settings.exposureMode;
} else {
this.hasExposureControl = false;
}
},
async applyExposure() {
const track = this.video.srcObject.getVideoTracks()[0];
const constraints = {};
if (this.exposureMode === 'manual') {
constraints.exposureMode = 'manual';
constraints.exposureTime = Number(this.exposureTime);
constraints.iso = Number(this.iso);
} else {
constraints.exposureMode = 'continuous';
}
await track.applyConstraints({ advanced: [constraints] });
const s = track.getSettings();
console.log('Exposure applied:', s.exposureMode, 'time:', s.exposureTime, 'iso:', s.iso);
},
start() {
this.clearAcc();
this.running = true;
this.loop();
},
stop() {
this.running = false;
},
clearAcc() {
this.accCtx.fillStyle = '#000';
this.accCtx.fillRect(0, 0, this.width, this.height);
this.displayCtx.fillStyle = '#000';
this.displayCtx.fillRect(0, 0, this.width, this.height);
},
async switchCamera() {
if (!this.selectedCam) return;
this.message = 'Switching camera...';
if (this.video.srcObject) {
this.video.srcObject.getTracks().forEach(t => t.stop());
this.video.srcObject = null;
}
await new Promise(r => setTimeout(r, 500));
const stream = await navigator.mediaDevices.getUserMedia({
video: { deviceId: { exact: this.selectedCam } }
});
this.video.srcObject = stream;
await new Promise(resolve => {
this.video.addEventListener('loadedmetadata', resolve, { once: true });
});
this.width = this.video.videoWidth;
this.height = this.video.videoHeight;
this.displayCanvas.width = this.width;
this.displayCanvas.height = this.height;
this.accCanvas.width = this.width;
this.accCanvas.height = this.height;
this.tmpCanvas.width = this.width;
this.tmpCanvas.height = this.height;
await this.video.play();
this.readExposureCaps();
this.message = '';
},
stackFrame(source) {
this.tmpCtx.drawImage(source, 0, 0, this.width, this.height);
const frame = this.tmpCtx.getImageData(0, 0, this.width, this.height);
const acc = this.accCtx.getImageData(0, 0, this.width, this.height);
const fd = frame.data;
const ad = acc.data;
const thr = this.threshold;
for (let i = 0; i < fd.length; i += 4) {
const fb = (fd[i] + fd[i+1] + fd[i+2]) / 3;
if (fb > thr) {
if (fd[i] > ad[i]) ad[i] = fd[i];
if (fd[i+1] > ad[i+1]) ad[i+1] = fd[i+1];
if (fd[i+2] > ad[i+2]) ad[i+2] = fd[i+2];
}
ad[i+3] = 255;
}
this.accCtx.putImageData(acc, 0, 0);
this.displayCtx.drawImage(this.accCanvas, 0, 0);
},
async loop() {
if (!this.running) return;
if (this.exposureMode === 'manual') {
const track = this.video.srcObject.getVideoTracks()[0];
const ic = new ImageCapture(track);
const blob = await ic.takePhoto();
if (!this.running) return;
const bmp = await createImageBitmap(blob);
if (this.stacking) {
this.stackFrame(bmp);
} else {
this.displayCtx.drawImage(bmp, 0, 0, this.width, this.height);
}
bmp.close();
this.loop();
} else {
if (this.stacking) {
this.stackFrame(this.video);
} else {
this.displayCtx.drawImage(this.video, 0, 0, this.width, this.height);
}
this.video.requestVideoFrameCallback(() => this.loop());
}
},
save() {
this.displayCanvas.toBlob((blob) => {
const link = document.createElement('a');
const now = new Date();
const ts = now.toISOString().replace(/[:.]/g, '-');
link.download = `lightpaint_${ts}.png`;
const url = URL.createObjectURL(blob);
link.href = url;
link.click();
URL.revokeObjectURL(url);
}, 'image/png');
},
}));
});
</script>
</head>
<body>
<div x-data="app" class="min-h-screen bg-black text-white flex flex-col">
<video x-ref="video" playsinline autoplay muted class="hidden"></video>
<canvas x-ref="displayCanvas" class="flex-1"></canvas>
<div class="fixed bottom-0 left-0 right-0 bg-black/80 p-4 flex flex-col gap-3">
<div class="flex items-center gap-2">
<label class="text-sm w-24">Threshold</label>
<input type="range" min="0" max="255" x-model="threshold" class="flex-1">
<span class="text-sm w-8" x-text="threshold"></span>
</div>
<div x-show="message" class="text-center text-yellow-400 text-sm" x-text="message"></div>
<div x-show="cameras.length" class="flex items-center gap-2">
<label class="text-sm w-24">Camera</label>
<select x-model="selectedCam" @change="switchCamera().catch(e => { message = e.name + ': ' + e.message })" class="flex-1 bg-gray-800 text-white rounded p-2">
<template x-for="cam in cameras" :key="cam.deviceId">
<option :value="cam.deviceId" x-text="cam.label"></option>
</template>
</select>
</div>
<div x-show="hasExposureControl" class="flex flex-col gap-2">
<div class="flex items-center gap-2">
<label class="text-sm w-24">Exposure</label>
<select x-model="exposureMode" @change="applyExposure()" class="bg-gray-800 text-white rounded p-1 text-sm">
<option value="continuous">Auto</option>
<option value="manual">Manual</option>
</select>
</div>
<div x-show="exposureMode === 'manual'" class="flex items-center gap-2">
<label class="text-sm w-24">Time (ms)</label>
<input type="range" :min="exposureTimeMin" :max="exposureTimeMax" step="1" x-model="exposureTime" @change="applyExposure()" class="flex-1">
<span class="text-sm w-16" x-text="Math.round(exposureTime) + 'ms'"></span>
</div>
<div x-show="exposureMode === 'manual'" class="flex items-center gap-2">
<label class="text-sm w-24">ISO</label>
<input type="range" :min="isoMin" :max="isoMax" step="1" x-model="iso" @change="applyExposure()" class="flex-1">
<span class="text-sm w-16" x-text="iso"></span>
</div>
</div>
<div class="flex gap-2 justify-center">
<button
x-show="!width"
@click="setupCamera().catch(e => { message = e.name + ': ' + e.message })"
class="bg-yellow-600 px-6 py-3 rounded-lg text-lg font-bold">
Camera
</button>
<button
x-show="width && !running"
@click="start()"
class="bg-green-600 px-6 py-3 rounded-lg text-lg font-bold">
Start
</button>
<button
x-show="running"
@click="stop()"
class="bg-red-600 px-6 py-3 rounded-lg text-lg font-bold">
Stop
</button>
<button
@click="stacking = !stacking"
:class="stacking ? 'bg-purple-600' : 'bg-gray-600'"
class="px-6 py-3 rounded-lg text-lg font-bold">
Stack
</button>
<button
@click="clearAcc()"
class="bg-gray-600 px-6 py-3 rounded-lg text-lg font-bold">
Clear
</button>
<button
@click="save()"
class="bg-blue-600 px-6 py-3 rounded-lg text-lg font-bold">
Save
</button>
</div>
</div>
</div>
</body>
</html>