import debug from "debug";
import { css, html, LitElement, PropertyValueMap, TemplateResult } from "lit";
import { customElement, property, state } from "lit/decorators.js";
@customElement("light9-camera")
export class Light9Camera extends LitElement {
static styles = [
css`
:host {
display: flex;
}
#video {
display: none;
}
#stack {
position: relative;
width: 640px;
height: 480px;
}
#stack > * {
position: absolute;
left: 0;
top: 0;
}
#stack > :first-child {
position: static;
}
#stack > img {
opacity: 0;
animation: fadeIn 1s 1s ease-in-out forwards;
}
@keyframes fadeIn {
from {
opacity: 0;
}
to {
opacity: 1;
}
}
`,
];
videoEl!: HTMLVideoElement;
canvas!: HTMLCanvasElement;
ctx!: CanvasRenderingContext2D;
@state()
vtrack: MediaStreamTrack | undefined;
@property() saturatedPixelCount = 0;
@property() saturatedPixelFraction = 0;
@property() videoSettings: MediaTrackSettings & any = {};
render() {
const saturatedCountDisplay = `${this.saturatedPixelCount} (${(this.saturatedPixelFraction * 100).toFixed(2)}%)`;
return html`
saturated pixels: ${saturatedCountDisplay}
`;
}
protected async firstUpdated(_changedProperties: PropertyValueMap | Map) {
this.videoEl = this.shadowRoot!.getElementById("video") as HTMLVideoElement;
this.canvas = this.shadowRoot!.getElementById("canvas") as HTMLCanvasElement;
this.ctx = this.canvas.getContext("2d", { willReadFrequently: true })!;
const constraints: MediaStreamConstraints = {
video: {
facingMode: { ideal: "environment" },
frameRate: { max: 10 },
},
};
const stream = await navigator.mediaDevices.getUserMedia(constraints);
const t = stream.getVideoTracks()[0];
await t.applyConstraints({
brightness: 0,
contrast: 32,
colorTemperature: 6600,
exposureMode: "manual",
exposureTime: 250,
whiteBalanceMode: "manual",
focusMode: "manual",
focusDistance: 235,
} as MediaTrackConstraints);
this.vtrack = t;
this.videoEl.srcObject = stream;
this.videoEl.play();
this.videoSettings = this.vtrack.getSettings();
this.redrawLoop();
}
redrawLoop() {
if (this.videoEl.videoWidth !== 0 && this.videoEl.videoHeight !== 0) {
this.redraw();
}
// todo: video frames come slower than raf is waiting
requestAnimationFrame(this.redrawLoop.bind(this));
}
public async set(k: string, v: any) {
if (!this.vtrack) {
throw new Error("vtrack");
}
await this.vtrack.applyConstraints({ [k]: v });
this.videoSettings = this.vtrack.getSettings();
}
private redraw() {
this.canvas.width = this.videoEl.videoWidth;
this.canvas.height = this.videoEl.videoHeight;
this.ctx.drawImage(this.videoEl, 0, 0);
this.makeSaturatedPixelsTransparent();
}
private makeSaturatedPixelsTransparent() {
const imageData = this.ctx.getImageData(0, 0, this.canvas.width, this.canvas.height);
const data = imageData.data;
this.saturatedPixelCount = 0;
for (let i = 0; i < data.length; i += 4) {
if (data[i] === 255 || data[i + 1] === 255 || data[i + 2] === 255) {
this.saturatedPixelCount += 1;
data[i + 3] = 0;
}
}
this.saturatedPixelFraction = this.saturatedPixelCount / (data.length / 4);
this.ctx.putImageData(imageData, 0, 0);
}
}
@customElement("light9-camera-settings-table")
export class Light9CameraSettingsTable extends LitElement {
static styles = [
css`
table {
border-collapse: collapse;
}
td {
border: 1px solid gray;
padding: 1px 6px;
}
`,
];
boring = [
"aspectRatio",
"backgroundBlur",
"channelCount",
"deviceId",
"displaySurface",
"echoCancellation",
"eyeGazeCorrection",
"faceFraming",
"groupId",
"latency",
"noiseSuppression",
"pointsOfInterest",
"resizeMode",
"sampleRate",
"sampleSize",
"suppressLocalAudioPlayback",
"torch",
"voiceIsolation",
];
adjustable: Record = {
focusDistance: { min: "0", max: "1023" },
brightness: { min: "0", max: "64" },
colorTemperature: { min: "2800", max: "6500" },
exposureTime: { min: "0", max: "400" },
};
@property() cam!: Light9Camera;
@property() videoSettings: MediaTrackSettings & any = {};
supportedByBrowser: MediaTrackSupportedConstraints;
constructor() {
super();
this.supportedByBrowser = navigator.mediaDevices.getSupportedConstraints();
}
render() {
const rows: TemplateResult<1>[] = [];
for (const key of Object.keys(this.supportedByBrowser)) {
if (!this.boring.includes(key)) {
this.renderRow(key, rows);
}
}
return html``;
}
private renderRow(key: string, rows: any[]) {
let valueDisplay = "";
if (this.videoSettings[key] !== undefined) {
valueDisplay = JSON.stringify(this.videoSettings[key]);
}
let adjuster = html``;
let conf = this.adjustable[key];
if (conf !== undefined) {
adjuster = html`
`;
}
rows.push(
html`
${key} |
${valueDisplay} |
${adjuster} |
`
);
}
async setFromSlider(ev: InputEvent) {
const el = ev.target as HTMLInputElement;
await this.cam.set(el.dataset.param as string, parseFloat(el.value));
}
}